]>
Commit | Line | Data |
---|---|---|
bccafa26 | 1 | /* Emit RTL for the GCC expander. |
8e8f6434 | 2 | Copyright (C) 1987-2018 Free Software Foundation, Inc. |
15bbde2b | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
15bbde2b | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
15bbde2b | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15bbde2b | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
15bbde2b | 19 | |
20 | ||
21 | /* Middle-to-low level generation of rtx code and insns. | |
22 | ||
74efa612 | 23 | This file contains support functions for creating rtl expressions |
24 | and manipulating them in the doubly-linked chain of insns. | |
15bbde2b | 25 | |
26 | The patterns of the insns are created by machine-dependent | |
27 | routines in insn-emit.c, which is generated automatically from | |
74efa612 | 28 | the machine description. These routines make the individual rtx's |
29 | of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch], | |
30 | which are automatically generated from rtl.def; what is machine | |
8fd5918e | 31 | dependent is the kind of rtx's they make and what arguments they |
32 | use. */ | |
15bbde2b | 33 | |
34 | #include "config.h" | |
405711de | 35 | #include "system.h" |
805e22b2 | 36 | #include "coretypes.h" |
ad7b10a2 | 37 | #include "memmodel.h" |
9ef16211 | 38 | #include "backend.h" |
7c29e30e | 39 | #include "target.h" |
15bbde2b | 40 | #include "rtl.h" |
7c29e30e | 41 | #include "tree.h" |
9ef16211 | 42 | #include "df.h" |
7c29e30e | 43 | #include "tm_p.h" |
44 | #include "stringpool.h" | |
7c29e30e | 45 | #include "insn-config.h" |
46 | #include "regs.h" | |
47 | #include "emit-rtl.h" | |
48 | #include "recog.h" | |
9ef16211 | 49 | #include "diagnostic-core.h" |
b20a8bb4 | 50 | #include "alias.h" |
b20a8bb4 | 51 | #include "fold-const.h" |
9ed99284 | 52 | #include "varasm.h" |
94ea8568 | 53 | #include "cfgrtl.h" |
94ea8568 | 54 | #include "tree-eh.h" |
d53441c8 | 55 | #include "explow.h" |
15bbde2b | 56 | #include "expr.h" |
9845d120 | 57 | #include "params.h" |
f7715905 | 58 | #include "builtins.h" |
4073adaa | 59 | #include "rtl-iter.h" |
94f92c36 | 60 | #include "stor-layout.h" |
48a7e3d1 | 61 | #include "opts.h" |
61cb1816 | 62 | #include "predict.h" |
a80726d1 | 63 | #include "rtx-vector-builder.h" |
649d8da6 | 64 | |
679bcc8d | 65 | struct target_rtl default_target_rtl; |
66 | #if SWITCHABLE_TARGET | |
67 | struct target_rtl *this_target_rtl = &default_target_rtl; | |
68 | #endif | |
69 | ||
70 | #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx) | |
71 | ||
399d45d3 | 72 | /* Commonly used modes. */ |
73 | ||
af8303fa | 74 | scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */ |
75 | scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */ | |
76 | scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ | |
399d45d3 | 77 | |
b079a207 | 78 | /* Datastructures maintained for currently processed function in RTL form. */ |
79 | ||
fd6ffb7c | 80 | struct rtl_data x_rtl; |
b079a207 | 81 | |
82 | /* Indexed by pseudo register number, gives the rtx for that pseudo. | |
48e1416a | 83 | Allocated in parallel with regno_pointer_align. |
b079a207 | 84 | FIXME: We could put it into emit_status struct, but gengtype is not able to deal |
85 | with length attribute nested in top level structures. */ | |
86 | ||
87 | rtx * regno_reg_rtx; | |
15bbde2b | 88 | |
89 | /* This is *not* reset after each function. It gives each CODE_LABEL | |
90 | in the entire compilation a unique label number. */ | |
91 | ||
9105005a | 92 | static GTY(()) int label_num = 1; |
15bbde2b | 93 | |
15bbde2b | 94 | /* We record floating-point CONST_DOUBLEs in each floating-point mode for |
95 | the values of 0, 1, and 2. For the integer entries and VOIDmode, we | |
ba8dfb08 | 96 | record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX |
97 | is set only for MODE_INT and MODE_VECTOR_INT modes. */ | |
15bbde2b | 98 | |
ba8dfb08 | 99 | rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE]; |
15bbde2b | 100 | |
1a60f06a | 101 | rtx const_true_rtx; |
102 | ||
15bbde2b | 103 | REAL_VALUE_TYPE dconst0; |
104 | REAL_VALUE_TYPE dconst1; | |
105 | REAL_VALUE_TYPE dconst2; | |
106 | REAL_VALUE_TYPE dconstm1; | |
77e89269 | 107 | REAL_VALUE_TYPE dconsthalf; |
15bbde2b | 108 | |
06f0b99c | 109 | /* Record fixed-point constant 0 and 1. */ |
110 | FIXED_VALUE_TYPE fconst0[MAX_FCONST0]; | |
111 | FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; | |
112 | ||
15bbde2b | 113 | /* We make one copy of (const_int C) where C is in |
114 | [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] | |
115 | to save space during the compilation and simplify comparisons of | |
116 | integers. */ | |
117 | ||
57c097d5 | 118 | rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1]; |
15bbde2b | 119 | |
7d7b0bac | 120 | /* Standard pieces of rtx, to be substituted directly into things. */ |
121 | rtx pc_rtx; | |
122 | rtx ret_rtx; | |
123 | rtx simple_return_rtx; | |
124 | rtx cc0_rtx; | |
125 | ||
f9a00e9e | 126 | /* Marker used for denoting an INSN, which should never be accessed (i.e., |
127 | this pointer should normally never be dereferenced), but is required to be | |
128 | distinct from NULL_RTX. Currently used by peephole2 pass. */ | |
129 | rtx_insn *invalid_insn_rtx; | |
130 | ||
73f5c1e3 | 131 | /* A hash table storing CONST_INTs whose absolute value is greater |
132 | than MAX_SAVED_CONST_INT. */ | |
133 | ||
eae1ecb4 | 134 | struct const_int_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 135 | { |
136 | typedef HOST_WIDE_INT compare_type; | |
137 | ||
138 | static hashval_t hash (rtx i); | |
139 | static bool equal (rtx i, HOST_WIDE_INT h); | |
140 | }; | |
73f5c1e3 | 141 | |
f863a586 | 142 | static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab; |
143 | ||
eae1ecb4 | 144 | struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 145 | { |
146 | static hashval_t hash (rtx x); | |
147 | static bool equal (rtx x, rtx y); | |
148 | }; | |
149 | ||
150 | static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab; | |
e913b5cd | 151 | |
bbad7cd0 | 152 | struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def> |
153 | { | |
154 | typedef std::pair<machine_mode, poly_wide_int_ref> compare_type; | |
155 | ||
156 | static hashval_t hash (rtx x); | |
157 | static bool equal (rtx x, const compare_type &y); | |
158 | }; | |
159 | ||
160 | static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab; | |
161 | ||
ca74b940 | 162 | /* A hash table storing register attribute structures. */ |
eae1ecb4 | 163 | struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs> |
f863a586 | 164 | { |
165 | static hashval_t hash (reg_attrs *x); | |
166 | static bool equal (reg_attrs *a, reg_attrs *b); | |
167 | }; | |
168 | ||
169 | static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab; | |
ca74b940 | 170 | |
2ff23ed0 | 171 | /* A hash table storing all CONST_DOUBLEs. */ |
eae1ecb4 | 172 | struct const_double_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 173 | { |
174 | static hashval_t hash (rtx x); | |
175 | static bool equal (rtx x, rtx y); | |
176 | }; | |
177 | ||
178 | static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab; | |
2ff23ed0 | 179 | |
e397ad8e | 180 | /* A hash table storing all CONST_FIXEDs. */ |
eae1ecb4 | 181 | struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 182 | { |
183 | static hashval_t hash (rtx x); | |
184 | static bool equal (rtx x, rtx y); | |
185 | }; | |
186 | ||
187 | static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab; | |
e397ad8e | 188 | |
fd6ffb7c | 189 | #define cur_insn_uid (crtl->emit.x_cur_insn_uid) |
9845d120 | 190 | #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid) |
fd6ffb7c | 191 | #define first_label_num (crtl->emit.x_first_label_num) |
15bbde2b | 192 | |
265be050 | 193 | static void set_used_decls (tree); |
35cb5232 | 194 | static void mark_label_nuses (rtx); |
e913b5cd | 195 | #if TARGET_SUPPORTS_WIDE_INT |
e913b5cd | 196 | static rtx lookup_const_wide_int (rtx); |
197 | #endif | |
35cb5232 | 198 | static rtx lookup_const_double (rtx); |
e397ad8e | 199 | static rtx lookup_const_fixed (rtx); |
3754d046 | 200 | static rtx gen_const_vector (machine_mode, int); |
0e0727c4 | 201 | static void copy_rtx_if_shared_1 (rtx *orig); |
73f5c1e3 | 202 | |
61cb1816 | 203 | /* Probability of the conditional branch currently proceeded by try_split. */ |
204 | profile_probability split_branch_probability; | |
649d8da6 | 205 | \f |
73f5c1e3 | 206 | /* Returns a hash code for X (which is a really a CONST_INT). */ |
207 | ||
f863a586 | 208 | hashval_t |
209 | const_int_hasher::hash (rtx x) | |
73f5c1e3 | 210 | { |
f863a586 | 211 | return (hashval_t) INTVAL (x); |
73f5c1e3 | 212 | } |
213 | ||
6ef828f9 | 214 | /* Returns nonzero if the value represented by X (which is really a |
73f5c1e3 | 215 | CONST_INT) is the same as that given by Y (which is really a |
216 | HOST_WIDE_INT *). */ | |
217 | ||
f863a586 | 218 | bool |
219 | const_int_hasher::equal (rtx x, HOST_WIDE_INT y) | |
73f5c1e3 | 220 | { |
f863a586 | 221 | return (INTVAL (x) == y); |
2ff23ed0 | 222 | } |
223 | ||
e913b5cd | 224 | #if TARGET_SUPPORTS_WIDE_INT |
225 | /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */ | |
226 | ||
f863a586 | 227 | hashval_t |
228 | const_wide_int_hasher::hash (rtx x) | |
e913b5cd | 229 | { |
230 | int i; | |
06b8401d | 231 | unsigned HOST_WIDE_INT hash = 0; |
f863a586 | 232 | const_rtx xr = x; |
e913b5cd | 233 | |
234 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
235 | hash += CONST_WIDE_INT_ELT (xr, i); | |
236 | ||
237 | return (hashval_t) hash; | |
238 | } | |
239 | ||
240 | /* Returns nonzero if the value represented by X (which is really a | |
241 | CONST_WIDE_INT) is the same as that given by Y (which is really a | |
242 | CONST_WIDE_INT). */ | |
243 | ||
f863a586 | 244 | bool |
245 | const_wide_int_hasher::equal (rtx x, rtx y) | |
e913b5cd | 246 | { |
247 | int i; | |
f863a586 | 248 | const_rtx xr = x; |
249 | const_rtx yr = y; | |
e913b5cd | 250 | if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr)) |
f863a586 | 251 | return false; |
e913b5cd | 252 | |
253 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
254 | if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i)) | |
f863a586 | 255 | return false; |
ddb1be65 | 256 | |
f863a586 | 257 | return true; |
e913b5cd | 258 | } |
259 | #endif | |
260 | ||
bbad7cd0 | 261 | /* Returns a hash code for CONST_POLY_INT X. */ |
262 | ||
263 | hashval_t | |
264 | const_poly_int_hasher::hash (rtx x) | |
265 | { | |
266 | inchash::hash h; | |
267 | h.add_int (GET_MODE (x)); | |
268 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
269 | h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]); | |
270 | return h.end (); | |
271 | } | |
272 | ||
273 | /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */ | |
274 | ||
275 | bool | |
276 | const_poly_int_hasher::equal (rtx x, const compare_type &y) | |
277 | { | |
278 | if (GET_MODE (x) != y.first) | |
279 | return false; | |
280 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
281 | if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i]) | |
282 | return false; | |
283 | return true; | |
284 | } | |
285 | ||
2ff23ed0 | 286 | /* Returns a hash code for X (which is really a CONST_DOUBLE). */ |
f863a586 | 287 | hashval_t |
288 | const_double_hasher::hash (rtx x) | |
2ff23ed0 | 289 | { |
f863a586 | 290 | const_rtx const value = x; |
3393215f | 291 | hashval_t h; |
2ff23ed0 | 292 | |
e913b5cd | 293 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode) |
3393215f | 294 | h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value); |
295 | else | |
a5760913 | 296 | { |
e2e205b3 | 297 | h = real_hash (CONST_DOUBLE_REAL_VALUE (value)); |
a5760913 | 298 | /* MODE is used in the comparison, so it should be in the hash. */ |
299 | h ^= GET_MODE (value); | |
300 | } | |
2ff23ed0 | 301 | return h; |
302 | } | |
303 | ||
6ef828f9 | 304 | /* Returns nonzero if the value represented by X (really a ...) |
2ff23ed0 | 305 | is the same as that represented by Y (really a ...) */ |
f863a586 | 306 | bool |
307 | const_double_hasher::equal (rtx x, rtx y) | |
2ff23ed0 | 308 | { |
f863a586 | 309 | const_rtx const a = x, b = y; |
2ff23ed0 | 310 | |
311 | if (GET_MODE (a) != GET_MODE (b)) | |
312 | return 0; | |
e913b5cd | 313 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode) |
f82a103d | 314 | return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b) |
315 | && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b)); | |
316 | else | |
317 | return real_identical (CONST_DOUBLE_REAL_VALUE (a), | |
318 | CONST_DOUBLE_REAL_VALUE (b)); | |
73f5c1e3 | 319 | } |
320 | ||
e397ad8e | 321 | /* Returns a hash code for X (which is really a CONST_FIXED). */ |
322 | ||
f863a586 | 323 | hashval_t |
324 | const_fixed_hasher::hash (rtx x) | |
e397ad8e | 325 | { |
f863a586 | 326 | const_rtx const value = x; |
e397ad8e | 327 | hashval_t h; |
328 | ||
329 | h = fixed_hash (CONST_FIXED_VALUE (value)); | |
330 | /* MODE is used in the comparison, so it should be in the hash. */ | |
331 | h ^= GET_MODE (value); | |
332 | return h; | |
333 | } | |
334 | ||
f863a586 | 335 | /* Returns nonzero if the value represented by X is the same as that |
336 | represented by Y. */ | |
e397ad8e | 337 | |
f863a586 | 338 | bool |
339 | const_fixed_hasher::equal (rtx x, rtx y) | |
e397ad8e | 340 | { |
f863a586 | 341 | const_rtx const a = x, b = y; |
e397ad8e | 342 | |
343 | if (GET_MODE (a) != GET_MODE (b)) | |
344 | return 0; | |
345 | return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b)); | |
346 | } | |
347 | ||
d72886b5 | 348 | /* Return true if the given memory attributes are equal. */ |
73f5c1e3 | 349 | |
7e304b71 | 350 | bool |
d72886b5 | 351 | mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q) |
73f5c1e3 | 352 | { |
7e304b71 | 353 | if (p == q) |
354 | return true; | |
355 | if (!p || !q) | |
356 | return false; | |
6d58bcba | 357 | return (p->alias == q->alias |
358 | && p->offset_known_p == q->offset_known_p | |
711f137f | 359 | && (!p->offset_known_p || known_eq (p->offset, q->offset)) |
6d58bcba | 360 | && p->size_known_p == q->size_known_p |
711f137f | 361 | && (!p->size_known_p || known_eq (p->size, q->size)) |
6d58bcba | 362 | && p->align == q->align |
bd1a81f7 | 363 | && p->addrspace == q->addrspace |
2f16183e | 364 | && (p->expr == q->expr |
365 | || (p->expr != NULL_TREE && q->expr != NULL_TREE | |
366 | && operand_equal_p (p->expr, q->expr, 0)))); | |
73f5c1e3 | 367 | } |
368 | ||
d72886b5 | 369 | /* Set MEM's memory attributes so that they are the same as ATTRS. */ |
5cc193e7 | 370 | |
d72886b5 | 371 | static void |
372 | set_mem_attrs (rtx mem, mem_attrs *attrs) | |
373 | { | |
d72886b5 | 374 | /* If everything is the default, we can just clear the attributes. */ |
375 | if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)])) | |
376 | { | |
377 | MEM_ATTRS (mem) = 0; | |
378 | return; | |
379 | } | |
c6259b83 | 380 | |
8dc3230c | 381 | if (!MEM_ATTRS (mem) |
382 | || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem))) | |
c6259b83 | 383 | { |
25a27413 | 384 | MEM_ATTRS (mem) = ggc_alloc<mem_attrs> (); |
8dc3230c | 385 | memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs)); |
c6259b83 | 386 | } |
73f5c1e3 | 387 | } |
388 | ||
ca74b940 | 389 | /* Returns a hash code for X (which is a really a reg_attrs *). */ |
390 | ||
f863a586 | 391 | hashval_t |
392 | reg_attr_hasher::hash (reg_attrs *x) | |
ca74b940 | 393 | { |
f863a586 | 394 | const reg_attrs *const p = x; |
ca74b940 | 395 | |
a14d43f8 | 396 | inchash::hash h; |
397 | h.add_ptr (p->decl); | |
398 | h.add_poly_hwi (p->offset); | |
399 | return h.end (); | |
ca74b940 | 400 | } |
401 | ||
f863a586 | 402 | /* Returns nonzero if the value represented by X is the same as that given by |
403 | Y. */ | |
ca74b940 | 404 | |
f863a586 | 405 | bool |
406 | reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y) | |
ca74b940 | 407 | { |
f863a586 | 408 | const reg_attrs *const p = x; |
409 | const reg_attrs *const q = y; | |
ca74b940 | 410 | |
a14d43f8 | 411 | return (p->decl == q->decl && known_eq (p->offset, q->offset)); |
ca74b940 | 412 | } |
413 | /* Allocate a new reg_attrs structure and insert it into the hash table if | |
414 | one identical to it is not already in the table. We are doing this for | |
415 | MEM of mode MODE. */ | |
416 | ||
417 | static reg_attrs * | |
a14d43f8 | 418 | get_reg_attrs (tree decl, poly_int64 offset) |
ca74b940 | 419 | { |
420 | reg_attrs attrs; | |
ca74b940 | 421 | |
422 | /* If everything is the default, we can just return zero. */ | |
a14d43f8 | 423 | if (decl == 0 && known_eq (offset, 0)) |
ca74b940 | 424 | return 0; |
425 | ||
426 | attrs.decl = decl; | |
427 | attrs.offset = offset; | |
428 | ||
f863a586 | 429 | reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT); |
ca74b940 | 430 | if (*slot == 0) |
431 | { | |
25a27413 | 432 | *slot = ggc_alloc<reg_attrs> (); |
ca74b940 | 433 | memcpy (*slot, &attrs, sizeof (reg_attrs)); |
434 | } | |
435 | ||
f863a586 | 436 | return *slot; |
ca74b940 | 437 | } |
438 | ||
3072d30e | 439 | |
440 | #if !HAVE_blockage | |
e12b44a3 | 441 | /* Generate an empty ASM_INPUT, which is used to block attempts to schedule, |
442 | and to block register equivalences to be seen across this insn. */ | |
3072d30e | 443 | |
444 | rtx | |
445 | gen_blockage (void) | |
446 | { | |
447 | rtx x = gen_rtx_ASM_INPUT (VOIDmode, ""); | |
448 | MEM_VOLATILE_P (x) = true; | |
449 | return x; | |
450 | } | |
451 | #endif | |
452 | ||
453 | ||
937ca48e | 454 | /* Set the mode and register number of X to MODE and REGNO. */ |
455 | ||
456 | void | |
457 | set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno) | |
458 | { | |
1c0849e5 | 459 | unsigned int nregs = (HARD_REGISTER_NUM_P (regno) |
92d2aec3 | 460 | ? hard_regno_nregs (regno, mode) |
1c0849e5 | 461 | : 1); |
937ca48e | 462 | PUT_MODE_RAW (x, mode); |
1c0849e5 | 463 | set_regno_raw (x, regno, nregs); |
937ca48e | 464 | } |
465 | ||
22cf44bc | 466 | /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and |
467 | don't attempt to share with the various global pieces of rtl (such as | |
468 | frame_pointer_rtx). */ | |
469 | ||
470 | rtx | |
937ca48e | 471 | gen_raw_REG (machine_mode mode, unsigned int regno) |
22cf44bc | 472 | { |
68095389 | 473 | rtx x = rtx_alloc (REG MEM_STAT_INFO); |
937ca48e | 474 | set_mode_and_regno (x, mode, regno); |
15183fd2 | 475 | REG_ATTRS (x) = NULL; |
22cf44bc | 476 | ORIGINAL_REGNO (x) = regno; |
477 | return x; | |
478 | } | |
479 | ||
7014838c | 480 | /* There are some RTL codes that require special attention; the generation |
481 | functions do the raw handling. If you add to this list, modify | |
482 | special_rtx in gengenrtl.c as well. */ | |
483 | ||
ede4900a | 484 | rtx_expr_list * |
3754d046 | 485 | gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list) |
ede4900a | 486 | { |
487 | return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr, | |
488 | expr_list)); | |
489 | } | |
490 | ||
13be9dc6 | 491 | rtx_insn_list * |
3754d046 | 492 | gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list) |
13be9dc6 | 493 | { |
494 | return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn, | |
495 | insn_list)); | |
496 | } | |
497 | ||
f935868a | 498 | rtx_insn * |
3754d046 | 499 | gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn, |
f935868a | 500 | basic_block bb, rtx pattern, int location, int code, |
501 | rtx reg_notes) | |
502 | { | |
503 | return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode, | |
504 | prev_insn, next_insn, | |
505 | bb, pattern, location, code, | |
506 | reg_notes)); | |
507 | } | |
508 | ||
3ad7bb1c | 509 | rtx |
3754d046 | 510 | gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg) |
3ad7bb1c | 511 | { |
512 | if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT) | |
57c097d5 | 513 | return const_int_rtx[arg + MAX_SAVED_CONST_INT]; |
3ad7bb1c | 514 | |
515 | #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1 | |
516 | if (const_true_rtx && arg == STORE_FLAG_VALUE) | |
517 | return const_true_rtx; | |
518 | #endif | |
519 | ||
73f5c1e3 | 520 | /* Look up the CONST_INT in the hash table. */ |
f863a586 | 521 | rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg, |
522 | INSERT); | |
7f2875d3 | 523 | if (*slot == 0) |
d7c47c0e | 524 | *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg); |
73f5c1e3 | 525 | |
f863a586 | 526 | return *slot; |
3ad7bb1c | 527 | } |
528 | ||
2d232d05 | 529 | rtx |
bbad7cd0 | 530 | gen_int_mode (poly_int64 c, machine_mode mode) |
2d232d05 | 531 | { |
bbad7cd0 | 532 | c = trunc_int_for_mode (c, mode); |
533 | if (c.is_constant ()) | |
534 | return GEN_INT (c.coeffs[0]); | |
535 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
536 | return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode); | |
2d232d05 | 537 | } |
538 | ||
2ff23ed0 | 539 | /* CONST_DOUBLEs might be created from pairs of integers, or from |
540 | REAL_VALUE_TYPEs. Also, their length is known only at run time, | |
541 | so we cannot use gen_rtx_raw_CONST_DOUBLE. */ | |
542 | ||
543 | /* Determine whether REAL, a CONST_DOUBLE, already exists in the | |
544 | hash table. If so, return its counterpart; otherwise add it | |
545 | to the hash table and return it. */ | |
546 | static rtx | |
35cb5232 | 547 | lookup_const_double (rtx real) |
2ff23ed0 | 548 | { |
f863a586 | 549 | rtx *slot = const_double_htab->find_slot (real, INSERT); |
2ff23ed0 | 550 | if (*slot == 0) |
551 | *slot = real; | |
552 | ||
f863a586 | 553 | return *slot; |
2ff23ed0 | 554 | } |
7f2875d3 | 555 | |
2ff23ed0 | 556 | /* Return a CONST_DOUBLE rtx for a floating-point value specified by |
557 | VALUE in mode MODE. */ | |
67f2a2eb | 558 | rtx |
3754d046 | 559 | const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode) |
67f2a2eb | 560 | { |
2ff23ed0 | 561 | rtx real = rtx_alloc (CONST_DOUBLE); |
562 | PUT_MODE (real, mode); | |
563 | ||
e8aaae4e | 564 | real->u.rv = value; |
2ff23ed0 | 565 | |
566 | return lookup_const_double (real); | |
567 | } | |
568 | ||
e397ad8e | 569 | /* Determine whether FIXED, a CONST_FIXED, already exists in the |
570 | hash table. If so, return its counterpart; otherwise add it | |
571 | to the hash table and return it. */ | |
572 | ||
573 | static rtx | |
574 | lookup_const_fixed (rtx fixed) | |
575 | { | |
f863a586 | 576 | rtx *slot = const_fixed_htab->find_slot (fixed, INSERT); |
e397ad8e | 577 | if (*slot == 0) |
578 | *slot = fixed; | |
579 | ||
f863a586 | 580 | return *slot; |
e397ad8e | 581 | } |
582 | ||
583 | /* Return a CONST_FIXED rtx for a fixed-point value specified by | |
584 | VALUE in mode MODE. */ | |
585 | ||
586 | rtx | |
3754d046 | 587 | const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode) |
e397ad8e | 588 | { |
589 | rtx fixed = rtx_alloc (CONST_FIXED); | |
590 | PUT_MODE (fixed, mode); | |
591 | ||
592 | fixed->u.fv = value; | |
593 | ||
594 | return lookup_const_fixed (fixed); | |
595 | } | |
596 | ||
e913b5cd | 597 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
33274180 | 598 | /* Constructs double_int from rtx CST. */ |
599 | ||
600 | double_int | |
601 | rtx_to_double_int (const_rtx cst) | |
602 | { | |
603 | double_int r; | |
604 | ||
605 | if (CONST_INT_P (cst)) | |
cf8f0e63 | 606 | r = double_int::from_shwi (INTVAL (cst)); |
78f1962f | 607 | else if (CONST_DOUBLE_AS_INT_P (cst)) |
33274180 | 608 | { |
609 | r.low = CONST_DOUBLE_LOW (cst); | |
610 | r.high = CONST_DOUBLE_HIGH (cst); | |
611 | } | |
612 | else | |
613 | gcc_unreachable (); | |
614 | ||
615 | return r; | |
616 | } | |
e913b5cd | 617 | #endif |
618 | ||
619 | #if TARGET_SUPPORTS_WIDE_INT | |
a342dbb2 | 620 | /* Determine whether CONST_WIDE_INT WINT already exists in the hash table. |
621 | If so, return its counterpart; otherwise add it to the hash table and | |
e913b5cd | 622 | return it. */ |
33274180 | 623 | |
e913b5cd | 624 | static rtx |
625 | lookup_const_wide_int (rtx wint) | |
626 | { | |
f863a586 | 627 | rtx *slot = const_wide_int_htab->find_slot (wint, INSERT); |
e913b5cd | 628 | if (*slot == 0) |
629 | *slot = wint; | |
33274180 | 630 | |
f863a586 | 631 | return *slot; |
e913b5cd | 632 | } |
633 | #endif | |
3e052aec | 634 | |
a342dbb2 | 635 | /* Return an rtx constant for V, given that the constant has mode MODE. |
636 | The returned rtx will be a CONST_INT if V fits, otherwise it will be | |
637 | a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT | |
638 | (if TARGET_SUPPORTS_WIDE_INT). */ | |
639 | ||
bbad7cd0 | 640 | static rtx |
641 | immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode) | |
3e052aec | 642 | { |
e913b5cd | 643 | unsigned int len = v.get_len (); |
074473dd | 644 | /* Not scalar_int_mode because we also allow pointer bound modes. */ |
645 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
e913b5cd | 646 | |
647 | /* Allow truncation but not extension since we do not know if the | |
648 | number is signed or unsigned. */ | |
649 | gcc_assert (prec <= v.get_precision ()); | |
650 | ||
651 | if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT) | |
652 | return gen_int_mode (v.elt (0), mode); | |
653 | ||
654 | #if TARGET_SUPPORTS_WIDE_INT | |
655 | { | |
656 | unsigned int i; | |
657 | rtx value; | |
ddb1be65 | 658 | unsigned int blocks_needed |
e913b5cd | 659 | = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT; |
660 | ||
661 | if (len > blocks_needed) | |
662 | len = blocks_needed; | |
663 | ||
664 | value = const_wide_int_alloc (len); | |
665 | ||
666 | /* It is so tempting to just put the mode in here. Must control | |
667 | myself ... */ | |
668 | PUT_MODE (value, VOIDmode); | |
05c25ee6 | 669 | CWI_PUT_NUM_ELEM (value, len); |
e913b5cd | 670 | |
671 | for (i = 0; i < len; i++) | |
05363b4a | 672 | CONST_WIDE_INT_ELT (value, i) = v.elt (i); |
e913b5cd | 673 | |
674 | return lookup_const_wide_int (value); | |
675 | } | |
676 | #else | |
05363b4a | 677 | return immed_double_const (v.elt (0), v.elt (1), mode); |
e913b5cd | 678 | #endif |
3e052aec | 679 | } |
680 | ||
e913b5cd | 681 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
2ff23ed0 | 682 | /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair |
683 | of ints: I0 is the low-order word and I1 is the high-order word. | |
24cd46a7 | 684 | For values that are larger than HOST_BITS_PER_DOUBLE_INT, the |
db20fb47 | 685 | implied upper bits are copies of the high bit of i1. The value |
686 | itself is neither signed nor unsigned. Do not use this routine for | |
687 | non-integer modes; convert to REAL_VALUE_TYPE and use | |
d5f9611d | 688 | const_double_from_real_value. */ |
2ff23ed0 | 689 | |
690 | rtx | |
3754d046 | 691 | immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode) |
2ff23ed0 | 692 | { |
693 | rtx value; | |
694 | unsigned int i; | |
695 | ||
b1ca4af4 | 696 | /* There are the following cases (note that there are no modes with |
24cd46a7 | 697 | HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT): |
b1ca4af4 | 698 | |
699 | 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use | |
700 | gen_int_mode. | |
db20fb47 | 701 | 2) If the value of the integer fits into HOST_WIDE_INT anyway |
702 | (i.e., i1 consists only from copies of the sign bit, and sign | |
703 | of i0 and i1 are the same), then we return a CONST_INT for i0. | |
b1ca4af4 | 704 | 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ |
074473dd | 705 | scalar_mode smode; |
706 | if (is_a <scalar_mode> (mode, &smode) | |
707 | && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT) | |
708 | return gen_int_mode (i0, mode); | |
2ff23ed0 | 709 | |
710 | /* If this integer fits in one word, return a CONST_INT. */ | |
711 | if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0)) | |
712 | return GEN_INT (i0); | |
713 | ||
714 | /* We use VOIDmode for integers. */ | |
715 | value = rtx_alloc (CONST_DOUBLE); | |
716 | PUT_MODE (value, VOIDmode); | |
717 | ||
718 | CONST_DOUBLE_LOW (value) = i0; | |
719 | CONST_DOUBLE_HIGH (value) = i1; | |
720 | ||
721 | for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++) | |
722 | XWINT (value, i) = 0; | |
723 | ||
724 | return lookup_const_double (value); | |
67f2a2eb | 725 | } |
e913b5cd | 726 | #endif |
67f2a2eb | 727 | |
bbad7cd0 | 728 | /* Return an rtx representation of C in mode MODE. */ |
729 | ||
730 | rtx | |
731 | immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode) | |
732 | { | |
733 | if (c.is_constant ()) | |
734 | return immed_wide_int_const_1 (c.coeffs[0], mode); | |
735 | ||
736 | /* Not scalar_int_mode because we also allow pointer bound modes. */ | |
737 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
738 | ||
739 | /* Allow truncation but not extension since we do not know if the | |
740 | number is signed or unsigned. */ | |
741 | gcc_assert (prec <= c.coeffs[0].get_precision ()); | |
742 | poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED); | |
743 | ||
744 | /* See whether we already have an rtx for this constant. */ | |
745 | inchash::hash h; | |
746 | h.add_int (mode); | |
747 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
748 | h.add_wide_int (newc.coeffs[i]); | |
749 | const_poly_int_hasher::compare_type typed_value (mode, newc); | |
750 | rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value, | |
751 | h.end (), INSERT); | |
752 | rtx x = *slot; | |
753 | if (x) | |
754 | return x; | |
755 | ||
756 | /* Create a new rtx. There's a choice to be made here between installing | |
757 | the actual mode of the rtx or leaving it as VOIDmode (for consistency | |
758 | with CONST_INT). In practice the handling of the codes is different | |
759 | enough that we get no benefit from using VOIDmode, and various places | |
760 | assume that VOIDmode implies CONST_INT. Using the real mode seems like | |
761 | the right long-term direction anyway. */ | |
762 | typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi; | |
763 | size_t extra_size = twi::extra_size (prec); | |
764 | x = rtx_alloc_v (CONST_POLY_INT, | |
765 | sizeof (struct const_poly_int_def) + extra_size); | |
766 | PUT_MODE (x, mode); | |
767 | CONST_POLY_INT_COEFFS (x).set_precision (prec); | |
768 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
769 | CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i]; | |
770 | ||
771 | *slot = x; | |
772 | return x; | |
773 | } | |
774 | ||
3ad7bb1c | 775 | rtx |
3754d046 | 776 | gen_rtx_REG (machine_mode mode, unsigned int regno) |
3ad7bb1c | 777 | { |
778 | /* In case the MD file explicitly references the frame pointer, have | |
779 | all such references point to the same frame pointer. This is | |
780 | used during frame pointer elimination to distinguish the explicit | |
781 | references to these registers from pseudos that happened to be | |
782 | assigned to them. | |
783 | ||
784 | If we have eliminated the frame pointer or arg pointer, we will | |
785 | be using it as a normal register, for example as a spill | |
786 | register. In such cases, we might be accessing it in a mode that | |
787 | is not Pmode and therefore cannot use the pre-allocated rtx. | |
788 | ||
789 | Also don't do this when we are making new REGs in reload, since | |
790 | we don't want to get confused with the real pointers. */ | |
791 | ||
c6a6cdaa | 792 | if (mode == Pmode && !reload_in_progress && !lra_in_progress) |
3ad7bb1c | 793 | { |
71801afc | 794 | if (regno == FRAME_POINTER_REGNUM |
795 | && (!reload_completed || frame_pointer_needed)) | |
3ad7bb1c | 796 | return frame_pointer_rtx; |
f703b3d6 | 797 | |
798 | if (!HARD_FRAME_POINTER_IS_FRAME_POINTER | |
799 | && regno == HARD_FRAME_POINTER_REGNUM | |
71801afc | 800 | && (!reload_completed || frame_pointer_needed)) |
3ad7bb1c | 801 | return hard_frame_pointer_rtx; |
c6bb296a | 802 | #if !HARD_FRAME_POINTER_IS_ARG_POINTER |
803 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
804 | && regno == ARG_POINTER_REGNUM) | |
3ad7bb1c | 805 | return arg_pointer_rtx; |
806 | #endif | |
807 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
e8b59353 | 808 | if (regno == RETURN_ADDRESS_POINTER_REGNUM) |
3ad7bb1c | 809 | return return_address_pointer_rtx; |
810 | #endif | |
3473aefe | 811 | if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM |
8d43ad05 | 812 | && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM |
6ea47475 | 813 | && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) |
d4c5e26d | 814 | return pic_offset_table_rtx; |
e8b59353 | 815 | if (regno == STACK_POINTER_REGNUM) |
3ad7bb1c | 816 | return stack_pointer_rtx; |
817 | } | |
818 | ||
32b53d83 | 819 | #if 0 |
90295bd2 | 820 | /* If the per-function register table has been set up, try to re-use |
32b53d83 | 821 | an existing entry in that table to avoid useless generation of RTL. |
822 | ||
823 | This code is disabled for now until we can fix the various backends | |
824 | which depend on having non-shared hard registers in some cases. Long | |
825 | term we want to re-enable this code as it can significantly cut down | |
71801afc | 826 | on the amount of useless RTL that gets generated. |
827 | ||
828 | We'll also need to fix some code that runs after reload that wants to | |
829 | set ORIGINAL_REGNO. */ | |
830 | ||
90295bd2 | 831 | if (cfun |
832 | && cfun->emit | |
833 | && regno_reg_rtx | |
834 | && regno < FIRST_PSEUDO_REGISTER | |
835 | && reg_raw_mode[regno] == mode) | |
836 | return regno_reg_rtx[regno]; | |
32b53d83 | 837 | #endif |
90295bd2 | 838 | |
22cf44bc | 839 | return gen_raw_REG (mode, regno); |
3ad7bb1c | 840 | } |
841 | ||
b5ba9f3a | 842 | rtx |
3754d046 | 843 | gen_rtx_MEM (machine_mode mode, rtx addr) |
b5ba9f3a | 844 | { |
845 | rtx rt = gen_rtx_raw_MEM (mode, addr); | |
846 | ||
847 | /* This field is not cleared by the mere allocation of the rtx, so | |
848 | we clear it here. */ | |
c6259b83 | 849 | MEM_ATTRS (rt) = 0; |
b5ba9f3a | 850 | |
851 | return rt; | |
852 | } | |
701e46d0 | 853 | |
e265a6da | 854 | /* Generate a memory referring to non-trapping constant memory. */ |
855 | ||
856 | rtx | |
3754d046 | 857 | gen_const_mem (machine_mode mode, rtx addr) |
e265a6da | 858 | { |
859 | rtx mem = gen_rtx_MEM (mode, addr); | |
860 | MEM_READONLY_P (mem) = 1; | |
861 | MEM_NOTRAP_P (mem) = 1; | |
862 | return mem; | |
863 | } | |
864 | ||
00060fc2 | 865 | /* Generate a MEM referring to fixed portions of the frame, e.g., register |
866 | save areas. */ | |
867 | ||
868 | rtx | |
3754d046 | 869 | gen_frame_mem (machine_mode mode, rtx addr) |
00060fc2 | 870 | { |
871 | rtx mem = gen_rtx_MEM (mode, addr); | |
872 | MEM_NOTRAP_P (mem) = 1; | |
873 | set_mem_alias_set (mem, get_frame_alias_set ()); | |
874 | return mem; | |
875 | } | |
876 | ||
877 | /* Generate a MEM referring to a temporary use of the stack, not part | |
878 | of the fixed stack frame. For example, something which is pushed | |
879 | by a target splitter. */ | |
880 | rtx | |
3754d046 | 881 | gen_tmp_stack_mem (machine_mode mode, rtx addr) |
00060fc2 | 882 | { |
883 | rtx mem = gen_rtx_MEM (mode, addr); | |
884 | MEM_NOTRAP_P (mem) = 1; | |
18d50ae6 | 885 | if (!cfun->calls_alloca) |
00060fc2 | 886 | set_mem_alias_set (mem, get_frame_alias_set ()); |
887 | return mem; | |
888 | } | |
889 | ||
2166bbaa | 890 | /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if |
891 | this construct would be valid, and false otherwise. */ | |
892 | ||
893 | bool | |
3754d046 | 894 | validate_subreg (machine_mode omode, machine_mode imode, |
9edf7ea8 | 895 | const_rtx reg, poly_uint64 offset) |
701e46d0 | 896 | { |
68cc7e7b | 897 | poly_uint64 isize = GET_MODE_SIZE (imode); |
898 | poly_uint64 osize = GET_MODE_SIZE (omode); | |
899 | ||
900 | /* The sizes must be ordered, so that we know whether the subreg | |
901 | is partial, paradoxical or complete. */ | |
902 | if (!ordered_p (isize, osize)) | |
903 | return false; | |
2166bbaa | 904 | |
905 | /* All subregs must be aligned. */ | |
9edf7ea8 | 906 | if (!multiple_p (offset, osize)) |
2166bbaa | 907 | return false; |
908 | ||
909 | /* The subreg offset cannot be outside the inner object. */ | |
9edf7ea8 | 910 | if (maybe_ge (offset, isize)) |
2166bbaa | 911 | return false; |
912 | ||
68cc7e7b | 913 | poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode); |
44ce7b27 | 914 | |
2166bbaa | 915 | /* ??? This should not be here. Temporarily continue to allow word_mode |
916 | subregs of anything. The most common offender is (subreg:SI (reg:DF)). | |
917 | Generally, backends are doing something sketchy but it'll take time to | |
918 | fix them all. */ | |
919 | if (omode == word_mode) | |
920 | ; | |
921 | /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field | |
922 | is the culprit here, and not the backends. */ | |
68cc7e7b | 923 | else if (known_ge (osize, regsize) && known_ge (isize, osize)) |
2166bbaa | 924 | ; |
925 | /* Allow component subregs of complex and vector. Though given the below | |
926 | extraction rules, it's not always clear what that means. */ | |
927 | else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) | |
928 | && GET_MODE_INNER (imode) == omode) | |
929 | ; | |
930 | /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs, | |
931 | i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to | |
932 | represent this. It's questionable if this ought to be represented at | |
933 | all -- why can't this all be hidden in post-reload splitters that make | |
934 | arbitrarily mode changes to the registers themselves. */ | |
935 | else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode) | |
936 | ; | |
937 | /* Subregs involving floating point modes are not allowed to | |
938 | change size. Therefore (subreg:DI (reg:DF) 0) is fine, but | |
939 | (subreg:SI (reg:DF) 0) isn't. */ | |
940 | else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) | |
941 | { | |
68cc7e7b | 942 | if (! (known_eq (isize, osize) |
c6a6cdaa | 943 | /* LRA can use subreg to store a floating point value in |
944 | an integer mode. Although the floating point and the | |
945 | integer modes need the same number of hard registers, | |
946 | the size of floating point mode can be less than the | |
947 | integer mode. LRA also uses subregs for a register | |
948 | should be used in different mode in on insn. */ | |
949 | || lra_in_progress)) | |
2166bbaa | 950 | return false; |
951 | } | |
701e46d0 | 952 | |
2166bbaa | 953 | /* Paradoxical subregs must have offset zero. */ |
68cc7e7b | 954 | if (maybe_gt (osize, isize)) |
9edf7ea8 | 955 | return known_eq (offset, 0U); |
2166bbaa | 956 | |
957 | /* This is a normal subreg. Verify that the offset is representable. */ | |
958 | ||
959 | /* For hard registers, we already have most of these rules collected in | |
960 | subreg_offset_representable_p. */ | |
961 | if (reg && REG_P (reg) && HARD_REGISTER_P (reg)) | |
962 | { | |
963 | unsigned int regno = REGNO (reg); | |
964 | ||
2166bbaa | 965 | if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) |
966 | && GET_MODE_INNER (imode) == omode) | |
967 | ; | |
b56a9dbc | 968 | else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode)) |
2166bbaa | 969 | return false; |
2166bbaa | 970 | |
971 | return subreg_offset_representable_p (regno, imode, offset, omode); | |
972 | } | |
973 | ||
68cc7e7b | 974 | /* The outer size must be ordered wrt the register size, otherwise |
975 | we wouldn't know at compile time how many registers the outer | |
976 | mode occupies. */ | |
977 | if (!ordered_p (osize, regsize)) | |
978 | return false; | |
979 | ||
2166bbaa | 980 | /* For pseudo registers, we want most of the same checks. Namely: |
44ce7b27 | 981 | |
982 | Assume that the pseudo register will be allocated to hard registers | |
983 | that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE, | |
984 | the remainder must correspond to the lowpart of the containing hard | |
985 | register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset, | |
986 | otherwise it is at the lowest offset. | |
987 | ||
988 | Given that we've already checked the mode and offset alignment, | |
989 | we only have to check subblock subregs here. */ | |
68cc7e7b | 990 | if (maybe_lt (osize, regsize) |
c6a6cdaa | 991 | && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)))) |
2166bbaa | 992 | { |
68cc7e7b | 993 | /* It is invalid for the target to pick a register size for a mode |
994 | that isn't ordered wrt to the size of that mode. */ | |
995 | poly_uint64 block_size = ordered_min (isize, regsize); | |
9edf7ea8 | 996 | unsigned int start_reg; |
997 | poly_uint64 offset_within_reg; | |
998 | if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg) | |
999 | || (BYTES_BIG_ENDIAN | |
1000 | ? maybe_ne (offset_within_reg, block_size - osize) | |
1001 | : maybe_ne (offset_within_reg, 0U))) | |
2166bbaa | 1002 | return false; |
1003 | } | |
1004 | return true; | |
1005 | } | |
1006 | ||
1007 | rtx | |
9edf7ea8 | 1008 | gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset) |
2166bbaa | 1009 | { |
1010 | gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); | |
2ff23ed0 | 1011 | return gen_rtx_raw_SUBREG (mode, reg, offset); |
701e46d0 | 1012 | } |
1013 | ||
c6259b83 | 1014 | /* Generate a SUBREG representing the least-significant part of REG if MODE |
1015 | is smaller than mode of REG, otherwise paradoxical SUBREG. */ | |
1016 | ||
701e46d0 | 1017 | rtx |
3754d046 | 1018 | gen_lowpart_SUBREG (machine_mode mode, rtx reg) |
701e46d0 | 1019 | { |
3754d046 | 1020 | machine_mode inmode; |
701e46d0 | 1021 | |
1022 | inmode = GET_MODE (reg); | |
1023 | if (inmode == VOIDmode) | |
1024 | inmode = mode; | |
81802af6 | 1025 | return gen_rtx_SUBREG (mode, reg, |
1026 | subreg_lowpart_offset (mode, inmode)); | |
701e46d0 | 1027 | } |
e1398578 | 1028 | |
1029 | rtx | |
3754d046 | 1030 | gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc, |
e1398578 | 1031 | enum var_init_status status) |
1032 | { | |
1033 | rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc); | |
1034 | PAT_VAR_LOCATION_STATUS (x) = status; | |
1035 | return x; | |
1036 | } | |
7014838c | 1037 | \f |
15bbde2b | 1038 | |
cf9ac040 | 1039 | /* Create an rtvec and stores within it the RTXen passed in the arguments. */ |
1040 | ||
15bbde2b | 1041 | rtvec |
ee582a61 | 1042 | gen_rtvec (int n, ...) |
15bbde2b | 1043 | { |
cf9ac040 | 1044 | int i; |
1045 | rtvec rt_val; | |
ee582a61 | 1046 | va_list p; |
15bbde2b | 1047 | |
ee582a61 | 1048 | va_start (p, n); |
15bbde2b | 1049 | |
cf9ac040 | 1050 | /* Don't allocate an empty rtvec... */ |
15bbde2b | 1051 | if (n == 0) |
451c8e2f | 1052 | { |
1053 | va_end (p); | |
1054 | return NULL_RTVEC; | |
1055 | } | |
15bbde2b | 1056 | |
cf9ac040 | 1057 | rt_val = rtvec_alloc (n); |
e5fcd76a | 1058 | |
15bbde2b | 1059 | for (i = 0; i < n; i++) |
cf9ac040 | 1060 | rt_val->elem[i] = va_arg (p, rtx); |
7ad77798 | 1061 | |
ee582a61 | 1062 | va_end (p); |
cf9ac040 | 1063 | return rt_val; |
15bbde2b | 1064 | } |
1065 | ||
1066 | rtvec | |
35cb5232 | 1067 | gen_rtvec_v (int n, rtx *argp) |
15bbde2b | 1068 | { |
19cb6b50 | 1069 | int i; |
1070 | rtvec rt_val; | |
15bbde2b | 1071 | |
cf9ac040 | 1072 | /* Don't allocate an empty rtvec... */ |
15bbde2b | 1073 | if (n == 0) |
cf9ac040 | 1074 | return NULL_RTVEC; |
15bbde2b | 1075 | |
cf9ac040 | 1076 | rt_val = rtvec_alloc (n); |
15bbde2b | 1077 | |
1078 | for (i = 0; i < n; i++) | |
a4070a91 | 1079 | rt_val->elem[i] = *argp++; |
15bbde2b | 1080 | |
1081 | return rt_val; | |
1082 | } | |
f17e3fff | 1083 | |
1084 | rtvec | |
1085 | gen_rtvec_v (int n, rtx_insn **argp) | |
1086 | { | |
1087 | int i; | |
1088 | rtvec rt_val; | |
1089 | ||
1090 | /* Don't allocate an empty rtvec... */ | |
1091 | if (n == 0) | |
1092 | return NULL_RTVEC; | |
1093 | ||
1094 | rt_val = rtvec_alloc (n); | |
1095 | ||
1096 | for (i = 0; i < n; i++) | |
1097 | rt_val->elem[i] = *argp++; | |
1098 | ||
1099 | return rt_val; | |
1100 | } | |
1101 | ||
15bbde2b | 1102 | \f |
80c70e76 | 1103 | /* Return the number of bytes between the start of an OUTER_MODE |
1104 | in-memory value and the start of an INNER_MODE in-memory value, | |
1105 | given that the former is a lowpart of the latter. It may be a | |
1106 | paradoxical lowpart, in which case the offset will be negative | |
1107 | on big-endian targets. */ | |
1108 | ||
9edf7ea8 | 1109 | poly_int64 |
3754d046 | 1110 | byte_lowpart_offset (machine_mode outer_mode, |
1111 | machine_mode inner_mode) | |
80c70e76 | 1112 | { |
d0257d43 | 1113 | if (paradoxical_subreg_p (outer_mode, inner_mode)) |
80c70e76 | 1114 | return -subreg_lowpart_offset (inner_mode, outer_mode); |
d0257d43 | 1115 | else |
1116 | return subreg_lowpart_offset (outer_mode, inner_mode); | |
80c70e76 | 1117 | } |
57689c10 | 1118 | |
1119 | /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET) | |
1120 | from address X. For paradoxical big-endian subregs this is a | |
1121 | negative value, otherwise it's the same as OFFSET. */ | |
1122 | ||
9edf7ea8 | 1123 | poly_int64 |
57689c10 | 1124 | subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode, |
9edf7ea8 | 1125 | poly_uint64 offset) |
57689c10 | 1126 | { |
1127 | if (paradoxical_subreg_p (outer_mode, inner_mode)) | |
1128 | { | |
9edf7ea8 | 1129 | gcc_assert (known_eq (offset, 0U)); |
57689c10 | 1130 | return -subreg_lowpart_offset (inner_mode, outer_mode); |
1131 | } | |
1132 | return offset; | |
1133 | } | |
1134 | ||
1135 | /* As above, but return the offset that existing subreg X would have | |
1136 | if SUBREG_REG (X) were stored in memory. The only significant thing | |
1137 | about the current SUBREG_REG is its mode. */ | |
1138 | ||
9edf7ea8 | 1139 | poly_int64 |
57689c10 | 1140 | subreg_memory_offset (const_rtx x) |
1141 | { | |
1142 | return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
1143 | SUBREG_BYTE (x)); | |
1144 | } | |
80c70e76 | 1145 | \f |
15bbde2b | 1146 | /* Generate a REG rtx for a new pseudo register of mode MODE. |
1147 | This pseudo is assigned the next sequential register number. */ | |
1148 | ||
1149 | rtx | |
3754d046 | 1150 | gen_reg_rtx (machine_mode mode) |
15bbde2b | 1151 | { |
19cb6b50 | 1152 | rtx val; |
27a7a23a | 1153 | unsigned int align = GET_MODE_ALIGNMENT (mode); |
15bbde2b | 1154 | |
1b7ff857 | 1155 | gcc_assert (can_create_pseudo_p ()); |
15bbde2b | 1156 | |
27a7a23a | 1157 | /* If a virtual register with bigger mode alignment is generated, |
1158 | increase stack alignment estimation because it might be spilled | |
1159 | to stack later. */ | |
48e1416a | 1160 | if (SUPPORTS_STACK_ALIGNMENT |
27a7a23a | 1161 | && crtl->stack_alignment_estimated < align |
1162 | && !crtl->stack_realign_processed) | |
8645d3e7 | 1163 | { |
1164 | unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align); | |
1165 | if (crtl->stack_alignment_estimated < min_align) | |
1166 | crtl->stack_alignment_estimated = min_align; | |
1167 | } | |
27a7a23a | 1168 | |
316bc009 | 1169 | if (generating_concat_p |
1170 | && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT | |
1171 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)) | |
76c37538 | 1172 | { |
1173 | /* For complex modes, don't make a single pseudo. | |
1174 | Instead, make a CONCAT of two pseudos. | |
1175 | This allows noncontiguous allocation of the real and imaginary parts, | |
1176 | which makes much better code. Besides, allocating DCmode | |
1177 | pseudos overstrains reload on some machines like the 386. */ | |
1178 | rtx realpart, imagpart; | |
3754d046 | 1179 | machine_mode partmode = GET_MODE_INNER (mode); |
76c37538 | 1180 | |
1181 | realpart = gen_reg_rtx (partmode); | |
1182 | imagpart = gen_reg_rtx (partmode); | |
3ad7bb1c | 1183 | return gen_rtx_CONCAT (mode, realpart, imagpart); |
76c37538 | 1184 | } |
1185 | ||
b4c6ce9b | 1186 | /* Do not call gen_reg_rtx with uninitialized crtl. */ |
1187 | gcc_assert (crtl->emit.regno_pointer_align_length); | |
1188 | ||
cd769037 | 1189 | crtl->emit.ensure_regno_capacity (); |
1190 | gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length); | |
15bbde2b | 1191 | |
cd769037 | 1192 | val = gen_raw_REG (mode, reg_rtx_no); |
1193 | regno_reg_rtx[reg_rtx_no++] = val; | |
1194 | return val; | |
1195 | } | |
fcdc122e | 1196 | |
cd769037 | 1197 | /* Make sure m_regno_pointer_align, and regno_reg_rtx are large |
1198 | enough to have elements in the range 0 <= idx <= reg_rtx_no. */ | |
0a893c29 | 1199 | |
cd769037 | 1200 | void |
1201 | emit_status::ensure_regno_capacity () | |
1202 | { | |
1203 | int old_size = regno_pointer_align_length; | |
15bbde2b | 1204 | |
cd769037 | 1205 | if (reg_rtx_no < old_size) |
1206 | return; | |
15bbde2b | 1207 | |
cd769037 | 1208 | int new_size = old_size * 2; |
1209 | while (reg_rtx_no >= new_size) | |
1210 | new_size *= 2; | |
1211 | ||
1212 | char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size); | |
1213 | memset (tmp + old_size, 0, new_size - old_size); | |
1214 | regno_pointer_align = (unsigned char *) tmp; | |
1215 | ||
1216 | rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size); | |
1217 | memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx)); | |
1218 | regno_reg_rtx = new1; | |
1219 | ||
1220 | crtl->emit.regno_pointer_align_length = new_size; | |
15bbde2b | 1221 | } |
1222 | ||
ea239197 | 1223 | /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */ |
1224 | ||
1225 | bool | |
1226 | reg_is_parm_p (rtx reg) | |
1227 | { | |
1228 | tree decl; | |
1229 | ||
1230 | gcc_assert (REG_P (reg)); | |
1231 | decl = REG_EXPR (reg); | |
1232 | return (decl && TREE_CODE (decl) == PARM_DECL); | |
1233 | } | |
1234 | ||
80c70e76 | 1235 | /* Update NEW with the same attributes as REG, but with OFFSET added |
1236 | to the REG_OFFSET. */ | |
ca74b940 | 1237 | |
1a6a0f2a | 1238 | static void |
a14d43f8 | 1239 | update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset) |
ca74b940 | 1240 | { |
9ce37fa7 | 1241 | REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg), |
a14d43f8 | 1242 | REG_OFFSET (reg) + offset); |
1a6a0f2a | 1243 | } |
1244 | ||
80c70e76 | 1245 | /* Generate a register with same attributes as REG, but with OFFSET |
1246 | added to the REG_OFFSET. */ | |
1a6a0f2a | 1247 | |
1248 | rtx | |
3754d046 | 1249 | gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno, |
a14d43f8 | 1250 | poly_int64 offset) |
1a6a0f2a | 1251 | { |
9ce37fa7 | 1252 | rtx new_rtx = gen_rtx_REG (mode, regno); |
1a6a0f2a | 1253 | |
9ce37fa7 | 1254 | update_reg_offset (new_rtx, reg, offset); |
1255 | return new_rtx; | |
1a6a0f2a | 1256 | } |
1257 | ||
1258 | /* Generate a new pseudo-register with the same attributes as REG, but | |
80c70e76 | 1259 | with OFFSET added to the REG_OFFSET. */ |
1a6a0f2a | 1260 | |
1261 | rtx | |
3754d046 | 1262 | gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset) |
1a6a0f2a | 1263 | { |
9ce37fa7 | 1264 | rtx new_rtx = gen_reg_rtx (mode); |
1a6a0f2a | 1265 | |
9ce37fa7 | 1266 | update_reg_offset (new_rtx, reg, offset); |
1267 | return new_rtx; | |
ca74b940 | 1268 | } |
1269 | ||
80c70e76 | 1270 | /* Adjust REG in-place so that it has mode MODE. It is assumed that the |
1271 | new register is a (possibly paradoxical) lowpart of the old one. */ | |
ca74b940 | 1272 | |
1273 | void | |
3754d046 | 1274 | adjust_reg_mode (rtx reg, machine_mode mode) |
ca74b940 | 1275 | { |
80c70e76 | 1276 | update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg))); |
1277 | PUT_MODE (reg, mode); | |
1278 | } | |
1279 | ||
1280 | /* Copy REG's attributes from X, if X has any attributes. If REG and X | |
1281 | have different modes, REG is a (possibly paradoxical) lowpart of X. */ | |
1282 | ||
1283 | void | |
1284 | set_reg_attrs_from_value (rtx reg, rtx x) | |
1285 | { | |
a14d43f8 | 1286 | poly_int64 offset; |
e623c80a | 1287 | bool can_be_reg_pointer = true; |
1288 | ||
1289 | /* Don't call mark_reg_pointer for incompatible pointer sign | |
1290 | extension. */ | |
1291 | while (GET_CODE (x) == SIGN_EXTEND | |
1292 | || GET_CODE (x) == ZERO_EXTEND | |
1293 | || GET_CODE (x) == TRUNCATE | |
1294 | || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x))) | |
1295 | { | |
4dd7c283 | 1296 | #if defined(POINTERS_EXTEND_UNSIGNED) |
1297 | if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED) | |
afcace5c | 1298 | || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED) |
1299 | || (paradoxical_subreg_p (x) | |
1300 | && ! (SUBREG_PROMOTED_VAR_P (x) | |
1301 | && SUBREG_CHECK_PROMOTED_SIGN (x, | |
1302 | POINTERS_EXTEND_UNSIGNED)))) | |
4dd7c283 | 1303 | && !targetm.have_ptr_extend ()) |
e623c80a | 1304 | can_be_reg_pointer = false; |
1305 | #endif | |
1306 | x = XEXP (x, 0); | |
1307 | } | |
80c70e76 | 1308 | |
ac56145e | 1309 | /* Hard registers can be reused for multiple purposes within the same |
1310 | function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN | |
1311 | on them is wrong. */ | |
1312 | if (HARD_REGISTER_P (reg)) | |
1313 | return; | |
1314 | ||
80c70e76 | 1315 | offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x)); |
ae12ddda | 1316 | if (MEM_P (x)) |
1317 | { | |
da443c27 | 1318 | if (MEM_OFFSET_KNOWN_P (x)) |
1319 | REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x), | |
1320 | MEM_OFFSET (x) + offset); | |
e623c80a | 1321 | if (can_be_reg_pointer && MEM_POINTER (x)) |
40b93dba | 1322 | mark_reg_pointer (reg, 0); |
ae12ddda | 1323 | } |
1324 | else if (REG_P (x)) | |
1325 | { | |
1326 | if (REG_ATTRS (x)) | |
1327 | update_reg_offset (reg, x, offset); | |
e623c80a | 1328 | if (can_be_reg_pointer && REG_POINTER (x)) |
ae12ddda | 1329 | mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x))); |
1330 | } | |
1331 | } | |
1332 | ||
1333 | /* Generate a REG rtx for a new pseudo register, copying the mode | |
1334 | and attributes from X. */ | |
1335 | ||
1336 | rtx | |
1337 | gen_reg_rtx_and_attrs (rtx x) | |
1338 | { | |
1339 | rtx reg = gen_reg_rtx (GET_MODE (x)); | |
1340 | set_reg_attrs_from_value (reg, x); | |
1341 | return reg; | |
ca74b940 | 1342 | } |
1343 | ||
263c416c | 1344 | /* Set the register attributes for registers contained in PARM_RTX. |
1345 | Use needed values from memory attributes of MEM. */ | |
1346 | ||
1347 | void | |
35cb5232 | 1348 | set_reg_attrs_for_parm (rtx parm_rtx, rtx mem) |
263c416c | 1349 | { |
8ad4c111 | 1350 | if (REG_P (parm_rtx)) |
80c70e76 | 1351 | set_reg_attrs_from_value (parm_rtx, mem); |
263c416c | 1352 | else if (GET_CODE (parm_rtx) == PARALLEL) |
1353 | { | |
1354 | /* Check for a NULL entry in the first slot, used to indicate that the | |
1355 | parameter goes both on the stack and in registers. */ | |
1356 | int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1; | |
1357 | for (; i < XVECLEN (parm_rtx, 0); i++) | |
1358 | { | |
1359 | rtx x = XVECEXP (parm_rtx, 0, i); | |
8ad4c111 | 1360 | if (REG_P (XEXP (x, 0))) |
263c416c | 1361 | REG_ATTRS (XEXP (x, 0)) |
1362 | = get_reg_attrs (MEM_EXPR (mem), | |
1363 | INTVAL (XEXP (x, 1))); | |
1364 | } | |
1365 | } | |
1366 | } | |
1367 | ||
80c70e76 | 1368 | /* Set the REG_ATTRS for registers in value X, given that X represents |
1369 | decl T. */ | |
ca74b940 | 1370 | |
a8dd994c | 1371 | void |
80c70e76 | 1372 | set_reg_attrs_for_decl_rtl (tree t, rtx x) |
1373 | { | |
94f92c36 | 1374 | if (!t) |
1375 | return; | |
1376 | tree tdecl = t; | |
80c70e76 | 1377 | if (GET_CODE (x) == SUBREG) |
ebfc27f5 | 1378 | { |
80c70e76 | 1379 | gcc_assert (subreg_lowpart_p (x)); |
1380 | x = SUBREG_REG (x); | |
ebfc27f5 | 1381 | } |
8ad4c111 | 1382 | if (REG_P (x)) |
80c70e76 | 1383 | REG_ATTRS (x) |
1384 | = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x), | |
94f92c36 | 1385 | DECL_P (tdecl) |
1386 | ? DECL_MODE (tdecl) | |
1387 | : TYPE_MODE (TREE_TYPE (tdecl)))); | |
ca74b940 | 1388 | if (GET_CODE (x) == CONCAT) |
1389 | { | |
1390 | if (REG_P (XEXP (x, 0))) | |
1391 | REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0); | |
1392 | if (REG_P (XEXP (x, 1))) | |
1393 | REG_ATTRS (XEXP (x, 1)) | |
1394 | = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0)))); | |
1395 | } | |
1396 | if (GET_CODE (x) == PARALLEL) | |
1397 | { | |
85d25060 | 1398 | int i, start; |
1399 | ||
1400 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1401 | both on the stack and in registers. */ | |
1402 | if (XEXP (XVECEXP (x, 0, 0), 0)) | |
1403 | start = 0; | |
1404 | else | |
1405 | start = 1; | |
1406 | ||
1407 | for (i = start; i < XVECLEN (x, 0); i++) | |
ca74b940 | 1408 | { |
1409 | rtx y = XVECEXP (x, 0, i); | |
1410 | if (REG_P (XEXP (y, 0))) | |
1411 | REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1))); | |
1412 | } | |
1413 | } | |
1414 | } | |
1415 | ||
80c70e76 | 1416 | /* Assign the RTX X to declaration T. */ |
1417 | ||
1418 | void | |
1419 | set_decl_rtl (tree t, rtx x) | |
1420 | { | |
1421 | DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x; | |
1422 | if (x) | |
1423 | set_reg_attrs_for_decl_rtl (t, x); | |
1424 | } | |
1425 | ||
d91cf567 | 1426 | /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true |
1427 | if the ABI requires the parameter to be passed by reference. */ | |
80c70e76 | 1428 | |
1429 | void | |
d91cf567 | 1430 | set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p) |
80c70e76 | 1431 | { |
1432 | DECL_INCOMING_RTL (t) = x; | |
d91cf567 | 1433 | if (x && !by_reference_p) |
80c70e76 | 1434 | set_reg_attrs_for_decl_rtl (t, x); |
1435 | } | |
1436 | ||
de8ecfb5 | 1437 | /* Identify REG (which may be a CONCAT) as a user register. */ |
1438 | ||
1439 | void | |
35cb5232 | 1440 | mark_user_reg (rtx reg) |
de8ecfb5 | 1441 | { |
1442 | if (GET_CODE (reg) == CONCAT) | |
1443 | { | |
1444 | REG_USERVAR_P (XEXP (reg, 0)) = 1; | |
1445 | REG_USERVAR_P (XEXP (reg, 1)) = 1; | |
1446 | } | |
de8ecfb5 | 1447 | else |
611234b4 | 1448 | { |
1449 | gcc_assert (REG_P (reg)); | |
1450 | REG_USERVAR_P (reg) = 1; | |
1451 | } | |
de8ecfb5 | 1452 | } |
1453 | ||
d4c332ff | 1454 | /* Identify REG as a probable pointer register and show its alignment |
1455 | as ALIGN, if nonzero. */ | |
15bbde2b | 1456 | |
1457 | void | |
35cb5232 | 1458 | mark_reg_pointer (rtx reg, int align) |
15bbde2b | 1459 | { |
e61a0a7f | 1460 | if (! REG_POINTER (reg)) |
612409a6 | 1461 | { |
e61a0a7f | 1462 | REG_POINTER (reg) = 1; |
d4c332ff | 1463 | |
612409a6 | 1464 | if (align) |
1465 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; | |
1466 | } | |
1467 | else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg))) | |
8b332087 | 1468 | /* We can no-longer be sure just how aligned this pointer is. */ |
d4c332ff | 1469 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; |
15bbde2b | 1470 | } |
1471 | ||
1472 | /* Return 1 plus largest pseudo reg number used in the current function. */ | |
1473 | ||
1474 | int | |
35cb5232 | 1475 | max_reg_num (void) |
15bbde2b | 1476 | { |
1477 | return reg_rtx_no; | |
1478 | } | |
1479 | ||
1480 | /* Return 1 + the largest label number used so far in the current function. */ | |
1481 | ||
1482 | int | |
35cb5232 | 1483 | max_label_num (void) |
15bbde2b | 1484 | { |
15bbde2b | 1485 | return label_num; |
1486 | } | |
1487 | ||
1488 | /* Return first label number used in this function (if any were used). */ | |
1489 | ||
1490 | int | |
35cb5232 | 1491 | get_first_label_num (void) |
15bbde2b | 1492 | { |
1493 | return first_label_num; | |
1494 | } | |
4ee9c684 | 1495 | |
1496 | /* If the rtx for label was created during the expansion of a nested | |
1497 | function, then first_label_num won't include this label number. | |
f0b5f617 | 1498 | Fix this now so that array indices work later. */ |
4ee9c684 | 1499 | |
1500 | void | |
6313d5da | 1501 | maybe_set_first_label_num (rtx_code_label *x) |
4ee9c684 | 1502 | { |
1503 | if (CODE_LABEL_NUMBER (x) < first_label_num) | |
1504 | first_label_num = CODE_LABEL_NUMBER (x); | |
1505 | } | |
836c1c68 | 1506 | |
1507 | /* For use by the RTL function loader, when mingling with normal | |
1508 | functions. | |
1509 | Ensure that label_num is greater than the label num of X, to avoid | |
1510 | duplicate labels in the generated assembler. */ | |
1511 | ||
1512 | void | |
1513 | maybe_set_max_label_num (rtx_code_label *x) | |
1514 | { | |
1515 | if (CODE_LABEL_NUMBER (x) >= label_num) | |
1516 | label_num = CODE_LABEL_NUMBER (x) + 1; | |
1517 | } | |
1518 | ||
15bbde2b | 1519 | \f |
1520 | /* Return a value representing some low-order bits of X, where the number | |
1521 | of low-order bits is given by MODE. Note that no conversion is done | |
d823ba47 | 1522 | between floating-point and fixed-point values, rather, the bit |
15bbde2b | 1523 | representation is returned. |
1524 | ||
1525 | This function handles the cases in common between gen_lowpart, below, | |
1526 | and two variants in cse.c and combine.c. These are the cases that can | |
1527 | be safely handled at all points in the compilation. | |
1528 | ||
1529 | If this is not a case we can handle, return 0. */ | |
1530 | ||
1531 | rtx | |
3754d046 | 1532 | gen_lowpart_common (machine_mode mode, rtx x) |
15bbde2b | 1533 | { |
68cc7e7b | 1534 | poly_uint64 msize = GET_MODE_SIZE (mode); |
3754d046 | 1535 | machine_mode innermode; |
791172c5 | 1536 | |
1537 | /* Unfortunately, this routine doesn't take a parameter for the mode of X, | |
1538 | so we have to make one up. Yuk. */ | |
1539 | innermode = GET_MODE (x); | |
971ba038 | 1540 | if (CONST_INT_P (x) |
68cc7e7b | 1541 | && known_le (msize * BITS_PER_UNIT, |
1542 | (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT)) | |
517be012 | 1543 | innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require (); |
791172c5 | 1544 | else if (innermode == VOIDmode) |
517be012 | 1545 | innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require (); |
48e1416a | 1546 | |
611234b4 | 1547 | gcc_assert (innermode != VOIDmode && innermode != BLKmode); |
15bbde2b | 1548 | |
791172c5 | 1549 | if (innermode == mode) |
15bbde2b | 1550 | return x; |
1551 | ||
68cc7e7b | 1552 | /* The size of the outer and inner modes must be ordered. */ |
1553 | poly_uint64 xsize = GET_MODE_SIZE (innermode); | |
1554 | if (!ordered_p (msize, xsize)) | |
1555 | return 0; | |
1556 | ||
44ce7b27 | 1557 | if (SCALAR_FLOAT_MODE_P (mode)) |
1558 | { | |
1559 | /* Don't allow paradoxical FLOAT_MODE subregs. */ | |
68cc7e7b | 1560 | if (maybe_gt (msize, xsize)) |
44ce7b27 | 1561 | return 0; |
1562 | } | |
1563 | else | |
1564 | { | |
1565 | /* MODE must occupy no more of the underlying registers than X. */ | |
68cc7e7b | 1566 | poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode); |
1567 | unsigned int mregs, xregs; | |
1568 | if (!can_div_away_from_zero_p (msize, regsize, &mregs) | |
1569 | || !can_div_away_from_zero_p (xsize, regsize, &xregs) | |
1570 | || mregs > xregs) | |
44ce7b27 | 1571 | return 0; |
1572 | } | |
9abe1e73 | 1573 | |
58a70f63 | 1574 | scalar_int_mode int_mode, int_innermode, from_mode; |
15bbde2b | 1575 | if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) |
58a70f63 | 1576 | && is_a <scalar_int_mode> (mode, &int_mode) |
1577 | && is_a <scalar_int_mode> (innermode, &int_innermode) | |
1578 | && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode)) | |
15bbde2b | 1579 | { |
1580 | /* If we are getting the low-order part of something that has been | |
1581 | sign- or zero-extended, we can either just use the object being | |
1582 | extended or make a narrower extension. If we want an even smaller | |
1583 | piece than the size of the object being extended, call ourselves | |
1584 | recursively. | |
1585 | ||
1586 | This case is used mostly by combine and cse. */ | |
1587 | ||
58a70f63 | 1588 | if (from_mode == int_mode) |
15bbde2b | 1589 | return XEXP (x, 0); |
58a70f63 | 1590 | else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode)) |
1591 | return gen_lowpart_common (int_mode, XEXP (x, 0)); | |
1592 | else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode)) | |
1593 | return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0)); | |
15bbde2b | 1594 | } |
8ad4c111 | 1595 | else if (GET_CODE (x) == SUBREG || REG_P (x) |
499d2606 | 1596 | || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR |
bbad7cd0 | 1597 | || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x) |
1598 | || CONST_POLY_INT_P (x)) | |
a8a727ad | 1599 | return lowpart_subreg (mode, x, innermode); |
4a307dd5 | 1600 | |
15bbde2b | 1601 | /* Otherwise, we can't do this. */ |
1602 | return 0; | |
1603 | } | |
1604 | \f | |
d56d0ca2 | 1605 | rtx |
3754d046 | 1606 | gen_highpart (machine_mode mode, rtx x) |
d56d0ca2 | 1607 | { |
52acb7ae | 1608 | poly_uint64 msize = GET_MODE_SIZE (mode); |
81802af6 | 1609 | rtx result; |
701e46d0 | 1610 | |
d56d0ca2 | 1611 | /* This case loses if X is a subreg. To catch bugs early, |
1612 | complain if an invalid MODE is used even in other cases. */ | |
52acb7ae | 1613 | gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD) |
1614 | || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x)))); | |
701e46d0 | 1615 | |
81802af6 | 1616 | result = simplify_gen_subreg (mode, x, GET_MODE (x), |
1617 | subreg_highpart_offset (mode, GET_MODE (x))); | |
611234b4 | 1618 | gcc_assert (result); |
48e1416a | 1619 | |
a8c36ab2 | 1620 | /* simplify_gen_subreg is not guaranteed to return a valid operand for |
1621 | the target if we have a MEM. gen_highpart must return a valid operand, | |
1622 | emitting code if necessary to do so. */ | |
611234b4 | 1623 | if (MEM_P (result)) |
1624 | { | |
1625 | result = validize_mem (result); | |
1626 | gcc_assert (result); | |
1627 | } | |
48e1416a | 1628 | |
81802af6 | 1629 | return result; |
1630 | } | |
704fcf2b | 1631 | |
29d56731 | 1632 | /* Like gen_highpart, but accept mode of EXP operand in case EXP can |
704fcf2b | 1633 | be VOIDmode constant. */ |
1634 | rtx | |
3754d046 | 1635 | gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp) |
704fcf2b | 1636 | { |
1637 | if (GET_MODE (exp) != VOIDmode) | |
1638 | { | |
611234b4 | 1639 | gcc_assert (GET_MODE (exp) == innermode); |
704fcf2b | 1640 | return gen_highpart (outermode, exp); |
1641 | } | |
1642 | return simplify_gen_subreg (outermode, exp, innermode, | |
1643 | subreg_highpart_offset (outermode, innermode)); | |
1644 | } | |
d4c5e26d | 1645 | |
ca99c787 | 1646 | /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has |
1647 | OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | |
10ef59ac | 1648 | |
9edf7ea8 | 1649 | poly_uint64 |
1650 | subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes) | |
81802af6 | 1651 | { |
9edf7ea8 | 1652 | gcc_checking_assert (ordered_p (outer_bytes, inner_bytes)); |
1653 | if (maybe_gt (outer_bytes, inner_bytes)) | |
ca99c787 | 1654 | /* Paradoxical subregs always have a SUBREG_BYTE of 0. */ |
1655 | return 0; | |
701e46d0 | 1656 | |
ca99c787 | 1657 | if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1658 | return inner_bytes - outer_bytes; | |
1659 | else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | |
1660 | return 0; | |
1661 | else | |
1662 | return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0); | |
d56d0ca2 | 1663 | } |
64ab453f | 1664 | |
ca99c787 | 1665 | /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has |
1666 | OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | |
1667 | ||
9edf7ea8 | 1668 | poly_uint64 |
1669 | subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes) | |
64ab453f | 1670 | { |
9edf7ea8 | 1671 | gcc_assert (known_ge (inner_bytes, outer_bytes)); |
64ab453f | 1672 | |
ca99c787 | 1673 | if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1674 | return 0; | |
1675 | else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | |
1676 | return inner_bytes - outer_bytes; | |
1677 | else | |
1678 | return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, | |
1679 | (inner_bytes - outer_bytes) | |
1680 | * BITS_PER_UNIT); | |
64ab453f | 1681 | } |
d56d0ca2 | 1682 | |
15bbde2b | 1683 | /* Return 1 iff X, assumed to be a SUBREG, |
1684 | refers to the least significant part of its containing reg. | |
1685 | If X is not a SUBREG, always return 1 (it is its own low part!). */ | |
1686 | ||
1687 | int | |
b7bf20db | 1688 | subreg_lowpart_p (const_rtx x) |
15bbde2b | 1689 | { |
1690 | if (GET_CODE (x) != SUBREG) | |
1691 | return 1; | |
7e14c1bf | 1692 | else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) |
1693 | return 0; | |
15bbde2b | 1694 | |
9edf7ea8 | 1695 | return known_eq (subreg_lowpart_offset (GET_MODE (x), |
1696 | GET_MODE (SUBREG_REG (x))), | |
1697 | SUBREG_BYTE (x)); | |
15bbde2b | 1698 | } |
1699 | \f | |
701e46d0 | 1700 | /* Return subword OFFSET of operand OP. |
1701 | The word number, OFFSET, is interpreted as the word number starting | |
1702 | at the low-order address. OFFSET 0 is the low-order word if not | |
1703 | WORDS_BIG_ENDIAN, otherwise it is the high-order word. | |
1704 | ||
1705 | If we cannot extract the required word, we return zero. Otherwise, | |
1706 | an rtx corresponding to the requested word will be returned. | |
1707 | ||
1708 | VALIDATE_ADDRESS is nonzero if the address should be validated. Before | |
1709 | reload has completed, a valid address will always be returned. After | |
1710 | reload, if a valid address cannot be returned, we return zero. | |
1711 | ||
1712 | If VALIDATE_ADDRESS is zero, we simply form the required address; validating | |
1713 | it is the responsibility of the caller. | |
1714 | ||
1715 | MODE is the mode of OP in case it is a CONST_INT. | |
1716 | ||
1717 | ??? This is still rather broken for some cases. The problem for the | |
1718 | moment is that all callers of this thing provide no 'goal mode' to | |
1719 | tell us to work with. This exists because all callers were written | |
84e81e84 | 1720 | in a word based SUBREG world. |
1721 | Now use of this function can be deprecated by simplify_subreg in most | |
1722 | cases. | |
1723 | */ | |
701e46d0 | 1724 | |
1725 | rtx | |
b3d467b7 | 1726 | operand_subword (rtx op, poly_uint64 offset, int validate_address, |
1727 | machine_mode mode) | |
701e46d0 | 1728 | { |
1729 | if (mode == VOIDmode) | |
1730 | mode = GET_MODE (op); | |
1731 | ||
611234b4 | 1732 | gcc_assert (mode != VOIDmode); |
701e46d0 | 1733 | |
6312a35e | 1734 | /* If OP is narrower than a word, fail. */ |
701e46d0 | 1735 | if (mode != BLKmode |
b3d467b7 | 1736 | && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)) |
701e46d0 | 1737 | return 0; |
1738 | ||
6312a35e | 1739 | /* If we want a word outside OP, return zero. */ |
701e46d0 | 1740 | if (mode != BLKmode |
b3d467b7 | 1741 | && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode))) |
701e46d0 | 1742 | return const0_rtx; |
1743 | ||
701e46d0 | 1744 | /* Form a new MEM at the requested address. */ |
e16ceb8e | 1745 | if (MEM_P (op)) |
701e46d0 | 1746 | { |
9ce37fa7 | 1747 | rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD); |
701e46d0 | 1748 | |
e4e86ec5 | 1749 | if (! validate_address) |
9ce37fa7 | 1750 | return new_rtx; |
e4e86ec5 | 1751 | |
1752 | else if (reload_completed) | |
701e46d0 | 1753 | { |
bd1a81f7 | 1754 | if (! strict_memory_address_addr_space_p (word_mode, |
1755 | XEXP (new_rtx, 0), | |
1756 | MEM_ADDR_SPACE (op))) | |
e4e86ec5 | 1757 | return 0; |
701e46d0 | 1758 | } |
e4e86ec5 | 1759 | else |
9ce37fa7 | 1760 | return replace_equiv_address (new_rtx, XEXP (new_rtx, 0)); |
701e46d0 | 1761 | } |
1762 | ||
84e81e84 | 1763 | /* Rest can be handled by simplify_subreg. */ |
1764 | return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD)); | |
701e46d0 | 1765 | } |
1766 | ||
89f18f73 | 1767 | /* Similar to `operand_subword', but never return 0. If we can't |
1768 | extract the required subword, put OP into a register and try again. | |
1769 | The second attempt must succeed. We always validate the address in | |
1770 | this case. | |
15bbde2b | 1771 | |
1772 | MODE is the mode of OP, in case it is CONST_INT. */ | |
1773 | ||
1774 | rtx | |
b3d467b7 | 1775 | operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode) |
15bbde2b | 1776 | { |
701e46d0 | 1777 | rtx result = operand_subword (op, offset, 1, mode); |
15bbde2b | 1778 | |
1779 | if (result) | |
1780 | return result; | |
1781 | ||
1782 | if (mode != BLKmode && mode != VOIDmode) | |
ac825d29 | 1783 | { |
1784 | /* If this is a register which can not be accessed by words, copy it | |
1785 | to a pseudo register. */ | |
8ad4c111 | 1786 | if (REG_P (op)) |
ac825d29 | 1787 | op = copy_to_reg (op); |
1788 | else | |
1789 | op = force_reg (mode, op); | |
1790 | } | |
15bbde2b | 1791 | |
701e46d0 | 1792 | result = operand_subword (op, offset, 1, mode); |
611234b4 | 1793 | gcc_assert (result); |
15bbde2b | 1794 | |
1795 | return result; | |
1796 | } | |
1797 | \f | |
711f137f | 1798 | mem_attrs::mem_attrs () |
1799 | : expr (NULL_TREE), | |
1800 | offset (0), | |
1801 | size (0), | |
1802 | alias (0), | |
1803 | align (0), | |
1804 | addrspace (ADDR_SPACE_GENERIC), | |
1805 | offset_known_p (false), | |
1806 | size_known_p (false) | |
1807 | {} | |
1808 | ||
b3ff8d90 | 1809 | /* Returns 1 if both MEM_EXPR can be considered equal |
1810 | and 0 otherwise. */ | |
1811 | ||
1812 | int | |
52d07779 | 1813 | mem_expr_equal_p (const_tree expr1, const_tree expr2) |
b3ff8d90 | 1814 | { |
1815 | if (expr1 == expr2) | |
1816 | return 1; | |
1817 | ||
1818 | if (! expr1 || ! expr2) | |
1819 | return 0; | |
1820 | ||
1821 | if (TREE_CODE (expr1) != TREE_CODE (expr2)) | |
1822 | return 0; | |
1823 | ||
3a443843 | 1824 | return operand_equal_p (expr1, expr2, 0); |
b3ff8d90 | 1825 | } |
1826 | ||
ad0a178f | 1827 | /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN |
1828 | bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or | |
1829 | -1 if not known. */ | |
1830 | ||
1831 | int | |
7cfdc2f0 | 1832 | get_mem_align_offset (rtx mem, unsigned int align) |
ad0a178f | 1833 | { |
1834 | tree expr; | |
711f137f | 1835 | poly_uint64 offset; |
ad0a178f | 1836 | |
1837 | /* This function can't use | |
da443c27 | 1838 | if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) |
98ab9e8f | 1839 | || (MAX (MEM_ALIGN (mem), |
957d0361 | 1840 | MAX (align, get_object_alignment (MEM_EXPR (mem)))) |
ad0a178f | 1841 | < align)) |
1842 | return -1; | |
1843 | else | |
da443c27 | 1844 | return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1); |
ad0a178f | 1845 | for two reasons: |
1846 | - COMPONENT_REFs in MEM_EXPR can have NULL first operand, | |
1847 | for <variable>. get_inner_reference doesn't handle it and | |
1848 | even if it did, the alignment in that case needs to be determined | |
1849 | from DECL_FIELD_CONTEXT's TYPE_ALIGN. | |
1850 | - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR | |
1851 | isn't sufficiently aligned, the object it is in might be. */ | |
1852 | gcc_assert (MEM_P (mem)); | |
1853 | expr = MEM_EXPR (mem); | |
da443c27 | 1854 | if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
ad0a178f | 1855 | return -1; |
1856 | ||
da443c27 | 1857 | offset = MEM_OFFSET (mem); |
ad0a178f | 1858 | if (DECL_P (expr)) |
1859 | { | |
1860 | if (DECL_ALIGN (expr) < align) | |
1861 | return -1; | |
1862 | } | |
1863 | else if (INDIRECT_REF_P (expr)) | |
1864 | { | |
1865 | if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align) | |
1866 | return -1; | |
1867 | } | |
1868 | else if (TREE_CODE (expr) == COMPONENT_REF) | |
1869 | { | |
1870 | while (1) | |
1871 | { | |
1872 | tree inner = TREE_OPERAND (expr, 0); | |
1873 | tree field = TREE_OPERAND (expr, 1); | |
1874 | tree byte_offset = component_ref_field_offset (expr); | |
1875 | tree bit_offset = DECL_FIELD_BIT_OFFSET (field); | |
1876 | ||
711f137f | 1877 | poly_uint64 suboffset; |
ad0a178f | 1878 | if (!byte_offset |
711f137f | 1879 | || !poly_int_tree_p (byte_offset, &suboffset) |
e913b5cd | 1880 | || !tree_fits_uhwi_p (bit_offset)) |
ad0a178f | 1881 | return -1; |
1882 | ||
711f137f | 1883 | offset += suboffset; |
e913b5cd | 1884 | offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; |
ad0a178f | 1885 | |
1886 | if (inner == NULL_TREE) | |
1887 | { | |
1888 | if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field)) | |
1889 | < (unsigned int) align) | |
1890 | return -1; | |
1891 | break; | |
1892 | } | |
1893 | else if (DECL_P (inner)) | |
1894 | { | |
1895 | if (DECL_ALIGN (inner) < align) | |
1896 | return -1; | |
1897 | break; | |
1898 | } | |
1899 | else if (TREE_CODE (inner) != COMPONENT_REF) | |
1900 | return -1; | |
1901 | expr = inner; | |
1902 | } | |
1903 | } | |
1904 | else | |
1905 | return -1; | |
1906 | ||
711f137f | 1907 | HOST_WIDE_INT misalign; |
1908 | if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign)) | |
1909 | return -1; | |
1910 | return misalign; | |
ad0a178f | 1911 | } |
1912 | ||
310b57a1 | 1913 | /* Given REF (a MEM) and T, either the type of X or the expression |
c6259b83 | 1914 | corresponding to REF, set the memory attributes. OBJECTP is nonzero |
6f717f77 | 1915 | if we are making a new object of this type. BITPOS is nonzero if |
1916 | there is an offset outstanding on T that will be applied later. */ | |
c6259b83 | 1917 | |
1918 | void | |
35cb5232 | 1919 | set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, |
711f137f | 1920 | poly_int64 bitpos) |
c6259b83 | 1921 | { |
711f137f | 1922 | poly_int64 apply_bitpos = 0; |
c6259b83 | 1923 | tree type; |
d72886b5 | 1924 | struct mem_attrs attrs, *defattrs, *refattrs; |
3f06bd1b | 1925 | addr_space_t as; |
c6259b83 | 1926 | |
1927 | /* It can happen that type_for_mode was given a mode for which there | |
1928 | is no language-level type. In which case it returns NULL, which | |
1929 | we can see here. */ | |
1930 | if (t == NULL_TREE) | |
1931 | return; | |
1932 | ||
1933 | type = TYPE_P (t) ? t : TREE_TYPE (t); | |
4ccffa39 | 1934 | if (type == error_mark_node) |
1935 | return; | |
c6259b83 | 1936 | |
c6259b83 | 1937 | /* If we have already set DECL_RTL = ref, get_alias_set will get the |
1938 | wrong answer, as it assumes that DECL_RTL already has the right alias | |
1939 | info. Callers should not set DECL_RTL until after the call to | |
1940 | set_mem_attributes. */ | |
611234b4 | 1941 | gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); |
c6259b83 | 1942 | |
96216d37 | 1943 | /* Get the alias set from the expression or type (perhaps using a |
2a631e19 | 1944 | front-end routine) and use it. */ |
d72886b5 | 1945 | attrs.alias = get_alias_set (t); |
c6259b83 | 1946 | |
fbc6244b | 1947 | MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type); |
8d350e69 | 1948 | MEM_POINTER (ref) = POINTER_TYPE_P (type); |
c6259b83 | 1949 | |
d8dccfe9 | 1950 | /* Default values from pre-existing memory attributes if present. */ |
d72886b5 | 1951 | refattrs = MEM_ATTRS (ref); |
1952 | if (refattrs) | |
d8dccfe9 | 1953 | { |
1954 | /* ??? Can this ever happen? Calling this routine on a MEM that | |
1955 | already carries memory attributes should probably be invalid. */ | |
d72886b5 | 1956 | attrs.expr = refattrs->expr; |
6d58bcba | 1957 | attrs.offset_known_p = refattrs->offset_known_p; |
d72886b5 | 1958 | attrs.offset = refattrs->offset; |
6d58bcba | 1959 | attrs.size_known_p = refattrs->size_known_p; |
d72886b5 | 1960 | attrs.size = refattrs->size; |
1961 | attrs.align = refattrs->align; | |
d8dccfe9 | 1962 | } |
1963 | ||
1964 | /* Otherwise, default values from the mode of the MEM reference. */ | |
d72886b5 | 1965 | else |
d8dccfe9 | 1966 | { |
d72886b5 | 1967 | defattrs = mode_mem_attrs[(int) GET_MODE (ref)]; |
1968 | gcc_assert (!defattrs->expr); | |
6d58bcba | 1969 | gcc_assert (!defattrs->offset_known_p); |
d72886b5 | 1970 | |
d8dccfe9 | 1971 | /* Respect mode size. */ |
6d58bcba | 1972 | attrs.size_known_p = defattrs->size_known_p; |
d72886b5 | 1973 | attrs.size = defattrs->size; |
d8dccfe9 | 1974 | /* ??? Is this really necessary? We probably should always get |
1975 | the size from the type below. */ | |
1976 | ||
1977 | /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type; | |
1978 | if T is an object, always compute the object alignment below. */ | |
d72886b5 | 1979 | if (TYPE_P (t)) |
1980 | attrs.align = defattrs->align; | |
1981 | else | |
1982 | attrs.align = BITS_PER_UNIT; | |
d8dccfe9 | 1983 | /* ??? If T is a type, respecting mode alignment may *also* be wrong |
1984 | e.g. if the type carries an alignment attribute. Should we be | |
1985 | able to simply always use TYPE_ALIGN? */ | |
1986 | } | |
1987 | ||
b3b6e4b5 | 1988 | /* We can set the alignment from the type if we are making an object or if |
1989 | this is an INDIRECT_REF. */ | |
1990 | if (objectp || TREE_CODE (t) == INDIRECT_REF) | |
d72886b5 | 1991 | attrs.align = MAX (attrs.align, TYPE_ALIGN (type)); |
679e0056 | 1992 | |
96216d37 | 1993 | /* If the size is known, we can set that. */ |
50ba3acc | 1994 | tree new_size = TYPE_SIZE_UNIT (type); |
96216d37 | 1995 | |
9eec20bf | 1996 | /* The address-space is that of the type. */ |
1997 | as = TYPE_ADDR_SPACE (type); | |
1998 | ||
579bccf9 | 1999 | /* If T is not a type, we may be able to deduce some more information about |
2000 | the expression. */ | |
2001 | if (! TYPE_P (t)) | |
2a631e19 | 2002 | { |
ae2dd339 | 2003 | tree base; |
b04fab2a | 2004 | |
2a631e19 | 2005 | if (TREE_THIS_VOLATILE (t)) |
2006 | MEM_VOLATILE_P (ref) = 1; | |
c6259b83 | 2007 | |
3c00f11c | 2008 | /* Now remove any conversions: they don't change what the underlying |
2009 | object is. Likewise for SAVE_EXPR. */ | |
72dd6141 | 2010 | while (CONVERT_EXPR_P (t) |
3c00f11c | 2011 | || TREE_CODE (t) == VIEW_CONVERT_EXPR |
2012 | || TREE_CODE (t) == SAVE_EXPR) | |
2a631e19 | 2013 | t = TREE_OPERAND (t, 0); |
2014 | ||
73eb0a09 | 2015 | /* Note whether this expression can trap. */ |
2016 | MEM_NOTRAP_P (ref) = !tree_could_trap_p (t); | |
2017 | ||
2018 | base = get_base_address (t); | |
3f06bd1b | 2019 | if (base) |
2020 | { | |
2021 | if (DECL_P (base) | |
2022 | && TREE_READONLY (base) | |
2023 | && (TREE_STATIC (base) || DECL_EXTERNAL (base)) | |
2024 | && !TREE_THIS_VOLATILE (base)) | |
2025 | MEM_READONLY_P (ref) = 1; | |
2026 | ||
2027 | /* Mark static const strings readonly as well. */ | |
2028 | if (TREE_CODE (base) == STRING_CST | |
2029 | && TREE_READONLY (base) | |
2030 | && TREE_STATIC (base)) | |
2031 | MEM_READONLY_P (ref) = 1; | |
2032 | ||
9eec20bf | 2033 | /* Address-space information is on the base object. */ |
3f06bd1b | 2034 | if (TREE_CODE (base) == MEM_REF |
2035 | || TREE_CODE (base) == TARGET_MEM_REF) | |
2036 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, | |
2037 | 0)))); | |
2038 | else | |
2039 | as = TYPE_ADDR_SPACE (TREE_TYPE (base)); | |
2040 | } | |
cab98a0d | 2041 | |
2b02580f | 2042 | /* If this expression uses it's parent's alias set, mark it such |
2043 | that we won't change it. */ | |
d400f5e1 | 2044 | if (component_uses_parent_alias_set_from (t) != NULL_TREE) |
5cc193e7 | 2045 | MEM_KEEP_ALIAS_SET_P (ref) = 1; |
2046 | ||
2a631e19 | 2047 | /* If this is a decl, set the attributes of the MEM from it. */ |
2048 | if (DECL_P (t)) | |
2049 | { | |
d72886b5 | 2050 | attrs.expr = t; |
6d58bcba | 2051 | attrs.offset_known_p = true; |
2052 | attrs.offset = 0; | |
6f717f77 | 2053 | apply_bitpos = bitpos; |
50ba3acc | 2054 | new_size = DECL_SIZE_UNIT (t); |
2a631e19 | 2055 | } |
2056 | ||
9eec20bf | 2057 | /* ??? If we end up with a constant here do record a MEM_EXPR. */ |
ce45a448 | 2058 | else if (CONSTANT_CLASS_P (t)) |
9eec20bf | 2059 | ; |
b10dbbca | 2060 | |
50ba3acc | 2061 | /* If this is a field reference, record it. */ |
2062 | else if (TREE_CODE (t) == COMPONENT_REF) | |
b10dbbca | 2063 | { |
d72886b5 | 2064 | attrs.expr = t; |
6d58bcba | 2065 | attrs.offset_known_p = true; |
2066 | attrs.offset = 0; | |
6f717f77 | 2067 | apply_bitpos = bitpos; |
50ba3acc | 2068 | if (DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
2069 | new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1)); | |
b10dbbca | 2070 | } |
2071 | ||
2072 | /* If this is an array reference, look for an outer field reference. */ | |
2073 | else if (TREE_CODE (t) == ARRAY_REF) | |
2074 | { | |
2075 | tree off_tree = size_zero_node; | |
6b039979 | 2076 | /* We can't modify t, because we use it at the end of the |
2077 | function. */ | |
2078 | tree t2 = t; | |
b10dbbca | 2079 | |
2080 | do | |
2081 | { | |
6b039979 | 2082 | tree index = TREE_OPERAND (t2, 1); |
6374121b | 2083 | tree low_bound = array_ref_low_bound (t2); |
2084 | tree unit_size = array_ref_element_size (t2); | |
97f8ce30 | 2085 | |
2086 | /* We assume all arrays have sizes that are a multiple of a byte. | |
2087 | First subtract the lower bound, if any, in the type of the | |
6374121b | 2088 | index, then convert to sizetype and multiply by the size of |
2089 | the array element. */ | |
2090 | if (! integer_zerop (low_bound)) | |
faa43f85 | 2091 | index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), |
2092 | index, low_bound); | |
97f8ce30 | 2093 | |
6374121b | 2094 | off_tree = size_binop (PLUS_EXPR, |
535664e3 | 2095 | size_binop (MULT_EXPR, |
2096 | fold_convert (sizetype, | |
2097 | index), | |
6374121b | 2098 | unit_size), |
2099 | off_tree); | |
6b039979 | 2100 | t2 = TREE_OPERAND (t2, 0); |
b10dbbca | 2101 | } |
6b039979 | 2102 | while (TREE_CODE (t2) == ARRAY_REF); |
b10dbbca | 2103 | |
9eec20bf | 2104 | if (DECL_P (t2) |
6a57a1e8 | 2105 | || (TREE_CODE (t2) == COMPONENT_REF |
2106 | /* For trailing arrays t2 doesn't have a size that | |
2107 | covers all valid accesses. */ | |
07110764 | 2108 | && ! array_at_struct_end_p (t))) |
b10dbbca | 2109 | { |
d72886b5 | 2110 | attrs.expr = t2; |
6d58bcba | 2111 | attrs.offset_known_p = false; |
711f137f | 2112 | if (poly_int_tree_p (off_tree, &attrs.offset)) |
6f717f77 | 2113 | { |
6d58bcba | 2114 | attrs.offset_known_p = true; |
6f717f77 | 2115 | apply_bitpos = bitpos; |
2116 | } | |
b10dbbca | 2117 | } |
9eec20bf | 2118 | /* Else do not record a MEM_EXPR. */ |
2d8fe5d0 | 2119 | } |
2120 | ||
6d72287b | 2121 | /* If this is an indirect reference, record it. */ |
182cf5a9 | 2122 | else if (TREE_CODE (t) == MEM_REF |
5d9de213 | 2123 | || TREE_CODE (t) == TARGET_MEM_REF) |
6d72287b | 2124 | { |
d72886b5 | 2125 | attrs.expr = t; |
6d58bcba | 2126 | attrs.offset_known_p = true; |
2127 | attrs.offset = 0; | |
6d72287b | 2128 | apply_bitpos = bitpos; |
2129 | } | |
2130 | ||
9eec20bf | 2131 | /* Compute the alignment. */ |
2132 | unsigned int obj_align; | |
2133 | unsigned HOST_WIDE_INT obj_bitpos; | |
2134 | get_object_alignment_1 (t, &obj_align, &obj_bitpos); | |
711f137f | 2135 | unsigned int diff_align = known_alignment (obj_bitpos - bitpos); |
2136 | if (diff_align != 0) | |
2137 | obj_align = MIN (obj_align, diff_align); | |
9eec20bf | 2138 | attrs.align = MAX (attrs.align, obj_align); |
2a631e19 | 2139 | } |
2140 | ||
711f137f | 2141 | poly_uint64 const_size; |
2142 | if (poly_int_tree_p (new_size, &const_size)) | |
50ba3acc | 2143 | { |
2144 | attrs.size_known_p = true; | |
711f137f | 2145 | attrs.size = const_size; |
50ba3acc | 2146 | } |
2147 | ||
e2e205b3 | 2148 | /* If we modified OFFSET based on T, then subtract the outstanding |
595f1461 | 2149 | bit position offset. Similarly, increase the size of the accessed |
2150 | object to contain the negative offset. */ | |
711f137f | 2151 | if (maybe_ne (apply_bitpos, 0)) |
595f1461 | 2152 | { |
6d58bcba | 2153 | gcc_assert (attrs.offset_known_p); |
711f137f | 2154 | poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos); |
2155 | attrs.offset -= bytepos; | |
6d58bcba | 2156 | if (attrs.size_known_p) |
711f137f | 2157 | attrs.size += bytepos; |
595f1461 | 2158 | } |
6f717f77 | 2159 | |
2a631e19 | 2160 | /* Now set the attributes we computed above. */ |
3f06bd1b | 2161 | attrs.addrspace = as; |
d72886b5 | 2162 | set_mem_attrs (ref, &attrs); |
c6259b83 | 2163 | } |
2164 | ||
6f717f77 | 2165 | void |
35cb5232 | 2166 | set_mem_attributes (rtx ref, tree t, int objectp) |
6f717f77 | 2167 | { |
2168 | set_mem_attributes_minus_bitpos (ref, t, objectp, 0); | |
2169 | } | |
2170 | ||
c6259b83 | 2171 | /* Set the alias set of MEM to SET. */ |
2172 | ||
2173 | void | |
32c2fdea | 2174 | set_mem_alias_set (rtx mem, alias_set_type set) |
c6259b83 | 2175 | { |
c6259b83 | 2176 | /* If the new and old alias sets don't conflict, something is wrong. */ |
1b4345f7 | 2177 | gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); |
711f137f | 2178 | mem_attrs attrs (*get_mem_attrs (mem)); |
d72886b5 | 2179 | attrs.alias = set; |
2180 | set_mem_attrs (mem, &attrs); | |
bd1a81f7 | 2181 | } |
2182 | ||
2183 | /* Set the address space of MEM to ADDRSPACE (target-defined). */ | |
2184 | ||
2185 | void | |
2186 | set_mem_addr_space (rtx mem, addr_space_t addrspace) | |
2187 | { | |
711f137f | 2188 | mem_attrs attrs (*get_mem_attrs (mem)); |
d72886b5 | 2189 | attrs.addrspace = addrspace; |
2190 | set_mem_attrs (mem, &attrs); | |
c6259b83 | 2191 | } |
96216d37 | 2192 | |
1c4512da | 2193 | /* Set the alignment of MEM to ALIGN bits. */ |
96216d37 | 2194 | |
2195 | void | |
35cb5232 | 2196 | set_mem_align (rtx mem, unsigned int align) |
96216d37 | 2197 | { |
711f137f | 2198 | mem_attrs attrs (*get_mem_attrs (mem)); |
d72886b5 | 2199 | attrs.align = align; |
2200 | set_mem_attrs (mem, &attrs); | |
96216d37 | 2201 | } |
278fe152 | 2202 | |
b10dbbca | 2203 | /* Set the expr for MEM to EXPR. */ |
278fe152 | 2204 | |
2205 | void | |
35cb5232 | 2206 | set_mem_expr (rtx mem, tree expr) |
278fe152 | 2207 | { |
711f137f | 2208 | mem_attrs attrs (*get_mem_attrs (mem)); |
d72886b5 | 2209 | attrs.expr = expr; |
2210 | set_mem_attrs (mem, &attrs); | |
278fe152 | 2211 | } |
b10dbbca | 2212 | |
2213 | /* Set the offset of MEM to OFFSET. */ | |
2214 | ||
2215 | void | |
711f137f | 2216 | set_mem_offset (rtx mem, poly_int64 offset) |
b10dbbca | 2217 | { |
711f137f | 2218 | mem_attrs attrs (*get_mem_attrs (mem)); |
6d58bcba | 2219 | attrs.offset_known_p = true; |
2220 | attrs.offset = offset; | |
da443c27 | 2221 | set_mem_attrs (mem, &attrs); |
2222 | } | |
2223 | ||
2224 | /* Clear the offset of MEM. */ | |
2225 | ||
2226 | void | |
2227 | clear_mem_offset (rtx mem) | |
2228 | { | |
711f137f | 2229 | mem_attrs attrs (*get_mem_attrs (mem)); |
6d58bcba | 2230 | attrs.offset_known_p = false; |
d72886b5 | 2231 | set_mem_attrs (mem, &attrs); |
f0500469 | 2232 | } |
2233 | ||
2234 | /* Set the size of MEM to SIZE. */ | |
2235 | ||
2236 | void | |
711f137f | 2237 | set_mem_size (rtx mem, poly_int64 size) |
f0500469 | 2238 | { |
711f137f | 2239 | mem_attrs attrs (*get_mem_attrs (mem)); |
6d58bcba | 2240 | attrs.size_known_p = true; |
2241 | attrs.size = size; | |
5b2a69fa | 2242 | set_mem_attrs (mem, &attrs); |
2243 | } | |
2244 | ||
2245 | /* Clear the size of MEM. */ | |
2246 | ||
2247 | void | |
2248 | clear_mem_size (rtx mem) | |
2249 | { | |
711f137f | 2250 | mem_attrs attrs (*get_mem_attrs (mem)); |
6d58bcba | 2251 | attrs.size_known_p = false; |
d72886b5 | 2252 | set_mem_attrs (mem, &attrs); |
b10dbbca | 2253 | } |
c6259b83 | 2254 | \f |
96216d37 | 2255 | /* Return a memory reference like MEMREF, but with its mode changed to MODE |
2256 | and its address changed to ADDR. (VOIDmode means don't change the mode. | |
2257 | NULL for ADDR means don't change the address.) VALIDATE is nonzero if the | |
5cc04e45 | 2258 | returned memory location is required to be valid. INPLACE is true if any |
2259 | changes can be made directly to MEMREF or false if MEMREF must be treated | |
2260 | as immutable. | |
2261 | ||
2262 | The memory attributes are not changed. */ | |
15bbde2b | 2263 | |
96216d37 | 2264 | static rtx |
3754d046 | 2265 | change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate, |
5cc04e45 | 2266 | bool inplace) |
15bbde2b | 2267 | { |
bd1a81f7 | 2268 | addr_space_t as; |
9ce37fa7 | 2269 | rtx new_rtx; |
15bbde2b | 2270 | |
611234b4 | 2271 | gcc_assert (MEM_P (memref)); |
bd1a81f7 | 2272 | as = MEM_ADDR_SPACE (memref); |
15bbde2b | 2273 | if (mode == VOIDmode) |
2274 | mode = GET_MODE (memref); | |
2275 | if (addr == 0) | |
2276 | addr = XEXP (memref, 0); | |
3988ef8b | 2277 | if (mode == GET_MODE (memref) && addr == XEXP (memref, 0) |
bd1a81f7 | 2278 | && (!validate || memory_address_addr_space_p (mode, addr, as))) |
3988ef8b | 2279 | return memref; |
15bbde2b | 2280 | |
73a18f44 | 2281 | /* Don't validate address for LRA. LRA can make the address valid |
2282 | by itself in most efficient way. */ | |
2283 | if (validate && !lra_in_progress) | |
15bbde2b | 2284 | { |
e4e86ec5 | 2285 | if (reload_in_progress || reload_completed) |
bd1a81f7 | 2286 | gcc_assert (memory_address_addr_space_p (mode, addr, as)); |
e4e86ec5 | 2287 | else |
bd1a81f7 | 2288 | addr = memory_address_addr_space (mode, addr, as); |
15bbde2b | 2289 | } |
d823ba47 | 2290 | |
e8976cd7 | 2291 | if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref)) |
2292 | return memref; | |
2293 | ||
5cc04e45 | 2294 | if (inplace) |
2295 | { | |
2296 | XEXP (memref, 0) = addr; | |
2297 | return memref; | |
2298 | } | |
2299 | ||
9ce37fa7 | 2300 | new_rtx = gen_rtx_MEM (mode, addr); |
2301 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
2302 | return new_rtx; | |
15bbde2b | 2303 | } |
537ffcfc | 2304 | |
96216d37 | 2305 | /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what |
2306 | way we are changing MEMREF, so we only preserve the alias set. */ | |
e513d163 | 2307 | |
2308 | rtx | |
3754d046 | 2309 | change_address (rtx memref, machine_mode mode, rtx addr) |
e513d163 | 2310 | { |
5cc04e45 | 2311 | rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); |
3754d046 | 2312 | machine_mode mmode = GET_MODE (new_rtx); |
711f137f | 2313 | struct mem_attrs *defattrs; |
0ab96142 | 2314 | |
711f137f | 2315 | mem_attrs attrs (*get_mem_attrs (memref)); |
d72886b5 | 2316 | defattrs = mode_mem_attrs[(int) mmode]; |
6d58bcba | 2317 | attrs.expr = NULL_TREE; |
2318 | attrs.offset_known_p = false; | |
2319 | attrs.size_known_p = defattrs->size_known_p; | |
d72886b5 | 2320 | attrs.size = defattrs->size; |
2321 | attrs.align = defattrs->align; | |
6cc60c4d | 2322 | |
d28edf0d | 2323 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2324 | if (new_rtx == memref) |
0ab96142 | 2325 | { |
d72886b5 | 2326 | if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs)) |
9ce37fa7 | 2327 | return new_rtx; |
0ab96142 | 2328 | |
9ce37fa7 | 2329 | new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0)); |
2330 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
0ab96142 | 2331 | } |
d28edf0d | 2332 | |
d72886b5 | 2333 | set_mem_attrs (new_rtx, &attrs); |
9ce37fa7 | 2334 | return new_rtx; |
e513d163 | 2335 | } |
537ffcfc | 2336 | |
96216d37 | 2337 | /* Return a memory reference like MEMREF, but with its mode changed |
2338 | to MODE and its address offset by OFFSET bytes. If VALIDATE is | |
bf42c62d | 2339 | nonzero, the memory address is forced to be valid. |
2d0fd66d | 2340 | If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS |
2341 | and the caller is responsible for adjusting MEMREF base register. | |
2342 | If ADJUST_OBJECT is zero, the underlying object associated with the | |
2343 | memory reference is left unchanged and the caller is responsible for | |
2344 | dealing with it. Otherwise, if the new memory reference is outside | |
226c6baf | 2345 | the underlying object, even partially, then the object is dropped. |
2346 | SIZE, if nonzero, is the size of an access in cases where MODE | |
2347 | has no inherent size. */ | |
e4e86ec5 | 2348 | |
2349 | rtx | |
711f137f | 2350 | adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset, |
226c6baf | 2351 | int validate, int adjust_address, int adjust_object, |
711f137f | 2352 | poly_int64 size) |
e4e86ec5 | 2353 | { |
fb257ae6 | 2354 | rtx addr = XEXP (memref, 0); |
9ce37fa7 | 2355 | rtx new_rtx; |
f77c4496 | 2356 | scalar_int_mode address_mode; |
711f137f | 2357 | struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs; |
d72886b5 | 2358 | unsigned HOST_WIDE_INT max_align; |
21b8bc7e | 2359 | #ifdef POINTERS_EXTEND_UNSIGNED |
f77c4496 | 2360 | scalar_int_mode pointer_mode |
21b8bc7e | 2361 | = targetm.addr_space.pointer_mode (attrs.addrspace); |
2362 | #endif | |
fb257ae6 | 2363 | |
4733f549 | 2364 | /* VOIDmode means no mode change for change_address_1. */ |
2365 | if (mode == VOIDmode) | |
2366 | mode = GET_MODE (memref); | |
2367 | ||
226c6baf | 2368 | /* Take the size of non-BLKmode accesses from the mode. */ |
2369 | defattrs = mode_mem_attrs[(int) mode]; | |
2370 | if (defattrs->size_known_p) | |
2371 | size = defattrs->size; | |
2372 | ||
d28edf0d | 2373 | /* If there are no changes, just return the original memory reference. */ |
711f137f | 2374 | if (mode == GET_MODE (memref) |
2375 | && known_eq (offset, 0) | |
2376 | && (known_eq (size, 0) | |
2377 | || (attrs.size_known_p && known_eq (attrs.size, size))) | |
d72886b5 | 2378 | && (!validate || memory_address_addr_space_p (mode, addr, |
2379 | attrs.addrspace))) | |
d28edf0d | 2380 | return memref; |
2381 | ||
e36c3d58 | 2382 | /* ??? Prefer to create garbage instead of creating shared rtl. |
6ef828f9 | 2383 | This may happen even if offset is nonzero -- consider |
e36c3d58 | 2384 | (plus (plus reg reg) const_int) -- so do this always. */ |
2385 | addr = copy_rtx (addr); | |
2386 | ||
cfb75cdf | 2387 | /* Convert a possibly large offset to a signed value within the |
2388 | range of the target address space. */ | |
87cf5753 | 2389 | address_mode = get_address_mode (memref); |
711f137f | 2390 | offset = trunc_int_for_mode (offset, address_mode); |
cfb75cdf | 2391 | |
2d0fd66d | 2392 | if (adjust_address) |
cd358719 | 2393 | { |
2394 | /* If MEMREF is a LO_SUM and the offset is within the alignment of the | |
2395 | object, we can merge it into the LO_SUM. */ | |
711f137f | 2396 | if (GET_MODE (memref) != BLKmode |
2397 | && GET_CODE (addr) == LO_SUM | |
2398 | && known_in_range_p (offset, | |
2399 | 0, (GET_MODE_ALIGNMENT (GET_MODE (memref)) | |
2400 | / BITS_PER_UNIT))) | |
98155838 | 2401 | addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), |
29c05e22 | 2402 | plus_constant (address_mode, |
2403 | XEXP (addr, 1), offset)); | |
21b8bc7e | 2404 | #ifdef POINTERS_EXTEND_UNSIGNED |
2405 | /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid | |
2406 | in that mode, we merge it into the ZERO_EXTEND. We take advantage of | |
2407 | the fact that pointers are not allowed to overflow. */ | |
2408 | else if (POINTERS_EXTEND_UNSIGNED > 0 | |
2409 | && GET_CODE (addr) == ZERO_EXTEND | |
2410 | && GET_MODE (XEXP (addr, 0)) == pointer_mode | |
711f137f | 2411 | && known_eq (trunc_int_for_mode (offset, pointer_mode), offset)) |
21b8bc7e | 2412 | addr = gen_rtx_ZERO_EXTEND (address_mode, |
2413 | plus_constant (pointer_mode, | |
2414 | XEXP (addr, 0), offset)); | |
2415 | #endif | |
cd358719 | 2416 | else |
29c05e22 | 2417 | addr = plus_constant (address_mode, addr, offset); |
cd358719 | 2418 | } |
fb257ae6 | 2419 | |
5cc04e45 | 2420 | new_rtx = change_address_1 (memref, mode, addr, validate, false); |
96216d37 | 2421 | |
e077413c | 2422 | /* If the address is a REG, change_address_1 rightfully returns memref, |
2423 | but this would destroy memref's MEM_ATTRS. */ | |
711f137f | 2424 | if (new_rtx == memref && maybe_ne (offset, 0)) |
e077413c | 2425 | new_rtx = copy_rtx (new_rtx); |
2426 | ||
2d0fd66d | 2427 | /* Conservatively drop the object if we don't know where we start from. */ |
2428 | if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p)) | |
2429 | { | |
2430 | attrs.expr = NULL_TREE; | |
2431 | attrs.alias = 0; | |
2432 | } | |
2433 | ||
96216d37 | 2434 | /* Compute the new values of the memory attributes due to this adjustment. |
2435 | We add the offsets and update the alignment. */ | |
6d58bcba | 2436 | if (attrs.offset_known_p) |
2d0fd66d | 2437 | { |
2438 | attrs.offset += offset; | |
2439 | ||
2440 | /* Drop the object if the new left end is not within its bounds. */ | |
711f137f | 2441 | if (adjust_object && maybe_lt (attrs.offset, 0)) |
2d0fd66d | 2442 | { |
2443 | attrs.expr = NULL_TREE; | |
2444 | attrs.alias = 0; | |
2445 | } | |
2446 | } | |
96216d37 | 2447 | |
b8098e5b | 2448 | /* Compute the new alignment by taking the MIN of the alignment and the |
2449 | lowest-order set bit in OFFSET, but don't change the alignment if OFFSET | |
2450 | if zero. */ | |
711f137f | 2451 | if (maybe_ne (offset, 0)) |
d72886b5 | 2452 | { |
711f137f | 2453 | max_align = known_alignment (offset) * BITS_PER_UNIT; |
d72886b5 | 2454 | attrs.align = MIN (attrs.align, max_align); |
2455 | } | |
96216d37 | 2456 | |
711f137f | 2457 | if (maybe_ne (size, 0)) |
6d58bcba | 2458 | { |
2d0fd66d | 2459 | /* Drop the object if the new right end is not within its bounds. */ |
711f137f | 2460 | if (adjust_object && maybe_gt (offset + size, attrs.size)) |
2d0fd66d | 2461 | { |
2462 | attrs.expr = NULL_TREE; | |
2463 | attrs.alias = 0; | |
2464 | } | |
6d58bcba | 2465 | attrs.size_known_p = true; |
226c6baf | 2466 | attrs.size = size; |
6d58bcba | 2467 | } |
2468 | else if (attrs.size_known_p) | |
2d0fd66d | 2469 | { |
226c6baf | 2470 | gcc_assert (!adjust_object); |
2d0fd66d | 2471 | attrs.size -= offset; |
226c6baf | 2472 | /* ??? The store_by_pieces machinery generates negative sizes, |
2473 | so don't assert for that here. */ | |
2d0fd66d | 2474 | } |
5cc193e7 | 2475 | |
d72886b5 | 2476 | set_mem_attrs (new_rtx, &attrs); |
96216d37 | 2477 | |
9ce37fa7 | 2478 | return new_rtx; |
e4e86ec5 | 2479 | } |
2480 | ||
bf42c62d | 2481 | /* Return a memory reference like MEMREF, but with its mode changed |
2482 | to MODE and its address changed to ADDR, which is assumed to be | |
f0b5f617 | 2483 | MEMREF offset by OFFSET bytes. If VALIDATE is |
bf42c62d | 2484 | nonzero, the memory address is forced to be valid. */ |
2485 | ||
2486 | rtx | |
3754d046 | 2487 | adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, |
711f137f | 2488 | poly_int64 offset, int validate) |
bf42c62d | 2489 | { |
5cc04e45 | 2490 | memref = change_address_1 (memref, VOIDmode, addr, validate, false); |
226c6baf | 2491 | return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); |
bf42c62d | 2492 | } |
2493 | ||
2a631e19 | 2494 | /* Return a memory reference like MEMREF, but whose address is changed by |
2495 | adding OFFSET, an RTX, to it. POW2 is the highest power of two factor | |
2496 | known to be in OFFSET (possibly 1). */ | |
fcdc122e | 2497 | |
2498 | rtx | |
35cb5232 | 2499 | offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) |
fcdc122e | 2500 | { |
9ce37fa7 | 2501 | rtx new_rtx, addr = XEXP (memref, 0); |
3754d046 | 2502 | machine_mode address_mode; |
711f137f | 2503 | struct mem_attrs *defattrs; |
fac6aae6 | 2504 | |
711f137f | 2505 | mem_attrs attrs (*get_mem_attrs (memref)); |
87cf5753 | 2506 | address_mode = get_address_mode (memref); |
98155838 | 2507 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
fac6aae6 | 2508 | |
d4c5e26d | 2509 | /* At this point we don't know _why_ the address is invalid. It |
917bbcab | 2510 | could have secondary memory references, multiplies or anything. |
fac6aae6 | 2511 | |
2512 | However, if we did go and rearrange things, we can wind up not | |
2513 | being able to recognize the magic around pic_offset_table_rtx. | |
2514 | This stuff is fragile, and is yet another example of why it is | |
2515 | bad to expose PIC machinery too early. */ | |
d72886b5 | 2516 | if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, |
2517 | attrs.addrspace) | |
fac6aae6 | 2518 | && GET_CODE (addr) == PLUS |
2519 | && XEXP (addr, 0) == pic_offset_table_rtx) | |
2520 | { | |
2521 | addr = force_reg (GET_MODE (addr), addr); | |
98155838 | 2522 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
fac6aae6 | 2523 | } |
2524 | ||
9ce37fa7 | 2525 | update_temp_slot_address (XEXP (memref, 0), new_rtx); |
5cc04e45 | 2526 | new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false); |
fcdc122e | 2527 | |
d28edf0d | 2528 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2529 | if (new_rtx == memref) |
2530 | return new_rtx; | |
d28edf0d | 2531 | |
fcdc122e | 2532 | /* Update the alignment to reflect the offset. Reset the offset, which |
2533 | we don't know. */ | |
6d58bcba | 2534 | defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)]; |
2535 | attrs.offset_known_p = false; | |
2536 | attrs.size_known_p = defattrs->size_known_p; | |
2537 | attrs.size = defattrs->size; | |
d72886b5 | 2538 | attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT); |
2539 | set_mem_attrs (new_rtx, &attrs); | |
9ce37fa7 | 2540 | return new_rtx; |
fcdc122e | 2541 | } |
d4c5e26d | 2542 | |
537ffcfc | 2543 | /* Return a memory reference like MEMREF, but with its address changed to |
2544 | ADDR. The caller is asserting that the actual piece of memory pointed | |
2545 | to is the same, just the form of the address is being changed, such as | |
5cc04e45 | 2546 | by putting something into a register. INPLACE is true if any changes |
2547 | can be made directly to MEMREF or false if MEMREF must be treated as | |
2548 | immutable. */ | |
537ffcfc | 2549 | |
2550 | rtx | |
5cc04e45 | 2551 | replace_equiv_address (rtx memref, rtx addr, bool inplace) |
537ffcfc | 2552 | { |
96216d37 | 2553 | /* change_address_1 copies the memory attribute structure without change |
2554 | and that's exactly what we want here. */ | |
ecfe4ca9 | 2555 | update_temp_slot_address (XEXP (memref, 0), addr); |
5cc04e45 | 2556 | return change_address_1 (memref, VOIDmode, addr, 1, inplace); |
537ffcfc | 2557 | } |
96216d37 | 2558 | |
e4e86ec5 | 2559 | /* Likewise, but the reference is not required to be valid. */ |
2560 | ||
2561 | rtx | |
5cc04e45 | 2562 | replace_equiv_address_nv (rtx memref, rtx addr, bool inplace) |
e4e86ec5 | 2563 | { |
5cc04e45 | 2564 | return change_address_1 (memref, VOIDmode, addr, 0, inplace); |
e4e86ec5 | 2565 | } |
8259ab07 | 2566 | |
2567 | /* Return a memory reference like MEMREF, but with its mode widened to | |
2568 | MODE and offset by OFFSET. This would be used by targets that e.g. | |
2569 | cannot issue QImode memory operations and have to use SImode memory | |
2570 | operations plus masking logic. */ | |
2571 | ||
2572 | rtx | |
711f137f | 2573 | widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset) |
8259ab07 | 2574 | { |
226c6baf | 2575 | rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); |
52acb7ae | 2576 | poly_uint64 size = GET_MODE_SIZE (mode); |
8259ab07 | 2577 | |
d28edf0d | 2578 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2579 | if (new_rtx == memref) |
2580 | return new_rtx; | |
d28edf0d | 2581 | |
711f137f | 2582 | mem_attrs attrs (*get_mem_attrs (new_rtx)); |
d72886b5 | 2583 | |
8259ab07 | 2584 | /* If we don't know what offset we were at within the expression, then |
2585 | we can't know if we've overstepped the bounds. */ | |
6d58bcba | 2586 | if (! attrs.offset_known_p) |
d72886b5 | 2587 | attrs.expr = NULL_TREE; |
8259ab07 | 2588 | |
d72886b5 | 2589 | while (attrs.expr) |
8259ab07 | 2590 | { |
d72886b5 | 2591 | if (TREE_CODE (attrs.expr) == COMPONENT_REF) |
8259ab07 | 2592 | { |
d72886b5 | 2593 | tree field = TREE_OPERAND (attrs.expr, 1); |
2594 | tree offset = component_ref_field_offset (attrs.expr); | |
8259ab07 | 2595 | |
2596 | if (! DECL_SIZE_UNIT (field)) | |
2597 | { | |
d72886b5 | 2598 | attrs.expr = NULL_TREE; |
8259ab07 | 2599 | break; |
2600 | } | |
2601 | ||
2602 | /* Is the field at least as large as the access? If so, ok, | |
2603 | otherwise strip back to the containing structure. */ | |
711f137f | 2604 | if (poly_int_tree_p (DECL_SIZE_UNIT (field)) |
2605 | && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size) | |
2606 | && known_ge (attrs.offset, 0)) | |
8259ab07 | 2607 | break; |
2608 | ||
711f137f | 2609 | poly_uint64 suboffset; |
2610 | if (!poly_int_tree_p (offset, &suboffset)) | |
8259ab07 | 2611 | { |
d72886b5 | 2612 | attrs.expr = NULL_TREE; |
8259ab07 | 2613 | break; |
2614 | } | |
2615 | ||
d72886b5 | 2616 | attrs.expr = TREE_OPERAND (attrs.expr, 0); |
711f137f | 2617 | attrs.offset += suboffset; |
e913b5cd | 2618 | attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
6d58bcba | 2619 | / BITS_PER_UNIT); |
8259ab07 | 2620 | } |
2621 | /* Similarly for the decl. */ | |
d72886b5 | 2622 | else if (DECL_P (attrs.expr) |
2623 | && DECL_SIZE_UNIT (attrs.expr) | |
711f137f | 2624 | && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr)) |
2625 | && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)), | |
2626 | size) | |
2627 | && known_ge (attrs.offset, 0)) | |
8259ab07 | 2628 | break; |
2629 | else | |
2630 | { | |
2631 | /* The widened memory access overflows the expression, which means | |
2632 | that it could alias another expression. Zap it. */ | |
d72886b5 | 2633 | attrs.expr = NULL_TREE; |
8259ab07 | 2634 | break; |
2635 | } | |
2636 | } | |
2637 | ||
d72886b5 | 2638 | if (! attrs.expr) |
6d58bcba | 2639 | attrs.offset_known_p = false; |
8259ab07 | 2640 | |
2641 | /* The widened memory may alias other stuff, so zap the alias set. */ | |
2642 | /* ??? Maybe use get_alias_set on any remaining expression. */ | |
d72886b5 | 2643 | attrs.alias = 0; |
6d58bcba | 2644 | attrs.size_known_p = true; |
2645 | attrs.size = size; | |
d72886b5 | 2646 | set_mem_attrs (new_rtx, &attrs); |
9ce37fa7 | 2647 | return new_rtx; |
8259ab07 | 2648 | } |
15bbde2b | 2649 | \f |
ac681e84 | 2650 | /* A fake decl that is used as the MEM_EXPR of spill slots. */ |
2651 | static GTY(()) tree spill_slot_decl; | |
2652 | ||
58029e61 | 2653 | tree |
2654 | get_spill_slot_decl (bool force_build_p) | |
ac681e84 | 2655 | { |
2656 | tree d = spill_slot_decl; | |
2657 | rtx rd; | |
2658 | ||
58029e61 | 2659 | if (d || !force_build_p) |
ac681e84 | 2660 | return d; |
2661 | ||
e60a6f7b | 2662 | d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
2663 | VAR_DECL, get_identifier ("%sfp"), void_type_node); | |
ac681e84 | 2664 | DECL_ARTIFICIAL (d) = 1; |
2665 | DECL_IGNORED_P (d) = 1; | |
2666 | TREE_USED (d) = 1; | |
ac681e84 | 2667 | spill_slot_decl = d; |
2668 | ||
2669 | rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); | |
2670 | MEM_NOTRAP_P (rd) = 1; | |
711f137f | 2671 | mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]); |
d72886b5 | 2672 | attrs.alias = new_alias_set (); |
2673 | attrs.expr = d; | |
2674 | set_mem_attrs (rd, &attrs); | |
ac681e84 | 2675 | SET_DECL_RTL (d, rd); |
2676 | ||
2677 | return d; | |
2678 | } | |
2679 | ||
2680 | /* Given MEM, a result from assign_stack_local, fill in the memory | |
2681 | attributes as appropriate for a register allocator spill slot. | |
2682 | These slots are not aliasable by other memory. We arrange for | |
2683 | them all to use a single MEM_EXPR, so that the aliasing code can | |
2684 | work properly in the case of shared spill slots. */ | |
2685 | ||
2686 | void | |
2687 | set_mem_attrs_for_spill (rtx mem) | |
2688 | { | |
d72886b5 | 2689 | rtx addr; |
ac681e84 | 2690 | |
711f137f | 2691 | mem_attrs attrs (*get_mem_attrs (mem)); |
d72886b5 | 2692 | attrs.expr = get_spill_slot_decl (true); |
2693 | attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); | |
2694 | attrs.addrspace = ADDR_SPACE_GENERIC; | |
ac681e84 | 2695 | |
2696 | /* We expect the incoming memory to be of the form: | |
2697 | (mem:MODE (plus (reg sfp) (const_int offset))) | |
2698 | with perhaps the plus missing for offset = 0. */ | |
2699 | addr = XEXP (mem, 0); | |
6d58bcba | 2700 | attrs.offset_known_p = true; |
711f137f | 2701 | strip_offset (addr, &attrs.offset); |
ac681e84 | 2702 | |
d72886b5 | 2703 | set_mem_attrs (mem, &attrs); |
ac681e84 | 2704 | MEM_NOTRAP_P (mem) = 1; |
2705 | } | |
2706 | \f | |
15bbde2b | 2707 | /* Return a newly created CODE_LABEL rtx with a unique label number. */ |
2708 | ||
be95c7c7 | 2709 | rtx_code_label * |
35cb5232 | 2710 | gen_label_rtx (void) |
15bbde2b | 2711 | { |
be95c7c7 | 2712 | return as_a <rtx_code_label *> ( |
2713 | gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX, | |
2714 | NULL, label_num++, NULL)); | |
15bbde2b | 2715 | } |
2716 | \f | |
2717 | /* For procedure integration. */ | |
2718 | ||
15bbde2b | 2719 | /* Install new pointers to the first and last insns in the chain. |
d4c332ff | 2720 | Also, set cur_insn_uid to one higher than the last in use. |
15bbde2b | 2721 | Used for an inline-procedure after copying the insn chain. */ |
2722 | ||
2723 | void | |
57c26b3a | 2724 | set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last) |
15bbde2b | 2725 | { |
57c26b3a | 2726 | rtx_insn *insn; |
d4c332ff | 2727 | |
06f9d6ef | 2728 | set_first_insn (first); |
2729 | set_last_insn (last); | |
d4c332ff | 2730 | cur_insn_uid = 0; |
2731 | ||
9845d120 | 2732 | if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS) |
2733 | { | |
2734 | int debug_count = 0; | |
2735 | ||
2736 | cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1; | |
2737 | cur_debug_insn_uid = 0; | |
2738 | ||
2739 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2740 | if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID) | |
2741 | cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn)); | |
2742 | else | |
2743 | { | |
2744 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
2745 | if (DEBUG_INSN_P (insn)) | |
2746 | debug_count++; | |
2747 | } | |
2748 | ||
2749 | if (debug_count) | |
2750 | cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count; | |
2751 | else | |
2752 | cur_debug_insn_uid++; | |
2753 | } | |
2754 | else | |
2755 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2756 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
d4c332ff | 2757 | |
2758 | cur_insn_uid++; | |
15bbde2b | 2759 | } |
15bbde2b | 2760 | \f |
d823ba47 | 2761 | /* Go through all the RTL insn bodies and copy any invalid shared |
2d96a59a | 2762 | structure. This routine should only be called once. */ |
15bbde2b | 2763 | |
a40c0eeb | 2764 | static void |
58945f46 | 2765 | unshare_all_rtl_1 (rtx_insn *insn) |
15bbde2b | 2766 | { |
2d96a59a | 2767 | /* Unshare just about everything else. */ |
1cd4cfea | 2768 | unshare_all_rtl_in_chain (insn); |
d823ba47 | 2769 | |
15bbde2b | 2770 | /* Make sure the addresses of stack slots found outside the insn chain |
2771 | (such as, in DECL_RTL of a variable) are not shared | |
2772 | with the insn chain. | |
2773 | ||
2774 | This special care is necessary when the stack slot MEM does not | |
2775 | actually appear in the insn chain. If it does appear, its address | |
2776 | is unshared from all else at that point. */ | |
84f4f7bf | 2777 | unsigned int i; |
2778 | rtx temp; | |
2779 | FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp) | |
2780 | (*stack_slot_list)[i] = copy_rtx_if_shared (temp); | |
15bbde2b | 2781 | } |
2782 | ||
d823ba47 | 2783 | /* Go through all the RTL insn bodies and copy any invalid shared |
2d96a59a | 2784 | structure, again. This is a fairly expensive thing to do so it |
2785 | should be done sparingly. */ | |
2786 | ||
2787 | void | |
58945f46 | 2788 | unshare_all_rtl_again (rtx_insn *insn) |
2d96a59a | 2789 | { |
58945f46 | 2790 | rtx_insn *p; |
5244079b | 2791 | tree decl; |
2792 | ||
2d96a59a | 2793 | for (p = insn; p; p = NEXT_INSN (p)) |
9204e736 | 2794 | if (INSN_P (p)) |
2d96a59a | 2795 | { |
2796 | reset_used_flags (PATTERN (p)); | |
2797 | reset_used_flags (REG_NOTES (p)); | |
6d2a4bac | 2798 | if (CALL_P (p)) |
2799 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (p)); | |
2d96a59a | 2800 | } |
5244079b | 2801 | |
01dc9f0c | 2802 | /* Make sure that virtual stack slots are not shared. */ |
265be050 | 2803 | set_used_decls (DECL_INITIAL (cfun->decl)); |
01dc9f0c | 2804 | |
5244079b | 2805 | /* Make sure that virtual parameters are not shared. */ |
1767a056 | 2806 | for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) |
265be050 | 2807 | set_used_flags (DECL_RTL (decl)); |
5244079b | 2808 | |
84f4f7bf | 2809 | rtx temp; |
2810 | unsigned int i; | |
2811 | FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp) | |
2812 | reset_used_flags (temp); | |
5244079b | 2813 | |
df329266 | 2814 | unshare_all_rtl_1 (insn); |
a40c0eeb | 2815 | } |
2816 | ||
2a1990e9 | 2817 | unsigned int |
a40c0eeb | 2818 | unshare_all_rtl (void) |
2819 | { | |
df329266 | 2820 | unshare_all_rtl_1 (get_insns ()); |
607381a9 | 2821 | |
2822 | for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) | |
2823 | { | |
2824 | if (DECL_RTL_SET_P (decl)) | |
2825 | SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl))); | |
2826 | DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl)); | |
2827 | } | |
2828 | ||
2a1990e9 | 2829 | return 0; |
2d96a59a | 2830 | } |
2831 | ||
77fce4cd | 2832 | |
1cd4cfea | 2833 | /* Check that ORIG is not marked when it should not be and mark ORIG as in use, |
2834 | Recursively does the same for subexpressions. */ | |
2835 | ||
2836 | static void | |
2837 | verify_rtx_sharing (rtx orig, rtx insn) | |
2838 | { | |
2839 | rtx x = orig; | |
2840 | int i; | |
2841 | enum rtx_code code; | |
2842 | const char *format_ptr; | |
2843 | ||
2844 | if (x == 0) | |
2845 | return; | |
2846 | ||
2847 | code = GET_CODE (x); | |
2848 | ||
2849 | /* These types may be freely shared. */ | |
2850 | ||
2851 | switch (code) | |
2852 | { | |
2853 | case REG: | |
688ff29b | 2854 | case DEBUG_EXPR: |
2855 | case VALUE: | |
0349edce | 2856 | CASE_CONST_ANY: |
1cd4cfea | 2857 | case SYMBOL_REF: |
2858 | case LABEL_REF: | |
2859 | case CODE_LABEL: | |
2860 | case PC: | |
2861 | case CC0: | |
1a860023 | 2862 | case RETURN: |
9cb2517e | 2863 | case SIMPLE_RETURN: |
1cd4cfea | 2864 | case SCRATCH: |
c09425a0 | 2865 | /* SCRATCH must be shared because they represent distinct values. */ |
b291008a | 2866 | return; |
c09425a0 | 2867 | case CLOBBER: |
ccd6679f | 2868 | case CLOBBER_HIGH: |
b291008a | 2869 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
2870 | clobbers or clobbers of hard registers that originated as pseudos. | |
2871 | This is needed to allow safe register renaming. */ | |
2b5f32ae | 2872 | if (REG_P (XEXP (x, 0)) |
2873 | && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | |
2874 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0)))) | |
c09425a0 | 2875 | return; |
2876 | break; | |
1cd4cfea | 2877 | |
2878 | case CONST: | |
3072d30e | 2879 | if (shared_const_p (orig)) |
1cd4cfea | 2880 | return; |
2881 | break; | |
2882 | ||
2883 | case MEM: | |
2884 | /* A MEM is allowed to be shared if its address is constant. */ | |
2885 | if (CONSTANT_ADDRESS_P (XEXP (x, 0)) | |
2886 | || reload_completed || reload_in_progress) | |
2887 | return; | |
2888 | ||
2889 | break; | |
2890 | ||
2891 | default: | |
2892 | break; | |
2893 | } | |
2894 | ||
2895 | /* This rtx may not be shared. If it has already been seen, | |
2896 | replace it with a copy of itself. */ | |
382ecba7 | 2897 | if (flag_checking && RTX_FLAG (x, used)) |
1cd4cfea | 2898 | { |
0a81f5a0 | 2899 | error ("invalid rtl sharing found in the insn"); |
1cd4cfea | 2900 | debug_rtx (insn); |
0a81f5a0 | 2901 | error ("shared rtx"); |
1cd4cfea | 2902 | debug_rtx (x); |
0a81f5a0 | 2903 | internal_error ("internal consistency failure"); |
1cd4cfea | 2904 | } |
9cee7c3f | 2905 | gcc_assert (!RTX_FLAG (x, used)); |
48e1416a | 2906 | |
1cd4cfea | 2907 | RTX_FLAG (x, used) = 1; |
2908 | ||
8b332087 | 2909 | /* Now scan the subexpressions recursively. */ |
1cd4cfea | 2910 | |
2911 | format_ptr = GET_RTX_FORMAT (code); | |
2912 | ||
2913 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
2914 | { | |
2915 | switch (*format_ptr++) | |
2916 | { | |
2917 | case 'e': | |
2918 | verify_rtx_sharing (XEXP (x, i), insn); | |
2919 | break; | |
2920 | ||
2921 | case 'E': | |
2922 | if (XVEC (x, i) != NULL) | |
2923 | { | |
2924 | int j; | |
2925 | int len = XVECLEN (x, i); | |
2926 | ||
2927 | for (j = 0; j < len; j++) | |
2928 | { | |
9cee7c3f | 2929 | /* We allow sharing of ASM_OPERANDS inside single |
2930 | instruction. */ | |
1cd4cfea | 2931 | if (j && GET_CODE (XVECEXP (x, i, j)) == SET |
9cee7c3f | 2932 | && (GET_CODE (SET_SRC (XVECEXP (x, i, j))) |
2933 | == ASM_OPERANDS)) | |
1cd4cfea | 2934 | verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn); |
2935 | else | |
2936 | verify_rtx_sharing (XVECEXP (x, i, j), insn); | |
2937 | } | |
2938 | } | |
2939 | break; | |
2940 | } | |
2941 | } | |
2942 | return; | |
2943 | } | |
2944 | ||
1e9af25c | 2945 | /* Reset used-flags for INSN. */ |
2946 | ||
2947 | static void | |
2948 | reset_insn_used_flags (rtx insn) | |
2949 | { | |
2950 | gcc_assert (INSN_P (insn)); | |
2951 | reset_used_flags (PATTERN (insn)); | |
2952 | reset_used_flags (REG_NOTES (insn)); | |
2953 | if (CALL_P (insn)) | |
2954 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn)); | |
2955 | } | |
2956 | ||
7cdd84a2 | 2957 | /* Go through all the RTL insn bodies and clear all the USED bits. */ |
1cd4cfea | 2958 | |
7cdd84a2 | 2959 | static void |
2960 | reset_all_used_flags (void) | |
1cd4cfea | 2961 | { |
4cd001d5 | 2962 | rtx_insn *p; |
1cd4cfea | 2963 | |
2964 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2965 | if (INSN_P (p)) | |
2966 | { | |
1e9af25c | 2967 | rtx pat = PATTERN (p); |
2968 | if (GET_CODE (pat) != SEQUENCE) | |
2969 | reset_insn_used_flags (p); | |
2970 | else | |
764f640f | 2971 | { |
1e9af25c | 2972 | gcc_assert (REG_NOTES (p) == NULL); |
2973 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
11c8949c | 2974 | { |
2975 | rtx insn = XVECEXP (pat, 0, i); | |
2976 | if (INSN_P (insn)) | |
2977 | reset_insn_used_flags (insn); | |
2978 | } | |
764f640f | 2979 | } |
1cd4cfea | 2980 | } |
7cdd84a2 | 2981 | } |
2982 | ||
1e9af25c | 2983 | /* Verify sharing in INSN. */ |
2984 | ||
2985 | static void | |
2986 | verify_insn_sharing (rtx insn) | |
2987 | { | |
2988 | gcc_assert (INSN_P (insn)); | |
44bf3f4e | 2989 | verify_rtx_sharing (PATTERN (insn), insn); |
2990 | verify_rtx_sharing (REG_NOTES (insn), insn); | |
1e9af25c | 2991 | if (CALL_P (insn)) |
44bf3f4e | 2992 | verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn); |
1e9af25c | 2993 | } |
2994 | ||
7cdd84a2 | 2995 | /* Go through all the RTL insn bodies and check that there is no unexpected |
2996 | sharing in between the subexpressions. */ | |
2997 | ||
2998 | DEBUG_FUNCTION void | |
2999 | verify_rtl_sharing (void) | |
3000 | { | |
4cd001d5 | 3001 | rtx_insn *p; |
7cdd84a2 | 3002 | |
3003 | timevar_push (TV_VERIFY_RTL_SHARING); | |
3004 | ||
3005 | reset_all_used_flags (); | |
1cd4cfea | 3006 | |
3007 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
3008 | if (INSN_P (p)) | |
3009 | { | |
1e9af25c | 3010 | rtx pat = PATTERN (p); |
3011 | if (GET_CODE (pat) != SEQUENCE) | |
3012 | verify_insn_sharing (p); | |
3013 | else | |
3014 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
11c8949c | 3015 | { |
3016 | rtx insn = XVECEXP (pat, 0, i); | |
3017 | if (INSN_P (insn)) | |
3018 | verify_insn_sharing (insn); | |
3019 | } | |
1cd4cfea | 3020 | } |
4b366dd3 | 3021 | |
7cdd84a2 | 3022 | reset_all_used_flags (); |
3023 | ||
4b366dd3 | 3024 | timevar_pop (TV_VERIFY_RTL_SHARING); |
1cd4cfea | 3025 | } |
3026 | ||
2d96a59a | 3027 | /* Go through all the RTL insn bodies and copy any invalid shared structure. |
3028 | Assumes the mark bits are cleared at entry. */ | |
3029 | ||
1cd4cfea | 3030 | void |
4cd001d5 | 3031 | unshare_all_rtl_in_chain (rtx_insn *insn) |
2d96a59a | 3032 | { |
3033 | for (; insn; insn = NEXT_INSN (insn)) | |
9204e736 | 3034 | if (INSN_P (insn)) |
2d96a59a | 3035 | { |
3036 | PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn)); | |
3037 | REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn)); | |
6d2a4bac | 3038 | if (CALL_P (insn)) |
3039 | CALL_INSN_FUNCTION_USAGE (insn) | |
3040 | = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn)); | |
2d96a59a | 3041 | } |
3042 | } | |
3043 | ||
01dc9f0c | 3044 | /* Go through all virtual stack slots of a function and mark them as |
265be050 | 3045 | shared. We never replace the DECL_RTLs themselves with a copy, |
3046 | but expressions mentioned into a DECL_RTL cannot be shared with | |
3047 | expressions in the instruction stream. | |
3048 | ||
3049 | Note that reload may convert pseudo registers into memories in-place. | |
3050 | Pseudo registers are always shared, but MEMs never are. Thus if we | |
3051 | reset the used flags on MEMs in the instruction stream, we must set | |
3052 | them again on MEMs that appear in DECL_RTLs. */ | |
3053 | ||
01dc9f0c | 3054 | static void |
265be050 | 3055 | set_used_decls (tree blk) |
01dc9f0c | 3056 | { |
3057 | tree t; | |
3058 | ||
3059 | /* Mark decls. */ | |
1767a056 | 3060 | for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t)) |
0e8e37b2 | 3061 | if (DECL_RTL_SET_P (t)) |
265be050 | 3062 | set_used_flags (DECL_RTL (t)); |
01dc9f0c | 3063 | |
3064 | /* Now process sub-blocks. */ | |
93110716 | 3065 | for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t)) |
265be050 | 3066 | set_used_decls (t); |
01dc9f0c | 3067 | } |
3068 | ||
15bbde2b | 3069 | /* Mark ORIG as in use, and return a copy of it if it was already in use. |
7ba6ce7a | 3070 | Recursively does the same for subexpressions. Uses |
3071 | copy_rtx_if_shared_1 to reduce stack space. */ | |
15bbde2b | 3072 | |
3073 | rtx | |
35cb5232 | 3074 | copy_rtx_if_shared (rtx orig) |
15bbde2b | 3075 | { |
0e0727c4 | 3076 | copy_rtx_if_shared_1 (&orig); |
3077 | return orig; | |
3078 | } | |
3079 | ||
7ba6ce7a | 3080 | /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in |
3081 | use. Recursively does the same for subexpressions. */ | |
3082 | ||
0e0727c4 | 3083 | static void |
3084 | copy_rtx_if_shared_1 (rtx *orig1) | |
3085 | { | |
3086 | rtx x; | |
19cb6b50 | 3087 | int i; |
3088 | enum rtx_code code; | |
0e0727c4 | 3089 | rtx *last_ptr; |
19cb6b50 | 3090 | const char *format_ptr; |
15bbde2b | 3091 | int copied = 0; |
0e0727c4 | 3092 | int length; |
3093 | ||
3094 | /* Repeat is used to turn tail-recursion into iteration. */ | |
3095 | repeat: | |
3096 | x = *orig1; | |
15bbde2b | 3097 | |
3098 | if (x == 0) | |
0e0727c4 | 3099 | return; |
15bbde2b | 3100 | |
3101 | code = GET_CODE (x); | |
3102 | ||
3103 | /* These types may be freely shared. */ | |
3104 | ||
3105 | switch (code) | |
3106 | { | |
3107 | case REG: | |
688ff29b | 3108 | case DEBUG_EXPR: |
3109 | case VALUE: | |
0349edce | 3110 | CASE_CONST_ANY: |
15bbde2b | 3111 | case SYMBOL_REF: |
1cd4cfea | 3112 | case LABEL_REF: |
15bbde2b | 3113 | case CODE_LABEL: |
3114 | case PC: | |
3115 | case CC0: | |
e0691b9a | 3116 | case RETURN: |
9cb2517e | 3117 | case SIMPLE_RETURN: |
15bbde2b | 3118 | case SCRATCH: |
a92771b8 | 3119 | /* SCRATCH must be shared because they represent distinct values. */ |
0e0727c4 | 3120 | return; |
c09425a0 | 3121 | case CLOBBER: |
ccd6679f | 3122 | case CLOBBER_HIGH: |
b291008a | 3123 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
3124 | clobbers or clobbers of hard registers that originated as pseudos. | |
3125 | This is needed to allow safe register renaming. */ | |
2b5f32ae | 3126 | if (REG_P (XEXP (x, 0)) |
3127 | && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | |
3128 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0)))) | |
c09425a0 | 3129 | return; |
3130 | break; | |
15bbde2b | 3131 | |
f63d12e3 | 3132 | case CONST: |
3072d30e | 3133 | if (shared_const_p (x)) |
0e0727c4 | 3134 | return; |
f63d12e3 | 3135 | break; |
3136 | ||
9845d120 | 3137 | case DEBUG_INSN: |
15bbde2b | 3138 | case INSN: |
3139 | case JUMP_INSN: | |
3140 | case CALL_INSN: | |
3141 | case NOTE: | |
15bbde2b | 3142 | case BARRIER: |
3143 | /* The chain of insns is not being copied. */ | |
0e0727c4 | 3144 | return; |
15bbde2b | 3145 | |
0dbd1c74 | 3146 | default: |
3147 | break; | |
15bbde2b | 3148 | } |
3149 | ||
3150 | /* This rtx may not be shared. If it has already been seen, | |
3151 | replace it with a copy of itself. */ | |
3152 | ||
7c25cb91 | 3153 | if (RTX_FLAG (x, used)) |
15bbde2b | 3154 | { |
f2d0e9f1 | 3155 | x = shallow_copy_rtx (x); |
15bbde2b | 3156 | copied = 1; |
3157 | } | |
7c25cb91 | 3158 | RTX_FLAG (x, used) = 1; |
15bbde2b | 3159 | |
3160 | /* Now scan the subexpressions recursively. | |
3161 | We can store any replaced subexpressions directly into X | |
3162 | since we know X is not shared! Any vectors in X | |
3163 | must be copied if X was copied. */ | |
3164 | ||
3165 | format_ptr = GET_RTX_FORMAT (code); | |
0e0727c4 | 3166 | length = GET_RTX_LENGTH (code); |
3167 | last_ptr = NULL; | |
48e1416a | 3168 | |
0e0727c4 | 3169 | for (i = 0; i < length; i++) |
15bbde2b | 3170 | { |
3171 | switch (*format_ptr++) | |
3172 | { | |
3173 | case 'e': | |
0e0727c4 | 3174 | if (last_ptr) |
3175 | copy_rtx_if_shared_1 (last_ptr); | |
3176 | last_ptr = &XEXP (x, i); | |
15bbde2b | 3177 | break; |
3178 | ||
3179 | case 'E': | |
3180 | if (XVEC (x, i) != NULL) | |
3181 | { | |
19cb6b50 | 3182 | int j; |
ffe0869b | 3183 | int len = XVECLEN (x, i); |
48e1416a | 3184 | |
8b332087 | 3185 | /* Copy the vector iff I copied the rtx and the length |
3186 | is nonzero. */ | |
ffe0869b | 3187 | if (copied && len > 0) |
a4070a91 | 3188 | XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem); |
48e1416a | 3189 | |
d632b59a | 3190 | /* Call recursively on all inside the vector. */ |
ffe0869b | 3191 | for (j = 0; j < len; j++) |
0e0727c4 | 3192 | { |
3193 | if (last_ptr) | |
3194 | copy_rtx_if_shared_1 (last_ptr); | |
3195 | last_ptr = &XVECEXP (x, i, j); | |
3196 | } | |
15bbde2b | 3197 | } |
3198 | break; | |
3199 | } | |
3200 | } | |
0e0727c4 | 3201 | *orig1 = x; |
3202 | if (last_ptr) | |
3203 | { | |
3204 | orig1 = last_ptr; | |
3205 | goto repeat; | |
3206 | } | |
3207 | return; | |
15bbde2b | 3208 | } |
3209 | ||
709947e6 | 3210 | /* Set the USED bit in X and its non-shareable subparts to FLAG. */ |
15bbde2b | 3211 | |
709947e6 | 3212 | static void |
3213 | mark_used_flags (rtx x, int flag) | |
15bbde2b | 3214 | { |
19cb6b50 | 3215 | int i, j; |
3216 | enum rtx_code code; | |
3217 | const char *format_ptr; | |
0e0727c4 | 3218 | int length; |
15bbde2b | 3219 | |
0e0727c4 | 3220 | /* Repeat is used to turn tail-recursion into iteration. */ |
3221 | repeat: | |
15bbde2b | 3222 | if (x == 0) |
3223 | return; | |
3224 | ||
3225 | code = GET_CODE (x); | |
3226 | ||
c3418f42 | 3227 | /* These types may be freely shared so we needn't do any resetting |
15bbde2b | 3228 | for them. */ |
3229 | ||
3230 | switch (code) | |
3231 | { | |
3232 | case REG: | |
688ff29b | 3233 | case DEBUG_EXPR: |
3234 | case VALUE: | |
0349edce | 3235 | CASE_CONST_ANY: |
15bbde2b | 3236 | case SYMBOL_REF: |
3237 | case CODE_LABEL: | |
3238 | case PC: | |
3239 | case CC0: | |
e0691b9a | 3240 | case RETURN: |
9cb2517e | 3241 | case SIMPLE_RETURN: |
15bbde2b | 3242 | return; |
3243 | ||
9845d120 | 3244 | case DEBUG_INSN: |
15bbde2b | 3245 | case INSN: |
3246 | case JUMP_INSN: | |
3247 | case CALL_INSN: | |
3248 | case NOTE: | |
3249 | case LABEL_REF: | |
3250 | case BARRIER: | |
3251 | /* The chain of insns is not being copied. */ | |
3252 | return; | |
d823ba47 | 3253 | |
0dbd1c74 | 3254 | default: |
3255 | break; | |
15bbde2b | 3256 | } |
3257 | ||
709947e6 | 3258 | RTX_FLAG (x, used) = flag; |
15bbde2b | 3259 | |
3260 | format_ptr = GET_RTX_FORMAT (code); | |
0e0727c4 | 3261 | length = GET_RTX_LENGTH (code); |
48e1416a | 3262 | |
0e0727c4 | 3263 | for (i = 0; i < length; i++) |
15bbde2b | 3264 | { |
3265 | switch (*format_ptr++) | |
3266 | { | |
3267 | case 'e': | |
0e0727c4 | 3268 | if (i == length-1) |
3269 | { | |
3270 | x = XEXP (x, i); | |
3271 | goto repeat; | |
3272 | } | |
709947e6 | 3273 | mark_used_flags (XEXP (x, i), flag); |
15bbde2b | 3274 | break; |
3275 | ||
3276 | case 'E': | |
3277 | for (j = 0; j < XVECLEN (x, i); j++) | |
709947e6 | 3278 | mark_used_flags (XVECEXP (x, i, j), flag); |
15bbde2b | 3279 | break; |
3280 | } | |
3281 | } | |
3282 | } | |
1cd4cfea | 3283 | |
709947e6 | 3284 | /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used |
1cd4cfea | 3285 | to look for shared sub-parts. */ |
3286 | ||
3287 | void | |
709947e6 | 3288 | reset_used_flags (rtx x) |
1cd4cfea | 3289 | { |
709947e6 | 3290 | mark_used_flags (x, 0); |
3291 | } | |
1cd4cfea | 3292 | |
709947e6 | 3293 | /* Set all the USED bits in X to allow copy_rtx_if_shared to be used |
3294 | to look for shared sub-parts. */ | |
1cd4cfea | 3295 | |
709947e6 | 3296 | void |
3297 | set_used_flags (rtx x) | |
3298 | { | |
3299 | mark_used_flags (x, 1); | |
1cd4cfea | 3300 | } |
15bbde2b | 3301 | \f |
3302 | /* Copy X if necessary so that it won't be altered by changes in OTHER. | |
3303 | Return X or the rtx for the pseudo reg the value of X was copied into. | |
3304 | OTHER must be valid as a SET_DEST. */ | |
3305 | ||
3306 | rtx | |
35cb5232 | 3307 | make_safe_from (rtx x, rtx other) |
15bbde2b | 3308 | { |
3309 | while (1) | |
3310 | switch (GET_CODE (other)) | |
3311 | { | |
3312 | case SUBREG: | |
3313 | other = SUBREG_REG (other); | |
3314 | break; | |
3315 | case STRICT_LOW_PART: | |
3316 | case SIGN_EXTEND: | |
3317 | case ZERO_EXTEND: | |
3318 | other = XEXP (other, 0); | |
3319 | break; | |
3320 | default: | |
3321 | goto done; | |
3322 | } | |
3323 | done: | |
e16ceb8e | 3324 | if ((MEM_P (other) |
15bbde2b | 3325 | && ! CONSTANT_P (x) |
8ad4c111 | 3326 | && !REG_P (x) |
15bbde2b | 3327 | && GET_CODE (x) != SUBREG) |
8ad4c111 | 3328 | || (REG_P (other) |
15bbde2b | 3329 | && (REGNO (other) < FIRST_PSEUDO_REGISTER |
3330 | || reg_mentioned_p (other, x)))) | |
3331 | { | |
3332 | rtx temp = gen_reg_rtx (GET_MODE (x)); | |
3333 | emit_move_insn (temp, x); | |
3334 | return temp; | |
3335 | } | |
3336 | return x; | |
3337 | } | |
3338 | \f | |
3339 | /* Emission of insns (adding them to the doubly-linked list). */ | |
3340 | ||
15bbde2b | 3341 | /* Return the last insn emitted, even if it is in a sequence now pushed. */ |
3342 | ||
447ab0fc | 3343 | rtx_insn * |
35cb5232 | 3344 | get_last_insn_anywhere (void) |
15bbde2b | 3345 | { |
c36aa54b | 3346 | struct sequence_stack *seq; |
3347 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
3348 | if (seq->last != 0) | |
3349 | return seq->last; | |
15bbde2b | 3350 | return 0; |
3351 | } | |
3352 | ||
70545de4 | 3353 | /* Return the first nonnote insn emitted in current sequence or current |
3354 | function. This routine looks inside SEQUENCEs. */ | |
3355 | ||
2eb8c261 | 3356 | rtx_insn * |
35cb5232 | 3357 | get_first_nonnote_insn (void) |
70545de4 | 3358 | { |
4cd001d5 | 3359 | rtx_insn *insn = get_insns (); |
f86e856e | 3360 | |
3361 | if (insn) | |
3362 | { | |
3363 | if (NOTE_P (insn)) | |
3364 | for (insn = next_insn (insn); | |
3365 | insn && NOTE_P (insn); | |
3366 | insn = next_insn (insn)) | |
3367 | continue; | |
3368 | else | |
3369 | { | |
1c14a50e | 3370 | if (NONJUMP_INSN_P (insn) |
f86e856e | 3371 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
4cd001d5 | 3372 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
f86e856e | 3373 | } |
3374 | } | |
70545de4 | 3375 | |
3376 | return insn; | |
3377 | } | |
3378 | ||
3379 | /* Return the last nonnote insn emitted in current sequence or current | |
3380 | function. This routine looks inside SEQUENCEs. */ | |
3381 | ||
2eb8c261 | 3382 | rtx_insn * |
35cb5232 | 3383 | get_last_nonnote_insn (void) |
70545de4 | 3384 | { |
4cd001d5 | 3385 | rtx_insn *insn = get_last_insn (); |
f86e856e | 3386 | |
3387 | if (insn) | |
3388 | { | |
3389 | if (NOTE_P (insn)) | |
3390 | for (insn = previous_insn (insn); | |
3391 | insn && NOTE_P (insn); | |
3392 | insn = previous_insn (insn)) | |
3393 | continue; | |
3394 | else | |
3395 | { | |
4cd001d5 | 3396 | if (NONJUMP_INSN_P (insn)) |
3397 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
3398 | insn = seq->insn (seq->len () - 1); | |
f86e856e | 3399 | } |
3400 | } | |
70545de4 | 3401 | |
3402 | return insn; | |
3403 | } | |
3404 | ||
9845d120 | 3405 | /* Return the number of actual (non-debug) insns emitted in this |
3406 | function. */ | |
3407 | ||
3408 | int | |
3409 | get_max_insn_count (void) | |
3410 | { | |
3411 | int n = cur_insn_uid; | |
3412 | ||
3413 | /* The table size must be stable across -g, to avoid codegen | |
3414 | differences due to debug insns, and not be affected by | |
3415 | -fmin-insn-uid, to avoid excessive table size and to simplify | |
3416 | debugging of -fcompare-debug failures. */ | |
3417 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
3418 | n -= cur_debug_insn_uid; | |
3419 | else | |
3420 | n -= MIN_NONDEBUG_INSN_UID; | |
3421 | ||
3422 | return n; | |
3423 | } | |
3424 | ||
15bbde2b | 3425 | \f |
3426 | /* Return the next insn. If it is a SEQUENCE, return the first insn | |
3427 | of the sequence. */ | |
3428 | ||
7bac25b3 | 3429 | rtx_insn * |
50895eab | 3430 | next_insn (rtx_insn *insn) |
15bbde2b | 3431 | { |
ce4469fa | 3432 | if (insn) |
3433 | { | |
3434 | insn = NEXT_INSN (insn); | |
3435 | if (insn && NONJUMP_INSN_P (insn) | |
3436 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4cd001d5 | 3437 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
ce4469fa | 3438 | } |
15bbde2b | 3439 | |
4cd001d5 | 3440 | return insn; |
15bbde2b | 3441 | } |
3442 | ||
3443 | /* Return the previous insn. If it is a SEQUENCE, return the last insn | |
3444 | of the sequence. */ | |
3445 | ||
7bac25b3 | 3446 | rtx_insn * |
50895eab | 3447 | previous_insn (rtx_insn *insn) |
15bbde2b | 3448 | { |
ce4469fa | 3449 | if (insn) |
3450 | { | |
3451 | insn = PREV_INSN (insn); | |
4cd001d5 | 3452 | if (insn && NONJUMP_INSN_P (insn)) |
3453 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
3454 | insn = seq->insn (seq->len () - 1); | |
ce4469fa | 3455 | } |
15bbde2b | 3456 | |
4cd001d5 | 3457 | return insn; |
15bbde2b | 3458 | } |
3459 | ||
3460 | /* Return the next insn after INSN that is not a NOTE. This routine does not | |
3461 | look inside SEQUENCEs. */ | |
3462 | ||
7bac25b3 | 3463 | rtx_insn * |
4066f31e | 3464 | next_nonnote_insn (rtx_insn *insn) |
15bbde2b | 3465 | { |
ce4469fa | 3466 | while (insn) |
3467 | { | |
3468 | insn = NEXT_INSN (insn); | |
3469 | if (insn == 0 || !NOTE_P (insn)) | |
3470 | break; | |
3471 | } | |
15bbde2b | 3472 | |
4cd001d5 | 3473 | return insn; |
15bbde2b | 3474 | } |
3475 | ||
18fc6357 | 3476 | /* Return the next insn after INSN that is not a DEBUG_INSN. This |
3477 | routine does not look inside SEQUENCEs. */ | |
c4d13c5c | 3478 | |
7bac25b3 | 3479 | rtx_insn * |
18fc6357 | 3480 | next_nondebug_insn (rtx_insn *insn) |
c4d13c5c | 3481 | { |
3482 | while (insn) | |
3483 | { | |
3484 | insn = NEXT_INSN (insn); | |
18fc6357 | 3485 | if (insn == 0 || !DEBUG_INSN_P (insn)) |
c4d13c5c | 3486 | break; |
c4d13c5c | 3487 | } |
3488 | ||
4cd001d5 | 3489 | return insn; |
c4d13c5c | 3490 | } |
3491 | ||
15bbde2b | 3492 | /* Return the previous insn before INSN that is not a NOTE. This routine does |
3493 | not look inside SEQUENCEs. */ | |
3494 | ||
7bac25b3 | 3495 | rtx_insn * |
4066f31e | 3496 | prev_nonnote_insn (rtx_insn *insn) |
15bbde2b | 3497 | { |
ce4469fa | 3498 | while (insn) |
3499 | { | |
3500 | insn = PREV_INSN (insn); | |
3501 | if (insn == 0 || !NOTE_P (insn)) | |
3502 | break; | |
3503 | } | |
15bbde2b | 3504 | |
4cd001d5 | 3505 | return insn; |
15bbde2b | 3506 | } |
3507 | ||
18fc6357 | 3508 | /* Return the previous insn before INSN that is not a DEBUG_INSN. |
3509 | This routine does not look inside SEQUENCEs. */ | |
bcc66782 | 3510 | |
7bac25b3 | 3511 | rtx_insn * |
18fc6357 | 3512 | prev_nondebug_insn (rtx_insn *insn) |
bcc66782 | 3513 | { |
3514 | while (insn) | |
3515 | { | |
3516 | insn = PREV_INSN (insn); | |
18fc6357 | 3517 | if (insn == 0 || !DEBUG_INSN_P (insn)) |
bcc66782 | 3518 | break; |
bcc66782 | 3519 | } |
3520 | ||
4cd001d5 | 3521 | return insn; |
bcc66782 | 3522 | } |
3523 | ||
18fc6357 | 3524 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN. |
3525 | This routine does not look inside SEQUENCEs. */ | |
9845d120 | 3526 | |
7bac25b3 | 3527 | rtx_insn * |
18fc6357 | 3528 | next_nonnote_nondebug_insn (rtx_insn *insn) |
9845d120 | 3529 | { |
3530 | while (insn) | |
3531 | { | |
3532 | insn = NEXT_INSN (insn); | |
18fc6357 | 3533 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
9845d120 | 3534 | break; |
3535 | } | |
3536 | ||
4cd001d5 | 3537 | return insn; |
9845d120 | 3538 | } |
3539 | ||
18fc6357 | 3540 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN, |
3541 | but stop the search before we enter another basic block. This | |
3542 | routine does not look inside SEQUENCEs. */ | |
9845d120 | 3543 | |
7bac25b3 | 3544 | rtx_insn * |
18fc6357 | 3545 | next_nonnote_nondebug_insn_bb (rtx_insn *insn) |
9845d120 | 3546 | { |
3547 | while (insn) | |
3548 | { | |
18fc6357 | 3549 | insn = NEXT_INSN (insn); |
3550 | if (insn == 0) | |
3551 | break; | |
3552 | if (DEBUG_INSN_P (insn)) | |
3553 | continue; | |
3554 | if (!NOTE_P (insn)) | |
9845d120 | 3555 | break; |
18fc6357 | 3556 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) |
3557 | return NULL; | |
9845d120 | 3558 | } |
3559 | ||
4cd001d5 | 3560 | return insn; |
9845d120 | 3561 | } |
3562 | ||
18fc6357 | 3563 | /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN. |
5b8537a8 | 3564 | This routine does not look inside SEQUENCEs. */ |
3565 | ||
7bac25b3 | 3566 | rtx_insn * |
18fc6357 | 3567 | prev_nonnote_nondebug_insn (rtx_insn *insn) |
5b8537a8 | 3568 | { |
3569 | while (insn) | |
3570 | { | |
18fc6357 | 3571 | insn = PREV_INSN (insn); |
5b8537a8 | 3572 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
3573 | break; | |
3574 | } | |
3575 | ||
4cd001d5 | 3576 | return insn; |
5b8537a8 | 3577 | } |
3578 | ||
18fc6357 | 3579 | /* Return the previous insn before INSN that is not a NOTE nor |
3580 | DEBUG_INSN, but stop the search before we enter another basic | |
3581 | block. This routine does not look inside SEQUENCEs. */ | |
5b8537a8 | 3582 | |
7bac25b3 | 3583 | rtx_insn * |
18fc6357 | 3584 | prev_nonnote_nondebug_insn_bb (rtx_insn *insn) |
5b8537a8 | 3585 | { |
3586 | while (insn) | |
3587 | { | |
3588 | insn = PREV_INSN (insn); | |
18fc6357 | 3589 | if (insn == 0) |
5b8537a8 | 3590 | break; |
18fc6357 | 3591 | if (DEBUG_INSN_P (insn)) |
3592 | continue; | |
3593 | if (!NOTE_P (insn)) | |
3594 | break; | |
3595 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) | |
3596 | return NULL; | |
5b8537a8 | 3597 | } |
3598 | ||
4cd001d5 | 3599 | return insn; |
5b8537a8 | 3600 | } |
3601 | ||
3ba5631b | 3602 | /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN; |
15bbde2b | 3603 | or 0, if there is none. This routine does not look inside |
a92771b8 | 3604 | SEQUENCEs. */ |
15bbde2b | 3605 | |
7bac25b3 | 3606 | rtx_insn * |
924b3c83 | 3607 | next_real_insn (rtx_insn *insn) |
15bbde2b | 3608 | { |
ce4469fa | 3609 | while (insn) |
3610 | { | |
3611 | insn = NEXT_INSN (insn); | |
3612 | if (insn == 0 || INSN_P (insn)) | |
3613 | break; | |
3614 | } | |
15bbde2b | 3615 | |
4cd001d5 | 3616 | return insn; |
15bbde2b | 3617 | } |
3618 | ||
3ba5631b | 3619 | /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN; |
15bbde2b | 3620 | or 0, if there is none. This routine does not look inside |
3621 | SEQUENCEs. */ | |
3622 | ||
7bac25b3 | 3623 | rtx_insn * |
4067fcc6 | 3624 | prev_real_insn (rtx_insn *insn) |
15bbde2b | 3625 | { |
ce4469fa | 3626 | while (insn) |
3627 | { | |
3628 | insn = PREV_INSN (insn); | |
3629 | if (insn == 0 || INSN_P (insn)) | |
3630 | break; | |
3631 | } | |
15bbde2b | 3632 | |
4cd001d5 | 3633 | return insn; |
15bbde2b | 3634 | } |
3635 | ||
3ba5631b | 3636 | /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; |
3637 | or 0, if there is none. This routine does not look inside | |
3638 | SEQUENCEs. */ | |
3639 | ||
3640 | rtx_insn * | |
3641 | next_real_nondebug_insn (rtx uncast_insn) | |
3642 | { | |
3643 | rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn); | |
3644 | ||
3645 | while (insn) | |
3646 | { | |
3647 | insn = NEXT_INSN (insn); | |
3648 | if (insn == 0 || NONDEBUG_INSN_P (insn)) | |
3649 | break; | |
3650 | } | |
3651 | ||
3652 | return insn; | |
3653 | } | |
3654 | ||
3655 | /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; | |
3656 | or 0, if there is none. This routine does not look inside | |
3657 | SEQUENCEs. */ | |
3658 | ||
3659 | rtx_insn * | |
3660 | prev_real_nondebug_insn (rtx_insn *insn) | |
3661 | { | |
3662 | while (insn) | |
3663 | { | |
3664 | insn = PREV_INSN (insn); | |
3665 | if (insn == 0 || NONDEBUG_INSN_P (insn)) | |
3666 | break; | |
3667 | } | |
3668 | ||
3669 | return insn; | |
3670 | } | |
3671 | ||
d5f9786f | 3672 | /* Return the last CALL_INSN in the current list, or 0 if there is none. |
3673 | This routine does not look inside SEQUENCEs. */ | |
3674 | ||
ec22da62 | 3675 | rtx_call_insn * |
35cb5232 | 3676 | last_call_insn (void) |
d5f9786f | 3677 | { |
ec22da62 | 3678 | rtx_insn *insn; |
d5f9786f | 3679 | |
3680 | for (insn = get_last_insn (); | |
6d7dc5b9 | 3681 | insn && !CALL_P (insn); |
d5f9786f | 3682 | insn = PREV_INSN (insn)) |
3683 | ; | |
3684 | ||
ec22da62 | 3685 | return safe_as_a <rtx_call_insn *> (insn); |
d5f9786f | 3686 | } |
3687 | ||
15bbde2b | 3688 | /* Find the next insn after INSN that really does something. This routine |
084950ee | 3689 | does not look inside SEQUENCEs. After reload this also skips over |
3690 | standalone USE and CLOBBER insn. */ | |
15bbde2b | 3691 | |
2215ca0d | 3692 | int |
41503955 | 3693 | active_insn_p (const rtx_insn *insn) |
2215ca0d | 3694 | { |
6d7dc5b9 | 3695 | return (CALL_P (insn) || JUMP_P (insn) |
91f71fa3 | 3696 | || JUMP_TABLE_DATA_P (insn) /* FIXME */ |
6d7dc5b9 | 3697 | || (NONJUMP_INSN_P (insn) |
3a66feab | 3698 | && (! reload_completed |
3699 | || (GET_CODE (PATTERN (insn)) != USE | |
3700 | && GET_CODE (PATTERN (insn)) != CLOBBER)))); | |
2215ca0d | 3701 | } |
3702 | ||
7bac25b3 | 3703 | rtx_insn * |
41503955 | 3704 | next_active_insn (rtx_insn *insn) |
15bbde2b | 3705 | { |
ce4469fa | 3706 | while (insn) |
3707 | { | |
3708 | insn = NEXT_INSN (insn); | |
3709 | if (insn == 0 || active_insn_p (insn)) | |
3710 | break; | |
3711 | } | |
15bbde2b | 3712 | |
4cd001d5 | 3713 | return insn; |
15bbde2b | 3714 | } |
3715 | ||
3716 | /* Find the last insn before INSN that really does something. This routine | |
084950ee | 3717 | does not look inside SEQUENCEs. After reload this also skips over |
3718 | standalone USE and CLOBBER insn. */ | |
15bbde2b | 3719 | |
7bac25b3 | 3720 | rtx_insn * |
41503955 | 3721 | prev_active_insn (rtx_insn *insn) |
15bbde2b | 3722 | { |
ce4469fa | 3723 | while (insn) |
3724 | { | |
3725 | insn = PREV_INSN (insn); | |
3726 | if (insn == 0 || active_insn_p (insn)) | |
3727 | break; | |
3728 | } | |
15bbde2b | 3729 | |
4cd001d5 | 3730 | return insn; |
15bbde2b | 3731 | } |
15bbde2b | 3732 | \f |
15bbde2b | 3733 | /* Return the next insn that uses CC0 after INSN, which is assumed to |
3734 | set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter | |
3735 | applied to the result of this function should yield INSN). | |
3736 | ||
3737 | Normally, this is simply the next insn. However, if a REG_CC_USER note | |
3738 | is present, it contains the insn that uses CC0. | |
3739 | ||
3740 | Return 0 if we can't find the insn. */ | |
3741 | ||
0be88abd | 3742 | rtx_insn * |
924a5cee | 3743 | next_cc0_user (rtx_insn *insn) |
15bbde2b | 3744 | { |
b572011e | 3745 | rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX); |
15bbde2b | 3746 | |
3747 | if (note) | |
0be88abd | 3748 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
15bbde2b | 3749 | |
3750 | insn = next_nonnote_insn (insn); | |
6d7dc5b9 | 3751 | if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
4cd001d5 | 3752 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
15bbde2b | 3753 | |
9204e736 | 3754 | if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn))) |
4cd001d5 | 3755 | return insn; |
15bbde2b | 3756 | |
3757 | return 0; | |
3758 | } | |
3759 | ||
3760 | /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER | |
3761 | note, it is the previous insn. */ | |
3762 | ||
0be88abd | 3763 | rtx_insn * |
fd8b0a1a | 3764 | prev_cc0_setter (rtx_insn *insn) |
15bbde2b | 3765 | { |
b572011e | 3766 | rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); |
15bbde2b | 3767 | |
3768 | if (note) | |
0be88abd | 3769 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
15bbde2b | 3770 | |
3771 | insn = prev_nonnote_insn (insn); | |
611234b4 | 3772 | gcc_assert (sets_cc0_p (PATTERN (insn))); |
15bbde2b | 3773 | |
4cd001d5 | 3774 | return insn; |
15bbde2b | 3775 | } |
344dc2fa | 3776 | |
698ff1f0 | 3777 | /* Find a RTX_AUTOINC class rtx which matches DATA. */ |
3778 | ||
3779 | static int | |
4073adaa | 3780 | find_auto_inc (const_rtx x, const_rtx reg) |
698ff1f0 | 3781 | { |
4073adaa | 3782 | subrtx_iterator::array_type array; |
3783 | FOR_EACH_SUBRTX (iter, array, x, NONCONST) | |
698ff1f0 | 3784 | { |
4073adaa | 3785 | const_rtx x = *iter; |
3786 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC | |
3787 | && rtx_equal_p (reg, XEXP (x, 0))) | |
3788 | return true; | |
698ff1f0 | 3789 | } |
4073adaa | 3790 | return false; |
698ff1f0 | 3791 | } |
698ff1f0 | 3792 | |
344dc2fa | 3793 | /* Increment the label uses for all labels present in rtx. */ |
3794 | ||
3795 | static void | |
35cb5232 | 3796 | mark_label_nuses (rtx x) |
344dc2fa | 3797 | { |
19cb6b50 | 3798 | enum rtx_code code; |
3799 | int i, j; | |
3800 | const char *fmt; | |
344dc2fa | 3801 | |
3802 | code = GET_CODE (x); | |
c7799456 | 3803 | if (code == LABEL_REF && LABEL_P (label_ref_label (x))) |
3804 | LABEL_NUSES (label_ref_label (x))++; | |
344dc2fa | 3805 | |
3806 | fmt = GET_RTX_FORMAT (code); | |
3807 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3808 | { | |
3809 | if (fmt[i] == 'e') | |
ff385626 | 3810 | mark_label_nuses (XEXP (x, i)); |
344dc2fa | 3811 | else if (fmt[i] == 'E') |
ff385626 | 3812 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
344dc2fa | 3813 | mark_label_nuses (XVECEXP (x, i, j)); |
3814 | } | |
3815 | } | |
3816 | ||
15bbde2b | 3817 | \f |
3818 | /* Try splitting insns that can be split for better scheduling. | |
3819 | PAT is the pattern which might split. | |
3820 | TRIAL is the insn providing PAT. | |
6ef828f9 | 3821 | LAST is nonzero if we should return the last insn of the sequence produced. |
15bbde2b | 3822 | |
3823 | If this routine succeeds in splitting, it returns the first or last | |
0e69a50a | 3824 | replacement insn depending on the value of LAST. Otherwise, it |
15bbde2b | 3825 | returns TRIAL. If the insn to be returned can be split, it will be. */ |
3826 | ||
bffa1357 | 3827 | rtx_insn * |
58a87a29 | 3828 | try_split (rtx pat, rtx_insn *trial, int last) |
15bbde2b | 3829 | { |
3b50f202 | 3830 | rtx_insn *before, *after; |
4cd001d5 | 3831 | rtx note; |
3832 | rtx_insn *seq, *tem; | |
61cb1816 | 3833 | profile_probability probability; |
4cd001d5 | 3834 | rtx_insn *insn_last, *insn; |
e13693ec | 3835 | int njumps = 0; |
9ed997be | 3836 | rtx_insn *call_insn = NULL; |
3cd757b1 | 3837 | |
25e880b1 | 3838 | /* We're not good at redistributing frame information. */ |
3839 | if (RTX_FRAME_RELATED_P (trial)) | |
4cd001d5 | 3840 | return trial; |
25e880b1 | 3841 | |
3cd757b1 | 3842 | if (any_condjump_p (trial) |
3843 | && (note = find_reg_note (trial, REG_BR_PROB, 0))) | |
61cb1816 | 3844 | split_branch_probability |
3845 | = profile_probability::from_reg_br_prob_note (XINT (note, 0)); | |
3846 | else | |
3847 | split_branch_probability = profile_probability::uninitialized (); | |
3848 | ||
3cd757b1 | 3849 | probability = split_branch_probability; |
3850 | ||
58a87a29 | 3851 | seq = split_insns (pat, trial); |
3cd757b1 | 3852 | |
61cb1816 | 3853 | split_branch_probability = profile_probability::uninitialized (); |
15bbde2b | 3854 | |
e13693ec | 3855 | if (!seq) |
4cd001d5 | 3856 | return trial; |
e13693ec | 3857 | |
3858 | /* Avoid infinite loop if any insn of the result matches | |
3859 | the original pattern. */ | |
3860 | insn_last = seq; | |
3861 | while (1) | |
15bbde2b | 3862 | { |
e13693ec | 3863 | if (INSN_P (insn_last) |
3864 | && rtx_equal_p (PATTERN (insn_last), pat)) | |
4cd001d5 | 3865 | return trial; |
e13693ec | 3866 | if (!NEXT_INSN (insn_last)) |
3867 | break; | |
3868 | insn_last = NEXT_INSN (insn_last); | |
3869 | } | |
d823ba47 | 3870 | |
3072d30e | 3871 | /* We will be adding the new sequence to the function. The splitters |
3872 | may have introduced invalid RTL sharing, so unshare the sequence now. */ | |
3873 | unshare_all_rtl_in_chain (seq); | |
3874 | ||
8f869004 | 3875 | /* Mark labels and copy flags. */ |
e13693ec | 3876 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) |
3877 | { | |
6d7dc5b9 | 3878 | if (JUMP_P (insn)) |
e13693ec | 3879 | { |
8f869004 | 3880 | if (JUMP_P (trial)) |
3881 | CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial); | |
e13693ec | 3882 | mark_jump_label (PATTERN (insn), insn, 0); |
3883 | njumps++; | |
61cb1816 | 3884 | if (probability.initialized_p () |
e13693ec | 3885 | && any_condjump_p (insn) |
3886 | && !find_reg_note (insn, REG_BR_PROB, 0)) | |
31d3e01c | 3887 | { |
e13693ec | 3888 | /* We can preserve the REG_BR_PROB notes only if exactly |
3889 | one jump is created, otherwise the machine description | |
3890 | is responsible for this step using | |
3891 | split_branch_probability variable. */ | |
611234b4 | 3892 | gcc_assert (njumps == 1); |
61cb1816 | 3893 | add_reg_br_prob_note (insn, probability); |
31d3e01c | 3894 | } |
e13693ec | 3895 | } |
3896 | } | |
3897 | ||
3898 | /* If we are splitting a CALL_INSN, look for the CALL_INSN | |
b0bd0491 | 3899 | in SEQ and copy any additional information across. */ |
6d7dc5b9 | 3900 | if (CALL_P (trial)) |
e13693ec | 3901 | { |
3902 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) | |
6d7dc5b9 | 3903 | if (CALL_P (insn)) |
e13693ec | 3904 | { |
2e3b0d0f | 3905 | gcc_assert (call_insn == NULL_RTX); |
3906 | call_insn = insn; | |
3907 | ||
b0bd0491 | 3908 | /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the |
3909 | target may have explicitly specified. */ | |
ddc5a1df | 3910 | rtx *p = &CALL_INSN_FUNCTION_USAGE (insn); |
0bb5a6cd | 3911 | while (*p) |
3912 | p = &XEXP (*p, 1); | |
3913 | *p = CALL_INSN_FUNCTION_USAGE (trial); | |
b0bd0491 | 3914 | |
3915 | /* If the old call was a sibling call, the new one must | |
3916 | be too. */ | |
e13693ec | 3917 | SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); |
3918 | } | |
3919 | } | |
5262c253 | 3920 | |
e13693ec | 3921 | /* Copy notes, particularly those related to the CFG. */ |
3922 | for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) | |
3923 | { | |
3924 | switch (REG_NOTE_KIND (note)) | |
3925 | { | |
3926 | case REG_EH_REGION: | |
e38def9c | 3927 | copy_reg_eh_region_note_backward (note, insn_last, NULL); |
e13693ec | 3928 | break; |
381eb1e7 | 3929 | |
e13693ec | 3930 | case REG_NORETURN: |
3931 | case REG_SETJMP: | |
4c0315d0 | 3932 | case REG_TM: |
3c0f15b4 | 3933 | case REG_CALL_NOCF_CHECK: |
ddc5a1df | 3934 | case REG_CALL_ARG_LOCATION: |
698ff1f0 | 3935 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
381eb1e7 | 3936 | { |
6d7dc5b9 | 3937 | if (CALL_P (insn)) |
a1ddb869 | 3938 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
381eb1e7 | 3939 | } |
e13693ec | 3940 | break; |
5bb27a4b | 3941 | |
e13693ec | 3942 | case REG_NON_LOCAL_GOTO: |
698ff1f0 | 3943 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
31d3e01c | 3944 | { |
6d7dc5b9 | 3945 | if (JUMP_P (insn)) |
a1ddb869 | 3946 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
31d3e01c | 3947 | } |
e13693ec | 3948 | break; |
344dc2fa | 3949 | |
698ff1f0 | 3950 | case REG_INC: |
32aa77d9 | 3951 | if (!AUTO_INC_DEC) |
3952 | break; | |
3953 | ||
698ff1f0 | 3954 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
3955 | { | |
3956 | rtx reg = XEXP (note, 0); | |
3957 | if (!FIND_REG_INC_NOTE (insn, reg) | |
4073adaa | 3958 | && find_auto_inc (PATTERN (insn), reg)) |
a1ddb869 | 3959 | add_reg_note (insn, REG_INC, reg); |
698ff1f0 | 3960 | } |
3961 | break; | |
698ff1f0 | 3962 | |
dfe00a8f | 3963 | case REG_ARGS_SIZE: |
f6a1fc98 | 3964 | fixup_args_size_notes (NULL, insn_last, get_args_size (note)); |
dfe00a8f | 3965 | break; |
3966 | ||
2e3b0d0f | 3967 | case REG_CALL_DECL: |
3968 | gcc_assert (call_insn != NULL_RTX); | |
3969 | add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
3970 | break; | |
3971 | ||
e13693ec | 3972 | default: |
3973 | break; | |
15bbde2b | 3974 | } |
e13693ec | 3975 | } |
3976 | ||
3977 | /* If there are LABELS inside the split insns increment the | |
3978 | usage count so we don't delete the label. */ | |
19d2fe05 | 3979 | if (INSN_P (trial)) |
e13693ec | 3980 | { |
3981 | insn = insn_last; | |
3982 | while (insn != NULL_RTX) | |
15bbde2b | 3983 | { |
19d2fe05 | 3984 | /* JUMP_P insns have already been "marked" above. */ |
6d7dc5b9 | 3985 | if (NONJUMP_INSN_P (insn)) |
e13693ec | 3986 | mark_label_nuses (PATTERN (insn)); |
15bbde2b | 3987 | |
e13693ec | 3988 | insn = PREV_INSN (insn); |
3989 | } | |
15bbde2b | 3990 | } |
3991 | ||
3b50f202 | 3992 | before = PREV_INSN (trial); |
3993 | after = NEXT_INSN (trial); | |
3994 | ||
5169661d | 3995 | tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial)); |
e13693ec | 3996 | |
3997 | delete_insn (trial); | |
e13693ec | 3998 | |
3999 | /* Recursively call try_split for each new insn created; by the | |
4000 | time control returns here that insn will be fully split, so | |
4001 | set LAST and continue from the insn after the one returned. | |
4002 | We can't use next_active_insn here since AFTER may be a note. | |
4003 | Ignore deleted insns, which can be occur if not optimizing. */ | |
4004 | for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem)) | |
dd1286fb | 4005 | if (! tem->deleted () && INSN_P (tem)) |
e13693ec | 4006 | tem = try_split (PATTERN (tem), tem, 1); |
4007 | ||
4008 | /* Return either the first or the last insn, depending on which was | |
4009 | requested. */ | |
4010 | return last | |
06f9d6ef | 4011 | ? (after ? PREV_INSN (after) : get_last_insn ()) |
e13693ec | 4012 | : NEXT_INSN (before); |
15bbde2b | 4013 | } |
4014 | \f | |
4015 | /* Make and return an INSN rtx, initializing all its slots. | |
6a84e367 | 4016 | Store PATTERN in the pattern slots. */ |
15bbde2b | 4017 | |
2c57d586 | 4018 | rtx_insn * |
35cb5232 | 4019 | make_insn_raw (rtx pattern) |
15bbde2b | 4020 | { |
2c57d586 | 4021 | rtx_insn *insn; |
15bbde2b | 4022 | |
2c57d586 | 4023 | insn = as_a <rtx_insn *> (rtx_alloc (INSN)); |
15bbde2b | 4024 | |
575333f9 | 4025 | INSN_UID (insn) = cur_insn_uid++; |
15bbde2b | 4026 | PATTERN (insn) = pattern; |
4027 | INSN_CODE (insn) = -1; | |
fc92fa61 | 4028 | REG_NOTES (insn) = NULL; |
5169661d | 4029 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 4030 | BLOCK_FOR_INSN (insn) = NULL; |
15bbde2b | 4031 | |
fe7f701d | 4032 | #ifdef ENABLE_RTL_CHECKING |
4033 | if (insn | |
9204e736 | 4034 | && INSN_P (insn) |
fe7f701d | 4035 | && (returnjump_p (insn) |
4036 | || (GET_CODE (insn) == SET | |
4037 | && SET_DEST (insn) == pc_rtx))) | |
4038 | { | |
c3ceba8e | 4039 | warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n"); |
fe7f701d | 4040 | debug_rtx (insn); |
4041 | } | |
4042 | #endif | |
d823ba47 | 4043 | |
15bbde2b | 4044 | return insn; |
4045 | } | |
4046 | ||
9845d120 | 4047 | /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */ |
4048 | ||
2c57d586 | 4049 | static rtx_insn * |
9845d120 | 4050 | make_debug_insn_raw (rtx pattern) |
4051 | { | |
2c57d586 | 4052 | rtx_debug_insn *insn; |
9845d120 | 4053 | |
2c57d586 | 4054 | insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN)); |
9845d120 | 4055 | INSN_UID (insn) = cur_debug_insn_uid++; |
4056 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
4057 | INSN_UID (insn) = cur_insn_uid++; | |
4058 | ||
4059 | PATTERN (insn) = pattern; | |
4060 | INSN_CODE (insn) = -1; | |
4061 | REG_NOTES (insn) = NULL; | |
5169661d | 4062 | INSN_LOCATION (insn) = curr_insn_location (); |
9845d120 | 4063 | BLOCK_FOR_INSN (insn) = NULL; |
4064 | ||
4065 | return insn; | |
4066 | } | |
4067 | ||
31d3e01c | 4068 | /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ |
15bbde2b | 4069 | |
2c57d586 | 4070 | static rtx_insn * |
35cb5232 | 4071 | make_jump_insn_raw (rtx pattern) |
15bbde2b | 4072 | { |
2c57d586 | 4073 | rtx_jump_insn *insn; |
15bbde2b | 4074 | |
2c57d586 | 4075 | insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN)); |
fc92fa61 | 4076 | INSN_UID (insn) = cur_insn_uid++; |
15bbde2b | 4077 | |
4078 | PATTERN (insn) = pattern; | |
4079 | INSN_CODE (insn) = -1; | |
fc92fa61 | 4080 | REG_NOTES (insn) = NULL; |
4081 | JUMP_LABEL (insn) = NULL; | |
5169661d | 4082 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 4083 | BLOCK_FOR_INSN (insn) = NULL; |
15bbde2b | 4084 | |
4085 | return insn; | |
4086 | } | |
6e911104 | 4087 | |
31d3e01c | 4088 | /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */ |
6e911104 | 4089 | |
2c57d586 | 4090 | static rtx_insn * |
35cb5232 | 4091 | make_call_insn_raw (rtx pattern) |
6e911104 | 4092 | { |
2c57d586 | 4093 | rtx_call_insn *insn; |
6e911104 | 4094 | |
2c57d586 | 4095 | insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN)); |
6e911104 | 4096 | INSN_UID (insn) = cur_insn_uid++; |
4097 | ||
4098 | PATTERN (insn) = pattern; | |
4099 | INSN_CODE (insn) = -1; | |
6e911104 | 4100 | REG_NOTES (insn) = NULL; |
4101 | CALL_INSN_FUNCTION_USAGE (insn) = NULL; | |
5169661d | 4102 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 4103 | BLOCK_FOR_INSN (insn) = NULL; |
6e911104 | 4104 | |
4105 | return insn; | |
4106 | } | |
35f3420b | 4107 | |
4108 | /* Like `make_insn_raw' but make a NOTE instead of an insn. */ | |
4109 | ||
cef3d8ad | 4110 | static rtx_note * |
35f3420b | 4111 | make_note_raw (enum insn_note subtype) |
4112 | { | |
4113 | /* Some notes are never created this way at all. These notes are | |
4114 | only created by patching out insns. */ | |
4115 | gcc_assert (subtype != NOTE_INSN_DELETED_LABEL | |
4116 | && subtype != NOTE_INSN_DELETED_DEBUG_LABEL); | |
4117 | ||
cef3d8ad | 4118 | rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE)); |
35f3420b | 4119 | INSN_UID (note) = cur_insn_uid++; |
4120 | NOTE_KIND (note) = subtype; | |
4121 | BLOCK_FOR_INSN (note) = NULL; | |
4122 | memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); | |
4123 | return note; | |
4124 | } | |
15bbde2b | 4125 | \f |
35f3420b | 4126 | /* Add INSN to the end of the doubly-linked list, between PREV and NEXT. |
4127 | INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects, | |
4128 | but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */ | |
4129 | ||
4130 | static inline void | |
3e75e92b | 4131 | link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next) |
35f3420b | 4132 | { |
4a57a2e8 | 4133 | SET_PREV_INSN (insn) = prev; |
4134 | SET_NEXT_INSN (insn) = next; | |
35f3420b | 4135 | if (prev != NULL) |
4136 | { | |
4a57a2e8 | 4137 | SET_NEXT_INSN (prev) = insn; |
35f3420b | 4138 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
4139 | { | |
f17e3fff | 4140 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev)); |
4141 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn; | |
35f3420b | 4142 | } |
4143 | } | |
4144 | if (next != NULL) | |
4145 | { | |
4a57a2e8 | 4146 | SET_PREV_INSN (next) = insn; |
35f3420b | 4147 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
f17e3fff | 4148 | { |
4149 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next)); | |
4150 | SET_PREV_INSN (sequence->insn (0)) = insn; | |
4151 | } | |
35f3420b | 4152 | } |
34f5b9ac | 4153 | |
4154 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4155 | { | |
f17e3fff | 4156 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn)); |
4157 | SET_PREV_INSN (sequence->insn (0)) = prev; | |
4158 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next; | |
34f5b9ac | 4159 | } |
35f3420b | 4160 | } |
4161 | ||
15bbde2b | 4162 | /* Add INSN to the end of the doubly-linked list. |
4163 | INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */ | |
4164 | ||
4165 | void | |
3e75e92b | 4166 | add_insn (rtx_insn *insn) |
15bbde2b | 4167 | { |
3e75e92b | 4168 | rtx_insn *prev = get_last_insn (); |
35f3420b | 4169 | link_insn_into_chain (insn, prev, NULL); |
c9281ef8 | 4170 | if (get_insns () == NULL) |
06f9d6ef | 4171 | set_first_insn (insn); |
06f9d6ef | 4172 | set_last_insn (insn); |
15bbde2b | 4173 | } |
4174 | ||
35f3420b | 4175 | /* Add INSN into the doubly-linked list after insn AFTER. */ |
15bbde2b | 4176 | |
35f3420b | 4177 | static void |
3e75e92b | 4178 | add_insn_after_nobb (rtx_insn *insn, rtx_insn *after) |
15bbde2b | 4179 | { |
3e75e92b | 4180 | rtx_insn *next = NEXT_INSN (after); |
15bbde2b | 4181 | |
dd1286fb | 4182 | gcc_assert (!optimize || !after->deleted ()); |
f65c10c0 | 4183 | |
35f3420b | 4184 | link_insn_into_chain (insn, after, next); |
15bbde2b | 4185 | |
35f3420b | 4186 | if (next == NULL) |
15bbde2b | 4187 | { |
c36aa54b | 4188 | struct sequence_stack *seq; |
4189 | ||
4190 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4191 | if (after == seq->last) | |
4192 | { | |
4193 | seq->last = insn; | |
4194 | break; | |
4195 | } | |
15bbde2b | 4196 | } |
35f3420b | 4197 | } |
4198 | ||
4199 | /* Add INSN into the doubly-linked list before insn BEFORE. */ | |
4200 | ||
4201 | static void | |
3e75e92b | 4202 | add_insn_before_nobb (rtx_insn *insn, rtx_insn *before) |
35f3420b | 4203 | { |
3e75e92b | 4204 | rtx_insn *prev = PREV_INSN (before); |
35f3420b | 4205 | |
dd1286fb | 4206 | gcc_assert (!optimize || !before->deleted ()); |
35f3420b | 4207 | |
4208 | link_insn_into_chain (insn, prev, before); | |
4209 | ||
4210 | if (prev == NULL) | |
15bbde2b | 4211 | { |
c36aa54b | 4212 | struct sequence_stack *seq; |
312de84d | 4213 | |
c36aa54b | 4214 | for (seq = get_current_sequence (); seq; seq = seq->next) |
4215 | if (before == seq->first) | |
4216 | { | |
4217 | seq->first = insn; | |
4218 | break; | |
4219 | } | |
4220 | ||
4221 | gcc_assert (seq); | |
15bbde2b | 4222 | } |
35f3420b | 4223 | } |
4224 | ||
4225 | /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN. | |
4226 | If BB is NULL, an attempt is made to infer the bb from before. | |
4227 | ||
4228 | This and the next function should be the only functions called | |
4229 | to insert an insn once delay slots have been filled since only | |
4230 | they know how to update a SEQUENCE. */ | |
15bbde2b | 4231 | |
35f3420b | 4232 | void |
924b3c83 | 4233 | add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb) |
35f3420b | 4234 | { |
4235 | add_insn_after_nobb (insn, after); | |
6d7dc5b9 | 4236 | if (!BARRIER_P (after) |
4237 | && !BARRIER_P (insn) | |
9dda7915 | 4238 | && (bb = BLOCK_FOR_INSN (after))) |
4239 | { | |
4240 | set_block_for_insn (insn, bb); | |
308f9b79 | 4241 | if (INSN_P (insn)) |
3072d30e | 4242 | df_insn_rescan (insn); |
9dda7915 | 4243 | /* Should not happen as first in the BB is always |
3fb1e43b | 4244 | either NOTE or LABEL. */ |
5496dbfc | 4245 | if (BB_END (bb) == after |
9dda7915 | 4246 | /* Avoid clobbering of structure when creating new BB. */ |
6d7dc5b9 | 4247 | && !BARRIER_P (insn) |
ad4583d9 | 4248 | && !NOTE_INSN_BASIC_BLOCK_P (insn)) |
26bb3cb2 | 4249 | BB_END (bb) = insn; |
9dda7915 | 4250 | } |
15bbde2b | 4251 | } |
4252 | ||
35f3420b | 4253 | /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN. |
4254 | If BB is NULL, an attempt is made to infer the bb from before. | |
4255 | ||
4256 | This and the previous function should be the only functions called | |
4257 | to insert an insn once delay slots have been filled since only | |
4258 | they know how to update a SEQUENCE. */ | |
312de84d | 4259 | |
4260 | void | |
924b3c83 | 4261 | add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb) |
312de84d | 4262 | { |
35f3420b | 4263 | add_insn_before_nobb (insn, before); |
312de84d | 4264 | |
48e1416a | 4265 | if (!bb |
3072d30e | 4266 | && !BARRIER_P (before) |
4267 | && !BARRIER_P (insn)) | |
4268 | bb = BLOCK_FOR_INSN (before); | |
4269 | ||
4270 | if (bb) | |
9dda7915 | 4271 | { |
4272 | set_block_for_insn (insn, bb); | |
308f9b79 | 4273 | if (INSN_P (insn)) |
3072d30e | 4274 | df_insn_rescan (insn); |
611234b4 | 4275 | /* Should not happen as first in the BB is always either NOTE or |
ba821eb1 | 4276 | LABEL. */ |
611234b4 | 4277 | gcc_assert (BB_HEAD (bb) != insn |
4278 | /* Avoid clobbering of structure when creating new BB. */ | |
4279 | || BARRIER_P (insn) | |
ad4583d9 | 4280 | || NOTE_INSN_BASIC_BLOCK_P (insn)); |
9dda7915 | 4281 | } |
312de84d | 4282 | } |
4283 | ||
3072d30e | 4284 | /* Replace insn with an deleted instruction note. */ |
4285 | ||
fc3d1695 | 4286 | void |
6b63fbbe | 4287 | set_insn_deleted (rtx_insn *insn) |
3072d30e | 4288 | { |
91f71fa3 | 4289 | if (INSN_P (insn)) |
6b63fbbe | 4290 | df_insn_delete (insn); |
3072d30e | 4291 | PUT_CODE (insn, NOTE); |
4292 | NOTE_KIND (insn) = NOTE_INSN_DELETED; | |
4293 | } | |
4294 | ||
4295 | ||
93ff53d3 | 4296 | /* Unlink INSN from the insn chain. |
4297 | ||
4298 | This function knows how to handle sequences. | |
4299 | ||
4300 | This function does not invalidate data flow information associated with | |
4301 | INSN (i.e. does not call df_insn_delete). That makes this function | |
4302 | usable for only disconnecting an insn from the chain, and re-emit it | |
4303 | elsewhere later. | |
4304 | ||
4305 | To later insert INSN elsewhere in the insn chain via add_insn and | |
4306 | similar functions, PREV_INSN and NEXT_INSN must be nullified by | |
4307 | the caller. Nullifying them here breaks many insn chain walks. | |
4308 | ||
4309 | To really delete an insn and related DF information, use delete_insn. */ | |
4310 | ||
7ddcf2bf | 4311 | void |
924b3c83 | 4312 | remove_insn (rtx_insn *insn) |
7ddcf2bf | 4313 | { |
26bb3cb2 | 4314 | rtx_insn *next = NEXT_INSN (insn); |
4315 | rtx_insn *prev = PREV_INSN (insn); | |
e4bf866d | 4316 | basic_block bb; |
4317 | ||
7ddcf2bf | 4318 | if (prev) |
4319 | { | |
4a57a2e8 | 4320 | SET_NEXT_INSN (prev) = next; |
6d7dc5b9 | 4321 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
7ddcf2bf | 4322 | { |
f17e3fff | 4323 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev)); |
4324 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next; | |
7ddcf2bf | 4325 | } |
4326 | } | |
7ddcf2bf | 4327 | else |
4328 | { | |
c36aa54b | 4329 | struct sequence_stack *seq; |
4330 | ||
4331 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4332 | if (insn == seq->first) | |
7ddcf2bf | 4333 | { |
c36aa54b | 4334 | seq->first = next; |
7ddcf2bf | 4335 | break; |
4336 | } | |
4337 | ||
c36aa54b | 4338 | gcc_assert (seq); |
7ddcf2bf | 4339 | } |
4340 | ||
4341 | if (next) | |
4342 | { | |
4a57a2e8 | 4343 | SET_PREV_INSN (next) = prev; |
6d7dc5b9 | 4344 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
f17e3fff | 4345 | { |
4346 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next)); | |
4347 | SET_PREV_INSN (sequence->insn (0)) = prev; | |
4348 | } | |
7ddcf2bf | 4349 | } |
7ddcf2bf | 4350 | else |
4351 | { | |
c36aa54b | 4352 | struct sequence_stack *seq; |
4353 | ||
4354 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4355 | if (insn == seq->last) | |
7ddcf2bf | 4356 | { |
c36aa54b | 4357 | seq->last = prev; |
7ddcf2bf | 4358 | break; |
4359 | } | |
4360 | ||
c36aa54b | 4361 | gcc_assert (seq); |
7ddcf2bf | 4362 | } |
b983ea33 | 4363 | |
b983ea33 | 4364 | /* Fix up basic block boundaries, if necessary. */ |
6d7dc5b9 | 4365 | if (!BARRIER_P (insn) |
e4bf866d | 4366 | && (bb = BLOCK_FOR_INSN (insn))) |
4367 | { | |
5496dbfc | 4368 | if (BB_HEAD (bb) == insn) |
e4bf866d | 4369 | { |
f4aee538 | 4370 | /* Never ever delete the basic block note without deleting whole |
4371 | basic block. */ | |
611234b4 | 4372 | gcc_assert (!NOTE_P (insn)); |
26bb3cb2 | 4373 | BB_HEAD (bb) = next; |
e4bf866d | 4374 | } |
5496dbfc | 4375 | if (BB_END (bb) == insn) |
26bb3cb2 | 4376 | BB_END (bb) = prev; |
e4bf866d | 4377 | } |
7ddcf2bf | 4378 | } |
4379 | ||
d5f9786f | 4380 | /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */ |
4381 | ||
4382 | void | |
35cb5232 | 4383 | add_function_usage_to (rtx call_insn, rtx call_fusage) |
d5f9786f | 4384 | { |
611234b4 | 4385 | gcc_assert (call_insn && CALL_P (call_insn)); |
d5f9786f | 4386 | |
4387 | /* Put the register usage information on the CALL. If there is already | |
4388 | some usage information, put ours at the end. */ | |
4389 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
4390 | { | |
4391 | rtx link; | |
4392 | ||
4393 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
4394 | link = XEXP (link, 1)) | |
4395 | ; | |
4396 | ||
4397 | XEXP (link, 1) = call_fusage; | |
4398 | } | |
4399 | else | |
4400 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
4401 | } | |
4402 | ||
15bbde2b | 4403 | /* Delete all insns made since FROM. |
4404 | FROM becomes the new last instruction. */ | |
4405 | ||
4406 | void | |
57c26b3a | 4407 | delete_insns_since (rtx_insn *from) |
15bbde2b | 4408 | { |
4409 | if (from == 0) | |
06f9d6ef | 4410 | set_first_insn (0); |
15bbde2b | 4411 | else |
4a57a2e8 | 4412 | SET_NEXT_INSN (from) = 0; |
06f9d6ef | 4413 | set_last_insn (from); |
15bbde2b | 4414 | } |
4415 | ||
34e2ddcd | 4416 | /* This function is deprecated, please use sequences instead. |
4417 | ||
4418 | Move a consecutive bunch of insns to a different place in the chain. | |
15bbde2b | 4419 | The insns to be moved are those between FROM and TO. |
4420 | They are moved to a new position after the insn AFTER. | |
4421 | AFTER must not be FROM or TO or any insn in between. | |
4422 | ||
4423 | This function does not know about SEQUENCEs and hence should not be | |
4424 | called after delay-slot filling has been done. */ | |
4425 | ||
4426 | void | |
57c26b3a | 4427 | reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
15bbde2b | 4428 | { |
382ecba7 | 4429 | if (flag_checking) |
4430 | { | |
4431 | for (rtx_insn *x = from; x != to; x = NEXT_INSN (x)) | |
4432 | gcc_assert (after != x); | |
4433 | gcc_assert (after != to); | |
4434 | } | |
7f6ca11f | 4435 | |
15bbde2b | 4436 | /* Splice this bunch out of where it is now. */ |
4437 | if (PREV_INSN (from)) | |
4a57a2e8 | 4438 | SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to); |
15bbde2b | 4439 | if (NEXT_INSN (to)) |
4a57a2e8 | 4440 | SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from); |
06f9d6ef | 4441 | if (get_last_insn () == to) |
4442 | set_last_insn (PREV_INSN (from)); | |
4443 | if (get_insns () == from) | |
4444 | set_first_insn (NEXT_INSN (to)); | |
15bbde2b | 4445 | |
4446 | /* Make the new neighbors point to it and it to them. */ | |
4447 | if (NEXT_INSN (after)) | |
4a57a2e8 | 4448 | SET_PREV_INSN (NEXT_INSN (after)) = to; |
15bbde2b | 4449 | |
4a57a2e8 | 4450 | SET_NEXT_INSN (to) = NEXT_INSN (after); |
4451 | SET_PREV_INSN (from) = after; | |
4452 | SET_NEXT_INSN (after) = from; | |
9af5ce0c | 4453 | if (after == get_last_insn ()) |
06f9d6ef | 4454 | set_last_insn (to); |
15bbde2b | 4455 | } |
4456 | ||
9dda7915 | 4457 | /* Same as function above, but take care to update BB boundaries. */ |
4458 | void | |
4a3fb716 | 4459 | reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
9dda7915 | 4460 | { |
4a3fb716 | 4461 | rtx_insn *prev = PREV_INSN (from); |
9dda7915 | 4462 | basic_block bb, bb2; |
4463 | ||
4464 | reorder_insns_nobb (from, to, after); | |
4465 | ||
6d7dc5b9 | 4466 | if (!BARRIER_P (after) |
9dda7915 | 4467 | && (bb = BLOCK_FOR_INSN (after))) |
4468 | { | |
e149ca56 | 4469 | rtx_insn *x; |
3072d30e | 4470 | df_set_bb_dirty (bb); |
d4c5e26d | 4471 | |
6d7dc5b9 | 4472 | if (!BARRIER_P (from) |
9dda7915 | 4473 | && (bb2 = BLOCK_FOR_INSN (from))) |
4474 | { | |
5496dbfc | 4475 | if (BB_END (bb2) == to) |
26bb3cb2 | 4476 | BB_END (bb2) = prev; |
3072d30e | 4477 | df_set_bb_dirty (bb2); |
9dda7915 | 4478 | } |
4479 | ||
5496dbfc | 4480 | if (BB_END (bb) == after) |
26bb3cb2 | 4481 | BB_END (bb) = to; |
9dda7915 | 4482 | |
4483 | for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x)) | |
7097dd0c | 4484 | if (!BARRIER_P (x)) |
a2bdd643 | 4485 | df_insn_change_bb (x, bb); |
9dda7915 | 4486 | } |
4487 | } | |
4488 | ||
15bbde2b | 4489 | \f |
31d3e01c | 4490 | /* Emit insn(s) of given code and pattern |
4491 | at a specified place within the doubly-linked list. | |
15bbde2b | 4492 | |
31d3e01c | 4493 | All of the emit_foo global entry points accept an object |
4494 | X which is either an insn list or a PATTERN of a single | |
4495 | instruction. | |
15bbde2b | 4496 | |
31d3e01c | 4497 | There are thus a few canonical ways to generate code and |
4498 | emit it at a specific place in the instruction stream. For | |
4499 | example, consider the instruction named SPOT and the fact that | |
4500 | we would like to emit some instructions before SPOT. We might | |
4501 | do it like this: | |
15bbde2b | 4502 | |
31d3e01c | 4503 | start_sequence (); |
4504 | ... emit the new instructions ... | |
4505 | insns_head = get_insns (); | |
4506 | end_sequence (); | |
15bbde2b | 4507 | |
31d3e01c | 4508 | emit_insn_before (insns_head, SPOT); |
15bbde2b | 4509 | |
31d3e01c | 4510 | It used to be common to generate SEQUENCE rtl instead, but that |
4511 | is a relic of the past which no longer occurs. The reason is that | |
4512 | SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE | |
4513 | generated would almost certainly die right after it was created. */ | |
15bbde2b | 4514 | |
722334ea | 4515 | static rtx_insn * |
924b3c83 | 4516 | emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last, |
4517 | basic_block bb, | |
2c57d586 | 4518 | rtx_insn *(*make_raw) (rtx)) |
15bbde2b | 4519 | { |
2c57d586 | 4520 | rtx_insn *insn; |
15bbde2b | 4521 | |
611234b4 | 4522 | gcc_assert (before); |
31d3e01c | 4523 | |
4524 | if (x == NULL_RTX) | |
924b3c83 | 4525 | return last; |
31d3e01c | 4526 | |
4527 | switch (GET_CODE (x)) | |
15bbde2b | 4528 | { |
9845d120 | 4529 | case DEBUG_INSN: |
31d3e01c | 4530 | case INSN: |
4531 | case JUMP_INSN: | |
4532 | case CALL_INSN: | |
4533 | case CODE_LABEL: | |
4534 | case BARRIER: | |
4535 | case NOTE: | |
2c57d586 | 4536 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 4537 | while (insn) |
4538 | { | |
2c57d586 | 4539 | rtx_insn *next = NEXT_INSN (insn); |
3072d30e | 4540 | add_insn_before (insn, before, bb); |
31d3e01c | 4541 | last = insn; |
4542 | insn = next; | |
4543 | } | |
4544 | break; | |
4545 | ||
4546 | #ifdef ENABLE_RTL_CHECKING | |
4547 | case SEQUENCE: | |
611234b4 | 4548 | gcc_unreachable (); |
31d3e01c | 4549 | break; |
4550 | #endif | |
4551 | ||
4552 | default: | |
5f7c5ddd | 4553 | last = (*make_raw) (x); |
3072d30e | 4554 | add_insn_before (last, before, bb); |
31d3e01c | 4555 | break; |
15bbde2b | 4556 | } |
4557 | ||
924b3c83 | 4558 | return last; |
15bbde2b | 4559 | } |
4560 | ||
5f7c5ddd | 4561 | /* Make X be output before the instruction BEFORE. */ |
4562 | ||
722334ea | 4563 | rtx_insn * |
c9a09955 | 4564 | emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb) |
5f7c5ddd | 4565 | { |
4566 | return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw); | |
4567 | } | |
4568 | ||
31d3e01c | 4569 | /* Make an instruction with body X and code JUMP_INSN |
15bbde2b | 4570 | and output it before the instruction BEFORE. */ |
4571 | ||
f9a00e9e | 4572 | rtx_jump_insn * |
c9a09955 | 4573 | emit_jump_insn_before_noloc (rtx x, rtx_insn *before) |
15bbde2b | 4574 | { |
f9a00e9e | 4575 | return as_a <rtx_jump_insn *> ( |
924b3c83 | 4576 | emit_pattern_before_noloc (x, before, NULL, NULL, |
f9a00e9e | 4577 | make_jump_insn_raw)); |
15bbde2b | 4578 | } |
4579 | ||
31d3e01c | 4580 | /* Make an instruction with body X and code CALL_INSN |
cd0fe062 | 4581 | and output it before the instruction BEFORE. */ |
4582 | ||
722334ea | 4583 | rtx_insn * |
c9a09955 | 4584 | emit_call_insn_before_noloc (rtx x, rtx_insn *before) |
cd0fe062 | 4585 | { |
924b3c83 | 4586 | return emit_pattern_before_noloc (x, before, NULL, NULL, |
5f7c5ddd | 4587 | make_call_insn_raw); |
cd0fe062 | 4588 | } |
4589 | ||
9845d120 | 4590 | /* Make an instruction with body X and code DEBUG_INSN |
4591 | and output it before the instruction BEFORE. */ | |
4592 | ||
722334ea | 4593 | rtx_insn * |
924b3c83 | 4594 | emit_debug_insn_before_noloc (rtx x, rtx_insn *before) |
9845d120 | 4595 | { |
924b3c83 | 4596 | return emit_pattern_before_noloc (x, before, NULL, NULL, |
5f7c5ddd | 4597 | make_debug_insn_raw); |
9845d120 | 4598 | } |
4599 | ||
15bbde2b | 4600 | /* Make an insn of code BARRIER |
71caadc0 | 4601 | and output it before the insn BEFORE. */ |
15bbde2b | 4602 | |
722334ea | 4603 | rtx_barrier * |
924b3c83 | 4604 | emit_barrier_before (rtx_insn *before) |
15bbde2b | 4605 | { |
722334ea | 4606 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 4607 | |
4608 | INSN_UID (insn) = cur_insn_uid++; | |
4609 | ||
3072d30e | 4610 | add_insn_before (insn, before, NULL); |
15bbde2b | 4611 | return insn; |
4612 | } | |
4613 | ||
71caadc0 | 4614 | /* Emit the label LABEL before the insn BEFORE. */ |
4615 | ||
f9a00e9e | 4616 | rtx_code_label * |
924b3c83 | 4617 | emit_label_before (rtx_code_label *label, rtx_insn *before) |
71caadc0 | 4618 | { |
596ef494 | 4619 | gcc_checking_assert (INSN_UID (label) == 0); |
4620 | INSN_UID (label) = cur_insn_uid++; | |
4621 | add_insn_before (label, before, NULL); | |
924b3c83 | 4622 | return label; |
71caadc0 | 4623 | } |
15bbde2b | 4624 | \f |
31d3e01c | 4625 | /* Helper for emit_insn_after, handles lists of instructions |
4626 | efficiently. */ | |
15bbde2b | 4627 | |
f17e3fff | 4628 | static rtx_insn * |
924b3c83 | 4629 | emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb) |
15bbde2b | 4630 | { |
26bb3cb2 | 4631 | rtx_insn *last; |
4632 | rtx_insn *after_after; | |
3072d30e | 4633 | if (!bb && !BARRIER_P (after)) |
4634 | bb = BLOCK_FOR_INSN (after); | |
15bbde2b | 4635 | |
3072d30e | 4636 | if (bb) |
15bbde2b | 4637 | { |
3072d30e | 4638 | df_set_bb_dirty (bb); |
31d3e01c | 4639 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
6d7dc5b9 | 4640 | if (!BARRIER_P (last)) |
3072d30e | 4641 | { |
4642 | set_block_for_insn (last, bb); | |
4643 | df_insn_rescan (last); | |
4644 | } | |
6d7dc5b9 | 4645 | if (!BARRIER_P (last)) |
3072d30e | 4646 | { |
4647 | set_block_for_insn (last, bb); | |
4648 | df_insn_rescan (last); | |
4649 | } | |
5496dbfc | 4650 | if (BB_END (bb) == after) |
26bb3cb2 | 4651 | BB_END (bb) = last; |
15bbde2b | 4652 | } |
4653 | else | |
31d3e01c | 4654 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
4655 | continue; | |
4656 | ||
4657 | after_after = NEXT_INSN (after); | |
4658 | ||
4a57a2e8 | 4659 | SET_NEXT_INSN (after) = first; |
4660 | SET_PREV_INSN (first) = after; | |
4661 | SET_NEXT_INSN (last) = after_after; | |
31d3e01c | 4662 | if (after_after) |
4a57a2e8 | 4663 | SET_PREV_INSN (after_after) = last; |
31d3e01c | 4664 | |
9af5ce0c | 4665 | if (after == get_last_insn ()) |
06f9d6ef | 4666 | set_last_insn (last); |
e1ab7874 | 4667 | |
31d3e01c | 4668 | return last; |
4669 | } | |
4670 | ||
722334ea | 4671 | static rtx_insn * |
924b3c83 | 4672 | emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb, |
2c57d586 | 4673 | rtx_insn *(*make_raw)(rtx)) |
31d3e01c | 4674 | { |
f17e3fff | 4675 | rtx_insn *last = after; |
31d3e01c | 4676 | |
611234b4 | 4677 | gcc_assert (after); |
31d3e01c | 4678 | |
4679 | if (x == NULL_RTX) | |
f17e3fff | 4680 | return last; |
31d3e01c | 4681 | |
4682 | switch (GET_CODE (x)) | |
15bbde2b | 4683 | { |
9845d120 | 4684 | case DEBUG_INSN: |
31d3e01c | 4685 | case INSN: |
4686 | case JUMP_INSN: | |
4687 | case CALL_INSN: | |
4688 | case CODE_LABEL: | |
4689 | case BARRIER: | |
4690 | case NOTE: | |
26bb3cb2 | 4691 | last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb); |
31d3e01c | 4692 | break; |
4693 | ||
4694 | #ifdef ENABLE_RTL_CHECKING | |
4695 | case SEQUENCE: | |
611234b4 | 4696 | gcc_unreachable (); |
31d3e01c | 4697 | break; |
4698 | #endif | |
4699 | ||
4700 | default: | |
5f7c5ddd | 4701 | last = (*make_raw) (x); |
3072d30e | 4702 | add_insn_after (last, after, bb); |
31d3e01c | 4703 | break; |
15bbde2b | 4704 | } |
4705 | ||
f17e3fff | 4706 | return last; |
15bbde2b | 4707 | } |
4708 | ||
5f7c5ddd | 4709 | /* Make X be output after the insn AFTER and set the BB of insn. If |
4710 | BB is NULL, an attempt is made to infer the BB from AFTER. */ | |
4711 | ||
722334ea | 4712 | rtx_insn * |
924b3c83 | 4713 | emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb) |
5f7c5ddd | 4714 | { |
4715 | return emit_pattern_after_noloc (x, after, bb, make_insn_raw); | |
4716 | } | |
4717 | ||
1bea98fb | 4718 | |
31d3e01c | 4719 | /* Make an insn of code JUMP_INSN with body X |
15bbde2b | 4720 | and output it after the insn AFTER. */ |
4721 | ||
f9a00e9e | 4722 | rtx_jump_insn * |
924b3c83 | 4723 | emit_jump_insn_after_noloc (rtx x, rtx_insn *after) |
15bbde2b | 4724 | { |
f9a00e9e | 4725 | return as_a <rtx_jump_insn *> ( |
4726 | emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw)); | |
31d3e01c | 4727 | } |
4728 | ||
4729 | /* Make an instruction with body X and code CALL_INSN | |
4730 | and output it after the instruction AFTER. */ | |
4731 | ||
722334ea | 4732 | rtx_insn * |
924b3c83 | 4733 | emit_call_insn_after_noloc (rtx x, rtx_insn *after) |
31d3e01c | 4734 | { |
5f7c5ddd | 4735 | return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw); |
15bbde2b | 4736 | } |
4737 | ||
9845d120 | 4738 | /* Make an instruction with body X and code CALL_INSN |
4739 | and output it after the instruction AFTER. */ | |
4740 | ||
722334ea | 4741 | rtx_insn * |
924b3c83 | 4742 | emit_debug_insn_after_noloc (rtx x, rtx_insn *after) |
9845d120 | 4743 | { |
5f7c5ddd | 4744 | return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw); |
9845d120 | 4745 | } |
4746 | ||
15bbde2b | 4747 | /* Make an insn of code BARRIER |
4748 | and output it after the insn AFTER. */ | |
4749 | ||
722334ea | 4750 | rtx_barrier * |
924b3c83 | 4751 | emit_barrier_after (rtx_insn *after) |
15bbde2b | 4752 | { |
722334ea | 4753 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 4754 | |
4755 | INSN_UID (insn) = cur_insn_uid++; | |
4756 | ||
3072d30e | 4757 | add_insn_after (insn, after, NULL); |
15bbde2b | 4758 | return insn; |
4759 | } | |
4760 | ||
4761 | /* Emit the label LABEL after the insn AFTER. */ | |
4762 | ||
722334ea | 4763 | rtx_insn * |
924b3c83 | 4764 | emit_label_after (rtx_insn *label, rtx_insn *after) |
15bbde2b | 4765 | { |
596ef494 | 4766 | gcc_checking_assert (INSN_UID (label) == 0); |
4767 | INSN_UID (label) = cur_insn_uid++; | |
4768 | add_insn_after (label, after, NULL); | |
924b3c83 | 4769 | return label; |
15bbde2b | 4770 | } |
35f3420b | 4771 | \f |
4772 | /* Notes require a bit of special handling: Some notes need to have their | |
4773 | BLOCK_FOR_INSN set, others should never have it set, and some should | |
4774 | have it set or clear depending on the context. */ | |
4775 | ||
4776 | /* Return true iff a note of kind SUBTYPE should be emitted with routines | |
4777 | that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the | |
4778 | caller is asked to emit a note before BB_HEAD, or after BB_END. */ | |
4779 | ||
4780 | static bool | |
4781 | note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p) | |
4782 | { | |
4783 | switch (subtype) | |
4784 | { | |
4785 | /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */ | |
4786 | case NOTE_INSN_SWITCH_TEXT_SECTIONS: | |
4787 | return true; | |
4788 | ||
4789 | /* Notes for var tracking and EH region markers can appear between or | |
4790 | inside basic blocks. If the caller is emitting on the basic block | |
4791 | boundary, do not set BLOCK_FOR_INSN on the new note. */ | |
4792 | case NOTE_INSN_VAR_LOCATION: | |
35f3420b | 4793 | case NOTE_INSN_EH_REGION_BEG: |
4794 | case NOTE_INSN_EH_REGION_END: | |
4795 | return on_bb_boundary_p; | |
4796 | ||
4797 | /* Otherwise, BLOCK_FOR_INSN must be set. */ | |
4798 | default: | |
4799 | return false; | |
4800 | } | |
4801 | } | |
15bbde2b | 4802 | |
4803 | /* Emit a note of subtype SUBTYPE after the insn AFTER. */ | |
4804 | ||
cef3d8ad | 4805 | rtx_note * |
4d86329d | 4806 | emit_note_after (enum insn_note subtype, rtx_insn *after) |
15bbde2b | 4807 | { |
cef3d8ad | 4808 | rtx_note *note = make_note_raw (subtype); |
35f3420b | 4809 | basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after); |
4810 | bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after); | |
4811 | ||
4812 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4813 | add_insn_after_nobb (note, after); | |
4814 | else | |
4815 | add_insn_after (note, after, bb); | |
4816 | return note; | |
4817 | } | |
4818 | ||
4819 | /* Emit a note of subtype SUBTYPE before the insn BEFORE. */ | |
4820 | ||
cef3d8ad | 4821 | rtx_note * |
1dc26636 | 4822 | emit_note_before (enum insn_note subtype, rtx_insn *before) |
35f3420b | 4823 | { |
cef3d8ad | 4824 | rtx_note *note = make_note_raw (subtype); |
35f3420b | 4825 | basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before); |
4826 | bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before); | |
4827 | ||
4828 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4829 | add_insn_before_nobb (note, before); | |
4830 | else | |
4831 | add_insn_before (note, before, bb); | |
15bbde2b | 4832 | return note; |
4833 | } | |
15bbde2b | 4834 | \f |
ede4ebcb | 4835 | /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC. |
4836 | MAKE_RAW indicates how to turn PATTERN into a real insn. */ | |
4837 | ||
722334ea | 4838 | static rtx_insn * |
924b3c83 | 4839 | emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc, |
2c57d586 | 4840 | rtx_insn *(*make_raw) (rtx)) |
d321a68b | 4841 | { |
9ed997be | 4842 | rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
d321a68b | 4843 | |
0891f67c | 4844 | if (pattern == NULL_RTX || !loc) |
9ed997be | 4845 | return last; |
ca154f3f | 4846 | |
31d3e01c | 4847 | after = NEXT_INSN (after); |
4848 | while (1) | |
4849 | { | |
57e999d9 | 4850 | if (active_insn_p (after) |
4851 | && !JUMP_TABLE_DATA_P (after) /* FIXME */ | |
4852 | && !INSN_LOCATION (after)) | |
5169661d | 4853 | INSN_LOCATION (after) = loc; |
31d3e01c | 4854 | if (after == last) |
4855 | break; | |
4856 | after = NEXT_INSN (after); | |
4857 | } | |
9ed997be | 4858 | return last; |
d321a68b | 4859 | } |
4860 | ||
ede4ebcb | 4861 | /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN |
4862 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after | |
4863 | any DEBUG_INSNs. */ | |
4864 | ||
722334ea | 4865 | static rtx_insn * |
6b63fbbe | 4866 | emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns, |
2c57d586 | 4867 | rtx_insn *(*make_raw) (rtx)) |
0891f67c | 4868 | { |
4cd001d5 | 4869 | rtx_insn *prev = after; |
9845d120 | 4870 | |
ede4ebcb | 4871 | if (skip_debug_insns) |
4872 | while (DEBUG_INSN_P (prev)) | |
4873 | prev = PREV_INSN (prev); | |
9845d120 | 4874 | |
4875 | if (INSN_P (prev)) | |
5169661d | 4876 | return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev), |
ede4ebcb | 4877 | make_raw); |
0891f67c | 4878 | else |
ede4ebcb | 4879 | return emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
0891f67c | 4880 | } |
4881 | ||
5169661d | 4882 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4883 | rtx_insn * |
924b3c83 | 4884 | emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
d321a68b | 4885 | { |
ede4ebcb | 4886 | return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw); |
4887 | } | |
31d3e01c | 4888 | |
5169661d | 4889 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4890 | rtx_insn * |
6b63fbbe | 4891 | emit_insn_after (rtx pattern, rtx_insn *after) |
ede4ebcb | 4892 | { |
4893 | return emit_pattern_after (pattern, after, true, make_insn_raw); | |
4894 | } | |
ca154f3f | 4895 | |
5169661d | 4896 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
f9a00e9e | 4897 | rtx_jump_insn * |
924b3c83 | 4898 | emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
ede4ebcb | 4899 | { |
f9a00e9e | 4900 | return as_a <rtx_jump_insn *> ( |
4901 | emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw)); | |
d321a68b | 4902 | } |
4903 | ||
5169661d | 4904 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
f9a00e9e | 4905 | rtx_jump_insn * |
6b63fbbe | 4906 | emit_jump_insn_after (rtx pattern, rtx_insn *after) |
0891f67c | 4907 | { |
f9a00e9e | 4908 | return as_a <rtx_jump_insn *> ( |
4909 | emit_pattern_after (pattern, after, true, make_jump_insn_raw)); | |
0891f67c | 4910 | } |
4911 | ||
5169661d | 4912 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4913 | rtx_insn * |
924b3c83 | 4914 | emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
d321a68b | 4915 | { |
ede4ebcb | 4916 | return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw); |
d321a68b | 4917 | } |
4918 | ||
5169661d | 4919 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4920 | rtx_insn * |
6b63fbbe | 4921 | emit_call_insn_after (rtx pattern, rtx_insn *after) |
0891f67c | 4922 | { |
ede4ebcb | 4923 | return emit_pattern_after (pattern, after, true, make_call_insn_raw); |
0891f67c | 4924 | } |
4925 | ||
5169661d | 4926 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4927 | rtx_insn * |
924b3c83 | 4928 | emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
9845d120 | 4929 | { |
ede4ebcb | 4930 | return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw); |
9845d120 | 4931 | } |
4932 | ||
5169661d | 4933 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4934 | rtx_insn * |
6b63fbbe | 4935 | emit_debug_insn_after (rtx pattern, rtx_insn *after) |
9845d120 | 4936 | { |
ede4ebcb | 4937 | return emit_pattern_after (pattern, after, false, make_debug_insn_raw); |
9845d120 | 4938 | } |
4939 | ||
ede4ebcb | 4940 | /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC. |
4941 | MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP | |
4942 | indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN, | |
4943 | CALL_INSN, etc. */ | |
4944 | ||
722334ea | 4945 | static rtx_insn * |
924b3c83 | 4946 | emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc, |
4947 | bool insnp, rtx_insn *(*make_raw) (rtx)) | |
d321a68b | 4948 | { |
4cd001d5 | 4949 | rtx_insn *first = PREV_INSN (before); |
4950 | rtx_insn *last = emit_pattern_before_noloc (pattern, before, | |
924b3c83 | 4951 | insnp ? before : NULL, |
4cd001d5 | 4952 | NULL, make_raw); |
0891f67c | 4953 | |
4954 | if (pattern == NULL_RTX || !loc) | |
4cd001d5 | 4955 | return last; |
0891f67c | 4956 | |
4486418e | 4957 | if (!first) |
4958 | first = get_insns (); | |
4959 | else | |
4960 | first = NEXT_INSN (first); | |
0891f67c | 4961 | while (1) |
4962 | { | |
57e999d9 | 4963 | if (active_insn_p (first) |
4964 | && !JUMP_TABLE_DATA_P (first) /* FIXME */ | |
4965 | && !INSN_LOCATION (first)) | |
5169661d | 4966 | INSN_LOCATION (first) = loc; |
0891f67c | 4967 | if (first == last) |
4968 | break; | |
4969 | first = NEXT_INSN (first); | |
4970 | } | |
4cd001d5 | 4971 | return last; |
0891f67c | 4972 | } |
4973 | ||
ede4ebcb | 4974 | /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN |
4975 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert | |
4976 | before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an | |
4977 | INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */ | |
4978 | ||
722334ea | 4979 | static rtx_insn * |
6b63fbbe | 4980 | emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns, |
2c57d586 | 4981 | bool insnp, rtx_insn *(*make_raw) (rtx)) |
0891f67c | 4982 | { |
4cd001d5 | 4983 | rtx_insn *next = before; |
9845d120 | 4984 | |
ede4ebcb | 4985 | if (skip_debug_insns) |
4986 | while (DEBUG_INSN_P (next)) | |
4987 | next = PREV_INSN (next); | |
9845d120 | 4988 | |
4989 | if (INSN_P (next)) | |
5169661d | 4990 | return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next), |
ede4ebcb | 4991 | insnp, make_raw); |
0891f67c | 4992 | else |
ede4ebcb | 4993 | return emit_pattern_before_noloc (pattern, before, |
924b3c83 | 4994 | insnp ? before : NULL, |
ede4ebcb | 4995 | NULL, make_raw); |
0891f67c | 4996 | } |
4997 | ||
5169661d | 4998 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4999 | rtx_insn * |
924b3c83 | 5000 | emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
0891f67c | 5001 | { |
ede4ebcb | 5002 | return emit_pattern_before_setloc (pattern, before, loc, true, |
5003 | make_insn_raw); | |
5004 | } | |
0891f67c | 5005 | |
5169661d | 5006 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
722334ea | 5007 | rtx_insn * |
6b63fbbe | 5008 | emit_insn_before (rtx pattern, rtx_insn *before) |
ede4ebcb | 5009 | { |
5010 | return emit_pattern_before (pattern, before, true, true, make_insn_raw); | |
5011 | } | |
0891f67c | 5012 | |
5169661d | 5013 | /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
f9a00e9e | 5014 | rtx_jump_insn * |
924b3c83 | 5015 | emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
ede4ebcb | 5016 | { |
f9a00e9e | 5017 | return as_a <rtx_jump_insn *> ( |
5018 | emit_pattern_before_setloc (pattern, before, loc, false, | |
5019 | make_jump_insn_raw)); | |
0891f67c | 5020 | } |
5021 | ||
5169661d | 5022 | /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
f9a00e9e | 5023 | rtx_jump_insn * |
6b63fbbe | 5024 | emit_jump_insn_before (rtx pattern, rtx_insn *before) |
0891f67c | 5025 | { |
f9a00e9e | 5026 | return as_a <rtx_jump_insn *> ( |
5027 | emit_pattern_before (pattern, before, true, false, | |
5028 | make_jump_insn_raw)); | |
0891f67c | 5029 | } |
5030 | ||
5169661d | 5031 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 5032 | rtx_insn * |
924b3c83 | 5033 | emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
0891f67c | 5034 | { |
ede4ebcb | 5035 | return emit_pattern_before_setloc (pattern, before, loc, false, |
5036 | make_call_insn_raw); | |
d321a68b | 5037 | } |
0891f67c | 5038 | |
ede4ebcb | 5039 | /* Like emit_call_insn_before_noloc, |
5169661d | 5040 | but set insn_location according to BEFORE. */ |
722334ea | 5041 | rtx_insn * |
c9a09955 | 5042 | emit_call_insn_before (rtx pattern, rtx_insn *before) |
0891f67c | 5043 | { |
ede4ebcb | 5044 | return emit_pattern_before (pattern, before, true, false, |
5045 | make_call_insn_raw); | |
0891f67c | 5046 | } |
9845d120 | 5047 | |
5169661d | 5048 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 5049 | rtx_insn * |
924b3c83 | 5050 | emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
9845d120 | 5051 | { |
ede4ebcb | 5052 | return emit_pattern_before_setloc (pattern, before, loc, false, |
5053 | make_debug_insn_raw); | |
9845d120 | 5054 | } |
5055 | ||
ede4ebcb | 5056 | /* Like emit_debug_insn_before_noloc, |
5169661d | 5057 | but set insn_location according to BEFORE. */ |
722334ea | 5058 | rtx_insn * |
5518cf83 | 5059 | emit_debug_insn_before (rtx pattern, rtx_insn *before) |
9845d120 | 5060 | { |
ede4ebcb | 5061 | return emit_pattern_before (pattern, before, false, false, |
5062 | make_debug_insn_raw); | |
9845d120 | 5063 | } |
d321a68b | 5064 | \f |
31d3e01c | 5065 | /* Take X and emit it at the end of the doubly-linked |
5066 | INSN list. | |
15bbde2b | 5067 | |
5068 | Returns the last insn emitted. */ | |
5069 | ||
722334ea | 5070 | rtx_insn * |
35cb5232 | 5071 | emit_insn (rtx x) |
15bbde2b | 5072 | { |
722334ea | 5073 | rtx_insn *last = get_last_insn (); |
5074 | rtx_insn *insn; | |
15bbde2b | 5075 | |
31d3e01c | 5076 | if (x == NULL_RTX) |
5077 | return last; | |
15bbde2b | 5078 | |
31d3e01c | 5079 | switch (GET_CODE (x)) |
5080 | { | |
9845d120 | 5081 | case DEBUG_INSN: |
31d3e01c | 5082 | case INSN: |
5083 | case JUMP_INSN: | |
5084 | case CALL_INSN: | |
5085 | case CODE_LABEL: | |
5086 | case BARRIER: | |
5087 | case NOTE: | |
722334ea | 5088 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 5089 | while (insn) |
15bbde2b | 5090 | { |
722334ea | 5091 | rtx_insn *next = NEXT_INSN (insn); |
15bbde2b | 5092 | add_insn (insn); |
31d3e01c | 5093 | last = insn; |
5094 | insn = next; | |
15bbde2b | 5095 | } |
31d3e01c | 5096 | break; |
15bbde2b | 5097 | |
31d3e01c | 5098 | #ifdef ENABLE_RTL_CHECKING |
91f71fa3 | 5099 | case JUMP_TABLE_DATA: |
31d3e01c | 5100 | case SEQUENCE: |
611234b4 | 5101 | gcc_unreachable (); |
31d3e01c | 5102 | break; |
5103 | #endif | |
15bbde2b | 5104 | |
31d3e01c | 5105 | default: |
5106 | last = make_insn_raw (x); | |
5107 | add_insn (last); | |
5108 | break; | |
15bbde2b | 5109 | } |
5110 | ||
5111 | return last; | |
5112 | } | |
5113 | ||
9845d120 | 5114 | /* Make an insn of code DEBUG_INSN with pattern X |
5115 | and add it to the end of the doubly-linked list. */ | |
5116 | ||
722334ea | 5117 | rtx_insn * |
9845d120 | 5118 | emit_debug_insn (rtx x) |
5119 | { | |
722334ea | 5120 | rtx_insn *last = get_last_insn (); |
5121 | rtx_insn *insn; | |
9845d120 | 5122 | |
5123 | if (x == NULL_RTX) | |
5124 | return last; | |
5125 | ||
5126 | switch (GET_CODE (x)) | |
5127 | { | |
5128 | case DEBUG_INSN: | |
5129 | case INSN: | |
5130 | case JUMP_INSN: | |
5131 | case CALL_INSN: | |
5132 | case CODE_LABEL: | |
5133 | case BARRIER: | |
5134 | case NOTE: | |
722334ea | 5135 | insn = as_a <rtx_insn *> (x); |
9845d120 | 5136 | while (insn) |
5137 | { | |
722334ea | 5138 | rtx_insn *next = NEXT_INSN (insn); |
9845d120 | 5139 | add_insn (insn); |
5140 | last = insn; | |
5141 | insn = next; | |
5142 | } | |
5143 | break; | |
5144 | ||
5145 | #ifdef ENABLE_RTL_CHECKING | |
91f71fa3 | 5146 | case JUMP_TABLE_DATA: |
9845d120 | 5147 | case SEQUENCE: |
5148 | gcc_unreachable (); | |
5149 | break; | |
5150 | #endif | |
5151 | ||
5152 | default: | |
5153 | last = make_debug_insn_raw (x); | |
5154 | add_insn (last); | |
5155 | break; | |
5156 | } | |
5157 | ||
5158 | return last; | |
5159 | } | |
5160 | ||
31d3e01c | 5161 | /* Make an insn of code JUMP_INSN with pattern X |
5162 | and add it to the end of the doubly-linked list. */ | |
15bbde2b | 5163 | |
722334ea | 5164 | rtx_insn * |
35cb5232 | 5165 | emit_jump_insn (rtx x) |
15bbde2b | 5166 | { |
722334ea | 5167 | rtx_insn *last = NULL; |
5168 | rtx_insn *insn; | |
15bbde2b | 5169 | |
31d3e01c | 5170 | switch (GET_CODE (x)) |
15bbde2b | 5171 | { |
9845d120 | 5172 | case DEBUG_INSN: |
31d3e01c | 5173 | case INSN: |
5174 | case JUMP_INSN: | |
5175 | case CALL_INSN: | |
5176 | case CODE_LABEL: | |
5177 | case BARRIER: | |
5178 | case NOTE: | |
722334ea | 5179 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 5180 | while (insn) |
5181 | { | |
722334ea | 5182 | rtx_insn *next = NEXT_INSN (insn); |
31d3e01c | 5183 | add_insn (insn); |
5184 | last = insn; | |
5185 | insn = next; | |
5186 | } | |
5187 | break; | |
b36b07d8 | 5188 | |
31d3e01c | 5189 | #ifdef ENABLE_RTL_CHECKING |
91f71fa3 | 5190 | case JUMP_TABLE_DATA: |
31d3e01c | 5191 | case SEQUENCE: |
611234b4 | 5192 | gcc_unreachable (); |
31d3e01c | 5193 | break; |
5194 | #endif | |
b36b07d8 | 5195 | |
31d3e01c | 5196 | default: |
5197 | last = make_jump_insn_raw (x); | |
5198 | add_insn (last); | |
5199 | break; | |
9dda7915 | 5200 | } |
b36b07d8 | 5201 | |
5202 | return last; | |
5203 | } | |
5204 | ||
31d3e01c | 5205 | /* Make an insn of code CALL_INSN with pattern X |
15bbde2b | 5206 | and add it to the end of the doubly-linked list. */ |
5207 | ||
722334ea | 5208 | rtx_insn * |
35cb5232 | 5209 | emit_call_insn (rtx x) |
15bbde2b | 5210 | { |
722334ea | 5211 | rtx_insn *insn; |
31d3e01c | 5212 | |
5213 | switch (GET_CODE (x)) | |
15bbde2b | 5214 | { |
9845d120 | 5215 | case DEBUG_INSN: |
31d3e01c | 5216 | case INSN: |
5217 | case JUMP_INSN: | |
5218 | case CALL_INSN: | |
5219 | case CODE_LABEL: | |
5220 | case BARRIER: | |
5221 | case NOTE: | |
5222 | insn = emit_insn (x); | |
5223 | break; | |
15bbde2b | 5224 | |
31d3e01c | 5225 | #ifdef ENABLE_RTL_CHECKING |
5226 | case SEQUENCE: | |
91f71fa3 | 5227 | case JUMP_TABLE_DATA: |
611234b4 | 5228 | gcc_unreachable (); |
31d3e01c | 5229 | break; |
5230 | #endif | |
15bbde2b | 5231 | |
31d3e01c | 5232 | default: |
5233 | insn = make_call_insn_raw (x); | |
15bbde2b | 5234 | add_insn (insn); |
31d3e01c | 5235 | break; |
15bbde2b | 5236 | } |
31d3e01c | 5237 | |
5238 | return insn; | |
15bbde2b | 5239 | } |
5240 | ||
5241 | /* Add the label LABEL to the end of the doubly-linked list. */ | |
5242 | ||
f9a00e9e | 5243 | rtx_code_label * |
5244 | emit_label (rtx uncast_label) | |
15bbde2b | 5245 | { |
f9a00e9e | 5246 | rtx_code_label *label = as_a <rtx_code_label *> (uncast_label); |
5247 | ||
596ef494 | 5248 | gcc_checking_assert (INSN_UID (label) == 0); |
5249 | INSN_UID (label) = cur_insn_uid++; | |
f9a00e9e | 5250 | add_insn (label); |
5251 | return label; | |
15bbde2b | 5252 | } |
5253 | ||
91f71fa3 | 5254 | /* Make an insn of code JUMP_TABLE_DATA |
5255 | and add it to the end of the doubly-linked list. */ | |
5256 | ||
e41badc0 | 5257 | rtx_jump_table_data * |
91f71fa3 | 5258 | emit_jump_table_data (rtx table) |
5259 | { | |
e41badc0 | 5260 | rtx_jump_table_data *jump_table_data = |
5261 | as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA)); | |
91f71fa3 | 5262 | INSN_UID (jump_table_data) = cur_insn_uid++; |
5263 | PATTERN (jump_table_data) = table; | |
5264 | BLOCK_FOR_INSN (jump_table_data) = NULL; | |
5265 | add_insn (jump_table_data); | |
5266 | return jump_table_data; | |
5267 | } | |
5268 | ||
15bbde2b | 5269 | /* Make an insn of code BARRIER |
5270 | and add it to the end of the doubly-linked list. */ | |
5271 | ||
722334ea | 5272 | rtx_barrier * |
35cb5232 | 5273 | emit_barrier (void) |
15bbde2b | 5274 | { |
722334ea | 5275 | rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 5276 | INSN_UID (barrier) = cur_insn_uid++; |
5277 | add_insn (barrier); | |
5278 | return barrier; | |
5279 | } | |
5280 | ||
2f57e3d9 | 5281 | /* Emit a copy of note ORIG. */ |
35cb5232 | 5282 | |
cef3d8ad | 5283 | rtx_note * |
5284 | emit_note_copy (rtx_note *orig) | |
2f57e3d9 | 5285 | { |
35f3420b | 5286 | enum insn_note kind = (enum insn_note) NOTE_KIND (orig); |
cef3d8ad | 5287 | rtx_note *note = make_note_raw (kind); |
2f57e3d9 | 5288 | NOTE_DATA (note) = NOTE_DATA (orig); |
2f57e3d9 | 5289 | add_insn (note); |
31b97e8f | 5290 | return note; |
15bbde2b | 5291 | } |
5292 | ||
31b97e8f | 5293 | /* Make an insn of code NOTE or type NOTE_NO |
5294 | and add it to the end of the doubly-linked list. */ | |
15bbde2b | 5295 | |
cef3d8ad | 5296 | rtx_note * |
ad4583d9 | 5297 | emit_note (enum insn_note kind) |
15bbde2b | 5298 | { |
cef3d8ad | 5299 | rtx_note *note = make_note_raw (kind); |
15bbde2b | 5300 | add_insn (note); |
5301 | return note; | |
5302 | } | |
5303 | ||
18b42941 | 5304 | /* Emit a clobber of lvalue X. */ |
5305 | ||
722334ea | 5306 | rtx_insn * |
18b42941 | 5307 | emit_clobber (rtx x) |
5308 | { | |
5309 | /* CONCATs should not appear in the insn stream. */ | |
5310 | if (GET_CODE (x) == CONCAT) | |
5311 | { | |
5312 | emit_clobber (XEXP (x, 0)); | |
5313 | return emit_clobber (XEXP (x, 1)); | |
5314 | } | |
5315 | return emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); | |
5316 | } | |
5317 | ||
5318 | /* Return a sequence of insns to clobber lvalue X. */ | |
5319 | ||
722334ea | 5320 | rtx_insn * |
18b42941 | 5321 | gen_clobber (rtx x) |
5322 | { | |
722334ea | 5323 | rtx_insn *seq; |
18b42941 | 5324 | |
5325 | start_sequence (); | |
5326 | emit_clobber (x); | |
5327 | seq = get_insns (); | |
5328 | end_sequence (); | |
5329 | return seq; | |
5330 | } | |
5331 | ||
5332 | /* Emit a use of rvalue X. */ | |
5333 | ||
722334ea | 5334 | rtx_insn * |
18b42941 | 5335 | emit_use (rtx x) |
5336 | { | |
5337 | /* CONCATs should not appear in the insn stream. */ | |
5338 | if (GET_CODE (x) == CONCAT) | |
5339 | { | |
5340 | emit_use (XEXP (x, 0)); | |
5341 | return emit_use (XEXP (x, 1)); | |
5342 | } | |
5343 | return emit_insn (gen_rtx_USE (VOIDmode, x)); | |
5344 | } | |
5345 | ||
5346 | /* Return a sequence of insns to use rvalue X. */ | |
5347 | ||
722334ea | 5348 | rtx_insn * |
18b42941 | 5349 | gen_use (rtx x) |
5350 | { | |
722334ea | 5351 | rtx_insn *seq; |
18b42941 | 5352 | |
5353 | start_sequence (); | |
5354 | emit_use (x); | |
5355 | seq = get_insns (); | |
5356 | end_sequence (); | |
5357 | return seq; | |
5358 | } | |
5359 | ||
3a286419 | 5360 | /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction. |
5361 | Return the set in INSN that such notes describe, or NULL if the notes | |
5362 | have no meaning for INSN. */ | |
5363 | ||
5364 | rtx | |
5365 | set_for_reg_notes (rtx insn) | |
5366 | { | |
5367 | rtx pat, reg; | |
5368 | ||
5369 | if (!INSN_P (insn)) | |
5370 | return NULL_RTX; | |
5371 | ||
5372 | pat = PATTERN (insn); | |
5373 | if (GET_CODE (pat) == PARALLEL) | |
5374 | { | |
5375 | /* We do not use single_set because that ignores SETs of unused | |
5376 | registers. REG_EQUAL and REG_EQUIV notes really do require the | |
5377 | PARALLEL to have a single SET. */ | |
5378 | if (multiple_sets (insn)) | |
5379 | return NULL_RTX; | |
5380 | pat = XVECEXP (pat, 0, 0); | |
5381 | } | |
5382 | ||
5383 | if (GET_CODE (pat) != SET) | |
5384 | return NULL_RTX; | |
5385 | ||
5386 | reg = SET_DEST (pat); | |
5387 | ||
5388 | /* Notes apply to the contents of a STRICT_LOW_PART. */ | |
f2c7e335 | 5389 | if (GET_CODE (reg) == STRICT_LOW_PART |
5390 | || GET_CODE (reg) == ZERO_EXTRACT) | |
3a286419 | 5391 | reg = XEXP (reg, 0); |
5392 | ||
5393 | /* Check that we have a register. */ | |
5394 | if (!(REG_P (reg) || GET_CODE (reg) == SUBREG)) | |
5395 | return NULL_RTX; | |
5396 | ||
5397 | return pat; | |
5398 | } | |
5399 | ||
f1934a33 | 5400 | /* Place a note of KIND on insn INSN with DATUM as the datum. If a |
6312a35e | 5401 | note of this type already exists, remove it first. */ |
f1934a33 | 5402 | |
c080d8f0 | 5403 | rtx |
35cb5232 | 5404 | set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) |
f1934a33 | 5405 | { |
5406 | rtx note = find_reg_note (insn, kind, NULL_RTX); | |
5407 | ||
7e6224ab | 5408 | switch (kind) |
5409 | { | |
5410 | case REG_EQUAL: | |
5411 | case REG_EQUIV: | |
7b0b2add | 5412 | /* We need to support the REG_EQUAL on USE trick of find_reloads. */ |
5413 | if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE) | |
3a286419 | 5414 | return NULL_RTX; |
7e6224ab | 5415 | |
5416 | /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes. | |
5417 | It serves no useful purpose and breaks eliminate_regs. */ | |
5418 | if (GET_CODE (datum) == ASM_OPERANDS) | |
5419 | return NULL_RTX; | |
2f8cf22c | 5420 | |
5421 | /* Notes with side effects are dangerous. Even if the side-effect | |
5422 | initially mirrors one in PATTERN (INSN), later optimizations | |
5423 | might alter the way that the final register value is calculated | |
5424 | and so move or alter the side-effect in some way. The note would | |
5425 | then no longer be a valid substitution for SET_SRC. */ | |
5426 | if (side_effects_p (datum)) | |
5427 | return NULL_RTX; | |
7e6224ab | 5428 | break; |
5429 | ||
5430 | default: | |
5431 | break; | |
5432 | } | |
c080d8f0 | 5433 | |
3a286419 | 5434 | if (note) |
5435 | XEXP (note, 0) = datum; | |
5436 | else | |
5437 | { | |
5438 | add_reg_note (insn, kind, datum); | |
5439 | note = REG_NOTES (insn); | |
5440 | } | |
3072d30e | 5441 | |
5442 | switch (kind) | |
c080d8f0 | 5443 | { |
3072d30e | 5444 | case REG_EQUAL: |
5445 | case REG_EQUIV: | |
e149ca56 | 5446 | df_notes_rescan (as_a <rtx_insn *> (insn)); |
3072d30e | 5447 | break; |
5448 | default: | |
5449 | break; | |
c080d8f0 | 5450 | } |
f1934a33 | 5451 | |
3a286419 | 5452 | return note; |
f1934a33 | 5453 | } |
41cf444a | 5454 | |
5455 | /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */ | |
5456 | rtx | |
5457 | set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst) | |
5458 | { | |
3a286419 | 5459 | rtx set = set_for_reg_notes (insn); |
41cf444a | 5460 | |
5461 | if (set && SET_DEST (set) == dst) | |
5462 | return set_unique_reg_note (insn, kind, datum); | |
5463 | return NULL_RTX; | |
5464 | } | |
15bbde2b | 5465 | \f |
16d83c02 | 5466 | /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a |
5467 | following barrier if the instruction needs one and if ALLOW_BARRIER_P | |
5468 | is true. | |
5469 | ||
15bbde2b | 5470 | If X is a label, it is simply added into the insn chain. */ |
5471 | ||
722334ea | 5472 | rtx_insn * |
16d83c02 | 5473 | emit (rtx x, bool allow_barrier_p) |
15bbde2b | 5474 | { |
5475 | enum rtx_code code = classify_insn (x); | |
5476 | ||
611234b4 | 5477 | switch (code) |
15bbde2b | 5478 | { |
611234b4 | 5479 | case CODE_LABEL: |
5480 | return emit_label (x); | |
5481 | case INSN: | |
5482 | return emit_insn (x); | |
5483 | case JUMP_INSN: | |
5484 | { | |
722334ea | 5485 | rtx_insn *insn = emit_jump_insn (x); |
16d83c02 | 5486 | if (allow_barrier_p |
5487 | && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)) | |
611234b4 | 5488 | return emit_barrier (); |
5489 | return insn; | |
5490 | } | |
5491 | case CALL_INSN: | |
5492 | return emit_call_insn (x); | |
9845d120 | 5493 | case DEBUG_INSN: |
5494 | return emit_debug_insn (x); | |
611234b4 | 5495 | default: |
5496 | gcc_unreachable (); | |
15bbde2b | 5497 | } |
15bbde2b | 5498 | } |
5499 | \f | |
1f3233d1 | 5500 | /* Space for free sequence stack entries. */ |
7035b2ab | 5501 | static GTY ((deletable)) struct sequence_stack *free_sequence_stack; |
1f3233d1 | 5502 | |
735f4358 | 5503 | /* Begin emitting insns to a sequence. If this sequence will contain |
5504 | something that might cause the compiler to pop arguments to function | |
5505 | calls (because those pops have previously been deferred; see | |
5506 | INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust | |
5507 | before calling this function. That will ensure that the deferred | |
5508 | pops are not accidentally emitted in the middle of this sequence. */ | |
15bbde2b | 5509 | |
5510 | void | |
35cb5232 | 5511 | start_sequence (void) |
15bbde2b | 5512 | { |
5513 | struct sequence_stack *tem; | |
5514 | ||
1f3233d1 | 5515 | if (free_sequence_stack != NULL) |
5516 | { | |
5517 | tem = free_sequence_stack; | |
5518 | free_sequence_stack = tem->next; | |
5519 | } | |
5520 | else | |
25a27413 | 5521 | tem = ggc_alloc<sequence_stack> (); |
15bbde2b | 5522 | |
c36aa54b | 5523 | tem->next = get_current_sequence ()->next; |
06f9d6ef | 5524 | tem->first = get_insns (); |
5525 | tem->last = get_last_insn (); | |
c36aa54b | 5526 | get_current_sequence ()->next = tem; |
15bbde2b | 5527 | |
06f9d6ef | 5528 | set_first_insn (0); |
5529 | set_last_insn (0); | |
15bbde2b | 5530 | } |
5531 | ||
b49854c6 | 5532 | /* Set up the insn chain starting with FIRST as the current sequence, |
5533 | saving the previously current one. See the documentation for | |
5534 | start_sequence for more information about how to use this function. */ | |
15bbde2b | 5535 | |
5536 | void | |
57c26b3a | 5537 | push_to_sequence (rtx_insn *first) |
15bbde2b | 5538 | { |
57c26b3a | 5539 | rtx_insn *last; |
15bbde2b | 5540 | |
5541 | start_sequence (); | |
5542 | ||
3c802a1e | 5543 | for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last)) |
5544 | ; | |
15bbde2b | 5545 | |
06f9d6ef | 5546 | set_first_insn (first); |
5547 | set_last_insn (last); | |
15bbde2b | 5548 | } |
5549 | ||
28bf151d | 5550 | /* Like push_to_sequence, but take the last insn as an argument to avoid |
5551 | looping through the list. */ | |
5552 | ||
5553 | void | |
57c26b3a | 5554 | push_to_sequence2 (rtx_insn *first, rtx_insn *last) |
28bf151d | 5555 | { |
5556 | start_sequence (); | |
5557 | ||
06f9d6ef | 5558 | set_first_insn (first); |
5559 | set_last_insn (last); | |
28bf151d | 5560 | } |
5561 | ||
ab74c92f | 5562 | /* Set up the outer-level insn chain |
5563 | as the current sequence, saving the previously current one. */ | |
5564 | ||
5565 | void | |
35cb5232 | 5566 | push_topmost_sequence (void) |
ab74c92f | 5567 | { |
c36aa54b | 5568 | struct sequence_stack *top; |
ab74c92f | 5569 | |
5570 | start_sequence (); | |
5571 | ||
c36aa54b | 5572 | top = get_topmost_sequence (); |
06f9d6ef | 5573 | set_first_insn (top->first); |
5574 | set_last_insn (top->last); | |
ab74c92f | 5575 | } |
5576 | ||
5577 | /* After emitting to the outer-level insn chain, update the outer-level | |
5578 | insn chain, and restore the previous saved state. */ | |
5579 | ||
5580 | void | |
35cb5232 | 5581 | pop_topmost_sequence (void) |
ab74c92f | 5582 | { |
c36aa54b | 5583 | struct sequence_stack *top; |
ab74c92f | 5584 | |
c36aa54b | 5585 | top = get_topmost_sequence (); |
06f9d6ef | 5586 | top->first = get_insns (); |
5587 | top->last = get_last_insn (); | |
ab74c92f | 5588 | |
5589 | end_sequence (); | |
5590 | } | |
5591 | ||
15bbde2b | 5592 | /* After emitting to a sequence, restore previous saved state. |
5593 | ||
b49854c6 | 5594 | To get the contents of the sequence just made, you must call |
31d3e01c | 5595 | `get_insns' *before* calling here. |
b49854c6 | 5596 | |
5597 | If the compiler might have deferred popping arguments while | |
5598 | generating this sequence, and this sequence will not be immediately | |
5599 | inserted into the instruction stream, use do_pending_stack_adjust | |
31d3e01c | 5600 | before calling get_insns. That will ensure that the deferred |
b49854c6 | 5601 | pops are inserted into this sequence, and not into some random |
5602 | location in the instruction stream. See INHIBIT_DEFER_POP for more | |
5603 | information about deferred popping of arguments. */ | |
15bbde2b | 5604 | |
5605 | void | |
35cb5232 | 5606 | end_sequence (void) |
15bbde2b | 5607 | { |
c36aa54b | 5608 | struct sequence_stack *tem = get_current_sequence ()->next; |
15bbde2b | 5609 | |
06f9d6ef | 5610 | set_first_insn (tem->first); |
5611 | set_last_insn (tem->last); | |
c36aa54b | 5612 | get_current_sequence ()->next = tem->next; |
15bbde2b | 5613 | |
1f3233d1 | 5614 | memset (tem, 0, sizeof (*tem)); |
5615 | tem->next = free_sequence_stack; | |
5616 | free_sequence_stack = tem; | |
15bbde2b | 5617 | } |
5618 | ||
5619 | /* Return 1 if currently emitting into a sequence. */ | |
5620 | ||
5621 | int | |
35cb5232 | 5622 | in_sequence_p (void) |
15bbde2b | 5623 | { |
c36aa54b | 5624 | return get_current_sequence ()->next != 0; |
15bbde2b | 5625 | } |
15bbde2b | 5626 | \f |
02ebfa52 | 5627 | /* Put the various virtual registers into REGNO_REG_RTX. */ |
5628 | ||
2f3874ce | 5629 | static void |
b079a207 | 5630 | init_virtual_regs (void) |
02ebfa52 | 5631 | { |
b079a207 | 5632 | regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx; |
5633 | regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx; | |
5634 | regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx; | |
5635 | regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx; | |
5636 | regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx; | |
60778e62 | 5637 | regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM] |
5638 | = virtual_preferred_stack_boundary_rtx; | |
0a893c29 | 5639 | } |
5640 | ||
928d57e3 | 5641 | \f |
5642 | /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */ | |
5643 | static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]; | |
5644 | static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS]; | |
5645 | static int copy_insn_n_scratches; | |
5646 | ||
5647 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5648 | copied an ASM_OPERANDS. | |
5649 | In that case, it is the original input-operand vector. */ | |
5650 | static rtvec orig_asm_operands_vector; | |
5651 | ||
5652 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5653 | copied an ASM_OPERANDS. | |
5654 | In that case, it is the copied input-operand vector. */ | |
5655 | static rtvec copy_asm_operands_vector; | |
5656 | ||
5657 | /* Likewise for the constraints vector. */ | |
5658 | static rtvec orig_asm_constraints_vector; | |
5659 | static rtvec copy_asm_constraints_vector; | |
5660 | ||
5661 | /* Recursively create a new copy of an rtx for copy_insn. | |
5662 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5663 | ASM_OPERANDs properly. | |
5664 | Normally, this function is not used directly; use copy_insn as front end. | |
5665 | However, you could first copy an insn pattern with copy_insn and then use | |
5666 | this function afterwards to properly copy any REG_NOTEs containing | |
5667 | SCRATCHes. */ | |
5668 | ||
5669 | rtx | |
35cb5232 | 5670 | copy_insn_1 (rtx orig) |
928d57e3 | 5671 | { |
19cb6b50 | 5672 | rtx copy; |
5673 | int i, j; | |
5674 | RTX_CODE code; | |
5675 | const char *format_ptr; | |
928d57e3 | 5676 | |
25e880b1 | 5677 | if (orig == NULL) |
5678 | return NULL; | |
5679 | ||
928d57e3 | 5680 | code = GET_CODE (orig); |
5681 | ||
5682 | switch (code) | |
5683 | { | |
5684 | case REG: | |
d7fce3c8 | 5685 | case DEBUG_EXPR: |
0349edce | 5686 | CASE_CONST_ANY: |
928d57e3 | 5687 | case SYMBOL_REF: |
5688 | case CODE_LABEL: | |
5689 | case PC: | |
5690 | case CC0: | |
e0691b9a | 5691 | case RETURN: |
9cb2517e | 5692 | case SIMPLE_RETURN: |
928d57e3 | 5693 | return orig; |
c09425a0 | 5694 | case CLOBBER: |
ccd6679f | 5695 | case CLOBBER_HIGH: |
b291008a | 5696 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
5697 | clobbers or clobbers of hard registers that originated as pseudos. | |
5698 | This is needed to allow safe register renaming. */ | |
2b5f32ae | 5699 | if (REG_P (XEXP (orig, 0)) |
5700 | && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))) | |
5701 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0)))) | |
c09425a0 | 5702 | return orig; |
5703 | break; | |
928d57e3 | 5704 | |
5705 | case SCRATCH: | |
5706 | for (i = 0; i < copy_insn_n_scratches; i++) | |
5707 | if (copy_insn_scratch_in[i] == orig) | |
5708 | return copy_insn_scratch_out[i]; | |
5709 | break; | |
5710 | ||
5711 | case CONST: | |
3072d30e | 5712 | if (shared_const_p (orig)) |
928d57e3 | 5713 | return orig; |
5714 | break; | |
d823ba47 | 5715 | |
928d57e3 | 5716 | /* A MEM with a constant address is not sharable. The problem is that |
5717 | the constant address may need to be reloaded. If the mem is shared, | |
5718 | then reloading one copy of this mem will cause all copies to appear | |
5719 | to have been reloaded. */ | |
5720 | ||
5721 | default: | |
5722 | break; | |
5723 | } | |
5724 | ||
f2d0e9f1 | 5725 | /* Copy the various flags, fields, and other information. We assume |
5726 | that all fields need copying, and then clear the fields that should | |
928d57e3 | 5727 | not be copied. That is the sensible default behavior, and forces |
5728 | us to explicitly document why we are *not* copying a flag. */ | |
f2d0e9f1 | 5729 | copy = shallow_copy_rtx (orig); |
928d57e3 | 5730 | |
928d57e3 | 5731 | /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */ |
6720e96c | 5732 | if (INSN_P (orig)) |
928d57e3 | 5733 | { |
7c25cb91 | 5734 | RTX_FLAG (copy, jump) = 0; |
5735 | RTX_FLAG (copy, call) = 0; | |
5736 | RTX_FLAG (copy, frame_related) = 0; | |
928d57e3 | 5737 | } |
d823ba47 | 5738 | |
928d57e3 | 5739 | format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); |
5740 | ||
5741 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) | |
f2d0e9f1 | 5742 | switch (*format_ptr++) |
5743 | { | |
5744 | case 'e': | |
5745 | if (XEXP (orig, i) != NULL) | |
5746 | XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); | |
5747 | break; | |
928d57e3 | 5748 | |
f2d0e9f1 | 5749 | case 'E': |
5750 | case 'V': | |
5751 | if (XVEC (orig, i) == orig_asm_constraints_vector) | |
5752 | XVEC (copy, i) = copy_asm_constraints_vector; | |
5753 | else if (XVEC (orig, i) == orig_asm_operands_vector) | |
5754 | XVEC (copy, i) = copy_asm_operands_vector; | |
5755 | else if (XVEC (orig, i) != NULL) | |
5756 | { | |
5757 | XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
5758 | for (j = 0; j < XVECLEN (copy, i); j++) | |
5759 | XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); | |
5760 | } | |
5761 | break; | |
928d57e3 | 5762 | |
f2d0e9f1 | 5763 | case 't': |
5764 | case 'w': | |
5765 | case 'i': | |
9edf7ea8 | 5766 | case 'p': |
f2d0e9f1 | 5767 | case 's': |
5768 | case 'S': | |
5769 | case 'u': | |
5770 | case '0': | |
5771 | /* These are left unchanged. */ | |
5772 | break; | |
928d57e3 | 5773 | |
f2d0e9f1 | 5774 | default: |
5775 | gcc_unreachable (); | |
5776 | } | |
928d57e3 | 5777 | |
5778 | if (code == SCRATCH) | |
5779 | { | |
5780 | i = copy_insn_n_scratches++; | |
611234b4 | 5781 | gcc_assert (i < MAX_RECOG_OPERANDS); |
928d57e3 | 5782 | copy_insn_scratch_in[i] = orig; |
5783 | copy_insn_scratch_out[i] = copy; | |
5784 | } | |
5785 | else if (code == ASM_OPERANDS) | |
5786 | { | |
d91f2122 | 5787 | orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig); |
5788 | copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy); | |
5789 | orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig); | |
5790 | copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy); | |
928d57e3 | 5791 | } |
5792 | ||
5793 | return copy; | |
5794 | } | |
5795 | ||
5796 | /* Create a new copy of an rtx. | |
5797 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5798 | ASM_OPERANDs properly. | |
5799 | INSN doesn't really have to be a full INSN; it could be just the | |
5800 | pattern. */ | |
5801 | rtx | |
35cb5232 | 5802 | copy_insn (rtx insn) |
928d57e3 | 5803 | { |
5804 | copy_insn_n_scratches = 0; | |
5805 | orig_asm_operands_vector = 0; | |
5806 | orig_asm_constraints_vector = 0; | |
5807 | copy_asm_operands_vector = 0; | |
5808 | copy_asm_constraints_vector = 0; | |
5809 | return copy_insn_1 (insn); | |
5810 | } | |
02ebfa52 | 5811 | |
a9abe1f1 | 5812 | /* Return a copy of INSN that can be used in a SEQUENCE delay slot, |
5813 | on that assumption that INSN itself remains in its original place. */ | |
5814 | ||
575a12f2 | 5815 | rtx_insn * |
5816 | copy_delay_slot_insn (rtx_insn *insn) | |
a9abe1f1 | 5817 | { |
5818 | /* Copy INSN with its rtx_code, all its notes, location etc. */ | |
575a12f2 | 5819 | insn = as_a <rtx_insn *> (copy_rtx (insn)); |
a9abe1f1 | 5820 | INSN_UID (insn) = cur_insn_uid++; |
5821 | return insn; | |
5822 | } | |
5823 | ||
15bbde2b | 5824 | /* Initialize data structures and variables in this file |
5825 | before generating rtl for each function. */ | |
5826 | ||
5827 | void | |
35cb5232 | 5828 | init_emit (void) |
15bbde2b | 5829 | { |
06f9d6ef | 5830 | set_first_insn (NULL); |
5831 | set_last_insn (NULL); | |
9845d120 | 5832 | if (MIN_NONDEBUG_INSN_UID) |
5833 | cur_insn_uid = MIN_NONDEBUG_INSN_UID; | |
5834 | else | |
5835 | cur_insn_uid = 1; | |
5836 | cur_debug_insn_uid = 1; | |
15bbde2b | 5837 | reg_rtx_no = LAST_VIRTUAL_REGISTER + 1; |
15bbde2b | 5838 | first_label_num = label_num; |
c36aa54b | 5839 | get_current_sequence ()->next = NULL; |
15bbde2b | 5840 | |
15bbde2b | 5841 | /* Init the tables that describe all the pseudo regs. */ |
5842 | ||
fd6ffb7c | 5843 | crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101; |
15bbde2b | 5844 | |
fd6ffb7c | 5845 | crtl->emit.regno_pointer_align |
2457c754 | 5846 | = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length); |
d4c332ff | 5847 | |
cd769037 | 5848 | regno_reg_rtx |
5849 | = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length); | |
fcdc122e | 5850 | |
936082bb | 5851 | /* Put copies of all the hard registers into regno_reg_rtx. */ |
90295bd2 | 5852 | memcpy (regno_reg_rtx, |
679bcc8d | 5853 | initial_regno_reg_rtx, |
90295bd2 | 5854 | FIRST_PSEUDO_REGISTER * sizeof (rtx)); |
936082bb | 5855 | |
15bbde2b | 5856 | /* Put copies of all the virtual register rtx into regno_reg_rtx. */ |
b079a207 | 5857 | init_virtual_regs (); |
888e0d33 | 5858 | |
5859 | /* Indicate that the virtual registers and stack locations are | |
5860 | all pointers. */ | |
e61a0a7f | 5861 | REG_POINTER (stack_pointer_rtx) = 1; |
5862 | REG_POINTER (frame_pointer_rtx) = 1; | |
5863 | REG_POINTER (hard_frame_pointer_rtx) = 1; | |
5864 | REG_POINTER (arg_pointer_rtx) = 1; | |
888e0d33 | 5865 | |
e61a0a7f | 5866 | REG_POINTER (virtual_incoming_args_rtx) = 1; |
5867 | REG_POINTER (virtual_stack_vars_rtx) = 1; | |
5868 | REG_POINTER (virtual_stack_dynamic_rtx) = 1; | |
5869 | REG_POINTER (virtual_outgoing_args_rtx) = 1; | |
5870 | REG_POINTER (virtual_cfa_rtx) = 1; | |
89525da0 | 5871 | |
d4c332ff | 5872 | #ifdef STACK_BOUNDARY |
80909c64 | 5873 | REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; |
5874 | REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5875 | REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5876 | REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; | |
5877 | ||
5878 | REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; | |
5879 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; | |
5880 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; | |
5881 | REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; | |
213d1448 | 5882 | |
80909c64 | 5883 | REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD; |
d4c332ff | 5884 | #endif |
5885 | ||
89525da0 | 5886 | #ifdef INIT_EXPANDERS |
5887 | INIT_EXPANDERS; | |
5888 | #endif | |
15bbde2b | 5889 | } |
5890 | ||
0f78b37a | 5891 | /* Return the value of element I of CONST_VECTOR X as a wide_int. */ |
5892 | ||
5893 | wide_int | |
5894 | const_vector_int_elt (const_rtx x, unsigned int i) | |
5895 | { | |
5896 | /* First handle elements that are directly encoded. */ | |
5897 | machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x)); | |
5898 | if (i < (unsigned int) XVECLEN (x, 0)) | |
5899 | return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode); | |
5900 | ||
5901 | /* Identify the pattern that contains element I and work out the index of | |
5902 | the last encoded element for that pattern. */ | |
5903 | unsigned int encoded_nelts = const_vector_encoded_nelts (x); | |
5904 | unsigned int npatterns = CONST_VECTOR_NPATTERNS (x); | |
5905 | unsigned int count = i / npatterns; | |
5906 | unsigned int pattern = i % npatterns; | |
5907 | unsigned int final_i = encoded_nelts - npatterns + pattern; | |
5908 | ||
5909 | /* If there are no steps, the final encoded value is the right one. */ | |
5910 | if (!CONST_VECTOR_STEPPED_P (x)) | |
5911 | return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode); | |
5912 | ||
5913 | /* Otherwise work out the value from the last two encoded elements. */ | |
5914 | rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns); | |
5915 | rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i); | |
5916 | wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode), | |
5917 | rtx_mode_t (v1, elt_mode)); | |
5918 | return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff); | |
5919 | } | |
5920 | ||
5921 | /* Return the value of element I of CONST_VECTOR X. */ | |
5922 | ||
5923 | rtx | |
5924 | const_vector_elt (const_rtx x, unsigned int i) | |
5925 | { | |
5926 | /* First handle elements that are directly encoded. */ | |
5927 | if (i < (unsigned int) XVECLEN (x, 0)) | |
5928 | return CONST_VECTOR_ENCODED_ELT (x, i); | |
5929 | ||
5930 | /* If there are no steps, the final encoded value is the right one. */ | |
5931 | if (!CONST_VECTOR_STEPPED_P (x)) | |
5932 | { | |
5933 | /* Identify the pattern that contains element I and work out the index of | |
5934 | the last encoded element for that pattern. */ | |
5935 | unsigned int encoded_nelts = const_vector_encoded_nelts (x); | |
5936 | unsigned int npatterns = CONST_VECTOR_NPATTERNS (x); | |
5937 | unsigned int pattern = i % npatterns; | |
5938 | unsigned int final_i = encoded_nelts - npatterns + pattern; | |
5939 | return CONST_VECTOR_ENCODED_ELT (x, final_i); | |
5940 | } | |
5941 | ||
5942 | /* Otherwise work out the value from the last two encoded elements. */ | |
5943 | return immed_wide_int_const (const_vector_int_elt (x, i), | |
5944 | GET_MODE_INNER (GET_MODE (x))); | |
5945 | } | |
5946 | ||
ca4bb72c | 5947 | /* Return true if X is a valid element for a CONST_VECTOR of the given |
5948 | mode. */ | |
67c52133 | 5949 | |
5950 | bool | |
ca4bb72c | 5951 | valid_for_const_vector_p (machine_mode, rtx x) |
67c52133 | 5952 | { |
5953 | return (CONST_SCALAR_INT_P (x) | |
5954 | || CONST_DOUBLE_AS_FLOAT_P (x) | |
5955 | || CONST_FIXED_P (x)); | |
5956 | } | |
5957 | ||
0b51f5ce | 5958 | /* Generate a vector constant of mode MODE in which every element has |
5959 | value ELT. */ | |
886cfd4f | 5960 | |
0b51f5ce | 5961 | rtx |
5962 | gen_const_vec_duplicate (machine_mode mode, rtx elt) | |
5963 | { | |
a80726d1 | 5964 | rtx_vector_builder builder (mode, 1, 1); |
5965 | builder.quick_push (elt); | |
5966 | return builder.build (); | |
0b51f5ce | 5967 | } |
5968 | ||
5969 | /* Return a vector rtx of mode MODE in which every element has value X. | |
5970 | The result will be a constant if X is constant. */ | |
5971 | ||
5972 | rtx | |
5973 | gen_vec_duplicate (machine_mode mode, rtx x) | |
5974 | { | |
ca4bb72c | 5975 | if (valid_for_const_vector_p (mode, x)) |
0b51f5ce | 5976 | return gen_const_vec_duplicate (mode, x); |
5977 | return gen_rtx_VEC_DUPLICATE (mode, x); | |
5978 | } | |
069b07bf | 5979 | |
a80726d1 | 5980 | /* A subroutine of const_vec_series_p that handles the case in which: |
5981 | ||
5982 | (GET_CODE (X) == CONST_VECTOR | |
5983 | && CONST_VECTOR_NPATTERNS (X) == 1 | |
5984 | && !CONST_VECTOR_DUPLICATE_P (X)) | |
5985 | ||
5986 | is known to hold. */ | |
ccc2ef18 | 5987 | |
5988 | bool | |
5989 | const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out) | |
5990 | { | |
a80726d1 | 5991 | /* Stepped sequences are only defined for integers, to avoid specifying |
5992 | rounding behavior. */ | |
5993 | if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT) | |
5994 | return false; | |
5995 | ||
5996 | /* A non-duplicated vector with two elements can always be seen as a | |
5997 | series with a nonzero step. Longer vectors must have a stepped | |
5998 | encoding. */ | |
ba7efd65 | 5999 | if (maybe_ne (CONST_VECTOR_NUNITS (x), 2) |
a80726d1 | 6000 | && !CONST_VECTOR_STEPPED_P (x)) |
ccc2ef18 | 6001 | return false; |
6002 | ||
a80726d1 | 6003 | /* Calculate the step between the first and second elements. */ |
ccc2ef18 | 6004 | scalar_mode inner = GET_MODE_INNER (GET_MODE (x)); |
6005 | rtx base = CONST_VECTOR_ELT (x, 0); | |
6006 | rtx step = simplify_binary_operation (MINUS, inner, | |
a80726d1 | 6007 | CONST_VECTOR_ENCODED_ELT (x, 1), base); |
ccc2ef18 | 6008 | if (rtx_equal_p (step, CONST0_RTX (inner))) |
6009 | return false; | |
6010 | ||
a80726d1 | 6011 | /* If we have a stepped encoding, check that the step between the |
6012 | second and third elements is the same as STEP. */ | |
6013 | if (CONST_VECTOR_STEPPED_P (x)) | |
ccc2ef18 | 6014 | { |
6015 | rtx diff = simplify_binary_operation (MINUS, inner, | |
a80726d1 | 6016 | CONST_VECTOR_ENCODED_ELT (x, 2), |
6017 | CONST_VECTOR_ENCODED_ELT (x, 1)); | |
ccc2ef18 | 6018 | if (!rtx_equal_p (step, diff)) |
6019 | return false; | |
6020 | } | |
6021 | ||
6022 | *base_out = base; | |
6023 | *step_out = step; | |
6024 | return true; | |
6025 | } | |
6026 | ||
6027 | /* Generate a vector constant of mode MODE in which element I has | |
6028 | the value BASE + I * STEP. */ | |
6029 | ||
6030 | rtx | |
6031 | gen_const_vec_series (machine_mode mode, rtx base, rtx step) | |
6032 | { | |
8c5096cc | 6033 | gcc_assert (valid_for_const_vector_p (mode, base) |
6034 | && valid_for_const_vector_p (mode, step)); | |
ccc2ef18 | 6035 | |
a80726d1 | 6036 | rtx_vector_builder builder (mode, 1, 3); |
6037 | builder.quick_push (base); | |
6038 | for (int i = 1; i < 3; ++i) | |
6039 | builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode), | |
6040 | builder[i - 1], step)); | |
6041 | return builder.build (); | |
ccc2ef18 | 6042 | } |
6043 | ||
6044 | /* Generate a vector of mode MODE in which element I has the value | |
6045 | BASE + I * STEP. The result will be a constant if BASE and STEP | |
6046 | are both constants. */ | |
6047 | ||
6048 | rtx | |
6049 | gen_vec_series (machine_mode mode, rtx base, rtx step) | |
6050 | { | |
6051 | if (step == const0_rtx) | |
6052 | return gen_vec_duplicate (mode, base); | |
8c5096cc | 6053 | if (valid_for_const_vector_p (mode, base) |
6054 | && valid_for_const_vector_p (mode, step)) | |
ccc2ef18 | 6055 | return gen_const_vec_series (mode, base, step); |
6056 | return gen_rtx_VEC_SERIES (mode, base, step); | |
6057 | } | |
6058 | ||
0b51f5ce | 6059 | /* Generate a new vector constant for mode MODE and constant value |
6060 | CONSTANT. */ | |
886cfd4f | 6061 | |
0b51f5ce | 6062 | static rtx |
6063 | gen_const_vector (machine_mode mode, int constant) | |
6064 | { | |
6065 | machine_mode inner = GET_MODE_INNER (mode); | |
886cfd4f | 6066 | |
0b51f5ce | 6067 | gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); |
6068 | ||
6069 | rtx el = const_tiny_rtx[constant][(int) inner]; | |
6070 | gcc_assert (el); | |
886cfd4f | 6071 | |
a80726d1 | 6072 | return gen_const_vec_duplicate (mode, el); |
886cfd4f | 6073 | } |
6074 | ||
9426b612 | 6075 | /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when |
6e68dcb2 | 6076 | all elements are zero, and the one vector when all elements are one. */ |
9426b612 | 6077 | rtx |
3754d046 | 6078 | gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v) |
9426b612 | 6079 | { |
ba7efd65 | 6080 | gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v))); |
6e68dcb2 | 6081 | |
6082 | /* If the values are all the same, check to see if we can use one of the | |
6083 | standard constant vectors. */ | |
0b51f5ce | 6084 | if (rtvec_all_equal_p (v)) |
6085 | return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0)); | |
6e68dcb2 | 6086 | |
a80726d1 | 6087 | unsigned int nunits = GET_NUM_ELEM (v); |
6088 | rtx_vector_builder builder (mode, nunits, 1); | |
6089 | for (unsigned int i = 0; i < nunits; ++i) | |
6090 | builder.quick_push (RTVEC_ELT (v, i)); | |
6091 | return builder.build (v); | |
9426b612 | 6092 | } |
6093 | ||
6d8b68a3 | 6094 | /* Initialise global register information required by all functions. */ |
6095 | ||
6096 | void | |
6097 | init_emit_regs (void) | |
6098 | { | |
6099 | int i; | |
3754d046 | 6100 | machine_mode mode; |
d83fcaa1 | 6101 | mem_attrs *attrs; |
6d8b68a3 | 6102 | |
6103 | /* Reset register attributes */ | |
f863a586 | 6104 | reg_attrs_htab->empty (); |
6d8b68a3 | 6105 | |
6106 | /* We need reg_raw_mode, so initialize the modes now. */ | |
6107 | init_reg_modes_target (); | |
6108 | ||
6109 | /* Assign register numbers to the globally defined register rtx. */ | |
6d8b68a3 | 6110 | stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); |
6111 | frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); | |
6112 | hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM); | |
6113 | arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); | |
6114 | virtual_incoming_args_rtx = | |
6115 | gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); | |
6116 | virtual_stack_vars_rtx = | |
6117 | gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); | |
6118 | virtual_stack_dynamic_rtx = | |
6119 | gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); | |
6120 | virtual_outgoing_args_rtx = | |
6121 | gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); | |
6122 | virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); | |
60778e62 | 6123 | virtual_preferred_stack_boundary_rtx = |
6124 | gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM); | |
6d8b68a3 | 6125 | |
6126 | /* Initialize RTL for commonly used hard registers. These are | |
6127 | copied into regno_reg_rtx as we begin to compile each function. */ | |
6128 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
679bcc8d | 6129 | initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); |
6d8b68a3 | 6130 | |
6131 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
6132 | return_address_pointer_rtx | |
6133 | = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); | |
6134 | #endif | |
6135 | ||
639f32a2 | 6136 | pic_offset_table_rtx = NULL_RTX; |
6d8b68a3 | 6137 | if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) |
6138 | pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); | |
d83fcaa1 | 6139 | |
6140 | for (i = 0; i < (int) MAX_MACHINE_MODE; i++) | |
6141 | { | |
3754d046 | 6142 | mode = (machine_mode) i; |
25a27413 | 6143 | attrs = ggc_cleared_alloc<mem_attrs> (); |
d83fcaa1 | 6144 | attrs->align = BITS_PER_UNIT; |
6145 | attrs->addrspace = ADDR_SPACE_GENERIC; | |
de7513fb | 6146 | if (mode != BLKmode && mode != VOIDmode) |
d83fcaa1 | 6147 | { |
6d58bcba | 6148 | attrs->size_known_p = true; |
6149 | attrs->size = GET_MODE_SIZE (mode); | |
d83fcaa1 | 6150 | if (STRICT_ALIGNMENT) |
6151 | attrs->align = GET_MODE_ALIGNMENT (mode); | |
6152 | } | |
6153 | mode_mem_attrs[i] = attrs; | |
6154 | } | |
15b08c01 | 6155 | |
6156 | split_branch_probability = profile_probability::uninitialized (); | |
6d8b68a3 | 6157 | } |
6158 | ||
8059b95a | 6159 | /* Initialize global machine_mode variables. */ |
6160 | ||
6161 | void | |
6162 | init_derived_machine_modes (void) | |
6163 | { | |
af8303fa | 6164 | opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode; |
6165 | FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT) | |
8059b95a | 6166 | { |
af8303fa | 6167 | scalar_int_mode mode = mode_iter.require (); |
6168 | ||
8059b95a | 6169 | if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT |
af8303fa | 6170 | && !opt_byte_mode.exists ()) |
6171 | opt_byte_mode = mode; | |
8059b95a | 6172 | |
6173 | if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD | |
af8303fa | 6174 | && !opt_word_mode.exists ()) |
6175 | opt_word_mode = mode; | |
8059b95a | 6176 | } |
6177 | ||
af8303fa | 6178 | byte_mode = opt_byte_mode.require (); |
6179 | word_mode = opt_word_mode.require (); | |
db22dc71 | 6180 | ptr_mode = as_a <scalar_int_mode> |
6181 | (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ()); | |
8059b95a | 6182 | } |
6183 | ||
01703575 | 6184 | /* Create some permanent unique rtl objects shared between all functions. */ |
15bbde2b | 6185 | |
6186 | void | |
01703575 | 6187 | init_emit_once (void) |
15bbde2b | 6188 | { |
6189 | int i; | |
3754d046 | 6190 | machine_mode mode; |
99d671f4 | 6191 | scalar_float_mode double_mode; |
2b8f5b8a | 6192 | opt_scalar_mode smode_iter; |
15bbde2b | 6193 | |
e913b5cd | 6194 | /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE, |
6195 | CONST_FIXED, and memory attribute hash tables. */ | |
f863a586 | 6196 | const_int_htab = hash_table<const_int_hasher>::create_ggc (37); |
c6259b83 | 6197 | |
e913b5cd | 6198 | #if TARGET_SUPPORTS_WIDE_INT |
f863a586 | 6199 | const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37); |
e913b5cd | 6200 | #endif |
f863a586 | 6201 | const_double_htab = hash_table<const_double_hasher>::create_ggc (37); |
2ff23ed0 | 6202 | |
bbad7cd0 | 6203 | if (NUM_POLY_INT_COEFFS > 1) |
6204 | const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37); | |
6205 | ||
f863a586 | 6206 | const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37); |
e397ad8e | 6207 | |
f863a586 | 6208 | reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37); |
77695070 | 6209 | |
57c097d5 | 6210 | #ifdef INIT_EXPANDERS |
ab5beff9 | 6211 | /* This is to initialize {init|mark|free}_machine_status before the first |
6212 | call to push_function_context_to. This is needed by the Chill front | |
3fb1e43b | 6213 | end which calls push_function_context_to before the first call to |
57c097d5 | 6214 | init_function_start. */ |
6215 | INIT_EXPANDERS; | |
6216 | #endif | |
6217 | ||
15bbde2b | 6218 | /* Create the unique rtx's for certain rtx codes and operand values. */ |
6219 | ||
48a7e3d1 | 6220 | /* Process stack-limiting command-line options. */ |
6221 | if (opt_fstack_limit_symbol_arg != NULL) | |
6222 | stack_limit_rtx | |
6223 | = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg)); | |
6224 | if (opt_fstack_limit_register_no >= 0) | |
6225 | stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no); | |
6226 | ||
8fd5918e | 6227 | /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case |
7014838c | 6228 | tries to use these variables. */ |
15bbde2b | 6229 | for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++) |
d823ba47 | 6230 | const_int_rtx[i + MAX_SAVED_CONST_INT] = |
a717d5b4 | 6231 | gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i); |
15bbde2b | 6232 | |
1a60f06a | 6233 | if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT |
6234 | && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT) | |
57c097d5 | 6235 | const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT]; |
1a60f06a | 6236 | else |
3ad7bb1c | 6237 | const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE); |
15bbde2b | 6238 | |
99d671f4 | 6239 | double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require (); |
8059b95a | 6240 | |
cc69d08a | 6241 | real_from_integer (&dconst0, double_mode, 0, SIGNED); |
6242 | real_from_integer (&dconst1, double_mode, 1, SIGNED); | |
6243 | real_from_integer (&dconst2, double_mode, 2, SIGNED); | |
3fa759a9 | 6244 | |
6245 | dconstm1 = dconst1; | |
6246 | dconstm1.sign = 1; | |
77e89269 | 6247 | |
6248 | dconsthalf = dconst1; | |
9d96125b | 6249 | SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1); |
15bbde2b | 6250 | |
ba8dfb08 | 6251 | for (i = 0; i < 3; i++) |
15bbde2b | 6252 | { |
3fa759a9 | 6253 | const REAL_VALUE_TYPE *const r = |
badfe841 | 6254 | (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2); |
6255 | ||
19a4dce4 | 6256 | FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT) |
069b07bf | 6257 | const_tiny_rtx[i][(int) mode] = |
d5f9611d | 6258 | const_double_from_real_value (*r, mode); |
069b07bf | 6259 | |
19a4dce4 | 6260 | FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT) |
2ff23ed0 | 6261 | const_tiny_rtx[i][(int) mode] = |
d5f9611d | 6262 | const_double_from_real_value (*r, mode); |
15bbde2b | 6263 | |
b572011e | 6264 | const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i); |
15bbde2b | 6265 | |
19a4dce4 | 6266 | FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
b572011e | 6267 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
7540dcc4 | 6268 | |
8c20007a | 6269 | for (mode = MIN_MODE_PARTIAL_INT; |
6270 | mode <= MAX_MODE_PARTIAL_INT; | |
3754d046 | 6271 | mode = (machine_mode)((int)(mode) + 1)) |
7540dcc4 | 6272 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
15bbde2b | 6273 | } |
6274 | ||
ba8dfb08 | 6275 | const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx; |
6276 | ||
19a4dce4 | 6277 | FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
ba8dfb08 | 6278 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
6279 | ||
8464736b | 6280 | /* For BImode, 1 and -1 are unsigned and signed interpretations |
6281 | of the same value. */ | |
6282 | const_tiny_rtx[0][(int) BImode] = const0_rtx; | |
6283 | const_tiny_rtx[1][(int) BImode] = const_true_rtx; | |
6284 | const_tiny_rtx[3][(int) BImode] = const_true_rtx; | |
6285 | ||
8c20007a | 6286 | for (mode = MIN_MODE_PARTIAL_INT; |
6287 | mode <= MAX_MODE_PARTIAL_INT; | |
3754d046 | 6288 | mode = (machine_mode)((int)(mode) + 1)) |
dd276d20 | 6289 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
19a4dce4 | 6290 | |
6291 | FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT) | |
4248fc32 | 6292 | { |
6293 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
6294 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
6295 | } | |
6296 | ||
19a4dce4 | 6297 | FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT) |
4248fc32 | 6298 | { |
6299 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
6300 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
6301 | } | |
6302 | ||
8464736b | 6303 | /* As for BImode, "all 1" and "all -1" are unsigned and signed |
6304 | interpretations of the same value. */ | |
6305 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL) | |
6306 | { | |
6307 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6308 | const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3); | |
6309 | const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode]; | |
6310 | } | |
6311 | ||
19a4dce4 | 6312 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT) |
6e68dcb2 | 6313 | { |
6314 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6315 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
ba8dfb08 | 6316 | const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3); |
6e68dcb2 | 6317 | } |
886cfd4f | 6318 | |
19a4dce4 | 6319 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT) |
6e68dcb2 | 6320 | { |
6321 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6322 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6323 | } | |
886cfd4f | 6324 | |
2b8f5b8a | 6325 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT) |
06f0b99c | 6326 | { |
2b8f5b8a | 6327 | scalar_mode smode = smode_iter.require (); |
6328 | FCONST0 (smode).data.high = 0; | |
6329 | FCONST0 (smode).data.low = 0; | |
6330 | FCONST0 (smode).mode = smode; | |
6331 | const_tiny_rtx[0][(int) smode] | |
6332 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6333 | } |
6334 | ||
2b8f5b8a | 6335 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT) |
06f0b99c | 6336 | { |
2b8f5b8a | 6337 | scalar_mode smode = smode_iter.require (); |
6338 | FCONST0 (smode).data.high = 0; | |
6339 | FCONST0 (smode).data.low = 0; | |
6340 | FCONST0 (smode).mode = smode; | |
6341 | const_tiny_rtx[0][(int) smode] | |
6342 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6343 | } |
6344 | ||
2b8f5b8a | 6345 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM) |
06f0b99c | 6346 | { |
2b8f5b8a | 6347 | scalar_mode smode = smode_iter.require (); |
6348 | FCONST0 (smode).data.high = 0; | |
6349 | FCONST0 (smode).data.low = 0; | |
6350 | FCONST0 (smode).mode = smode; | |
6351 | const_tiny_rtx[0][(int) smode] | |
6352 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6353 | |
6354 | /* We store the value 1. */ | |
2b8f5b8a | 6355 | FCONST1 (smode).data.high = 0; |
6356 | FCONST1 (smode).data.low = 0; | |
6357 | FCONST1 (smode).mode = smode; | |
6358 | FCONST1 (smode).data | |
6359 | = double_int_one.lshift (GET_MODE_FBIT (smode), | |
d67b7119 | 6360 | HOST_BITS_PER_DOUBLE_INT, |
2b8f5b8a | 6361 | SIGNED_FIXED_POINT_MODE_P (smode)); |
6362 | const_tiny_rtx[1][(int) smode] | |
6363 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode); | |
06f0b99c | 6364 | } |
6365 | ||
2b8f5b8a | 6366 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM) |
06f0b99c | 6367 | { |
2b8f5b8a | 6368 | scalar_mode smode = smode_iter.require (); |
6369 | FCONST0 (smode).data.high = 0; | |
6370 | FCONST0 (smode).data.low = 0; | |
6371 | FCONST0 (smode).mode = smode; | |
6372 | const_tiny_rtx[0][(int) smode] | |
6373 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6374 | |
6375 | /* We store the value 1. */ | |
2b8f5b8a | 6376 | FCONST1 (smode).data.high = 0; |
6377 | FCONST1 (smode).data.low = 0; | |
6378 | FCONST1 (smode).mode = smode; | |
6379 | FCONST1 (smode).data | |
6380 | = double_int_one.lshift (GET_MODE_FBIT (smode), | |
d67b7119 | 6381 | HOST_BITS_PER_DOUBLE_INT, |
2b8f5b8a | 6382 | SIGNED_FIXED_POINT_MODE_P (smode)); |
6383 | const_tiny_rtx[1][(int) smode] | |
6384 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode); | |
e397ad8e | 6385 | } |
6386 | ||
19a4dce4 | 6387 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT) |
e397ad8e | 6388 | { |
6389 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6390 | } | |
6391 | ||
19a4dce4 | 6392 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT) |
e397ad8e | 6393 | { |
6394 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6395 | } | |
6396 | ||
19a4dce4 | 6397 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM) |
e397ad8e | 6398 | { |
6399 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6400 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6401 | } | |
6402 | ||
19a4dce4 | 6403 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM) |
e397ad8e | 6404 | { |
6405 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6406 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
06f0b99c | 6407 | } |
6408 | ||
0fd4500a | 6409 | for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) |
3754d046 | 6410 | if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC) |
0fd4500a | 6411 | const_tiny_rtx[0][i] = const0_rtx; |
15bbde2b | 6412 | |
7d7b0bac | 6413 | pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); |
6414 | ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); | |
6415 | simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); | |
6416 | cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); | |
f9a00e9e | 6417 | invalid_insn_rtx = gen_rtx_INSN (VOIDmode, |
6418 | /*prev_insn=*/NULL, | |
6419 | /*next_insn=*/NULL, | |
6420 | /*bb=*/NULL, | |
6421 | /*pattern=*/NULL_RTX, | |
6422 | /*location=*/-1, | |
6423 | CODE_FOR_nothing, | |
6424 | /*reg_notes=*/NULL_RTX); | |
15bbde2b | 6425 | } |
ac6c481d | 6426 | \f |
cd0fe062 | 6427 | /* Produce exact duplicate of insn INSN after AFTER. |
6428 | Care updating of libcall regions if present. */ | |
6429 | ||
722334ea | 6430 | rtx_insn * |
5e9c670f | 6431 | emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after) |
cd0fe062 | 6432 | { |
722334ea | 6433 | rtx_insn *new_rtx; |
6434 | rtx link; | |
cd0fe062 | 6435 | |
6436 | switch (GET_CODE (insn)) | |
6437 | { | |
6438 | case INSN: | |
9ce37fa7 | 6439 | new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after); |
cd0fe062 | 6440 | break; |
6441 | ||
6442 | case JUMP_INSN: | |
9ce37fa7 | 6443 | new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after); |
01762951 | 6444 | CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn); |
cd0fe062 | 6445 | break; |
6446 | ||
9845d120 | 6447 | case DEBUG_INSN: |
6448 | new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after); | |
6449 | break; | |
6450 | ||
cd0fe062 | 6451 | case CALL_INSN: |
9ce37fa7 | 6452 | new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after); |
cd0fe062 | 6453 | if (CALL_INSN_FUNCTION_USAGE (insn)) |
9ce37fa7 | 6454 | CALL_INSN_FUNCTION_USAGE (new_rtx) |
cd0fe062 | 6455 | = copy_insn (CALL_INSN_FUNCTION_USAGE (insn)); |
9ce37fa7 | 6456 | SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn); |
6457 | RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn); | |
6458 | RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn); | |
48e1416a | 6459 | RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx) |
9c2a0c05 | 6460 | = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn); |
cd0fe062 | 6461 | break; |
6462 | ||
6463 | default: | |
611234b4 | 6464 | gcc_unreachable (); |
cd0fe062 | 6465 | } |
6466 | ||
6467 | /* Update LABEL_NUSES. */ | |
9ce37fa7 | 6468 | mark_jump_label (PATTERN (new_rtx), new_rtx, 0); |
cd0fe062 | 6469 | |
5169661d | 6470 | INSN_LOCATION (new_rtx) = INSN_LOCATION (insn); |
ab87d1bc | 6471 | |
98116afd | 6472 | /* If the old insn is frame related, then so is the new one. This is |
6473 | primarily needed for IA-64 unwind info which marks epilogue insns, | |
6474 | which may be duplicated by the basic block reordering code. */ | |
9ce37fa7 | 6475 | RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn); |
98116afd | 6476 | |
bb99ba64 | 6477 | /* Locate the end of existing REG_NOTES in NEW_RTX. */ |
6478 | rtx *ptail = ®_NOTES (new_rtx); | |
6479 | while (*ptail != NULL_RTX) | |
6480 | ptail = &XEXP (*ptail, 1); | |
6481 | ||
19d2fe05 | 6482 | /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label |
6483 | will make them. REG_LABEL_TARGETs are created there too, but are | |
6484 | supposed to be sticky, so we copy them. */ | |
cd0fe062 | 6485 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
19d2fe05 | 6486 | if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND) |
cd0fe062 | 6487 | { |
bb99ba64 | 6488 | *ptail = duplicate_reg_note (link); |
6489 | ptail = &XEXP (*ptail, 1); | |
cd0fe062 | 6490 | } |
6491 | ||
9ce37fa7 | 6492 | INSN_CODE (new_rtx) = INSN_CODE (insn); |
6493 | return new_rtx; | |
cd0fe062 | 6494 | } |
1f3233d1 | 6495 | |
7035b2ab | 6496 | static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; |
c09425a0 | 6497 | rtx |
3754d046 | 6498 | gen_hard_reg_clobber (machine_mode mode, unsigned int regno) |
c09425a0 | 6499 | { |
6500 | if (hard_reg_clobbers[mode][regno]) | |
6501 | return hard_reg_clobbers[mode][regno]; | |
6502 | else | |
6503 | return (hard_reg_clobbers[mode][regno] = | |
6504 | gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); | |
6505 | } | |
6506 | ||
ccd6679f | 6507 | static GTY((deletable)) rtx |
6508 | hard_reg_clobbers_high[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; | |
6509 | ||
6510 | /* Return a CLOBBER_HIGH expression for register REGNO that clobbers MODE, | |
6511 | caching into HARD_REG_CLOBBERS_HIGH. */ | |
6512 | rtx | |
6513 | gen_hard_reg_clobber_high (machine_mode mode, unsigned int regno) | |
6514 | { | |
6515 | if (hard_reg_clobbers_high[mode][regno]) | |
6516 | return hard_reg_clobbers_high[mode][regno]; | |
6517 | else | |
6518 | return (hard_reg_clobbers_high[mode][regno] | |
6519 | = gen_rtx_CLOBBER_HIGH (VOIDmode, gen_rtx_REG (mode, regno))); | |
6520 | } | |
6521 | ||
5169661d | 6522 | location_t prologue_location; |
6523 | location_t epilogue_location; | |
23a070f3 | 6524 | |
6525 | /* Hold current location information and last location information, so the | |
6526 | datastructures are built lazily only when some instructions in given | |
6527 | place are needed. */ | |
c7abeac5 | 6528 | static location_t curr_location; |
23a070f3 | 6529 | |
5169661d | 6530 | /* Allocate insn location datastructure. */ |
23a070f3 | 6531 | void |
5169661d | 6532 | insn_locations_init (void) |
23a070f3 | 6533 | { |
5169661d | 6534 | prologue_location = epilogue_location = 0; |
23a070f3 | 6535 | curr_location = UNKNOWN_LOCATION; |
23a070f3 | 6536 | } |
6537 | ||
6538 | /* At the end of emit stage, clear current location. */ | |
6539 | void | |
5169661d | 6540 | insn_locations_finalize (void) |
23a070f3 | 6541 | { |
5169661d | 6542 | epilogue_location = curr_location; |
6543 | curr_location = UNKNOWN_LOCATION; | |
23a070f3 | 6544 | } |
6545 | ||
6546 | /* Set current location. */ | |
6547 | void | |
5169661d | 6548 | set_curr_insn_location (location_t location) |
23a070f3 | 6549 | { |
23a070f3 | 6550 | curr_location = location; |
6551 | } | |
6552 | ||
6553 | /* Get current location. */ | |
6554 | location_t | |
5169661d | 6555 | curr_insn_location (void) |
23a070f3 | 6556 | { |
6557 | return curr_location; | |
6558 | } | |
6559 | ||
23a070f3 | 6560 | /* Return lexical scope block insn belongs to. */ |
6561 | tree | |
5e9c670f | 6562 | insn_scope (const rtx_insn *insn) |
23a070f3 | 6563 | { |
5169661d | 6564 | return LOCATION_BLOCK (INSN_LOCATION (insn)); |
23a070f3 | 6565 | } |
6566 | ||
6567 | /* Return line number of the statement that produced this insn. */ | |
6568 | int | |
5e9c670f | 6569 | insn_line (const rtx_insn *insn) |
23a070f3 | 6570 | { |
5169661d | 6571 | return LOCATION_LINE (INSN_LOCATION (insn)); |
23a070f3 | 6572 | } |
6573 | ||
6574 | /* Return source file of the statement that produced this insn. */ | |
6575 | const char * | |
5e9c670f | 6576 | insn_file (const rtx_insn *insn) |
23a070f3 | 6577 | { |
5169661d | 6578 | return LOCATION_FILE (INSN_LOCATION (insn)); |
23a070f3 | 6579 | } |
30c3c442 | 6580 | |
0e7ae557 | 6581 | /* Return expanded location of the statement that produced this insn. */ |
6582 | expanded_location | |
5e9c670f | 6583 | insn_location (const rtx_insn *insn) |
0e7ae557 | 6584 | { |
6585 | return expand_location (INSN_LOCATION (insn)); | |
6586 | } | |
6587 | ||
30c3c442 | 6588 | /* Return true if memory model MODEL requires a pre-operation (release-style) |
6589 | barrier or a post-operation (acquire-style) barrier. While not universal, | |
6590 | this function matches behavior of several targets. */ | |
6591 | ||
6592 | bool | |
6593 | need_atomic_barrier_p (enum memmodel model, bool pre) | |
6594 | { | |
e205c62d | 6595 | switch (model & MEMMODEL_BASE_MASK) |
30c3c442 | 6596 | { |
6597 | case MEMMODEL_RELAXED: | |
6598 | case MEMMODEL_CONSUME: | |
6599 | return false; | |
6600 | case MEMMODEL_RELEASE: | |
6601 | return pre; | |
6602 | case MEMMODEL_ACQUIRE: | |
6603 | return !pre; | |
6604 | case MEMMODEL_ACQ_REL: | |
6605 | case MEMMODEL_SEQ_CST: | |
6606 | return true; | |
6607 | default: | |
6608 | gcc_unreachable (); | |
6609 | } | |
6610 | } | |
2add0b64 | 6611 | |
bd39703a | 6612 | /* Return a constant shift amount for shifting a value of mode MODE |
6613 | by VALUE bits. */ | |
6614 | ||
6615 | rtx | |
bbad7cd0 | 6616 | gen_int_shift_amount (machine_mode, poly_int64 value) |
bd39703a | 6617 | { |
6618 | /* Use a 64-bit mode, to avoid any truncation. | |
6619 | ||
6620 | ??? Perhaps this should be automatically derived from the .md files | |
6621 | instead, or perhaps have a target hook. */ | |
6622 | scalar_int_mode shift_mode = (BITS_PER_UNIT == 8 | |
6623 | ? DImode | |
6624 | : int_mode_for_size (64, 0).require ()); | |
6625 | return gen_int_mode (value, shift_mode); | |
6626 | } | |
6627 | ||
2add0b64 | 6628 | /* Initialize fields of rtl_data related to stack alignment. */ |
6629 | ||
6630 | void | |
6631 | rtl_data::init_stack_alignment () | |
6632 | { | |
6633 | stack_alignment_needed = STACK_BOUNDARY; | |
6634 | max_used_stack_slot_alignment = STACK_BOUNDARY; | |
6635 | stack_alignment_estimated = 0; | |
6636 | preferred_stack_boundary = STACK_BOUNDARY; | |
6637 | } | |
6638 | ||
30c3c442 | 6639 | \f |
1f3233d1 | 6640 | #include "gt-emit-rtl.h" |