]>
Commit | Line | Data |
---|---|---|
bccafa26 | 1 | /* Emit RTL for the GCC expander. |
aad93da1 | 2 | Copyright (C) 1987-2017 Free Software Foundation, Inc. |
15bbde2b | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
15bbde2b | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
15bbde2b | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15bbde2b | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
15bbde2b | 19 | |
20 | ||
21 | /* Middle-to-low level generation of rtx code and insns. | |
22 | ||
74efa612 | 23 | This file contains support functions for creating rtl expressions |
24 | and manipulating them in the doubly-linked chain of insns. | |
15bbde2b | 25 | |
26 | The patterns of the insns are created by machine-dependent | |
27 | routines in insn-emit.c, which is generated automatically from | |
74efa612 | 28 | the machine description. These routines make the individual rtx's |
29 | of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch], | |
30 | which are automatically generated from rtl.def; what is machine | |
8fd5918e | 31 | dependent is the kind of rtx's they make and what arguments they |
32 | use. */ | |
15bbde2b | 33 | |
34 | #include "config.h" | |
405711de | 35 | #include "system.h" |
805e22b2 | 36 | #include "coretypes.h" |
ad7b10a2 | 37 | #include "memmodel.h" |
9ef16211 | 38 | #include "backend.h" |
7c29e30e | 39 | #include "target.h" |
15bbde2b | 40 | #include "rtl.h" |
7c29e30e | 41 | #include "tree.h" |
9ef16211 | 42 | #include "df.h" |
7c29e30e | 43 | #include "tm_p.h" |
44 | #include "stringpool.h" | |
7c29e30e | 45 | #include "insn-config.h" |
46 | #include "regs.h" | |
47 | #include "emit-rtl.h" | |
48 | #include "recog.h" | |
9ef16211 | 49 | #include "diagnostic-core.h" |
b20a8bb4 | 50 | #include "alias.h" |
b20a8bb4 | 51 | #include "fold-const.h" |
9ed99284 | 52 | #include "varasm.h" |
94ea8568 | 53 | #include "cfgrtl.h" |
94ea8568 | 54 | #include "tree-eh.h" |
d53441c8 | 55 | #include "explow.h" |
15bbde2b | 56 | #include "expr.h" |
9845d120 | 57 | #include "params.h" |
f7715905 | 58 | #include "builtins.h" |
4073adaa | 59 | #include "rtl-iter.h" |
94f92c36 | 60 | #include "stor-layout.h" |
48a7e3d1 | 61 | #include "opts.h" |
61cb1816 | 62 | #include "predict.h" |
649d8da6 | 63 | |
679bcc8d | 64 | struct target_rtl default_target_rtl; |
65 | #if SWITCHABLE_TARGET | |
66 | struct target_rtl *this_target_rtl = &default_target_rtl; | |
67 | #endif | |
68 | ||
69 | #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx) | |
70 | ||
399d45d3 | 71 | /* Commonly used modes. */ |
72 | ||
af8303fa | 73 | scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */ |
74 | scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */ | |
75 | scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ | |
399d45d3 | 76 | |
b079a207 | 77 | /* Datastructures maintained for currently processed function in RTL form. */ |
78 | ||
fd6ffb7c | 79 | struct rtl_data x_rtl; |
b079a207 | 80 | |
81 | /* Indexed by pseudo register number, gives the rtx for that pseudo. | |
48e1416a | 82 | Allocated in parallel with regno_pointer_align. |
b079a207 | 83 | FIXME: We could put it into emit_status struct, but gengtype is not able to deal |
84 | with length attribute nested in top level structures. */ | |
85 | ||
86 | rtx * regno_reg_rtx; | |
15bbde2b | 87 | |
88 | /* This is *not* reset after each function. It gives each CODE_LABEL | |
89 | in the entire compilation a unique label number. */ | |
90 | ||
9105005a | 91 | static GTY(()) int label_num = 1; |
15bbde2b | 92 | |
15bbde2b | 93 | /* We record floating-point CONST_DOUBLEs in each floating-point mode for |
94 | the values of 0, 1, and 2. For the integer entries and VOIDmode, we | |
ba8dfb08 | 95 | record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX |
96 | is set only for MODE_INT and MODE_VECTOR_INT modes. */ | |
15bbde2b | 97 | |
ba8dfb08 | 98 | rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE]; |
15bbde2b | 99 | |
1a60f06a | 100 | rtx const_true_rtx; |
101 | ||
15bbde2b | 102 | REAL_VALUE_TYPE dconst0; |
103 | REAL_VALUE_TYPE dconst1; | |
104 | REAL_VALUE_TYPE dconst2; | |
105 | REAL_VALUE_TYPE dconstm1; | |
77e89269 | 106 | REAL_VALUE_TYPE dconsthalf; |
15bbde2b | 107 | |
06f0b99c | 108 | /* Record fixed-point constant 0 and 1. */ |
109 | FIXED_VALUE_TYPE fconst0[MAX_FCONST0]; | |
110 | FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; | |
111 | ||
15bbde2b | 112 | /* We make one copy of (const_int C) where C is in |
113 | [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] | |
114 | to save space during the compilation and simplify comparisons of | |
115 | integers. */ | |
116 | ||
57c097d5 | 117 | rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1]; |
15bbde2b | 118 | |
7d7b0bac | 119 | /* Standard pieces of rtx, to be substituted directly into things. */ |
120 | rtx pc_rtx; | |
121 | rtx ret_rtx; | |
122 | rtx simple_return_rtx; | |
123 | rtx cc0_rtx; | |
124 | ||
f9a00e9e | 125 | /* Marker used for denoting an INSN, which should never be accessed (i.e., |
126 | this pointer should normally never be dereferenced), but is required to be | |
127 | distinct from NULL_RTX. Currently used by peephole2 pass. */ | |
128 | rtx_insn *invalid_insn_rtx; | |
129 | ||
73f5c1e3 | 130 | /* A hash table storing CONST_INTs whose absolute value is greater |
131 | than MAX_SAVED_CONST_INT. */ | |
132 | ||
eae1ecb4 | 133 | struct const_int_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 134 | { |
135 | typedef HOST_WIDE_INT compare_type; | |
136 | ||
137 | static hashval_t hash (rtx i); | |
138 | static bool equal (rtx i, HOST_WIDE_INT h); | |
139 | }; | |
73f5c1e3 | 140 | |
f863a586 | 141 | static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab; |
142 | ||
eae1ecb4 | 143 | struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 144 | { |
145 | static hashval_t hash (rtx x); | |
146 | static bool equal (rtx x, rtx y); | |
147 | }; | |
148 | ||
149 | static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab; | |
e913b5cd | 150 | |
bbad7cd0 | 151 | struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def> |
152 | { | |
153 | typedef std::pair<machine_mode, poly_wide_int_ref> compare_type; | |
154 | ||
155 | static hashval_t hash (rtx x); | |
156 | static bool equal (rtx x, const compare_type &y); | |
157 | }; | |
158 | ||
159 | static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab; | |
160 | ||
ca74b940 | 161 | /* A hash table storing register attribute structures. */ |
eae1ecb4 | 162 | struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs> |
f863a586 | 163 | { |
164 | static hashval_t hash (reg_attrs *x); | |
165 | static bool equal (reg_attrs *a, reg_attrs *b); | |
166 | }; | |
167 | ||
168 | static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab; | |
ca74b940 | 169 | |
2ff23ed0 | 170 | /* A hash table storing all CONST_DOUBLEs. */ |
eae1ecb4 | 171 | struct const_double_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 172 | { |
173 | static hashval_t hash (rtx x); | |
174 | static bool equal (rtx x, rtx y); | |
175 | }; | |
176 | ||
177 | static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab; | |
2ff23ed0 | 178 | |
e397ad8e | 179 | /* A hash table storing all CONST_FIXEDs. */ |
eae1ecb4 | 180 | struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def> |
f863a586 | 181 | { |
182 | static hashval_t hash (rtx x); | |
183 | static bool equal (rtx x, rtx y); | |
184 | }; | |
185 | ||
186 | static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab; | |
e397ad8e | 187 | |
fd6ffb7c | 188 | #define cur_insn_uid (crtl->emit.x_cur_insn_uid) |
9845d120 | 189 | #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid) |
fd6ffb7c | 190 | #define first_label_num (crtl->emit.x_first_label_num) |
15bbde2b | 191 | |
265be050 | 192 | static void set_used_decls (tree); |
35cb5232 | 193 | static void mark_label_nuses (rtx); |
e913b5cd | 194 | #if TARGET_SUPPORTS_WIDE_INT |
e913b5cd | 195 | static rtx lookup_const_wide_int (rtx); |
196 | #endif | |
35cb5232 | 197 | static rtx lookup_const_double (rtx); |
e397ad8e | 198 | static rtx lookup_const_fixed (rtx); |
3754d046 | 199 | static rtx gen_const_vector (machine_mode, int); |
0e0727c4 | 200 | static void copy_rtx_if_shared_1 (rtx *orig); |
73f5c1e3 | 201 | |
61cb1816 | 202 | /* Probability of the conditional branch currently proceeded by try_split. */ |
203 | profile_probability split_branch_probability; | |
649d8da6 | 204 | \f |
73f5c1e3 | 205 | /* Returns a hash code for X (which is a really a CONST_INT). */ |
206 | ||
f863a586 | 207 | hashval_t |
208 | const_int_hasher::hash (rtx x) | |
73f5c1e3 | 209 | { |
f863a586 | 210 | return (hashval_t) INTVAL (x); |
73f5c1e3 | 211 | } |
212 | ||
6ef828f9 | 213 | /* Returns nonzero if the value represented by X (which is really a |
73f5c1e3 | 214 | CONST_INT) is the same as that given by Y (which is really a |
215 | HOST_WIDE_INT *). */ | |
216 | ||
f863a586 | 217 | bool |
218 | const_int_hasher::equal (rtx x, HOST_WIDE_INT y) | |
73f5c1e3 | 219 | { |
f863a586 | 220 | return (INTVAL (x) == y); |
2ff23ed0 | 221 | } |
222 | ||
e913b5cd | 223 | #if TARGET_SUPPORTS_WIDE_INT |
224 | /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */ | |
225 | ||
f863a586 | 226 | hashval_t |
227 | const_wide_int_hasher::hash (rtx x) | |
e913b5cd | 228 | { |
229 | int i; | |
06b8401d | 230 | unsigned HOST_WIDE_INT hash = 0; |
f863a586 | 231 | const_rtx xr = x; |
e913b5cd | 232 | |
233 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
234 | hash += CONST_WIDE_INT_ELT (xr, i); | |
235 | ||
236 | return (hashval_t) hash; | |
237 | } | |
238 | ||
239 | /* Returns nonzero if the value represented by X (which is really a | |
240 | CONST_WIDE_INT) is the same as that given by Y (which is really a | |
241 | CONST_WIDE_INT). */ | |
242 | ||
f863a586 | 243 | bool |
244 | const_wide_int_hasher::equal (rtx x, rtx y) | |
e913b5cd | 245 | { |
246 | int i; | |
f863a586 | 247 | const_rtx xr = x; |
248 | const_rtx yr = y; | |
e913b5cd | 249 | if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr)) |
f863a586 | 250 | return false; |
e913b5cd | 251 | |
252 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
253 | if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i)) | |
f863a586 | 254 | return false; |
ddb1be65 | 255 | |
f863a586 | 256 | return true; |
e913b5cd | 257 | } |
258 | #endif | |
259 | ||
bbad7cd0 | 260 | /* Returns a hash code for CONST_POLY_INT X. */ |
261 | ||
262 | hashval_t | |
263 | const_poly_int_hasher::hash (rtx x) | |
264 | { | |
265 | inchash::hash h; | |
266 | h.add_int (GET_MODE (x)); | |
267 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
268 | h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]); | |
269 | return h.end (); | |
270 | } | |
271 | ||
272 | /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */ | |
273 | ||
274 | bool | |
275 | const_poly_int_hasher::equal (rtx x, const compare_type &y) | |
276 | { | |
277 | if (GET_MODE (x) != y.first) | |
278 | return false; | |
279 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
280 | if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i]) | |
281 | return false; | |
282 | return true; | |
283 | } | |
284 | ||
2ff23ed0 | 285 | /* Returns a hash code for X (which is really a CONST_DOUBLE). */ |
f863a586 | 286 | hashval_t |
287 | const_double_hasher::hash (rtx x) | |
2ff23ed0 | 288 | { |
f863a586 | 289 | const_rtx const value = x; |
3393215f | 290 | hashval_t h; |
2ff23ed0 | 291 | |
e913b5cd | 292 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode) |
3393215f | 293 | h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value); |
294 | else | |
a5760913 | 295 | { |
e2e205b3 | 296 | h = real_hash (CONST_DOUBLE_REAL_VALUE (value)); |
a5760913 | 297 | /* MODE is used in the comparison, so it should be in the hash. */ |
298 | h ^= GET_MODE (value); | |
299 | } | |
2ff23ed0 | 300 | return h; |
301 | } | |
302 | ||
6ef828f9 | 303 | /* Returns nonzero if the value represented by X (really a ...) |
2ff23ed0 | 304 | is the same as that represented by Y (really a ...) */ |
f863a586 | 305 | bool |
306 | const_double_hasher::equal (rtx x, rtx y) | |
2ff23ed0 | 307 | { |
f863a586 | 308 | const_rtx const a = x, b = y; |
2ff23ed0 | 309 | |
310 | if (GET_MODE (a) != GET_MODE (b)) | |
311 | return 0; | |
e913b5cd | 312 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode) |
f82a103d | 313 | return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b) |
314 | && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b)); | |
315 | else | |
316 | return real_identical (CONST_DOUBLE_REAL_VALUE (a), | |
317 | CONST_DOUBLE_REAL_VALUE (b)); | |
73f5c1e3 | 318 | } |
319 | ||
e397ad8e | 320 | /* Returns a hash code for X (which is really a CONST_FIXED). */ |
321 | ||
f863a586 | 322 | hashval_t |
323 | const_fixed_hasher::hash (rtx x) | |
e397ad8e | 324 | { |
f863a586 | 325 | const_rtx const value = x; |
e397ad8e | 326 | hashval_t h; |
327 | ||
328 | h = fixed_hash (CONST_FIXED_VALUE (value)); | |
329 | /* MODE is used in the comparison, so it should be in the hash. */ | |
330 | h ^= GET_MODE (value); | |
331 | return h; | |
332 | } | |
333 | ||
f863a586 | 334 | /* Returns nonzero if the value represented by X is the same as that |
335 | represented by Y. */ | |
e397ad8e | 336 | |
f863a586 | 337 | bool |
338 | const_fixed_hasher::equal (rtx x, rtx y) | |
e397ad8e | 339 | { |
f863a586 | 340 | const_rtx const a = x, b = y; |
e397ad8e | 341 | |
342 | if (GET_MODE (a) != GET_MODE (b)) | |
343 | return 0; | |
344 | return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b)); | |
345 | } | |
346 | ||
d72886b5 | 347 | /* Return true if the given memory attributes are equal. */ |
73f5c1e3 | 348 | |
7e304b71 | 349 | bool |
d72886b5 | 350 | mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q) |
73f5c1e3 | 351 | { |
7e304b71 | 352 | if (p == q) |
353 | return true; | |
354 | if (!p || !q) | |
355 | return false; | |
6d58bcba | 356 | return (p->alias == q->alias |
357 | && p->offset_known_p == q->offset_known_p | |
358 | && (!p->offset_known_p || p->offset == q->offset) | |
359 | && p->size_known_p == q->size_known_p | |
360 | && (!p->size_known_p || p->size == q->size) | |
361 | && p->align == q->align | |
bd1a81f7 | 362 | && p->addrspace == q->addrspace |
2f16183e | 363 | && (p->expr == q->expr |
364 | || (p->expr != NULL_TREE && q->expr != NULL_TREE | |
365 | && operand_equal_p (p->expr, q->expr, 0)))); | |
73f5c1e3 | 366 | } |
367 | ||
d72886b5 | 368 | /* Set MEM's memory attributes so that they are the same as ATTRS. */ |
5cc193e7 | 369 | |
d72886b5 | 370 | static void |
371 | set_mem_attrs (rtx mem, mem_attrs *attrs) | |
372 | { | |
d72886b5 | 373 | /* If everything is the default, we can just clear the attributes. */ |
374 | if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)])) | |
375 | { | |
376 | MEM_ATTRS (mem) = 0; | |
377 | return; | |
378 | } | |
c6259b83 | 379 | |
8dc3230c | 380 | if (!MEM_ATTRS (mem) |
381 | || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem))) | |
c6259b83 | 382 | { |
25a27413 | 383 | MEM_ATTRS (mem) = ggc_alloc<mem_attrs> (); |
8dc3230c | 384 | memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs)); |
c6259b83 | 385 | } |
73f5c1e3 | 386 | } |
387 | ||
ca74b940 | 388 | /* Returns a hash code for X (which is a really a reg_attrs *). */ |
389 | ||
f863a586 | 390 | hashval_t |
391 | reg_attr_hasher::hash (reg_attrs *x) | |
ca74b940 | 392 | { |
f863a586 | 393 | const reg_attrs *const p = x; |
ca74b940 | 394 | |
a14d43f8 | 395 | inchash::hash h; |
396 | h.add_ptr (p->decl); | |
397 | h.add_poly_hwi (p->offset); | |
398 | return h.end (); | |
ca74b940 | 399 | } |
400 | ||
f863a586 | 401 | /* Returns nonzero if the value represented by X is the same as that given by |
402 | Y. */ | |
ca74b940 | 403 | |
f863a586 | 404 | bool |
405 | reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y) | |
ca74b940 | 406 | { |
f863a586 | 407 | const reg_attrs *const p = x; |
408 | const reg_attrs *const q = y; | |
ca74b940 | 409 | |
a14d43f8 | 410 | return (p->decl == q->decl && known_eq (p->offset, q->offset)); |
ca74b940 | 411 | } |
412 | /* Allocate a new reg_attrs structure and insert it into the hash table if | |
413 | one identical to it is not already in the table. We are doing this for | |
414 | MEM of mode MODE. */ | |
415 | ||
416 | static reg_attrs * | |
a14d43f8 | 417 | get_reg_attrs (tree decl, poly_int64 offset) |
ca74b940 | 418 | { |
419 | reg_attrs attrs; | |
ca74b940 | 420 | |
421 | /* If everything is the default, we can just return zero. */ | |
a14d43f8 | 422 | if (decl == 0 && known_eq (offset, 0)) |
ca74b940 | 423 | return 0; |
424 | ||
425 | attrs.decl = decl; | |
426 | attrs.offset = offset; | |
427 | ||
f863a586 | 428 | reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT); |
ca74b940 | 429 | if (*slot == 0) |
430 | { | |
25a27413 | 431 | *slot = ggc_alloc<reg_attrs> (); |
ca74b940 | 432 | memcpy (*slot, &attrs, sizeof (reg_attrs)); |
433 | } | |
434 | ||
f863a586 | 435 | return *slot; |
ca74b940 | 436 | } |
437 | ||
3072d30e | 438 | |
439 | #if !HAVE_blockage | |
e12b44a3 | 440 | /* Generate an empty ASM_INPUT, which is used to block attempts to schedule, |
441 | and to block register equivalences to be seen across this insn. */ | |
3072d30e | 442 | |
443 | rtx | |
444 | gen_blockage (void) | |
445 | { | |
446 | rtx x = gen_rtx_ASM_INPUT (VOIDmode, ""); | |
447 | MEM_VOLATILE_P (x) = true; | |
448 | return x; | |
449 | } | |
450 | #endif | |
451 | ||
452 | ||
937ca48e | 453 | /* Set the mode and register number of X to MODE and REGNO. */ |
454 | ||
455 | void | |
456 | set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno) | |
457 | { | |
1c0849e5 | 458 | unsigned int nregs = (HARD_REGISTER_NUM_P (regno) |
92d2aec3 | 459 | ? hard_regno_nregs (regno, mode) |
1c0849e5 | 460 | : 1); |
937ca48e | 461 | PUT_MODE_RAW (x, mode); |
1c0849e5 | 462 | set_regno_raw (x, regno, nregs); |
937ca48e | 463 | } |
464 | ||
22cf44bc | 465 | /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and |
466 | don't attempt to share with the various global pieces of rtl (such as | |
467 | frame_pointer_rtx). */ | |
468 | ||
469 | rtx | |
937ca48e | 470 | gen_raw_REG (machine_mode mode, unsigned int regno) |
22cf44bc | 471 | { |
68095389 | 472 | rtx x = rtx_alloc (REG MEM_STAT_INFO); |
937ca48e | 473 | set_mode_and_regno (x, mode, regno); |
15183fd2 | 474 | REG_ATTRS (x) = NULL; |
22cf44bc | 475 | ORIGINAL_REGNO (x) = regno; |
476 | return x; | |
477 | } | |
478 | ||
7014838c | 479 | /* There are some RTL codes that require special attention; the generation |
480 | functions do the raw handling. If you add to this list, modify | |
481 | special_rtx in gengenrtl.c as well. */ | |
482 | ||
ede4900a | 483 | rtx_expr_list * |
3754d046 | 484 | gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list) |
ede4900a | 485 | { |
486 | return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr, | |
487 | expr_list)); | |
488 | } | |
489 | ||
13be9dc6 | 490 | rtx_insn_list * |
3754d046 | 491 | gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list) |
13be9dc6 | 492 | { |
493 | return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn, | |
494 | insn_list)); | |
495 | } | |
496 | ||
f935868a | 497 | rtx_insn * |
3754d046 | 498 | gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn, |
f935868a | 499 | basic_block bb, rtx pattern, int location, int code, |
500 | rtx reg_notes) | |
501 | { | |
502 | return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode, | |
503 | prev_insn, next_insn, | |
504 | bb, pattern, location, code, | |
505 | reg_notes)); | |
506 | } | |
507 | ||
3ad7bb1c | 508 | rtx |
3754d046 | 509 | gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg) |
3ad7bb1c | 510 | { |
511 | if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT) | |
57c097d5 | 512 | return const_int_rtx[arg + MAX_SAVED_CONST_INT]; |
3ad7bb1c | 513 | |
514 | #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1 | |
515 | if (const_true_rtx && arg == STORE_FLAG_VALUE) | |
516 | return const_true_rtx; | |
517 | #endif | |
518 | ||
73f5c1e3 | 519 | /* Look up the CONST_INT in the hash table. */ |
f863a586 | 520 | rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg, |
521 | INSERT); | |
7f2875d3 | 522 | if (*slot == 0) |
d7c47c0e | 523 | *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg); |
73f5c1e3 | 524 | |
f863a586 | 525 | return *slot; |
3ad7bb1c | 526 | } |
527 | ||
2d232d05 | 528 | rtx |
bbad7cd0 | 529 | gen_int_mode (poly_int64 c, machine_mode mode) |
2d232d05 | 530 | { |
bbad7cd0 | 531 | c = trunc_int_for_mode (c, mode); |
532 | if (c.is_constant ()) | |
533 | return GEN_INT (c.coeffs[0]); | |
534 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
535 | return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode); | |
2d232d05 | 536 | } |
537 | ||
2ff23ed0 | 538 | /* CONST_DOUBLEs might be created from pairs of integers, or from |
539 | REAL_VALUE_TYPEs. Also, their length is known only at run time, | |
540 | so we cannot use gen_rtx_raw_CONST_DOUBLE. */ | |
541 | ||
542 | /* Determine whether REAL, a CONST_DOUBLE, already exists in the | |
543 | hash table. If so, return its counterpart; otherwise add it | |
544 | to the hash table and return it. */ | |
545 | static rtx | |
35cb5232 | 546 | lookup_const_double (rtx real) |
2ff23ed0 | 547 | { |
f863a586 | 548 | rtx *slot = const_double_htab->find_slot (real, INSERT); |
2ff23ed0 | 549 | if (*slot == 0) |
550 | *slot = real; | |
551 | ||
f863a586 | 552 | return *slot; |
2ff23ed0 | 553 | } |
7f2875d3 | 554 | |
2ff23ed0 | 555 | /* Return a CONST_DOUBLE rtx for a floating-point value specified by |
556 | VALUE in mode MODE. */ | |
67f2a2eb | 557 | rtx |
3754d046 | 558 | const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode) |
67f2a2eb | 559 | { |
2ff23ed0 | 560 | rtx real = rtx_alloc (CONST_DOUBLE); |
561 | PUT_MODE (real, mode); | |
562 | ||
e8aaae4e | 563 | real->u.rv = value; |
2ff23ed0 | 564 | |
565 | return lookup_const_double (real); | |
566 | } | |
567 | ||
e397ad8e | 568 | /* Determine whether FIXED, a CONST_FIXED, already exists in the |
569 | hash table. If so, return its counterpart; otherwise add it | |
570 | to the hash table and return it. */ | |
571 | ||
572 | static rtx | |
573 | lookup_const_fixed (rtx fixed) | |
574 | { | |
f863a586 | 575 | rtx *slot = const_fixed_htab->find_slot (fixed, INSERT); |
e397ad8e | 576 | if (*slot == 0) |
577 | *slot = fixed; | |
578 | ||
f863a586 | 579 | return *slot; |
e397ad8e | 580 | } |
581 | ||
582 | /* Return a CONST_FIXED rtx for a fixed-point value specified by | |
583 | VALUE in mode MODE. */ | |
584 | ||
585 | rtx | |
3754d046 | 586 | const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode) |
e397ad8e | 587 | { |
588 | rtx fixed = rtx_alloc (CONST_FIXED); | |
589 | PUT_MODE (fixed, mode); | |
590 | ||
591 | fixed->u.fv = value; | |
592 | ||
593 | return lookup_const_fixed (fixed); | |
594 | } | |
595 | ||
e913b5cd | 596 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
33274180 | 597 | /* Constructs double_int from rtx CST. */ |
598 | ||
599 | double_int | |
600 | rtx_to_double_int (const_rtx cst) | |
601 | { | |
602 | double_int r; | |
603 | ||
604 | if (CONST_INT_P (cst)) | |
cf8f0e63 | 605 | r = double_int::from_shwi (INTVAL (cst)); |
78f1962f | 606 | else if (CONST_DOUBLE_AS_INT_P (cst)) |
33274180 | 607 | { |
608 | r.low = CONST_DOUBLE_LOW (cst); | |
609 | r.high = CONST_DOUBLE_HIGH (cst); | |
610 | } | |
611 | else | |
612 | gcc_unreachable (); | |
613 | ||
614 | return r; | |
615 | } | |
e913b5cd | 616 | #endif |
617 | ||
618 | #if TARGET_SUPPORTS_WIDE_INT | |
a342dbb2 | 619 | /* Determine whether CONST_WIDE_INT WINT already exists in the hash table. |
620 | If so, return its counterpart; otherwise add it to the hash table and | |
e913b5cd | 621 | return it. */ |
33274180 | 622 | |
e913b5cd | 623 | static rtx |
624 | lookup_const_wide_int (rtx wint) | |
625 | { | |
f863a586 | 626 | rtx *slot = const_wide_int_htab->find_slot (wint, INSERT); |
e913b5cd | 627 | if (*slot == 0) |
628 | *slot = wint; | |
33274180 | 629 | |
f863a586 | 630 | return *slot; |
e913b5cd | 631 | } |
632 | #endif | |
3e052aec | 633 | |
a342dbb2 | 634 | /* Return an rtx constant for V, given that the constant has mode MODE. |
635 | The returned rtx will be a CONST_INT if V fits, otherwise it will be | |
636 | a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT | |
637 | (if TARGET_SUPPORTS_WIDE_INT). */ | |
638 | ||
bbad7cd0 | 639 | static rtx |
640 | immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode) | |
3e052aec | 641 | { |
e913b5cd | 642 | unsigned int len = v.get_len (); |
074473dd | 643 | /* Not scalar_int_mode because we also allow pointer bound modes. */ |
644 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
e913b5cd | 645 | |
646 | /* Allow truncation but not extension since we do not know if the | |
647 | number is signed or unsigned. */ | |
648 | gcc_assert (prec <= v.get_precision ()); | |
649 | ||
650 | if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT) | |
651 | return gen_int_mode (v.elt (0), mode); | |
652 | ||
653 | #if TARGET_SUPPORTS_WIDE_INT | |
654 | { | |
655 | unsigned int i; | |
656 | rtx value; | |
ddb1be65 | 657 | unsigned int blocks_needed |
e913b5cd | 658 | = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT; |
659 | ||
660 | if (len > blocks_needed) | |
661 | len = blocks_needed; | |
662 | ||
663 | value = const_wide_int_alloc (len); | |
664 | ||
665 | /* It is so tempting to just put the mode in here. Must control | |
666 | myself ... */ | |
667 | PUT_MODE (value, VOIDmode); | |
05c25ee6 | 668 | CWI_PUT_NUM_ELEM (value, len); |
e913b5cd | 669 | |
670 | for (i = 0; i < len; i++) | |
05363b4a | 671 | CONST_WIDE_INT_ELT (value, i) = v.elt (i); |
e913b5cd | 672 | |
673 | return lookup_const_wide_int (value); | |
674 | } | |
675 | #else | |
05363b4a | 676 | return immed_double_const (v.elt (0), v.elt (1), mode); |
e913b5cd | 677 | #endif |
3e052aec | 678 | } |
679 | ||
e913b5cd | 680 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
2ff23ed0 | 681 | /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair |
682 | of ints: I0 is the low-order word and I1 is the high-order word. | |
24cd46a7 | 683 | For values that are larger than HOST_BITS_PER_DOUBLE_INT, the |
db20fb47 | 684 | implied upper bits are copies of the high bit of i1. The value |
685 | itself is neither signed nor unsigned. Do not use this routine for | |
686 | non-integer modes; convert to REAL_VALUE_TYPE and use | |
d5f9611d | 687 | const_double_from_real_value. */ |
2ff23ed0 | 688 | |
689 | rtx | |
3754d046 | 690 | immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode) |
2ff23ed0 | 691 | { |
692 | rtx value; | |
693 | unsigned int i; | |
694 | ||
b1ca4af4 | 695 | /* There are the following cases (note that there are no modes with |
24cd46a7 | 696 | HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT): |
b1ca4af4 | 697 | |
698 | 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use | |
699 | gen_int_mode. | |
db20fb47 | 700 | 2) If the value of the integer fits into HOST_WIDE_INT anyway |
701 | (i.e., i1 consists only from copies of the sign bit, and sign | |
702 | of i0 and i1 are the same), then we return a CONST_INT for i0. | |
b1ca4af4 | 703 | 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ |
074473dd | 704 | scalar_mode smode; |
705 | if (is_a <scalar_mode> (mode, &smode) | |
706 | && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT) | |
707 | return gen_int_mode (i0, mode); | |
2ff23ed0 | 708 | |
709 | /* If this integer fits in one word, return a CONST_INT. */ | |
710 | if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0)) | |
711 | return GEN_INT (i0); | |
712 | ||
713 | /* We use VOIDmode for integers. */ | |
714 | value = rtx_alloc (CONST_DOUBLE); | |
715 | PUT_MODE (value, VOIDmode); | |
716 | ||
717 | CONST_DOUBLE_LOW (value) = i0; | |
718 | CONST_DOUBLE_HIGH (value) = i1; | |
719 | ||
720 | for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++) | |
721 | XWINT (value, i) = 0; | |
722 | ||
723 | return lookup_const_double (value); | |
67f2a2eb | 724 | } |
e913b5cd | 725 | #endif |
67f2a2eb | 726 | |
bbad7cd0 | 727 | /* Return an rtx representation of C in mode MODE. */ |
728 | ||
729 | rtx | |
730 | immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode) | |
731 | { | |
732 | if (c.is_constant ()) | |
733 | return immed_wide_int_const_1 (c.coeffs[0], mode); | |
734 | ||
735 | /* Not scalar_int_mode because we also allow pointer bound modes. */ | |
736 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
737 | ||
738 | /* Allow truncation but not extension since we do not know if the | |
739 | number is signed or unsigned. */ | |
740 | gcc_assert (prec <= c.coeffs[0].get_precision ()); | |
741 | poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED); | |
742 | ||
743 | /* See whether we already have an rtx for this constant. */ | |
744 | inchash::hash h; | |
745 | h.add_int (mode); | |
746 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
747 | h.add_wide_int (newc.coeffs[i]); | |
748 | const_poly_int_hasher::compare_type typed_value (mode, newc); | |
749 | rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value, | |
750 | h.end (), INSERT); | |
751 | rtx x = *slot; | |
752 | if (x) | |
753 | return x; | |
754 | ||
755 | /* Create a new rtx. There's a choice to be made here between installing | |
756 | the actual mode of the rtx or leaving it as VOIDmode (for consistency | |
757 | with CONST_INT). In practice the handling of the codes is different | |
758 | enough that we get no benefit from using VOIDmode, and various places | |
759 | assume that VOIDmode implies CONST_INT. Using the real mode seems like | |
760 | the right long-term direction anyway. */ | |
761 | typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi; | |
762 | size_t extra_size = twi::extra_size (prec); | |
763 | x = rtx_alloc_v (CONST_POLY_INT, | |
764 | sizeof (struct const_poly_int_def) + extra_size); | |
765 | PUT_MODE (x, mode); | |
766 | CONST_POLY_INT_COEFFS (x).set_precision (prec); | |
767 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
768 | CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i]; | |
769 | ||
770 | *slot = x; | |
771 | return x; | |
772 | } | |
773 | ||
3ad7bb1c | 774 | rtx |
3754d046 | 775 | gen_rtx_REG (machine_mode mode, unsigned int regno) |
3ad7bb1c | 776 | { |
777 | /* In case the MD file explicitly references the frame pointer, have | |
778 | all such references point to the same frame pointer. This is | |
779 | used during frame pointer elimination to distinguish the explicit | |
780 | references to these registers from pseudos that happened to be | |
781 | assigned to them. | |
782 | ||
783 | If we have eliminated the frame pointer or arg pointer, we will | |
784 | be using it as a normal register, for example as a spill | |
785 | register. In such cases, we might be accessing it in a mode that | |
786 | is not Pmode and therefore cannot use the pre-allocated rtx. | |
787 | ||
788 | Also don't do this when we are making new REGs in reload, since | |
789 | we don't want to get confused with the real pointers. */ | |
790 | ||
c6a6cdaa | 791 | if (mode == Pmode && !reload_in_progress && !lra_in_progress) |
3ad7bb1c | 792 | { |
71801afc | 793 | if (regno == FRAME_POINTER_REGNUM |
794 | && (!reload_completed || frame_pointer_needed)) | |
3ad7bb1c | 795 | return frame_pointer_rtx; |
f703b3d6 | 796 | |
797 | if (!HARD_FRAME_POINTER_IS_FRAME_POINTER | |
798 | && regno == HARD_FRAME_POINTER_REGNUM | |
71801afc | 799 | && (!reload_completed || frame_pointer_needed)) |
3ad7bb1c | 800 | return hard_frame_pointer_rtx; |
c6bb296a | 801 | #if !HARD_FRAME_POINTER_IS_ARG_POINTER |
802 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
803 | && regno == ARG_POINTER_REGNUM) | |
3ad7bb1c | 804 | return arg_pointer_rtx; |
805 | #endif | |
806 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
e8b59353 | 807 | if (regno == RETURN_ADDRESS_POINTER_REGNUM) |
3ad7bb1c | 808 | return return_address_pointer_rtx; |
809 | #endif | |
3473aefe | 810 | if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM |
8d43ad05 | 811 | && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM |
6ea47475 | 812 | && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) |
d4c5e26d | 813 | return pic_offset_table_rtx; |
e8b59353 | 814 | if (regno == STACK_POINTER_REGNUM) |
3ad7bb1c | 815 | return stack_pointer_rtx; |
816 | } | |
817 | ||
32b53d83 | 818 | #if 0 |
90295bd2 | 819 | /* If the per-function register table has been set up, try to re-use |
32b53d83 | 820 | an existing entry in that table to avoid useless generation of RTL. |
821 | ||
822 | This code is disabled for now until we can fix the various backends | |
823 | which depend on having non-shared hard registers in some cases. Long | |
824 | term we want to re-enable this code as it can significantly cut down | |
71801afc | 825 | on the amount of useless RTL that gets generated. |
826 | ||
827 | We'll also need to fix some code that runs after reload that wants to | |
828 | set ORIGINAL_REGNO. */ | |
829 | ||
90295bd2 | 830 | if (cfun |
831 | && cfun->emit | |
832 | && regno_reg_rtx | |
833 | && regno < FIRST_PSEUDO_REGISTER | |
834 | && reg_raw_mode[regno] == mode) | |
835 | return regno_reg_rtx[regno]; | |
32b53d83 | 836 | #endif |
90295bd2 | 837 | |
22cf44bc | 838 | return gen_raw_REG (mode, regno); |
3ad7bb1c | 839 | } |
840 | ||
b5ba9f3a | 841 | rtx |
3754d046 | 842 | gen_rtx_MEM (machine_mode mode, rtx addr) |
b5ba9f3a | 843 | { |
844 | rtx rt = gen_rtx_raw_MEM (mode, addr); | |
845 | ||
846 | /* This field is not cleared by the mere allocation of the rtx, so | |
847 | we clear it here. */ | |
c6259b83 | 848 | MEM_ATTRS (rt) = 0; |
b5ba9f3a | 849 | |
850 | return rt; | |
851 | } | |
701e46d0 | 852 | |
e265a6da | 853 | /* Generate a memory referring to non-trapping constant memory. */ |
854 | ||
855 | rtx | |
3754d046 | 856 | gen_const_mem (machine_mode mode, rtx addr) |
e265a6da | 857 | { |
858 | rtx mem = gen_rtx_MEM (mode, addr); | |
859 | MEM_READONLY_P (mem) = 1; | |
860 | MEM_NOTRAP_P (mem) = 1; | |
861 | return mem; | |
862 | } | |
863 | ||
00060fc2 | 864 | /* Generate a MEM referring to fixed portions of the frame, e.g., register |
865 | save areas. */ | |
866 | ||
867 | rtx | |
3754d046 | 868 | gen_frame_mem (machine_mode mode, rtx addr) |
00060fc2 | 869 | { |
870 | rtx mem = gen_rtx_MEM (mode, addr); | |
871 | MEM_NOTRAP_P (mem) = 1; | |
872 | set_mem_alias_set (mem, get_frame_alias_set ()); | |
873 | return mem; | |
874 | } | |
875 | ||
876 | /* Generate a MEM referring to a temporary use of the stack, not part | |
877 | of the fixed stack frame. For example, something which is pushed | |
878 | by a target splitter. */ | |
879 | rtx | |
3754d046 | 880 | gen_tmp_stack_mem (machine_mode mode, rtx addr) |
00060fc2 | 881 | { |
882 | rtx mem = gen_rtx_MEM (mode, addr); | |
883 | MEM_NOTRAP_P (mem) = 1; | |
18d50ae6 | 884 | if (!cfun->calls_alloca) |
00060fc2 | 885 | set_mem_alias_set (mem, get_frame_alias_set ()); |
886 | return mem; | |
887 | } | |
888 | ||
2166bbaa | 889 | /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if |
890 | this construct would be valid, and false otherwise. */ | |
891 | ||
892 | bool | |
3754d046 | 893 | validate_subreg (machine_mode omode, machine_mode imode, |
7ecb5bb2 | 894 | const_rtx reg, unsigned int offset) |
701e46d0 | 895 | { |
2166bbaa | 896 | unsigned int isize = GET_MODE_SIZE (imode); |
897 | unsigned int osize = GET_MODE_SIZE (omode); | |
898 | ||
899 | /* All subregs must be aligned. */ | |
900 | if (offset % osize != 0) | |
901 | return false; | |
902 | ||
903 | /* The subreg offset cannot be outside the inner object. */ | |
904 | if (offset >= isize) | |
905 | return false; | |
906 | ||
44ce7b27 | 907 | unsigned int regsize = REGMODE_NATURAL_SIZE (imode); |
908 | ||
2166bbaa | 909 | /* ??? This should not be here. Temporarily continue to allow word_mode |
910 | subregs of anything. The most common offender is (subreg:SI (reg:DF)). | |
911 | Generally, backends are doing something sketchy but it'll take time to | |
912 | fix them all. */ | |
913 | if (omode == word_mode) | |
914 | ; | |
915 | /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field | |
916 | is the culprit here, and not the backends. */ | |
44ce7b27 | 917 | else if (osize >= regsize && isize >= osize) |
2166bbaa | 918 | ; |
919 | /* Allow component subregs of complex and vector. Though given the below | |
920 | extraction rules, it's not always clear what that means. */ | |
921 | else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) | |
922 | && GET_MODE_INNER (imode) == omode) | |
923 | ; | |
924 | /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs, | |
925 | i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to | |
926 | represent this. It's questionable if this ought to be represented at | |
927 | all -- why can't this all be hidden in post-reload splitters that make | |
928 | arbitrarily mode changes to the registers themselves. */ | |
929 | else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode) | |
930 | ; | |
931 | /* Subregs involving floating point modes are not allowed to | |
932 | change size. Therefore (subreg:DI (reg:DF) 0) is fine, but | |
933 | (subreg:SI (reg:DF) 0) isn't. */ | |
934 | else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) | |
935 | { | |
c6a6cdaa | 936 | if (! (isize == osize |
937 | /* LRA can use subreg to store a floating point value in | |
938 | an integer mode. Although the floating point and the | |
939 | integer modes need the same number of hard registers, | |
940 | the size of floating point mode can be less than the | |
941 | integer mode. LRA also uses subregs for a register | |
942 | should be used in different mode in on insn. */ | |
943 | || lra_in_progress)) | |
2166bbaa | 944 | return false; |
945 | } | |
701e46d0 | 946 | |
2166bbaa | 947 | /* Paradoxical subregs must have offset zero. */ |
948 | if (osize > isize) | |
949 | return offset == 0; | |
950 | ||
951 | /* This is a normal subreg. Verify that the offset is representable. */ | |
952 | ||
953 | /* For hard registers, we already have most of these rules collected in | |
954 | subreg_offset_representable_p. */ | |
955 | if (reg && REG_P (reg) && HARD_REGISTER_P (reg)) | |
956 | { | |
957 | unsigned int regno = REGNO (reg); | |
958 | ||
2166bbaa | 959 | if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) |
960 | && GET_MODE_INNER (imode) == omode) | |
961 | ; | |
b56a9dbc | 962 | else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode)) |
2166bbaa | 963 | return false; |
2166bbaa | 964 | |
965 | return subreg_offset_representable_p (regno, imode, offset, omode); | |
966 | } | |
967 | ||
968 | /* For pseudo registers, we want most of the same checks. Namely: | |
44ce7b27 | 969 | |
970 | Assume that the pseudo register will be allocated to hard registers | |
971 | that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE, | |
972 | the remainder must correspond to the lowpart of the containing hard | |
973 | register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset, | |
974 | otherwise it is at the lowest offset. | |
975 | ||
976 | Given that we've already checked the mode and offset alignment, | |
977 | we only have to check subblock subregs here. */ | |
978 | if (osize < regsize | |
c6a6cdaa | 979 | && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)))) |
2166bbaa | 980 | { |
44ce7b27 | 981 | unsigned int block_size = MIN (isize, regsize); |
982 | unsigned int offset_within_block = offset % block_size; | |
983 | if (BYTES_BIG_ENDIAN | |
984 | ? offset_within_block != block_size - osize | |
985 | : offset_within_block != 0) | |
2166bbaa | 986 | return false; |
987 | } | |
988 | return true; | |
989 | } | |
990 | ||
991 | rtx | |
3754d046 | 992 | gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset) |
2166bbaa | 993 | { |
994 | gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); | |
2ff23ed0 | 995 | return gen_rtx_raw_SUBREG (mode, reg, offset); |
701e46d0 | 996 | } |
997 | ||
c6259b83 | 998 | /* Generate a SUBREG representing the least-significant part of REG if MODE |
999 | is smaller than mode of REG, otherwise paradoxical SUBREG. */ | |
1000 | ||
701e46d0 | 1001 | rtx |
3754d046 | 1002 | gen_lowpart_SUBREG (machine_mode mode, rtx reg) |
701e46d0 | 1003 | { |
3754d046 | 1004 | machine_mode inmode; |
701e46d0 | 1005 | |
1006 | inmode = GET_MODE (reg); | |
1007 | if (inmode == VOIDmode) | |
1008 | inmode = mode; | |
81802af6 | 1009 | return gen_rtx_SUBREG (mode, reg, |
1010 | subreg_lowpart_offset (mode, inmode)); | |
701e46d0 | 1011 | } |
e1398578 | 1012 | |
1013 | rtx | |
3754d046 | 1014 | gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc, |
e1398578 | 1015 | enum var_init_status status) |
1016 | { | |
1017 | rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc); | |
1018 | PAT_VAR_LOCATION_STATUS (x) = status; | |
1019 | return x; | |
1020 | } | |
7014838c | 1021 | \f |
15bbde2b | 1022 | |
cf9ac040 | 1023 | /* Create an rtvec and stores within it the RTXen passed in the arguments. */ |
1024 | ||
15bbde2b | 1025 | rtvec |
ee582a61 | 1026 | gen_rtvec (int n, ...) |
15bbde2b | 1027 | { |
cf9ac040 | 1028 | int i; |
1029 | rtvec rt_val; | |
ee582a61 | 1030 | va_list p; |
15bbde2b | 1031 | |
ee582a61 | 1032 | va_start (p, n); |
15bbde2b | 1033 | |
cf9ac040 | 1034 | /* Don't allocate an empty rtvec... */ |
15bbde2b | 1035 | if (n == 0) |
451c8e2f | 1036 | { |
1037 | va_end (p); | |
1038 | return NULL_RTVEC; | |
1039 | } | |
15bbde2b | 1040 | |
cf9ac040 | 1041 | rt_val = rtvec_alloc (n); |
e5fcd76a | 1042 | |
15bbde2b | 1043 | for (i = 0; i < n; i++) |
cf9ac040 | 1044 | rt_val->elem[i] = va_arg (p, rtx); |
7ad77798 | 1045 | |
ee582a61 | 1046 | va_end (p); |
cf9ac040 | 1047 | return rt_val; |
15bbde2b | 1048 | } |
1049 | ||
1050 | rtvec | |
35cb5232 | 1051 | gen_rtvec_v (int n, rtx *argp) |
15bbde2b | 1052 | { |
19cb6b50 | 1053 | int i; |
1054 | rtvec rt_val; | |
15bbde2b | 1055 | |
cf9ac040 | 1056 | /* Don't allocate an empty rtvec... */ |
15bbde2b | 1057 | if (n == 0) |
cf9ac040 | 1058 | return NULL_RTVEC; |
15bbde2b | 1059 | |
cf9ac040 | 1060 | rt_val = rtvec_alloc (n); |
15bbde2b | 1061 | |
1062 | for (i = 0; i < n; i++) | |
a4070a91 | 1063 | rt_val->elem[i] = *argp++; |
15bbde2b | 1064 | |
1065 | return rt_val; | |
1066 | } | |
f17e3fff | 1067 | |
1068 | rtvec | |
1069 | gen_rtvec_v (int n, rtx_insn **argp) | |
1070 | { | |
1071 | int i; | |
1072 | rtvec rt_val; | |
1073 | ||
1074 | /* Don't allocate an empty rtvec... */ | |
1075 | if (n == 0) | |
1076 | return NULL_RTVEC; | |
1077 | ||
1078 | rt_val = rtvec_alloc (n); | |
1079 | ||
1080 | for (i = 0; i < n; i++) | |
1081 | rt_val->elem[i] = *argp++; | |
1082 | ||
1083 | return rt_val; | |
1084 | } | |
1085 | ||
15bbde2b | 1086 | \f |
80c70e76 | 1087 | /* Return the number of bytes between the start of an OUTER_MODE |
1088 | in-memory value and the start of an INNER_MODE in-memory value, | |
1089 | given that the former is a lowpart of the latter. It may be a | |
1090 | paradoxical lowpart, in which case the offset will be negative | |
1091 | on big-endian targets. */ | |
1092 | ||
1093 | int | |
3754d046 | 1094 | byte_lowpart_offset (machine_mode outer_mode, |
1095 | machine_mode inner_mode) | |
80c70e76 | 1096 | { |
d0257d43 | 1097 | if (paradoxical_subreg_p (outer_mode, inner_mode)) |
80c70e76 | 1098 | return -subreg_lowpart_offset (inner_mode, outer_mode); |
d0257d43 | 1099 | else |
1100 | return subreg_lowpart_offset (outer_mode, inner_mode); | |
80c70e76 | 1101 | } |
57689c10 | 1102 | |
1103 | /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET) | |
1104 | from address X. For paradoxical big-endian subregs this is a | |
1105 | negative value, otherwise it's the same as OFFSET. */ | |
1106 | ||
1107 | int | |
1108 | subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode, | |
1109 | unsigned int offset) | |
1110 | { | |
1111 | if (paradoxical_subreg_p (outer_mode, inner_mode)) | |
1112 | { | |
1113 | gcc_assert (offset == 0); | |
1114 | return -subreg_lowpart_offset (inner_mode, outer_mode); | |
1115 | } | |
1116 | return offset; | |
1117 | } | |
1118 | ||
1119 | /* As above, but return the offset that existing subreg X would have | |
1120 | if SUBREG_REG (X) were stored in memory. The only significant thing | |
1121 | about the current SUBREG_REG is its mode. */ | |
1122 | ||
1123 | int | |
1124 | subreg_memory_offset (const_rtx x) | |
1125 | { | |
1126 | return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
1127 | SUBREG_BYTE (x)); | |
1128 | } | |
80c70e76 | 1129 | \f |
15bbde2b | 1130 | /* Generate a REG rtx for a new pseudo register of mode MODE. |
1131 | This pseudo is assigned the next sequential register number. */ | |
1132 | ||
1133 | rtx | |
3754d046 | 1134 | gen_reg_rtx (machine_mode mode) |
15bbde2b | 1135 | { |
19cb6b50 | 1136 | rtx val; |
27a7a23a | 1137 | unsigned int align = GET_MODE_ALIGNMENT (mode); |
15bbde2b | 1138 | |
1b7ff857 | 1139 | gcc_assert (can_create_pseudo_p ()); |
15bbde2b | 1140 | |
27a7a23a | 1141 | /* If a virtual register with bigger mode alignment is generated, |
1142 | increase stack alignment estimation because it might be spilled | |
1143 | to stack later. */ | |
48e1416a | 1144 | if (SUPPORTS_STACK_ALIGNMENT |
27a7a23a | 1145 | && crtl->stack_alignment_estimated < align |
1146 | && !crtl->stack_realign_processed) | |
8645d3e7 | 1147 | { |
1148 | unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align); | |
1149 | if (crtl->stack_alignment_estimated < min_align) | |
1150 | crtl->stack_alignment_estimated = min_align; | |
1151 | } | |
27a7a23a | 1152 | |
316bc009 | 1153 | if (generating_concat_p |
1154 | && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT | |
1155 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)) | |
76c37538 | 1156 | { |
1157 | /* For complex modes, don't make a single pseudo. | |
1158 | Instead, make a CONCAT of two pseudos. | |
1159 | This allows noncontiguous allocation of the real and imaginary parts, | |
1160 | which makes much better code. Besides, allocating DCmode | |
1161 | pseudos overstrains reload on some machines like the 386. */ | |
1162 | rtx realpart, imagpart; | |
3754d046 | 1163 | machine_mode partmode = GET_MODE_INNER (mode); |
76c37538 | 1164 | |
1165 | realpart = gen_reg_rtx (partmode); | |
1166 | imagpart = gen_reg_rtx (partmode); | |
3ad7bb1c | 1167 | return gen_rtx_CONCAT (mode, realpart, imagpart); |
76c37538 | 1168 | } |
1169 | ||
b4c6ce9b | 1170 | /* Do not call gen_reg_rtx with uninitialized crtl. */ |
1171 | gcc_assert (crtl->emit.regno_pointer_align_length); | |
1172 | ||
cd769037 | 1173 | crtl->emit.ensure_regno_capacity (); |
1174 | gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length); | |
15bbde2b | 1175 | |
cd769037 | 1176 | val = gen_raw_REG (mode, reg_rtx_no); |
1177 | regno_reg_rtx[reg_rtx_no++] = val; | |
1178 | return val; | |
1179 | } | |
fcdc122e | 1180 | |
cd769037 | 1181 | /* Make sure m_regno_pointer_align, and regno_reg_rtx are large |
1182 | enough to have elements in the range 0 <= idx <= reg_rtx_no. */ | |
0a893c29 | 1183 | |
cd769037 | 1184 | void |
1185 | emit_status::ensure_regno_capacity () | |
1186 | { | |
1187 | int old_size = regno_pointer_align_length; | |
15bbde2b | 1188 | |
cd769037 | 1189 | if (reg_rtx_no < old_size) |
1190 | return; | |
15bbde2b | 1191 | |
cd769037 | 1192 | int new_size = old_size * 2; |
1193 | while (reg_rtx_no >= new_size) | |
1194 | new_size *= 2; | |
1195 | ||
1196 | char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size); | |
1197 | memset (tmp + old_size, 0, new_size - old_size); | |
1198 | regno_pointer_align = (unsigned char *) tmp; | |
1199 | ||
1200 | rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size); | |
1201 | memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx)); | |
1202 | regno_reg_rtx = new1; | |
1203 | ||
1204 | crtl->emit.regno_pointer_align_length = new_size; | |
15bbde2b | 1205 | } |
1206 | ||
ea239197 | 1207 | /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */ |
1208 | ||
1209 | bool | |
1210 | reg_is_parm_p (rtx reg) | |
1211 | { | |
1212 | tree decl; | |
1213 | ||
1214 | gcc_assert (REG_P (reg)); | |
1215 | decl = REG_EXPR (reg); | |
1216 | return (decl && TREE_CODE (decl) == PARM_DECL); | |
1217 | } | |
1218 | ||
80c70e76 | 1219 | /* Update NEW with the same attributes as REG, but with OFFSET added |
1220 | to the REG_OFFSET. */ | |
ca74b940 | 1221 | |
1a6a0f2a | 1222 | static void |
a14d43f8 | 1223 | update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset) |
ca74b940 | 1224 | { |
9ce37fa7 | 1225 | REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg), |
a14d43f8 | 1226 | REG_OFFSET (reg) + offset); |
1a6a0f2a | 1227 | } |
1228 | ||
80c70e76 | 1229 | /* Generate a register with same attributes as REG, but with OFFSET |
1230 | added to the REG_OFFSET. */ | |
1a6a0f2a | 1231 | |
1232 | rtx | |
3754d046 | 1233 | gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno, |
a14d43f8 | 1234 | poly_int64 offset) |
1a6a0f2a | 1235 | { |
9ce37fa7 | 1236 | rtx new_rtx = gen_rtx_REG (mode, regno); |
1a6a0f2a | 1237 | |
9ce37fa7 | 1238 | update_reg_offset (new_rtx, reg, offset); |
1239 | return new_rtx; | |
1a6a0f2a | 1240 | } |
1241 | ||
1242 | /* Generate a new pseudo-register with the same attributes as REG, but | |
80c70e76 | 1243 | with OFFSET added to the REG_OFFSET. */ |
1a6a0f2a | 1244 | |
1245 | rtx | |
3754d046 | 1246 | gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset) |
1a6a0f2a | 1247 | { |
9ce37fa7 | 1248 | rtx new_rtx = gen_reg_rtx (mode); |
1a6a0f2a | 1249 | |
9ce37fa7 | 1250 | update_reg_offset (new_rtx, reg, offset); |
1251 | return new_rtx; | |
ca74b940 | 1252 | } |
1253 | ||
80c70e76 | 1254 | /* Adjust REG in-place so that it has mode MODE. It is assumed that the |
1255 | new register is a (possibly paradoxical) lowpart of the old one. */ | |
ca74b940 | 1256 | |
1257 | void | |
3754d046 | 1258 | adjust_reg_mode (rtx reg, machine_mode mode) |
ca74b940 | 1259 | { |
80c70e76 | 1260 | update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg))); |
1261 | PUT_MODE (reg, mode); | |
1262 | } | |
1263 | ||
1264 | /* Copy REG's attributes from X, if X has any attributes. If REG and X | |
1265 | have different modes, REG is a (possibly paradoxical) lowpart of X. */ | |
1266 | ||
1267 | void | |
1268 | set_reg_attrs_from_value (rtx reg, rtx x) | |
1269 | { | |
a14d43f8 | 1270 | poly_int64 offset; |
e623c80a | 1271 | bool can_be_reg_pointer = true; |
1272 | ||
1273 | /* Don't call mark_reg_pointer for incompatible pointer sign | |
1274 | extension. */ | |
1275 | while (GET_CODE (x) == SIGN_EXTEND | |
1276 | || GET_CODE (x) == ZERO_EXTEND | |
1277 | || GET_CODE (x) == TRUNCATE | |
1278 | || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x))) | |
1279 | { | |
4dd7c283 | 1280 | #if defined(POINTERS_EXTEND_UNSIGNED) |
1281 | if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED) | |
afcace5c | 1282 | || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED) |
1283 | || (paradoxical_subreg_p (x) | |
1284 | && ! (SUBREG_PROMOTED_VAR_P (x) | |
1285 | && SUBREG_CHECK_PROMOTED_SIGN (x, | |
1286 | POINTERS_EXTEND_UNSIGNED)))) | |
4dd7c283 | 1287 | && !targetm.have_ptr_extend ()) |
e623c80a | 1288 | can_be_reg_pointer = false; |
1289 | #endif | |
1290 | x = XEXP (x, 0); | |
1291 | } | |
80c70e76 | 1292 | |
ac56145e | 1293 | /* Hard registers can be reused for multiple purposes within the same |
1294 | function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN | |
1295 | on them is wrong. */ | |
1296 | if (HARD_REGISTER_P (reg)) | |
1297 | return; | |
1298 | ||
80c70e76 | 1299 | offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x)); |
ae12ddda | 1300 | if (MEM_P (x)) |
1301 | { | |
da443c27 | 1302 | if (MEM_OFFSET_KNOWN_P (x)) |
1303 | REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x), | |
1304 | MEM_OFFSET (x) + offset); | |
e623c80a | 1305 | if (can_be_reg_pointer && MEM_POINTER (x)) |
40b93dba | 1306 | mark_reg_pointer (reg, 0); |
ae12ddda | 1307 | } |
1308 | else if (REG_P (x)) | |
1309 | { | |
1310 | if (REG_ATTRS (x)) | |
1311 | update_reg_offset (reg, x, offset); | |
e623c80a | 1312 | if (can_be_reg_pointer && REG_POINTER (x)) |
ae12ddda | 1313 | mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x))); |
1314 | } | |
1315 | } | |
1316 | ||
1317 | /* Generate a REG rtx for a new pseudo register, copying the mode | |
1318 | and attributes from X. */ | |
1319 | ||
1320 | rtx | |
1321 | gen_reg_rtx_and_attrs (rtx x) | |
1322 | { | |
1323 | rtx reg = gen_reg_rtx (GET_MODE (x)); | |
1324 | set_reg_attrs_from_value (reg, x); | |
1325 | return reg; | |
ca74b940 | 1326 | } |
1327 | ||
263c416c | 1328 | /* Set the register attributes for registers contained in PARM_RTX. |
1329 | Use needed values from memory attributes of MEM. */ | |
1330 | ||
1331 | void | |
35cb5232 | 1332 | set_reg_attrs_for_parm (rtx parm_rtx, rtx mem) |
263c416c | 1333 | { |
8ad4c111 | 1334 | if (REG_P (parm_rtx)) |
80c70e76 | 1335 | set_reg_attrs_from_value (parm_rtx, mem); |
263c416c | 1336 | else if (GET_CODE (parm_rtx) == PARALLEL) |
1337 | { | |
1338 | /* Check for a NULL entry in the first slot, used to indicate that the | |
1339 | parameter goes both on the stack and in registers. */ | |
1340 | int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1; | |
1341 | for (; i < XVECLEN (parm_rtx, 0); i++) | |
1342 | { | |
1343 | rtx x = XVECEXP (parm_rtx, 0, i); | |
8ad4c111 | 1344 | if (REG_P (XEXP (x, 0))) |
263c416c | 1345 | REG_ATTRS (XEXP (x, 0)) |
1346 | = get_reg_attrs (MEM_EXPR (mem), | |
1347 | INTVAL (XEXP (x, 1))); | |
1348 | } | |
1349 | } | |
1350 | } | |
1351 | ||
80c70e76 | 1352 | /* Set the REG_ATTRS for registers in value X, given that X represents |
1353 | decl T. */ | |
ca74b940 | 1354 | |
a8dd994c | 1355 | void |
80c70e76 | 1356 | set_reg_attrs_for_decl_rtl (tree t, rtx x) |
1357 | { | |
94f92c36 | 1358 | if (!t) |
1359 | return; | |
1360 | tree tdecl = t; | |
80c70e76 | 1361 | if (GET_CODE (x) == SUBREG) |
ebfc27f5 | 1362 | { |
80c70e76 | 1363 | gcc_assert (subreg_lowpart_p (x)); |
1364 | x = SUBREG_REG (x); | |
ebfc27f5 | 1365 | } |
8ad4c111 | 1366 | if (REG_P (x)) |
80c70e76 | 1367 | REG_ATTRS (x) |
1368 | = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x), | |
94f92c36 | 1369 | DECL_P (tdecl) |
1370 | ? DECL_MODE (tdecl) | |
1371 | : TYPE_MODE (TREE_TYPE (tdecl)))); | |
ca74b940 | 1372 | if (GET_CODE (x) == CONCAT) |
1373 | { | |
1374 | if (REG_P (XEXP (x, 0))) | |
1375 | REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0); | |
1376 | if (REG_P (XEXP (x, 1))) | |
1377 | REG_ATTRS (XEXP (x, 1)) | |
1378 | = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0)))); | |
1379 | } | |
1380 | if (GET_CODE (x) == PARALLEL) | |
1381 | { | |
85d25060 | 1382 | int i, start; |
1383 | ||
1384 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1385 | both on the stack and in registers. */ | |
1386 | if (XEXP (XVECEXP (x, 0, 0), 0)) | |
1387 | start = 0; | |
1388 | else | |
1389 | start = 1; | |
1390 | ||
1391 | for (i = start; i < XVECLEN (x, 0); i++) | |
ca74b940 | 1392 | { |
1393 | rtx y = XVECEXP (x, 0, i); | |
1394 | if (REG_P (XEXP (y, 0))) | |
1395 | REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1))); | |
1396 | } | |
1397 | } | |
1398 | } | |
1399 | ||
80c70e76 | 1400 | /* Assign the RTX X to declaration T. */ |
1401 | ||
1402 | void | |
1403 | set_decl_rtl (tree t, rtx x) | |
1404 | { | |
1405 | DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x; | |
1406 | if (x) | |
1407 | set_reg_attrs_for_decl_rtl (t, x); | |
1408 | } | |
1409 | ||
d91cf567 | 1410 | /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true |
1411 | if the ABI requires the parameter to be passed by reference. */ | |
80c70e76 | 1412 | |
1413 | void | |
d91cf567 | 1414 | set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p) |
80c70e76 | 1415 | { |
1416 | DECL_INCOMING_RTL (t) = x; | |
d91cf567 | 1417 | if (x && !by_reference_p) |
80c70e76 | 1418 | set_reg_attrs_for_decl_rtl (t, x); |
1419 | } | |
1420 | ||
de8ecfb5 | 1421 | /* Identify REG (which may be a CONCAT) as a user register. */ |
1422 | ||
1423 | void | |
35cb5232 | 1424 | mark_user_reg (rtx reg) |
de8ecfb5 | 1425 | { |
1426 | if (GET_CODE (reg) == CONCAT) | |
1427 | { | |
1428 | REG_USERVAR_P (XEXP (reg, 0)) = 1; | |
1429 | REG_USERVAR_P (XEXP (reg, 1)) = 1; | |
1430 | } | |
de8ecfb5 | 1431 | else |
611234b4 | 1432 | { |
1433 | gcc_assert (REG_P (reg)); | |
1434 | REG_USERVAR_P (reg) = 1; | |
1435 | } | |
de8ecfb5 | 1436 | } |
1437 | ||
d4c332ff | 1438 | /* Identify REG as a probable pointer register and show its alignment |
1439 | as ALIGN, if nonzero. */ | |
15bbde2b | 1440 | |
1441 | void | |
35cb5232 | 1442 | mark_reg_pointer (rtx reg, int align) |
15bbde2b | 1443 | { |
e61a0a7f | 1444 | if (! REG_POINTER (reg)) |
612409a6 | 1445 | { |
e61a0a7f | 1446 | REG_POINTER (reg) = 1; |
d4c332ff | 1447 | |
612409a6 | 1448 | if (align) |
1449 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; | |
1450 | } | |
1451 | else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg))) | |
8b332087 | 1452 | /* We can no-longer be sure just how aligned this pointer is. */ |
d4c332ff | 1453 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; |
15bbde2b | 1454 | } |
1455 | ||
1456 | /* Return 1 plus largest pseudo reg number used in the current function. */ | |
1457 | ||
1458 | int | |
35cb5232 | 1459 | max_reg_num (void) |
15bbde2b | 1460 | { |
1461 | return reg_rtx_no; | |
1462 | } | |
1463 | ||
1464 | /* Return 1 + the largest label number used so far in the current function. */ | |
1465 | ||
1466 | int | |
35cb5232 | 1467 | max_label_num (void) |
15bbde2b | 1468 | { |
15bbde2b | 1469 | return label_num; |
1470 | } | |
1471 | ||
1472 | /* Return first label number used in this function (if any were used). */ | |
1473 | ||
1474 | int | |
35cb5232 | 1475 | get_first_label_num (void) |
15bbde2b | 1476 | { |
1477 | return first_label_num; | |
1478 | } | |
4ee9c684 | 1479 | |
1480 | /* If the rtx for label was created during the expansion of a nested | |
1481 | function, then first_label_num won't include this label number. | |
f0b5f617 | 1482 | Fix this now so that array indices work later. */ |
4ee9c684 | 1483 | |
1484 | void | |
6313d5da | 1485 | maybe_set_first_label_num (rtx_code_label *x) |
4ee9c684 | 1486 | { |
1487 | if (CODE_LABEL_NUMBER (x) < first_label_num) | |
1488 | first_label_num = CODE_LABEL_NUMBER (x); | |
1489 | } | |
836c1c68 | 1490 | |
1491 | /* For use by the RTL function loader, when mingling with normal | |
1492 | functions. | |
1493 | Ensure that label_num is greater than the label num of X, to avoid | |
1494 | duplicate labels in the generated assembler. */ | |
1495 | ||
1496 | void | |
1497 | maybe_set_max_label_num (rtx_code_label *x) | |
1498 | { | |
1499 | if (CODE_LABEL_NUMBER (x) >= label_num) | |
1500 | label_num = CODE_LABEL_NUMBER (x) + 1; | |
1501 | } | |
1502 | ||
15bbde2b | 1503 | \f |
1504 | /* Return a value representing some low-order bits of X, where the number | |
1505 | of low-order bits is given by MODE. Note that no conversion is done | |
d823ba47 | 1506 | between floating-point and fixed-point values, rather, the bit |
15bbde2b | 1507 | representation is returned. |
1508 | ||
1509 | This function handles the cases in common between gen_lowpart, below, | |
1510 | and two variants in cse.c and combine.c. These are the cases that can | |
1511 | be safely handled at all points in the compilation. | |
1512 | ||
1513 | If this is not a case we can handle, return 0. */ | |
1514 | ||
1515 | rtx | |
3754d046 | 1516 | gen_lowpart_common (machine_mode mode, rtx x) |
15bbde2b | 1517 | { |
701e46d0 | 1518 | int msize = GET_MODE_SIZE (mode); |
791172c5 | 1519 | int xsize; |
3754d046 | 1520 | machine_mode innermode; |
791172c5 | 1521 | |
1522 | /* Unfortunately, this routine doesn't take a parameter for the mode of X, | |
1523 | so we have to make one up. Yuk. */ | |
1524 | innermode = GET_MODE (x); | |
971ba038 | 1525 | if (CONST_INT_P (x) |
6c799a83 | 1526 | && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT) |
517be012 | 1527 | innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require (); |
791172c5 | 1528 | else if (innermode == VOIDmode) |
517be012 | 1529 | innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require (); |
48e1416a | 1530 | |
791172c5 | 1531 | xsize = GET_MODE_SIZE (innermode); |
1532 | ||
611234b4 | 1533 | gcc_assert (innermode != VOIDmode && innermode != BLKmode); |
15bbde2b | 1534 | |
791172c5 | 1535 | if (innermode == mode) |
15bbde2b | 1536 | return x; |
1537 | ||
44ce7b27 | 1538 | if (SCALAR_FLOAT_MODE_P (mode)) |
1539 | { | |
1540 | /* Don't allow paradoxical FLOAT_MODE subregs. */ | |
1541 | if (msize > xsize) | |
1542 | return 0; | |
1543 | } | |
1544 | else | |
1545 | { | |
1546 | /* MODE must occupy no more of the underlying registers than X. */ | |
1547 | unsigned int regsize = REGMODE_NATURAL_SIZE (innermode); | |
1548 | unsigned int mregs = CEIL (msize, regsize); | |
1549 | unsigned int xregs = CEIL (xsize, regsize); | |
1550 | if (mregs > xregs) | |
1551 | return 0; | |
1552 | } | |
9abe1e73 | 1553 | |
58a70f63 | 1554 | scalar_int_mode int_mode, int_innermode, from_mode; |
15bbde2b | 1555 | if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) |
58a70f63 | 1556 | && is_a <scalar_int_mode> (mode, &int_mode) |
1557 | && is_a <scalar_int_mode> (innermode, &int_innermode) | |
1558 | && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode)) | |
15bbde2b | 1559 | { |
1560 | /* If we are getting the low-order part of something that has been | |
1561 | sign- or zero-extended, we can either just use the object being | |
1562 | extended or make a narrower extension. If we want an even smaller | |
1563 | piece than the size of the object being extended, call ourselves | |
1564 | recursively. | |
1565 | ||
1566 | This case is used mostly by combine and cse. */ | |
1567 | ||
58a70f63 | 1568 | if (from_mode == int_mode) |
15bbde2b | 1569 | return XEXP (x, 0); |
58a70f63 | 1570 | else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode)) |
1571 | return gen_lowpart_common (int_mode, XEXP (x, 0)); | |
1572 | else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode)) | |
1573 | return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0)); | |
15bbde2b | 1574 | } |
8ad4c111 | 1575 | else if (GET_CODE (x) == SUBREG || REG_P (x) |
b58a8b74 | 1576 | || GET_CODE (x) == CONCAT || const_vec_p (x) |
bbad7cd0 | 1577 | || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x) |
1578 | || CONST_POLY_INT_P (x)) | |
a8a727ad | 1579 | return lowpart_subreg (mode, x, innermode); |
4a307dd5 | 1580 | |
15bbde2b | 1581 | /* Otherwise, we can't do this. */ |
1582 | return 0; | |
1583 | } | |
1584 | \f | |
d56d0ca2 | 1585 | rtx |
3754d046 | 1586 | gen_highpart (machine_mode mode, rtx x) |
d56d0ca2 | 1587 | { |
701e46d0 | 1588 | unsigned int msize = GET_MODE_SIZE (mode); |
81802af6 | 1589 | rtx result; |
701e46d0 | 1590 | |
d56d0ca2 | 1591 | /* This case loses if X is a subreg. To catch bugs early, |
1592 | complain if an invalid MODE is used even in other cases. */ | |
611234b4 | 1593 | gcc_assert (msize <= UNITS_PER_WORD |
1594 | || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))); | |
701e46d0 | 1595 | |
81802af6 | 1596 | result = simplify_gen_subreg (mode, x, GET_MODE (x), |
1597 | subreg_highpart_offset (mode, GET_MODE (x))); | |
611234b4 | 1598 | gcc_assert (result); |
48e1416a | 1599 | |
a8c36ab2 | 1600 | /* simplify_gen_subreg is not guaranteed to return a valid operand for |
1601 | the target if we have a MEM. gen_highpart must return a valid operand, | |
1602 | emitting code if necessary to do so. */ | |
611234b4 | 1603 | if (MEM_P (result)) |
1604 | { | |
1605 | result = validize_mem (result); | |
1606 | gcc_assert (result); | |
1607 | } | |
48e1416a | 1608 | |
81802af6 | 1609 | return result; |
1610 | } | |
704fcf2b | 1611 | |
29d56731 | 1612 | /* Like gen_highpart, but accept mode of EXP operand in case EXP can |
704fcf2b | 1613 | be VOIDmode constant. */ |
1614 | rtx | |
3754d046 | 1615 | gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp) |
704fcf2b | 1616 | { |
1617 | if (GET_MODE (exp) != VOIDmode) | |
1618 | { | |
611234b4 | 1619 | gcc_assert (GET_MODE (exp) == innermode); |
704fcf2b | 1620 | return gen_highpart (outermode, exp); |
1621 | } | |
1622 | return simplify_gen_subreg (outermode, exp, innermode, | |
1623 | subreg_highpart_offset (outermode, innermode)); | |
1624 | } | |
d4c5e26d | 1625 | |
ca99c787 | 1626 | /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has |
1627 | OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | |
10ef59ac | 1628 | |
81802af6 | 1629 | unsigned int |
ca99c787 | 1630 | subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes) |
81802af6 | 1631 | { |
ca99c787 | 1632 | if (outer_bytes > inner_bytes) |
1633 | /* Paradoxical subregs always have a SUBREG_BYTE of 0. */ | |
1634 | return 0; | |
701e46d0 | 1635 | |
ca99c787 | 1636 | if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1637 | return inner_bytes - outer_bytes; | |
1638 | else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | |
1639 | return 0; | |
1640 | else | |
1641 | return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0); | |
d56d0ca2 | 1642 | } |
64ab453f | 1643 | |
ca99c787 | 1644 | /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has |
1645 | OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | |
1646 | ||
81802af6 | 1647 | unsigned int |
ca99c787 | 1648 | subreg_size_highpart_offset (unsigned int outer_bytes, |
1649 | unsigned int inner_bytes) | |
64ab453f | 1650 | { |
ca99c787 | 1651 | gcc_assert (inner_bytes >= outer_bytes); |
64ab453f | 1652 | |
ca99c787 | 1653 | if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1654 | return 0; | |
1655 | else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | |
1656 | return inner_bytes - outer_bytes; | |
1657 | else | |
1658 | return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, | |
1659 | (inner_bytes - outer_bytes) | |
1660 | * BITS_PER_UNIT); | |
64ab453f | 1661 | } |
d56d0ca2 | 1662 | |
15bbde2b | 1663 | /* Return 1 iff X, assumed to be a SUBREG, |
1664 | refers to the least significant part of its containing reg. | |
1665 | If X is not a SUBREG, always return 1 (it is its own low part!). */ | |
1666 | ||
1667 | int | |
b7bf20db | 1668 | subreg_lowpart_p (const_rtx x) |
15bbde2b | 1669 | { |
1670 | if (GET_CODE (x) != SUBREG) | |
1671 | return 1; | |
7e14c1bf | 1672 | else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) |
1673 | return 0; | |
15bbde2b | 1674 | |
81802af6 | 1675 | return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x))) |
1676 | == SUBREG_BYTE (x)); | |
15bbde2b | 1677 | } |
1678 | \f | |
701e46d0 | 1679 | /* Return subword OFFSET of operand OP. |
1680 | The word number, OFFSET, is interpreted as the word number starting | |
1681 | at the low-order address. OFFSET 0 is the low-order word if not | |
1682 | WORDS_BIG_ENDIAN, otherwise it is the high-order word. | |
1683 | ||
1684 | If we cannot extract the required word, we return zero. Otherwise, | |
1685 | an rtx corresponding to the requested word will be returned. | |
1686 | ||
1687 | VALIDATE_ADDRESS is nonzero if the address should be validated. Before | |
1688 | reload has completed, a valid address will always be returned. After | |
1689 | reload, if a valid address cannot be returned, we return zero. | |
1690 | ||
1691 | If VALIDATE_ADDRESS is zero, we simply form the required address; validating | |
1692 | it is the responsibility of the caller. | |
1693 | ||
1694 | MODE is the mode of OP in case it is a CONST_INT. | |
1695 | ||
1696 | ??? This is still rather broken for some cases. The problem for the | |
1697 | moment is that all callers of this thing provide no 'goal mode' to | |
1698 | tell us to work with. This exists because all callers were written | |
84e81e84 | 1699 | in a word based SUBREG world. |
1700 | Now use of this function can be deprecated by simplify_subreg in most | |
1701 | cases. | |
1702 | */ | |
701e46d0 | 1703 | |
1704 | rtx | |
3754d046 | 1705 | operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode) |
701e46d0 | 1706 | { |
1707 | if (mode == VOIDmode) | |
1708 | mode = GET_MODE (op); | |
1709 | ||
611234b4 | 1710 | gcc_assert (mode != VOIDmode); |
701e46d0 | 1711 | |
6312a35e | 1712 | /* If OP is narrower than a word, fail. */ |
701e46d0 | 1713 | if (mode != BLKmode |
1714 | && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)) | |
1715 | return 0; | |
1716 | ||
6312a35e | 1717 | /* If we want a word outside OP, return zero. */ |
701e46d0 | 1718 | if (mode != BLKmode |
1719 | && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode)) | |
1720 | return const0_rtx; | |
1721 | ||
701e46d0 | 1722 | /* Form a new MEM at the requested address. */ |
e16ceb8e | 1723 | if (MEM_P (op)) |
701e46d0 | 1724 | { |
9ce37fa7 | 1725 | rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD); |
701e46d0 | 1726 | |
e4e86ec5 | 1727 | if (! validate_address) |
9ce37fa7 | 1728 | return new_rtx; |
e4e86ec5 | 1729 | |
1730 | else if (reload_completed) | |
701e46d0 | 1731 | { |
bd1a81f7 | 1732 | if (! strict_memory_address_addr_space_p (word_mode, |
1733 | XEXP (new_rtx, 0), | |
1734 | MEM_ADDR_SPACE (op))) | |
e4e86ec5 | 1735 | return 0; |
701e46d0 | 1736 | } |
e4e86ec5 | 1737 | else |
9ce37fa7 | 1738 | return replace_equiv_address (new_rtx, XEXP (new_rtx, 0)); |
701e46d0 | 1739 | } |
1740 | ||
84e81e84 | 1741 | /* Rest can be handled by simplify_subreg. */ |
1742 | return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD)); | |
701e46d0 | 1743 | } |
1744 | ||
89f18f73 | 1745 | /* Similar to `operand_subword', but never return 0. If we can't |
1746 | extract the required subword, put OP into a register and try again. | |
1747 | The second attempt must succeed. We always validate the address in | |
1748 | this case. | |
15bbde2b | 1749 | |
1750 | MODE is the mode of OP, in case it is CONST_INT. */ | |
1751 | ||
1752 | rtx | |
3754d046 | 1753 | operand_subword_force (rtx op, unsigned int offset, machine_mode mode) |
15bbde2b | 1754 | { |
701e46d0 | 1755 | rtx result = operand_subword (op, offset, 1, mode); |
15bbde2b | 1756 | |
1757 | if (result) | |
1758 | return result; | |
1759 | ||
1760 | if (mode != BLKmode && mode != VOIDmode) | |
ac825d29 | 1761 | { |
1762 | /* If this is a register which can not be accessed by words, copy it | |
1763 | to a pseudo register. */ | |
8ad4c111 | 1764 | if (REG_P (op)) |
ac825d29 | 1765 | op = copy_to_reg (op); |
1766 | else | |
1767 | op = force_reg (mode, op); | |
1768 | } | |
15bbde2b | 1769 | |
701e46d0 | 1770 | result = operand_subword (op, offset, 1, mode); |
611234b4 | 1771 | gcc_assert (result); |
15bbde2b | 1772 | |
1773 | return result; | |
1774 | } | |
1775 | \f | |
b3ff8d90 | 1776 | /* Returns 1 if both MEM_EXPR can be considered equal |
1777 | and 0 otherwise. */ | |
1778 | ||
1779 | int | |
52d07779 | 1780 | mem_expr_equal_p (const_tree expr1, const_tree expr2) |
b3ff8d90 | 1781 | { |
1782 | if (expr1 == expr2) | |
1783 | return 1; | |
1784 | ||
1785 | if (! expr1 || ! expr2) | |
1786 | return 0; | |
1787 | ||
1788 | if (TREE_CODE (expr1) != TREE_CODE (expr2)) | |
1789 | return 0; | |
1790 | ||
3a443843 | 1791 | return operand_equal_p (expr1, expr2, 0); |
b3ff8d90 | 1792 | } |
1793 | ||
ad0a178f | 1794 | /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN |
1795 | bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or | |
1796 | -1 if not known. */ | |
1797 | ||
1798 | int | |
7cfdc2f0 | 1799 | get_mem_align_offset (rtx mem, unsigned int align) |
ad0a178f | 1800 | { |
1801 | tree expr; | |
1802 | unsigned HOST_WIDE_INT offset; | |
1803 | ||
1804 | /* This function can't use | |
da443c27 | 1805 | if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) |
98ab9e8f | 1806 | || (MAX (MEM_ALIGN (mem), |
957d0361 | 1807 | MAX (align, get_object_alignment (MEM_EXPR (mem)))) |
ad0a178f | 1808 | < align)) |
1809 | return -1; | |
1810 | else | |
da443c27 | 1811 | return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1); |
ad0a178f | 1812 | for two reasons: |
1813 | - COMPONENT_REFs in MEM_EXPR can have NULL first operand, | |
1814 | for <variable>. get_inner_reference doesn't handle it and | |
1815 | even if it did, the alignment in that case needs to be determined | |
1816 | from DECL_FIELD_CONTEXT's TYPE_ALIGN. | |
1817 | - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR | |
1818 | isn't sufficiently aligned, the object it is in might be. */ | |
1819 | gcc_assert (MEM_P (mem)); | |
1820 | expr = MEM_EXPR (mem); | |
da443c27 | 1821 | if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
ad0a178f | 1822 | return -1; |
1823 | ||
da443c27 | 1824 | offset = MEM_OFFSET (mem); |
ad0a178f | 1825 | if (DECL_P (expr)) |
1826 | { | |
1827 | if (DECL_ALIGN (expr) < align) | |
1828 | return -1; | |
1829 | } | |
1830 | else if (INDIRECT_REF_P (expr)) | |
1831 | { | |
1832 | if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align) | |
1833 | return -1; | |
1834 | } | |
1835 | else if (TREE_CODE (expr) == COMPONENT_REF) | |
1836 | { | |
1837 | while (1) | |
1838 | { | |
1839 | tree inner = TREE_OPERAND (expr, 0); | |
1840 | tree field = TREE_OPERAND (expr, 1); | |
1841 | tree byte_offset = component_ref_field_offset (expr); | |
1842 | tree bit_offset = DECL_FIELD_BIT_OFFSET (field); | |
1843 | ||
1844 | if (!byte_offset | |
e913b5cd | 1845 | || !tree_fits_uhwi_p (byte_offset) |
1846 | || !tree_fits_uhwi_p (bit_offset)) | |
ad0a178f | 1847 | return -1; |
1848 | ||
e913b5cd | 1849 | offset += tree_to_uhwi (byte_offset); |
1850 | offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; | |
ad0a178f | 1851 | |
1852 | if (inner == NULL_TREE) | |
1853 | { | |
1854 | if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field)) | |
1855 | < (unsigned int) align) | |
1856 | return -1; | |
1857 | break; | |
1858 | } | |
1859 | else if (DECL_P (inner)) | |
1860 | { | |
1861 | if (DECL_ALIGN (inner) < align) | |
1862 | return -1; | |
1863 | break; | |
1864 | } | |
1865 | else if (TREE_CODE (inner) != COMPONENT_REF) | |
1866 | return -1; | |
1867 | expr = inner; | |
1868 | } | |
1869 | } | |
1870 | else | |
1871 | return -1; | |
1872 | ||
1873 | return offset & ((align / BITS_PER_UNIT) - 1); | |
1874 | } | |
1875 | ||
310b57a1 | 1876 | /* Given REF (a MEM) and T, either the type of X or the expression |
c6259b83 | 1877 | corresponding to REF, set the memory attributes. OBJECTP is nonzero |
6f717f77 | 1878 | if we are making a new object of this type. BITPOS is nonzero if |
1879 | there is an offset outstanding on T that will be applied later. */ | |
c6259b83 | 1880 | |
1881 | void | |
35cb5232 | 1882 | set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, |
1883 | HOST_WIDE_INT bitpos) | |
c6259b83 | 1884 | { |
6f717f77 | 1885 | HOST_WIDE_INT apply_bitpos = 0; |
c6259b83 | 1886 | tree type; |
d72886b5 | 1887 | struct mem_attrs attrs, *defattrs, *refattrs; |
3f06bd1b | 1888 | addr_space_t as; |
c6259b83 | 1889 | |
1890 | /* It can happen that type_for_mode was given a mode for which there | |
1891 | is no language-level type. In which case it returns NULL, which | |
1892 | we can see here. */ | |
1893 | if (t == NULL_TREE) | |
1894 | return; | |
1895 | ||
1896 | type = TYPE_P (t) ? t : TREE_TYPE (t); | |
4ccffa39 | 1897 | if (type == error_mark_node) |
1898 | return; | |
c6259b83 | 1899 | |
c6259b83 | 1900 | /* If we have already set DECL_RTL = ref, get_alias_set will get the |
1901 | wrong answer, as it assumes that DECL_RTL already has the right alias | |
1902 | info. Callers should not set DECL_RTL until after the call to | |
1903 | set_mem_attributes. */ | |
611234b4 | 1904 | gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); |
c6259b83 | 1905 | |
d72886b5 | 1906 | memset (&attrs, 0, sizeof (attrs)); |
1907 | ||
96216d37 | 1908 | /* Get the alias set from the expression or type (perhaps using a |
2a631e19 | 1909 | front-end routine) and use it. */ |
d72886b5 | 1910 | attrs.alias = get_alias_set (t); |
c6259b83 | 1911 | |
fbc6244b | 1912 | MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type); |
8d350e69 | 1913 | MEM_POINTER (ref) = POINTER_TYPE_P (type); |
c6259b83 | 1914 | |
d8dccfe9 | 1915 | /* Default values from pre-existing memory attributes if present. */ |
d72886b5 | 1916 | refattrs = MEM_ATTRS (ref); |
1917 | if (refattrs) | |
d8dccfe9 | 1918 | { |
1919 | /* ??? Can this ever happen? Calling this routine on a MEM that | |
1920 | already carries memory attributes should probably be invalid. */ | |
d72886b5 | 1921 | attrs.expr = refattrs->expr; |
6d58bcba | 1922 | attrs.offset_known_p = refattrs->offset_known_p; |
d72886b5 | 1923 | attrs.offset = refattrs->offset; |
6d58bcba | 1924 | attrs.size_known_p = refattrs->size_known_p; |
d72886b5 | 1925 | attrs.size = refattrs->size; |
1926 | attrs.align = refattrs->align; | |
d8dccfe9 | 1927 | } |
1928 | ||
1929 | /* Otherwise, default values from the mode of the MEM reference. */ | |
d72886b5 | 1930 | else |
d8dccfe9 | 1931 | { |
d72886b5 | 1932 | defattrs = mode_mem_attrs[(int) GET_MODE (ref)]; |
1933 | gcc_assert (!defattrs->expr); | |
6d58bcba | 1934 | gcc_assert (!defattrs->offset_known_p); |
d72886b5 | 1935 | |
d8dccfe9 | 1936 | /* Respect mode size. */ |
6d58bcba | 1937 | attrs.size_known_p = defattrs->size_known_p; |
d72886b5 | 1938 | attrs.size = defattrs->size; |
d8dccfe9 | 1939 | /* ??? Is this really necessary? We probably should always get |
1940 | the size from the type below. */ | |
1941 | ||
1942 | /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type; | |
1943 | if T is an object, always compute the object alignment below. */ | |
d72886b5 | 1944 | if (TYPE_P (t)) |
1945 | attrs.align = defattrs->align; | |
1946 | else | |
1947 | attrs.align = BITS_PER_UNIT; | |
d8dccfe9 | 1948 | /* ??? If T is a type, respecting mode alignment may *also* be wrong |
1949 | e.g. if the type carries an alignment attribute. Should we be | |
1950 | able to simply always use TYPE_ALIGN? */ | |
1951 | } | |
1952 | ||
b3b6e4b5 | 1953 | /* We can set the alignment from the type if we are making an object or if |
1954 | this is an INDIRECT_REF. */ | |
1955 | if (objectp || TREE_CODE (t) == INDIRECT_REF) | |
d72886b5 | 1956 | attrs.align = MAX (attrs.align, TYPE_ALIGN (type)); |
679e0056 | 1957 | |
96216d37 | 1958 | /* If the size is known, we can set that. */ |
50ba3acc | 1959 | tree new_size = TYPE_SIZE_UNIT (type); |
96216d37 | 1960 | |
9eec20bf | 1961 | /* The address-space is that of the type. */ |
1962 | as = TYPE_ADDR_SPACE (type); | |
1963 | ||
579bccf9 | 1964 | /* If T is not a type, we may be able to deduce some more information about |
1965 | the expression. */ | |
1966 | if (! TYPE_P (t)) | |
2a631e19 | 1967 | { |
ae2dd339 | 1968 | tree base; |
b04fab2a | 1969 | |
2a631e19 | 1970 | if (TREE_THIS_VOLATILE (t)) |
1971 | MEM_VOLATILE_P (ref) = 1; | |
c6259b83 | 1972 | |
3c00f11c | 1973 | /* Now remove any conversions: they don't change what the underlying |
1974 | object is. Likewise for SAVE_EXPR. */ | |
72dd6141 | 1975 | while (CONVERT_EXPR_P (t) |
3c00f11c | 1976 | || TREE_CODE (t) == VIEW_CONVERT_EXPR |
1977 | || TREE_CODE (t) == SAVE_EXPR) | |
2a631e19 | 1978 | t = TREE_OPERAND (t, 0); |
1979 | ||
73eb0a09 | 1980 | /* Note whether this expression can trap. */ |
1981 | MEM_NOTRAP_P (ref) = !tree_could_trap_p (t); | |
1982 | ||
1983 | base = get_base_address (t); | |
3f06bd1b | 1984 | if (base) |
1985 | { | |
1986 | if (DECL_P (base) | |
1987 | && TREE_READONLY (base) | |
1988 | && (TREE_STATIC (base) || DECL_EXTERNAL (base)) | |
1989 | && !TREE_THIS_VOLATILE (base)) | |
1990 | MEM_READONLY_P (ref) = 1; | |
1991 | ||
1992 | /* Mark static const strings readonly as well. */ | |
1993 | if (TREE_CODE (base) == STRING_CST | |
1994 | && TREE_READONLY (base) | |
1995 | && TREE_STATIC (base)) | |
1996 | MEM_READONLY_P (ref) = 1; | |
1997 | ||
9eec20bf | 1998 | /* Address-space information is on the base object. */ |
3f06bd1b | 1999 | if (TREE_CODE (base) == MEM_REF |
2000 | || TREE_CODE (base) == TARGET_MEM_REF) | |
2001 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, | |
2002 | 0)))); | |
2003 | else | |
2004 | as = TYPE_ADDR_SPACE (TREE_TYPE (base)); | |
2005 | } | |
cab98a0d | 2006 | |
2b02580f | 2007 | /* If this expression uses it's parent's alias set, mark it such |
2008 | that we won't change it. */ | |
d400f5e1 | 2009 | if (component_uses_parent_alias_set_from (t) != NULL_TREE) |
5cc193e7 | 2010 | MEM_KEEP_ALIAS_SET_P (ref) = 1; |
2011 | ||
2a631e19 | 2012 | /* If this is a decl, set the attributes of the MEM from it. */ |
2013 | if (DECL_P (t)) | |
2014 | { | |
d72886b5 | 2015 | attrs.expr = t; |
6d58bcba | 2016 | attrs.offset_known_p = true; |
2017 | attrs.offset = 0; | |
6f717f77 | 2018 | apply_bitpos = bitpos; |
50ba3acc | 2019 | new_size = DECL_SIZE_UNIT (t); |
2a631e19 | 2020 | } |
2021 | ||
9eec20bf | 2022 | /* ??? If we end up with a constant here do record a MEM_EXPR. */ |
ce45a448 | 2023 | else if (CONSTANT_CLASS_P (t)) |
9eec20bf | 2024 | ; |
b10dbbca | 2025 | |
50ba3acc | 2026 | /* If this is a field reference, record it. */ |
2027 | else if (TREE_CODE (t) == COMPONENT_REF) | |
b10dbbca | 2028 | { |
d72886b5 | 2029 | attrs.expr = t; |
6d58bcba | 2030 | attrs.offset_known_p = true; |
2031 | attrs.offset = 0; | |
6f717f77 | 2032 | apply_bitpos = bitpos; |
50ba3acc | 2033 | if (DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
2034 | new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1)); | |
b10dbbca | 2035 | } |
2036 | ||
2037 | /* If this is an array reference, look for an outer field reference. */ | |
2038 | else if (TREE_CODE (t) == ARRAY_REF) | |
2039 | { | |
2040 | tree off_tree = size_zero_node; | |
6b039979 | 2041 | /* We can't modify t, because we use it at the end of the |
2042 | function. */ | |
2043 | tree t2 = t; | |
b10dbbca | 2044 | |
2045 | do | |
2046 | { | |
6b039979 | 2047 | tree index = TREE_OPERAND (t2, 1); |
6374121b | 2048 | tree low_bound = array_ref_low_bound (t2); |
2049 | tree unit_size = array_ref_element_size (t2); | |
97f8ce30 | 2050 | |
2051 | /* We assume all arrays have sizes that are a multiple of a byte. | |
2052 | First subtract the lower bound, if any, in the type of the | |
6374121b | 2053 | index, then convert to sizetype and multiply by the size of |
2054 | the array element. */ | |
2055 | if (! integer_zerop (low_bound)) | |
faa43f85 | 2056 | index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), |
2057 | index, low_bound); | |
97f8ce30 | 2058 | |
6374121b | 2059 | off_tree = size_binop (PLUS_EXPR, |
535664e3 | 2060 | size_binop (MULT_EXPR, |
2061 | fold_convert (sizetype, | |
2062 | index), | |
6374121b | 2063 | unit_size), |
2064 | off_tree); | |
6b039979 | 2065 | t2 = TREE_OPERAND (t2, 0); |
b10dbbca | 2066 | } |
6b039979 | 2067 | while (TREE_CODE (t2) == ARRAY_REF); |
b10dbbca | 2068 | |
9eec20bf | 2069 | if (DECL_P (t2) |
6a57a1e8 | 2070 | || (TREE_CODE (t2) == COMPONENT_REF |
2071 | /* For trailing arrays t2 doesn't have a size that | |
2072 | covers all valid accesses. */ | |
07110764 | 2073 | && ! array_at_struct_end_p (t))) |
b10dbbca | 2074 | { |
d72886b5 | 2075 | attrs.expr = t2; |
6d58bcba | 2076 | attrs.offset_known_p = false; |
e913b5cd | 2077 | if (tree_fits_uhwi_p (off_tree)) |
6f717f77 | 2078 | { |
6d58bcba | 2079 | attrs.offset_known_p = true; |
e913b5cd | 2080 | attrs.offset = tree_to_uhwi (off_tree); |
6f717f77 | 2081 | apply_bitpos = bitpos; |
2082 | } | |
b10dbbca | 2083 | } |
9eec20bf | 2084 | /* Else do not record a MEM_EXPR. */ |
2d8fe5d0 | 2085 | } |
2086 | ||
6d72287b | 2087 | /* If this is an indirect reference, record it. */ |
182cf5a9 | 2088 | else if (TREE_CODE (t) == MEM_REF |
5d9de213 | 2089 | || TREE_CODE (t) == TARGET_MEM_REF) |
6d72287b | 2090 | { |
d72886b5 | 2091 | attrs.expr = t; |
6d58bcba | 2092 | attrs.offset_known_p = true; |
2093 | attrs.offset = 0; | |
6d72287b | 2094 | apply_bitpos = bitpos; |
2095 | } | |
2096 | ||
9eec20bf | 2097 | /* Compute the alignment. */ |
2098 | unsigned int obj_align; | |
2099 | unsigned HOST_WIDE_INT obj_bitpos; | |
2100 | get_object_alignment_1 (t, &obj_align, &obj_bitpos); | |
2101 | obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1); | |
2102 | if (obj_bitpos != 0) | |
ac29ece2 | 2103 | obj_align = least_bit_hwi (obj_bitpos); |
9eec20bf | 2104 | attrs.align = MAX (attrs.align, obj_align); |
2a631e19 | 2105 | } |
2106 | ||
e913b5cd | 2107 | if (tree_fits_uhwi_p (new_size)) |
50ba3acc | 2108 | { |
2109 | attrs.size_known_p = true; | |
e913b5cd | 2110 | attrs.size = tree_to_uhwi (new_size); |
50ba3acc | 2111 | } |
2112 | ||
e2e205b3 | 2113 | /* If we modified OFFSET based on T, then subtract the outstanding |
595f1461 | 2114 | bit position offset. Similarly, increase the size of the accessed |
2115 | object to contain the negative offset. */ | |
6f717f77 | 2116 | if (apply_bitpos) |
595f1461 | 2117 | { |
6d58bcba | 2118 | gcc_assert (attrs.offset_known_p); |
2119 | attrs.offset -= apply_bitpos / BITS_PER_UNIT; | |
2120 | if (attrs.size_known_p) | |
2121 | attrs.size += apply_bitpos / BITS_PER_UNIT; | |
595f1461 | 2122 | } |
6f717f77 | 2123 | |
2a631e19 | 2124 | /* Now set the attributes we computed above. */ |
3f06bd1b | 2125 | attrs.addrspace = as; |
d72886b5 | 2126 | set_mem_attrs (ref, &attrs); |
c6259b83 | 2127 | } |
2128 | ||
6f717f77 | 2129 | void |
35cb5232 | 2130 | set_mem_attributes (rtx ref, tree t, int objectp) |
6f717f77 | 2131 | { |
2132 | set_mem_attributes_minus_bitpos (ref, t, objectp, 0); | |
2133 | } | |
2134 | ||
c6259b83 | 2135 | /* Set the alias set of MEM to SET. */ |
2136 | ||
2137 | void | |
32c2fdea | 2138 | set_mem_alias_set (rtx mem, alias_set_type set) |
c6259b83 | 2139 | { |
d72886b5 | 2140 | struct mem_attrs attrs; |
2141 | ||
c6259b83 | 2142 | /* If the new and old alias sets don't conflict, something is wrong. */ |
1b4345f7 | 2143 | gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); |
d72886b5 | 2144 | attrs = *get_mem_attrs (mem); |
2145 | attrs.alias = set; | |
2146 | set_mem_attrs (mem, &attrs); | |
bd1a81f7 | 2147 | } |
2148 | ||
2149 | /* Set the address space of MEM to ADDRSPACE (target-defined). */ | |
2150 | ||
2151 | void | |
2152 | set_mem_addr_space (rtx mem, addr_space_t addrspace) | |
2153 | { | |
d72886b5 | 2154 | struct mem_attrs attrs; |
2155 | ||
2156 | attrs = *get_mem_attrs (mem); | |
2157 | attrs.addrspace = addrspace; | |
2158 | set_mem_attrs (mem, &attrs); | |
c6259b83 | 2159 | } |
96216d37 | 2160 | |
1c4512da | 2161 | /* Set the alignment of MEM to ALIGN bits. */ |
96216d37 | 2162 | |
2163 | void | |
35cb5232 | 2164 | set_mem_align (rtx mem, unsigned int align) |
96216d37 | 2165 | { |
d72886b5 | 2166 | struct mem_attrs attrs; |
2167 | ||
2168 | attrs = *get_mem_attrs (mem); | |
2169 | attrs.align = align; | |
2170 | set_mem_attrs (mem, &attrs); | |
96216d37 | 2171 | } |
278fe152 | 2172 | |
b10dbbca | 2173 | /* Set the expr for MEM to EXPR. */ |
278fe152 | 2174 | |
2175 | void | |
35cb5232 | 2176 | set_mem_expr (rtx mem, tree expr) |
278fe152 | 2177 | { |
d72886b5 | 2178 | struct mem_attrs attrs; |
2179 | ||
2180 | attrs = *get_mem_attrs (mem); | |
2181 | attrs.expr = expr; | |
2182 | set_mem_attrs (mem, &attrs); | |
278fe152 | 2183 | } |
b10dbbca | 2184 | |
2185 | /* Set the offset of MEM to OFFSET. */ | |
2186 | ||
2187 | void | |
da443c27 | 2188 | set_mem_offset (rtx mem, HOST_WIDE_INT offset) |
b10dbbca | 2189 | { |
d72886b5 | 2190 | struct mem_attrs attrs; |
2191 | ||
2192 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 2193 | attrs.offset_known_p = true; |
2194 | attrs.offset = offset; | |
da443c27 | 2195 | set_mem_attrs (mem, &attrs); |
2196 | } | |
2197 | ||
2198 | /* Clear the offset of MEM. */ | |
2199 | ||
2200 | void | |
2201 | clear_mem_offset (rtx mem) | |
2202 | { | |
2203 | struct mem_attrs attrs; | |
2204 | ||
2205 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 2206 | attrs.offset_known_p = false; |
d72886b5 | 2207 | set_mem_attrs (mem, &attrs); |
f0500469 | 2208 | } |
2209 | ||
2210 | /* Set the size of MEM to SIZE. */ | |
2211 | ||
2212 | void | |
5b2a69fa | 2213 | set_mem_size (rtx mem, HOST_WIDE_INT size) |
f0500469 | 2214 | { |
d72886b5 | 2215 | struct mem_attrs attrs; |
2216 | ||
2217 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 2218 | attrs.size_known_p = true; |
2219 | attrs.size = size; | |
5b2a69fa | 2220 | set_mem_attrs (mem, &attrs); |
2221 | } | |
2222 | ||
2223 | /* Clear the size of MEM. */ | |
2224 | ||
2225 | void | |
2226 | clear_mem_size (rtx mem) | |
2227 | { | |
2228 | struct mem_attrs attrs; | |
2229 | ||
2230 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 2231 | attrs.size_known_p = false; |
d72886b5 | 2232 | set_mem_attrs (mem, &attrs); |
b10dbbca | 2233 | } |
c6259b83 | 2234 | \f |
96216d37 | 2235 | /* Return a memory reference like MEMREF, but with its mode changed to MODE |
2236 | and its address changed to ADDR. (VOIDmode means don't change the mode. | |
2237 | NULL for ADDR means don't change the address.) VALIDATE is nonzero if the | |
5cc04e45 | 2238 | returned memory location is required to be valid. INPLACE is true if any |
2239 | changes can be made directly to MEMREF or false if MEMREF must be treated | |
2240 | as immutable. | |
2241 | ||
2242 | The memory attributes are not changed. */ | |
15bbde2b | 2243 | |
96216d37 | 2244 | static rtx |
3754d046 | 2245 | change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate, |
5cc04e45 | 2246 | bool inplace) |
15bbde2b | 2247 | { |
bd1a81f7 | 2248 | addr_space_t as; |
9ce37fa7 | 2249 | rtx new_rtx; |
15bbde2b | 2250 | |
611234b4 | 2251 | gcc_assert (MEM_P (memref)); |
bd1a81f7 | 2252 | as = MEM_ADDR_SPACE (memref); |
15bbde2b | 2253 | if (mode == VOIDmode) |
2254 | mode = GET_MODE (memref); | |
2255 | if (addr == 0) | |
2256 | addr = XEXP (memref, 0); | |
3988ef8b | 2257 | if (mode == GET_MODE (memref) && addr == XEXP (memref, 0) |
bd1a81f7 | 2258 | && (!validate || memory_address_addr_space_p (mode, addr, as))) |
3988ef8b | 2259 | return memref; |
15bbde2b | 2260 | |
73a18f44 | 2261 | /* Don't validate address for LRA. LRA can make the address valid |
2262 | by itself in most efficient way. */ | |
2263 | if (validate && !lra_in_progress) | |
15bbde2b | 2264 | { |
e4e86ec5 | 2265 | if (reload_in_progress || reload_completed) |
bd1a81f7 | 2266 | gcc_assert (memory_address_addr_space_p (mode, addr, as)); |
e4e86ec5 | 2267 | else |
bd1a81f7 | 2268 | addr = memory_address_addr_space (mode, addr, as); |
15bbde2b | 2269 | } |
d823ba47 | 2270 | |
e8976cd7 | 2271 | if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref)) |
2272 | return memref; | |
2273 | ||
5cc04e45 | 2274 | if (inplace) |
2275 | { | |
2276 | XEXP (memref, 0) = addr; | |
2277 | return memref; | |
2278 | } | |
2279 | ||
9ce37fa7 | 2280 | new_rtx = gen_rtx_MEM (mode, addr); |
2281 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
2282 | return new_rtx; | |
15bbde2b | 2283 | } |
537ffcfc | 2284 | |
96216d37 | 2285 | /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what |
2286 | way we are changing MEMREF, so we only preserve the alias set. */ | |
e513d163 | 2287 | |
2288 | rtx | |
3754d046 | 2289 | change_address (rtx memref, machine_mode mode, rtx addr) |
e513d163 | 2290 | { |
5cc04e45 | 2291 | rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); |
3754d046 | 2292 | machine_mode mmode = GET_MODE (new_rtx); |
d72886b5 | 2293 | struct mem_attrs attrs, *defattrs; |
0ab96142 | 2294 | |
d72886b5 | 2295 | attrs = *get_mem_attrs (memref); |
2296 | defattrs = mode_mem_attrs[(int) mmode]; | |
6d58bcba | 2297 | attrs.expr = NULL_TREE; |
2298 | attrs.offset_known_p = false; | |
2299 | attrs.size_known_p = defattrs->size_known_p; | |
d72886b5 | 2300 | attrs.size = defattrs->size; |
2301 | attrs.align = defattrs->align; | |
6cc60c4d | 2302 | |
d28edf0d | 2303 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2304 | if (new_rtx == memref) |
0ab96142 | 2305 | { |
d72886b5 | 2306 | if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs)) |
9ce37fa7 | 2307 | return new_rtx; |
0ab96142 | 2308 | |
9ce37fa7 | 2309 | new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0)); |
2310 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
0ab96142 | 2311 | } |
d28edf0d | 2312 | |
d72886b5 | 2313 | set_mem_attrs (new_rtx, &attrs); |
9ce37fa7 | 2314 | return new_rtx; |
e513d163 | 2315 | } |
537ffcfc | 2316 | |
96216d37 | 2317 | /* Return a memory reference like MEMREF, but with its mode changed |
2318 | to MODE and its address offset by OFFSET bytes. If VALIDATE is | |
bf42c62d | 2319 | nonzero, the memory address is forced to be valid. |
2d0fd66d | 2320 | If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS |
2321 | and the caller is responsible for adjusting MEMREF base register. | |
2322 | If ADJUST_OBJECT is zero, the underlying object associated with the | |
2323 | memory reference is left unchanged and the caller is responsible for | |
2324 | dealing with it. Otherwise, if the new memory reference is outside | |
226c6baf | 2325 | the underlying object, even partially, then the object is dropped. |
2326 | SIZE, if nonzero, is the size of an access in cases where MODE | |
2327 | has no inherent size. */ | |
e4e86ec5 | 2328 | |
2329 | rtx | |
3754d046 | 2330 | adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, |
226c6baf | 2331 | int validate, int adjust_address, int adjust_object, |
2332 | HOST_WIDE_INT size) | |
e4e86ec5 | 2333 | { |
fb257ae6 | 2334 | rtx addr = XEXP (memref, 0); |
9ce37fa7 | 2335 | rtx new_rtx; |
f77c4496 | 2336 | scalar_int_mode address_mode; |
cfb75cdf | 2337 | int pbits; |
21b8bc7e | 2338 | struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs; |
d72886b5 | 2339 | unsigned HOST_WIDE_INT max_align; |
21b8bc7e | 2340 | #ifdef POINTERS_EXTEND_UNSIGNED |
f77c4496 | 2341 | scalar_int_mode pointer_mode |
21b8bc7e | 2342 | = targetm.addr_space.pointer_mode (attrs.addrspace); |
2343 | #endif | |
fb257ae6 | 2344 | |
4733f549 | 2345 | /* VOIDmode means no mode change for change_address_1. */ |
2346 | if (mode == VOIDmode) | |
2347 | mode = GET_MODE (memref); | |
2348 | ||
226c6baf | 2349 | /* Take the size of non-BLKmode accesses from the mode. */ |
2350 | defattrs = mode_mem_attrs[(int) mode]; | |
2351 | if (defattrs->size_known_p) | |
2352 | size = defattrs->size; | |
2353 | ||
d28edf0d | 2354 | /* If there are no changes, just return the original memory reference. */ |
2355 | if (mode == GET_MODE (memref) && !offset | |
226c6baf | 2356 | && (size == 0 || (attrs.size_known_p && attrs.size == size)) |
d72886b5 | 2357 | && (!validate || memory_address_addr_space_p (mode, addr, |
2358 | attrs.addrspace))) | |
d28edf0d | 2359 | return memref; |
2360 | ||
e36c3d58 | 2361 | /* ??? Prefer to create garbage instead of creating shared rtl. |
6ef828f9 | 2362 | This may happen even if offset is nonzero -- consider |
e36c3d58 | 2363 | (plus (plus reg reg) const_int) -- so do this always. */ |
2364 | addr = copy_rtx (addr); | |
2365 | ||
cfb75cdf | 2366 | /* Convert a possibly large offset to a signed value within the |
2367 | range of the target address space. */ | |
87cf5753 | 2368 | address_mode = get_address_mode (memref); |
98155838 | 2369 | pbits = GET_MODE_BITSIZE (address_mode); |
cfb75cdf | 2370 | if (HOST_BITS_PER_WIDE_INT > pbits) |
2371 | { | |
2372 | int shift = HOST_BITS_PER_WIDE_INT - pbits; | |
2373 | offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift)) | |
2374 | >> shift); | |
2375 | } | |
2376 | ||
2d0fd66d | 2377 | if (adjust_address) |
cd358719 | 2378 | { |
2379 | /* If MEMREF is a LO_SUM and the offset is within the alignment of the | |
2380 | object, we can merge it into the LO_SUM. */ | |
2381 | if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM | |
2382 | && offset >= 0 | |
2383 | && (unsigned HOST_WIDE_INT) offset | |
2384 | < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) | |
98155838 | 2385 | addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), |
29c05e22 | 2386 | plus_constant (address_mode, |
2387 | XEXP (addr, 1), offset)); | |
21b8bc7e | 2388 | #ifdef POINTERS_EXTEND_UNSIGNED |
2389 | /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid | |
2390 | in that mode, we merge it into the ZERO_EXTEND. We take advantage of | |
2391 | the fact that pointers are not allowed to overflow. */ | |
2392 | else if (POINTERS_EXTEND_UNSIGNED > 0 | |
2393 | && GET_CODE (addr) == ZERO_EXTEND | |
2394 | && GET_MODE (XEXP (addr, 0)) == pointer_mode | |
2395 | && trunc_int_for_mode (offset, pointer_mode) == offset) | |
2396 | addr = gen_rtx_ZERO_EXTEND (address_mode, | |
2397 | plus_constant (pointer_mode, | |
2398 | XEXP (addr, 0), offset)); | |
2399 | #endif | |
cd358719 | 2400 | else |
29c05e22 | 2401 | addr = plus_constant (address_mode, addr, offset); |
cd358719 | 2402 | } |
fb257ae6 | 2403 | |
5cc04e45 | 2404 | new_rtx = change_address_1 (memref, mode, addr, validate, false); |
96216d37 | 2405 | |
e077413c | 2406 | /* If the address is a REG, change_address_1 rightfully returns memref, |
2407 | but this would destroy memref's MEM_ATTRS. */ | |
2408 | if (new_rtx == memref && offset != 0) | |
2409 | new_rtx = copy_rtx (new_rtx); | |
2410 | ||
2d0fd66d | 2411 | /* Conservatively drop the object if we don't know where we start from. */ |
2412 | if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p)) | |
2413 | { | |
2414 | attrs.expr = NULL_TREE; | |
2415 | attrs.alias = 0; | |
2416 | } | |
2417 | ||
96216d37 | 2418 | /* Compute the new values of the memory attributes due to this adjustment. |
2419 | We add the offsets and update the alignment. */ | |
6d58bcba | 2420 | if (attrs.offset_known_p) |
2d0fd66d | 2421 | { |
2422 | attrs.offset += offset; | |
2423 | ||
2424 | /* Drop the object if the new left end is not within its bounds. */ | |
2425 | if (adjust_object && attrs.offset < 0) | |
2426 | { | |
2427 | attrs.expr = NULL_TREE; | |
2428 | attrs.alias = 0; | |
2429 | } | |
2430 | } | |
96216d37 | 2431 | |
b8098e5b | 2432 | /* Compute the new alignment by taking the MIN of the alignment and the |
2433 | lowest-order set bit in OFFSET, but don't change the alignment if OFFSET | |
2434 | if zero. */ | |
2435 | if (offset != 0) | |
d72886b5 | 2436 | { |
ac29ece2 | 2437 | max_align = least_bit_hwi (offset) * BITS_PER_UNIT; |
d72886b5 | 2438 | attrs.align = MIN (attrs.align, max_align); |
2439 | } | |
96216d37 | 2440 | |
226c6baf | 2441 | if (size) |
6d58bcba | 2442 | { |
2d0fd66d | 2443 | /* Drop the object if the new right end is not within its bounds. */ |
226c6baf | 2444 | if (adjust_object && (offset + size) > attrs.size) |
2d0fd66d | 2445 | { |
2446 | attrs.expr = NULL_TREE; | |
2447 | attrs.alias = 0; | |
2448 | } | |
6d58bcba | 2449 | attrs.size_known_p = true; |
226c6baf | 2450 | attrs.size = size; |
6d58bcba | 2451 | } |
2452 | else if (attrs.size_known_p) | |
2d0fd66d | 2453 | { |
226c6baf | 2454 | gcc_assert (!adjust_object); |
2d0fd66d | 2455 | attrs.size -= offset; |
226c6baf | 2456 | /* ??? The store_by_pieces machinery generates negative sizes, |
2457 | so don't assert for that here. */ | |
2d0fd66d | 2458 | } |
5cc193e7 | 2459 | |
d72886b5 | 2460 | set_mem_attrs (new_rtx, &attrs); |
96216d37 | 2461 | |
9ce37fa7 | 2462 | return new_rtx; |
e4e86ec5 | 2463 | } |
2464 | ||
bf42c62d | 2465 | /* Return a memory reference like MEMREF, but with its mode changed |
2466 | to MODE and its address changed to ADDR, which is assumed to be | |
f0b5f617 | 2467 | MEMREF offset by OFFSET bytes. If VALIDATE is |
bf42c62d | 2468 | nonzero, the memory address is forced to be valid. */ |
2469 | ||
2470 | rtx | |
3754d046 | 2471 | adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, |
35cb5232 | 2472 | HOST_WIDE_INT offset, int validate) |
bf42c62d | 2473 | { |
5cc04e45 | 2474 | memref = change_address_1 (memref, VOIDmode, addr, validate, false); |
226c6baf | 2475 | return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); |
bf42c62d | 2476 | } |
2477 | ||
2a631e19 | 2478 | /* Return a memory reference like MEMREF, but whose address is changed by |
2479 | adding OFFSET, an RTX, to it. POW2 is the highest power of two factor | |
2480 | known to be in OFFSET (possibly 1). */ | |
fcdc122e | 2481 | |
2482 | rtx | |
35cb5232 | 2483 | offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) |
fcdc122e | 2484 | { |
9ce37fa7 | 2485 | rtx new_rtx, addr = XEXP (memref, 0); |
3754d046 | 2486 | machine_mode address_mode; |
6d58bcba | 2487 | struct mem_attrs attrs, *defattrs; |
fac6aae6 | 2488 | |
d72886b5 | 2489 | attrs = *get_mem_attrs (memref); |
87cf5753 | 2490 | address_mode = get_address_mode (memref); |
98155838 | 2491 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
fac6aae6 | 2492 | |
d4c5e26d | 2493 | /* At this point we don't know _why_ the address is invalid. It |
917bbcab | 2494 | could have secondary memory references, multiplies or anything. |
fac6aae6 | 2495 | |
2496 | However, if we did go and rearrange things, we can wind up not | |
2497 | being able to recognize the magic around pic_offset_table_rtx. | |
2498 | This stuff is fragile, and is yet another example of why it is | |
2499 | bad to expose PIC machinery too early. */ | |
d72886b5 | 2500 | if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, |
2501 | attrs.addrspace) | |
fac6aae6 | 2502 | && GET_CODE (addr) == PLUS |
2503 | && XEXP (addr, 0) == pic_offset_table_rtx) | |
2504 | { | |
2505 | addr = force_reg (GET_MODE (addr), addr); | |
98155838 | 2506 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
fac6aae6 | 2507 | } |
2508 | ||
9ce37fa7 | 2509 | update_temp_slot_address (XEXP (memref, 0), new_rtx); |
5cc04e45 | 2510 | new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false); |
fcdc122e | 2511 | |
d28edf0d | 2512 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2513 | if (new_rtx == memref) |
2514 | return new_rtx; | |
d28edf0d | 2515 | |
fcdc122e | 2516 | /* Update the alignment to reflect the offset. Reset the offset, which |
2517 | we don't know. */ | |
6d58bcba | 2518 | defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)]; |
2519 | attrs.offset_known_p = false; | |
2520 | attrs.size_known_p = defattrs->size_known_p; | |
2521 | attrs.size = defattrs->size; | |
d72886b5 | 2522 | attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT); |
2523 | set_mem_attrs (new_rtx, &attrs); | |
9ce37fa7 | 2524 | return new_rtx; |
fcdc122e | 2525 | } |
d4c5e26d | 2526 | |
537ffcfc | 2527 | /* Return a memory reference like MEMREF, but with its address changed to |
2528 | ADDR. The caller is asserting that the actual piece of memory pointed | |
2529 | to is the same, just the form of the address is being changed, such as | |
5cc04e45 | 2530 | by putting something into a register. INPLACE is true if any changes |
2531 | can be made directly to MEMREF or false if MEMREF must be treated as | |
2532 | immutable. */ | |
537ffcfc | 2533 | |
2534 | rtx | |
5cc04e45 | 2535 | replace_equiv_address (rtx memref, rtx addr, bool inplace) |
537ffcfc | 2536 | { |
96216d37 | 2537 | /* change_address_1 copies the memory attribute structure without change |
2538 | and that's exactly what we want here. */ | |
ecfe4ca9 | 2539 | update_temp_slot_address (XEXP (memref, 0), addr); |
5cc04e45 | 2540 | return change_address_1 (memref, VOIDmode, addr, 1, inplace); |
537ffcfc | 2541 | } |
96216d37 | 2542 | |
e4e86ec5 | 2543 | /* Likewise, but the reference is not required to be valid. */ |
2544 | ||
2545 | rtx | |
5cc04e45 | 2546 | replace_equiv_address_nv (rtx memref, rtx addr, bool inplace) |
e4e86ec5 | 2547 | { |
5cc04e45 | 2548 | return change_address_1 (memref, VOIDmode, addr, 0, inplace); |
e4e86ec5 | 2549 | } |
8259ab07 | 2550 | |
2551 | /* Return a memory reference like MEMREF, but with its mode widened to | |
2552 | MODE and offset by OFFSET. This would be used by targets that e.g. | |
2553 | cannot issue QImode memory operations and have to use SImode memory | |
2554 | operations plus masking logic. */ | |
2555 | ||
2556 | rtx | |
3754d046 | 2557 | widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset) |
8259ab07 | 2558 | { |
226c6baf | 2559 | rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); |
d72886b5 | 2560 | struct mem_attrs attrs; |
8259ab07 | 2561 | unsigned int size = GET_MODE_SIZE (mode); |
2562 | ||
d28edf0d | 2563 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2564 | if (new_rtx == memref) |
2565 | return new_rtx; | |
d28edf0d | 2566 | |
d72886b5 | 2567 | attrs = *get_mem_attrs (new_rtx); |
2568 | ||
8259ab07 | 2569 | /* If we don't know what offset we were at within the expression, then |
2570 | we can't know if we've overstepped the bounds. */ | |
6d58bcba | 2571 | if (! attrs.offset_known_p) |
d72886b5 | 2572 | attrs.expr = NULL_TREE; |
8259ab07 | 2573 | |
d72886b5 | 2574 | while (attrs.expr) |
8259ab07 | 2575 | { |
d72886b5 | 2576 | if (TREE_CODE (attrs.expr) == COMPONENT_REF) |
8259ab07 | 2577 | { |
d72886b5 | 2578 | tree field = TREE_OPERAND (attrs.expr, 1); |
2579 | tree offset = component_ref_field_offset (attrs.expr); | |
8259ab07 | 2580 | |
2581 | if (! DECL_SIZE_UNIT (field)) | |
2582 | { | |
d72886b5 | 2583 | attrs.expr = NULL_TREE; |
8259ab07 | 2584 | break; |
2585 | } | |
2586 | ||
2587 | /* Is the field at least as large as the access? If so, ok, | |
2588 | otherwise strip back to the containing structure. */ | |
8359cfb4 | 2589 | if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST |
2590 | && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0 | |
6d58bcba | 2591 | && attrs.offset >= 0) |
8259ab07 | 2592 | break; |
2593 | ||
e913b5cd | 2594 | if (! tree_fits_uhwi_p (offset)) |
8259ab07 | 2595 | { |
d72886b5 | 2596 | attrs.expr = NULL_TREE; |
8259ab07 | 2597 | break; |
2598 | } | |
2599 | ||
d72886b5 | 2600 | attrs.expr = TREE_OPERAND (attrs.expr, 0); |
e913b5cd | 2601 | attrs.offset += tree_to_uhwi (offset); |
2602 | attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) | |
6d58bcba | 2603 | / BITS_PER_UNIT); |
8259ab07 | 2604 | } |
2605 | /* Similarly for the decl. */ | |
d72886b5 | 2606 | else if (DECL_P (attrs.expr) |
2607 | && DECL_SIZE_UNIT (attrs.expr) | |
2608 | && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST | |
2609 | && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0 | |
6d58bcba | 2610 | && (! attrs.offset_known_p || attrs.offset >= 0)) |
8259ab07 | 2611 | break; |
2612 | else | |
2613 | { | |
2614 | /* The widened memory access overflows the expression, which means | |
2615 | that it could alias another expression. Zap it. */ | |
d72886b5 | 2616 | attrs.expr = NULL_TREE; |
8259ab07 | 2617 | break; |
2618 | } | |
2619 | } | |
2620 | ||
d72886b5 | 2621 | if (! attrs.expr) |
6d58bcba | 2622 | attrs.offset_known_p = false; |
8259ab07 | 2623 | |
2624 | /* The widened memory may alias other stuff, so zap the alias set. */ | |
2625 | /* ??? Maybe use get_alias_set on any remaining expression. */ | |
d72886b5 | 2626 | attrs.alias = 0; |
6d58bcba | 2627 | attrs.size_known_p = true; |
2628 | attrs.size = size; | |
d72886b5 | 2629 | set_mem_attrs (new_rtx, &attrs); |
9ce37fa7 | 2630 | return new_rtx; |
8259ab07 | 2631 | } |
15bbde2b | 2632 | \f |
ac681e84 | 2633 | /* A fake decl that is used as the MEM_EXPR of spill slots. */ |
2634 | static GTY(()) tree spill_slot_decl; | |
2635 | ||
58029e61 | 2636 | tree |
2637 | get_spill_slot_decl (bool force_build_p) | |
ac681e84 | 2638 | { |
2639 | tree d = spill_slot_decl; | |
2640 | rtx rd; | |
d72886b5 | 2641 | struct mem_attrs attrs; |
ac681e84 | 2642 | |
58029e61 | 2643 | if (d || !force_build_p) |
ac681e84 | 2644 | return d; |
2645 | ||
e60a6f7b | 2646 | d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
2647 | VAR_DECL, get_identifier ("%sfp"), void_type_node); | |
ac681e84 | 2648 | DECL_ARTIFICIAL (d) = 1; |
2649 | DECL_IGNORED_P (d) = 1; | |
2650 | TREE_USED (d) = 1; | |
ac681e84 | 2651 | spill_slot_decl = d; |
2652 | ||
2653 | rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); | |
2654 | MEM_NOTRAP_P (rd) = 1; | |
d72886b5 | 2655 | attrs = *mode_mem_attrs[(int) BLKmode]; |
2656 | attrs.alias = new_alias_set (); | |
2657 | attrs.expr = d; | |
2658 | set_mem_attrs (rd, &attrs); | |
ac681e84 | 2659 | SET_DECL_RTL (d, rd); |
2660 | ||
2661 | return d; | |
2662 | } | |
2663 | ||
2664 | /* Given MEM, a result from assign_stack_local, fill in the memory | |
2665 | attributes as appropriate for a register allocator spill slot. | |
2666 | These slots are not aliasable by other memory. We arrange for | |
2667 | them all to use a single MEM_EXPR, so that the aliasing code can | |
2668 | work properly in the case of shared spill slots. */ | |
2669 | ||
2670 | void | |
2671 | set_mem_attrs_for_spill (rtx mem) | |
2672 | { | |
d72886b5 | 2673 | struct mem_attrs attrs; |
2674 | rtx addr; | |
ac681e84 | 2675 | |
d72886b5 | 2676 | attrs = *get_mem_attrs (mem); |
2677 | attrs.expr = get_spill_slot_decl (true); | |
2678 | attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); | |
2679 | attrs.addrspace = ADDR_SPACE_GENERIC; | |
ac681e84 | 2680 | |
2681 | /* We expect the incoming memory to be of the form: | |
2682 | (mem:MODE (plus (reg sfp) (const_int offset))) | |
2683 | with perhaps the plus missing for offset = 0. */ | |
2684 | addr = XEXP (mem, 0); | |
6d58bcba | 2685 | attrs.offset_known_p = true; |
2686 | attrs.offset = 0; | |
ac681e84 | 2687 | if (GET_CODE (addr) == PLUS |
971ba038 | 2688 | && CONST_INT_P (XEXP (addr, 1))) |
6d58bcba | 2689 | attrs.offset = INTVAL (XEXP (addr, 1)); |
ac681e84 | 2690 | |
d72886b5 | 2691 | set_mem_attrs (mem, &attrs); |
ac681e84 | 2692 | MEM_NOTRAP_P (mem) = 1; |
2693 | } | |
2694 | \f | |
15bbde2b | 2695 | /* Return a newly created CODE_LABEL rtx with a unique label number. */ |
2696 | ||
be95c7c7 | 2697 | rtx_code_label * |
35cb5232 | 2698 | gen_label_rtx (void) |
15bbde2b | 2699 | { |
be95c7c7 | 2700 | return as_a <rtx_code_label *> ( |
2701 | gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX, | |
2702 | NULL, label_num++, NULL)); | |
15bbde2b | 2703 | } |
2704 | \f | |
2705 | /* For procedure integration. */ | |
2706 | ||
15bbde2b | 2707 | /* Install new pointers to the first and last insns in the chain. |
d4c332ff | 2708 | Also, set cur_insn_uid to one higher than the last in use. |
15bbde2b | 2709 | Used for an inline-procedure after copying the insn chain. */ |
2710 | ||
2711 | void | |
57c26b3a | 2712 | set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last) |
15bbde2b | 2713 | { |
57c26b3a | 2714 | rtx_insn *insn; |
d4c332ff | 2715 | |
06f9d6ef | 2716 | set_first_insn (first); |
2717 | set_last_insn (last); | |
d4c332ff | 2718 | cur_insn_uid = 0; |
2719 | ||
9845d120 | 2720 | if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS) |
2721 | { | |
2722 | int debug_count = 0; | |
2723 | ||
2724 | cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1; | |
2725 | cur_debug_insn_uid = 0; | |
2726 | ||
2727 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2728 | if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID) | |
2729 | cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn)); | |
2730 | else | |
2731 | { | |
2732 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
2733 | if (DEBUG_INSN_P (insn)) | |
2734 | debug_count++; | |
2735 | } | |
2736 | ||
2737 | if (debug_count) | |
2738 | cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count; | |
2739 | else | |
2740 | cur_debug_insn_uid++; | |
2741 | } | |
2742 | else | |
2743 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2744 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
d4c332ff | 2745 | |
2746 | cur_insn_uid++; | |
15bbde2b | 2747 | } |
15bbde2b | 2748 | \f |
d823ba47 | 2749 | /* Go through all the RTL insn bodies and copy any invalid shared |
2d96a59a | 2750 | structure. This routine should only be called once. */ |
15bbde2b | 2751 | |
a40c0eeb | 2752 | static void |
58945f46 | 2753 | unshare_all_rtl_1 (rtx_insn *insn) |
15bbde2b | 2754 | { |
2d96a59a | 2755 | /* Unshare just about everything else. */ |
1cd4cfea | 2756 | unshare_all_rtl_in_chain (insn); |
d823ba47 | 2757 | |
15bbde2b | 2758 | /* Make sure the addresses of stack slots found outside the insn chain |
2759 | (such as, in DECL_RTL of a variable) are not shared | |
2760 | with the insn chain. | |
2761 | ||
2762 | This special care is necessary when the stack slot MEM does not | |
2763 | actually appear in the insn chain. If it does appear, its address | |
2764 | is unshared from all else at that point. */ | |
84f4f7bf | 2765 | unsigned int i; |
2766 | rtx temp; | |
2767 | FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp) | |
2768 | (*stack_slot_list)[i] = copy_rtx_if_shared (temp); | |
15bbde2b | 2769 | } |
2770 | ||
d823ba47 | 2771 | /* Go through all the RTL insn bodies and copy any invalid shared |
2d96a59a | 2772 | structure, again. This is a fairly expensive thing to do so it |
2773 | should be done sparingly. */ | |
2774 | ||
2775 | void | |
58945f46 | 2776 | unshare_all_rtl_again (rtx_insn *insn) |
2d96a59a | 2777 | { |
58945f46 | 2778 | rtx_insn *p; |
5244079b | 2779 | tree decl; |
2780 | ||
2d96a59a | 2781 | for (p = insn; p; p = NEXT_INSN (p)) |
9204e736 | 2782 | if (INSN_P (p)) |
2d96a59a | 2783 | { |
2784 | reset_used_flags (PATTERN (p)); | |
2785 | reset_used_flags (REG_NOTES (p)); | |
6d2a4bac | 2786 | if (CALL_P (p)) |
2787 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (p)); | |
2d96a59a | 2788 | } |
5244079b | 2789 | |
01dc9f0c | 2790 | /* Make sure that virtual stack slots are not shared. */ |
265be050 | 2791 | set_used_decls (DECL_INITIAL (cfun->decl)); |
01dc9f0c | 2792 | |
5244079b | 2793 | /* Make sure that virtual parameters are not shared. */ |
1767a056 | 2794 | for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) |
265be050 | 2795 | set_used_flags (DECL_RTL (decl)); |
5244079b | 2796 | |
84f4f7bf | 2797 | rtx temp; |
2798 | unsigned int i; | |
2799 | FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp) | |
2800 | reset_used_flags (temp); | |
5244079b | 2801 | |
df329266 | 2802 | unshare_all_rtl_1 (insn); |
a40c0eeb | 2803 | } |
2804 | ||
2a1990e9 | 2805 | unsigned int |
a40c0eeb | 2806 | unshare_all_rtl (void) |
2807 | { | |
df329266 | 2808 | unshare_all_rtl_1 (get_insns ()); |
607381a9 | 2809 | |
2810 | for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) | |
2811 | { | |
2812 | if (DECL_RTL_SET_P (decl)) | |
2813 | SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl))); | |
2814 | DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl)); | |
2815 | } | |
2816 | ||
2a1990e9 | 2817 | return 0; |
2d96a59a | 2818 | } |
2819 | ||
77fce4cd | 2820 | |
1cd4cfea | 2821 | /* Check that ORIG is not marked when it should not be and mark ORIG as in use, |
2822 | Recursively does the same for subexpressions. */ | |
2823 | ||
2824 | static void | |
2825 | verify_rtx_sharing (rtx orig, rtx insn) | |
2826 | { | |
2827 | rtx x = orig; | |
2828 | int i; | |
2829 | enum rtx_code code; | |
2830 | const char *format_ptr; | |
2831 | ||
2832 | if (x == 0) | |
2833 | return; | |
2834 | ||
2835 | code = GET_CODE (x); | |
2836 | ||
2837 | /* These types may be freely shared. */ | |
2838 | ||
2839 | switch (code) | |
2840 | { | |
2841 | case REG: | |
688ff29b | 2842 | case DEBUG_EXPR: |
2843 | case VALUE: | |
0349edce | 2844 | CASE_CONST_ANY: |
1cd4cfea | 2845 | case SYMBOL_REF: |
2846 | case LABEL_REF: | |
2847 | case CODE_LABEL: | |
2848 | case PC: | |
2849 | case CC0: | |
1a860023 | 2850 | case RETURN: |
9cb2517e | 2851 | case SIMPLE_RETURN: |
1cd4cfea | 2852 | case SCRATCH: |
c09425a0 | 2853 | /* SCRATCH must be shared because they represent distinct values. */ |
b291008a | 2854 | return; |
c09425a0 | 2855 | case CLOBBER: |
b291008a | 2856 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
2857 | clobbers or clobbers of hard registers that originated as pseudos. | |
2858 | This is needed to allow safe register renaming. */ | |
2b5f32ae | 2859 | if (REG_P (XEXP (x, 0)) |
2860 | && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | |
2861 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0)))) | |
c09425a0 | 2862 | return; |
2863 | break; | |
1cd4cfea | 2864 | |
2865 | case CONST: | |
3072d30e | 2866 | if (shared_const_p (orig)) |
1cd4cfea | 2867 | return; |
2868 | break; | |
2869 | ||
2870 | case MEM: | |
2871 | /* A MEM is allowed to be shared if its address is constant. */ | |
2872 | if (CONSTANT_ADDRESS_P (XEXP (x, 0)) | |
2873 | || reload_completed || reload_in_progress) | |
2874 | return; | |
2875 | ||
2876 | break; | |
2877 | ||
2878 | default: | |
2879 | break; | |
2880 | } | |
2881 | ||
2882 | /* This rtx may not be shared. If it has already been seen, | |
2883 | replace it with a copy of itself. */ | |
382ecba7 | 2884 | if (flag_checking && RTX_FLAG (x, used)) |
1cd4cfea | 2885 | { |
0a81f5a0 | 2886 | error ("invalid rtl sharing found in the insn"); |
1cd4cfea | 2887 | debug_rtx (insn); |
0a81f5a0 | 2888 | error ("shared rtx"); |
1cd4cfea | 2889 | debug_rtx (x); |
0a81f5a0 | 2890 | internal_error ("internal consistency failure"); |
1cd4cfea | 2891 | } |
9cee7c3f | 2892 | gcc_assert (!RTX_FLAG (x, used)); |
48e1416a | 2893 | |
1cd4cfea | 2894 | RTX_FLAG (x, used) = 1; |
2895 | ||
8b332087 | 2896 | /* Now scan the subexpressions recursively. */ |
1cd4cfea | 2897 | |
2898 | format_ptr = GET_RTX_FORMAT (code); | |
2899 | ||
2900 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
2901 | { | |
2902 | switch (*format_ptr++) | |
2903 | { | |
2904 | case 'e': | |
2905 | verify_rtx_sharing (XEXP (x, i), insn); | |
2906 | break; | |
2907 | ||
2908 | case 'E': | |
2909 | if (XVEC (x, i) != NULL) | |
2910 | { | |
2911 | int j; | |
2912 | int len = XVECLEN (x, i); | |
2913 | ||
2914 | for (j = 0; j < len; j++) | |
2915 | { | |
9cee7c3f | 2916 | /* We allow sharing of ASM_OPERANDS inside single |
2917 | instruction. */ | |
1cd4cfea | 2918 | if (j && GET_CODE (XVECEXP (x, i, j)) == SET |
9cee7c3f | 2919 | && (GET_CODE (SET_SRC (XVECEXP (x, i, j))) |
2920 | == ASM_OPERANDS)) | |
1cd4cfea | 2921 | verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn); |
2922 | else | |
2923 | verify_rtx_sharing (XVECEXP (x, i, j), insn); | |
2924 | } | |
2925 | } | |
2926 | break; | |
2927 | } | |
2928 | } | |
2929 | return; | |
2930 | } | |
2931 | ||
1e9af25c | 2932 | /* Reset used-flags for INSN. */ |
2933 | ||
2934 | static void | |
2935 | reset_insn_used_flags (rtx insn) | |
2936 | { | |
2937 | gcc_assert (INSN_P (insn)); | |
2938 | reset_used_flags (PATTERN (insn)); | |
2939 | reset_used_flags (REG_NOTES (insn)); | |
2940 | if (CALL_P (insn)) | |
2941 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn)); | |
2942 | } | |
2943 | ||
7cdd84a2 | 2944 | /* Go through all the RTL insn bodies and clear all the USED bits. */ |
1cd4cfea | 2945 | |
7cdd84a2 | 2946 | static void |
2947 | reset_all_used_flags (void) | |
1cd4cfea | 2948 | { |
4cd001d5 | 2949 | rtx_insn *p; |
1cd4cfea | 2950 | |
2951 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2952 | if (INSN_P (p)) | |
2953 | { | |
1e9af25c | 2954 | rtx pat = PATTERN (p); |
2955 | if (GET_CODE (pat) != SEQUENCE) | |
2956 | reset_insn_used_flags (p); | |
2957 | else | |
764f640f | 2958 | { |
1e9af25c | 2959 | gcc_assert (REG_NOTES (p) == NULL); |
2960 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
11c8949c | 2961 | { |
2962 | rtx insn = XVECEXP (pat, 0, i); | |
2963 | if (INSN_P (insn)) | |
2964 | reset_insn_used_flags (insn); | |
2965 | } | |
764f640f | 2966 | } |
1cd4cfea | 2967 | } |
7cdd84a2 | 2968 | } |
2969 | ||
1e9af25c | 2970 | /* Verify sharing in INSN. */ |
2971 | ||
2972 | static void | |
2973 | verify_insn_sharing (rtx insn) | |
2974 | { | |
2975 | gcc_assert (INSN_P (insn)); | |
44bf3f4e | 2976 | verify_rtx_sharing (PATTERN (insn), insn); |
2977 | verify_rtx_sharing (REG_NOTES (insn), insn); | |
1e9af25c | 2978 | if (CALL_P (insn)) |
44bf3f4e | 2979 | verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn); |
1e9af25c | 2980 | } |
2981 | ||
7cdd84a2 | 2982 | /* Go through all the RTL insn bodies and check that there is no unexpected |
2983 | sharing in between the subexpressions. */ | |
2984 | ||
2985 | DEBUG_FUNCTION void | |
2986 | verify_rtl_sharing (void) | |
2987 | { | |
4cd001d5 | 2988 | rtx_insn *p; |
7cdd84a2 | 2989 | |
2990 | timevar_push (TV_VERIFY_RTL_SHARING); | |
2991 | ||
2992 | reset_all_used_flags (); | |
1cd4cfea | 2993 | |
2994 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2995 | if (INSN_P (p)) | |
2996 | { | |
1e9af25c | 2997 | rtx pat = PATTERN (p); |
2998 | if (GET_CODE (pat) != SEQUENCE) | |
2999 | verify_insn_sharing (p); | |
3000 | else | |
3001 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
11c8949c | 3002 | { |
3003 | rtx insn = XVECEXP (pat, 0, i); | |
3004 | if (INSN_P (insn)) | |
3005 | verify_insn_sharing (insn); | |
3006 | } | |
1cd4cfea | 3007 | } |
4b366dd3 | 3008 | |
7cdd84a2 | 3009 | reset_all_used_flags (); |
3010 | ||
4b366dd3 | 3011 | timevar_pop (TV_VERIFY_RTL_SHARING); |
1cd4cfea | 3012 | } |
3013 | ||
2d96a59a | 3014 | /* Go through all the RTL insn bodies and copy any invalid shared structure. |
3015 | Assumes the mark bits are cleared at entry. */ | |
3016 | ||
1cd4cfea | 3017 | void |
4cd001d5 | 3018 | unshare_all_rtl_in_chain (rtx_insn *insn) |
2d96a59a | 3019 | { |
3020 | for (; insn; insn = NEXT_INSN (insn)) | |
9204e736 | 3021 | if (INSN_P (insn)) |
2d96a59a | 3022 | { |
3023 | PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn)); | |
3024 | REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn)); | |
6d2a4bac | 3025 | if (CALL_P (insn)) |
3026 | CALL_INSN_FUNCTION_USAGE (insn) | |
3027 | = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn)); | |
2d96a59a | 3028 | } |
3029 | } | |
3030 | ||
01dc9f0c | 3031 | /* Go through all virtual stack slots of a function and mark them as |
265be050 | 3032 | shared. We never replace the DECL_RTLs themselves with a copy, |
3033 | but expressions mentioned into a DECL_RTL cannot be shared with | |
3034 | expressions in the instruction stream. | |
3035 | ||
3036 | Note that reload may convert pseudo registers into memories in-place. | |
3037 | Pseudo registers are always shared, but MEMs never are. Thus if we | |
3038 | reset the used flags on MEMs in the instruction stream, we must set | |
3039 | them again on MEMs that appear in DECL_RTLs. */ | |
3040 | ||
01dc9f0c | 3041 | static void |
265be050 | 3042 | set_used_decls (tree blk) |
01dc9f0c | 3043 | { |
3044 | tree t; | |
3045 | ||
3046 | /* Mark decls. */ | |
1767a056 | 3047 | for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t)) |
0e8e37b2 | 3048 | if (DECL_RTL_SET_P (t)) |
265be050 | 3049 | set_used_flags (DECL_RTL (t)); |
01dc9f0c | 3050 | |
3051 | /* Now process sub-blocks. */ | |
93110716 | 3052 | for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t)) |
265be050 | 3053 | set_used_decls (t); |
01dc9f0c | 3054 | } |
3055 | ||
15bbde2b | 3056 | /* Mark ORIG as in use, and return a copy of it if it was already in use. |
7ba6ce7a | 3057 | Recursively does the same for subexpressions. Uses |
3058 | copy_rtx_if_shared_1 to reduce stack space. */ | |
15bbde2b | 3059 | |
3060 | rtx | |
35cb5232 | 3061 | copy_rtx_if_shared (rtx orig) |
15bbde2b | 3062 | { |
0e0727c4 | 3063 | copy_rtx_if_shared_1 (&orig); |
3064 | return orig; | |
3065 | } | |
3066 | ||
7ba6ce7a | 3067 | /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in |
3068 | use. Recursively does the same for subexpressions. */ | |
3069 | ||
0e0727c4 | 3070 | static void |
3071 | copy_rtx_if_shared_1 (rtx *orig1) | |
3072 | { | |
3073 | rtx x; | |
19cb6b50 | 3074 | int i; |
3075 | enum rtx_code code; | |
0e0727c4 | 3076 | rtx *last_ptr; |
19cb6b50 | 3077 | const char *format_ptr; |
15bbde2b | 3078 | int copied = 0; |
0e0727c4 | 3079 | int length; |
3080 | ||
3081 | /* Repeat is used to turn tail-recursion into iteration. */ | |
3082 | repeat: | |
3083 | x = *orig1; | |
15bbde2b | 3084 | |
3085 | if (x == 0) | |
0e0727c4 | 3086 | return; |
15bbde2b | 3087 | |
3088 | code = GET_CODE (x); | |
3089 | ||
3090 | /* These types may be freely shared. */ | |
3091 | ||
3092 | switch (code) | |
3093 | { | |
3094 | case REG: | |
688ff29b | 3095 | case DEBUG_EXPR: |
3096 | case VALUE: | |
0349edce | 3097 | CASE_CONST_ANY: |
15bbde2b | 3098 | case SYMBOL_REF: |
1cd4cfea | 3099 | case LABEL_REF: |
15bbde2b | 3100 | case CODE_LABEL: |
3101 | case PC: | |
3102 | case CC0: | |
e0691b9a | 3103 | case RETURN: |
9cb2517e | 3104 | case SIMPLE_RETURN: |
15bbde2b | 3105 | case SCRATCH: |
a92771b8 | 3106 | /* SCRATCH must be shared because they represent distinct values. */ |
0e0727c4 | 3107 | return; |
c09425a0 | 3108 | case CLOBBER: |
b291008a | 3109 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
3110 | clobbers or clobbers of hard registers that originated as pseudos. | |
3111 | This is needed to allow safe register renaming. */ | |
2b5f32ae | 3112 | if (REG_P (XEXP (x, 0)) |
3113 | && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | |
3114 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0)))) | |
c09425a0 | 3115 | return; |
3116 | break; | |
15bbde2b | 3117 | |
f63d12e3 | 3118 | case CONST: |
3072d30e | 3119 | if (shared_const_p (x)) |
0e0727c4 | 3120 | return; |
f63d12e3 | 3121 | break; |
3122 | ||
9845d120 | 3123 | case DEBUG_INSN: |
15bbde2b | 3124 | case INSN: |
3125 | case JUMP_INSN: | |
3126 | case CALL_INSN: | |
3127 | case NOTE: | |
15bbde2b | 3128 | case BARRIER: |
3129 | /* The chain of insns is not being copied. */ | |
0e0727c4 | 3130 | return; |
15bbde2b | 3131 | |
0dbd1c74 | 3132 | default: |
3133 | break; | |
15bbde2b | 3134 | } |
3135 | ||
3136 | /* This rtx may not be shared. If it has already been seen, | |
3137 | replace it with a copy of itself. */ | |
3138 | ||
7c25cb91 | 3139 | if (RTX_FLAG (x, used)) |
15bbde2b | 3140 | { |
f2d0e9f1 | 3141 | x = shallow_copy_rtx (x); |
15bbde2b | 3142 | copied = 1; |
3143 | } | |
7c25cb91 | 3144 | RTX_FLAG (x, used) = 1; |
15bbde2b | 3145 | |
3146 | /* Now scan the subexpressions recursively. | |
3147 | We can store any replaced subexpressions directly into X | |
3148 | since we know X is not shared! Any vectors in X | |
3149 | must be copied if X was copied. */ | |
3150 | ||
3151 | format_ptr = GET_RTX_FORMAT (code); | |
0e0727c4 | 3152 | length = GET_RTX_LENGTH (code); |
3153 | last_ptr = NULL; | |
48e1416a | 3154 | |
0e0727c4 | 3155 | for (i = 0; i < length; i++) |
15bbde2b | 3156 | { |
3157 | switch (*format_ptr++) | |
3158 | { | |
3159 | case 'e': | |
0e0727c4 | 3160 | if (last_ptr) |
3161 | copy_rtx_if_shared_1 (last_ptr); | |
3162 | last_ptr = &XEXP (x, i); | |
15bbde2b | 3163 | break; |
3164 | ||
3165 | case 'E': | |
3166 | if (XVEC (x, i) != NULL) | |
3167 | { | |
19cb6b50 | 3168 | int j; |
ffe0869b | 3169 | int len = XVECLEN (x, i); |
48e1416a | 3170 | |
8b332087 | 3171 | /* Copy the vector iff I copied the rtx and the length |
3172 | is nonzero. */ | |
ffe0869b | 3173 | if (copied && len > 0) |
a4070a91 | 3174 | XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem); |
48e1416a | 3175 | |
d632b59a | 3176 | /* Call recursively on all inside the vector. */ |
ffe0869b | 3177 | for (j = 0; j < len; j++) |
0e0727c4 | 3178 | { |
3179 | if (last_ptr) | |
3180 | copy_rtx_if_shared_1 (last_ptr); | |
3181 | last_ptr = &XVECEXP (x, i, j); | |
3182 | } | |
15bbde2b | 3183 | } |
3184 | break; | |
3185 | } | |
3186 | } | |
0e0727c4 | 3187 | *orig1 = x; |
3188 | if (last_ptr) | |
3189 | { | |
3190 | orig1 = last_ptr; | |
3191 | goto repeat; | |
3192 | } | |
3193 | return; | |
15bbde2b | 3194 | } |
3195 | ||
709947e6 | 3196 | /* Set the USED bit in X and its non-shareable subparts to FLAG. */ |
15bbde2b | 3197 | |
709947e6 | 3198 | static void |
3199 | mark_used_flags (rtx x, int flag) | |
15bbde2b | 3200 | { |
19cb6b50 | 3201 | int i, j; |
3202 | enum rtx_code code; | |
3203 | const char *format_ptr; | |
0e0727c4 | 3204 | int length; |
15bbde2b | 3205 | |
0e0727c4 | 3206 | /* Repeat is used to turn tail-recursion into iteration. */ |
3207 | repeat: | |
15bbde2b | 3208 | if (x == 0) |
3209 | return; | |
3210 | ||
3211 | code = GET_CODE (x); | |
3212 | ||
c3418f42 | 3213 | /* These types may be freely shared so we needn't do any resetting |
15bbde2b | 3214 | for them. */ |
3215 | ||
3216 | switch (code) | |
3217 | { | |
3218 | case REG: | |
688ff29b | 3219 | case DEBUG_EXPR: |
3220 | case VALUE: | |
0349edce | 3221 | CASE_CONST_ANY: |
15bbde2b | 3222 | case SYMBOL_REF: |
3223 | case CODE_LABEL: | |
3224 | case PC: | |
3225 | case CC0: | |
e0691b9a | 3226 | case RETURN: |
9cb2517e | 3227 | case SIMPLE_RETURN: |
15bbde2b | 3228 | return; |
3229 | ||
9845d120 | 3230 | case DEBUG_INSN: |
15bbde2b | 3231 | case INSN: |
3232 | case JUMP_INSN: | |
3233 | case CALL_INSN: | |
3234 | case NOTE: | |
3235 | case LABEL_REF: | |
3236 | case BARRIER: | |
3237 | /* The chain of insns is not being copied. */ | |
3238 | return; | |
d823ba47 | 3239 | |
0dbd1c74 | 3240 | default: |
3241 | break; | |
15bbde2b | 3242 | } |
3243 | ||
709947e6 | 3244 | RTX_FLAG (x, used) = flag; |
15bbde2b | 3245 | |
3246 | format_ptr = GET_RTX_FORMAT (code); | |
0e0727c4 | 3247 | length = GET_RTX_LENGTH (code); |
48e1416a | 3248 | |
0e0727c4 | 3249 | for (i = 0; i < length; i++) |
15bbde2b | 3250 | { |
3251 | switch (*format_ptr++) | |
3252 | { | |
3253 | case 'e': | |
0e0727c4 | 3254 | if (i == length-1) |
3255 | { | |
3256 | x = XEXP (x, i); | |
3257 | goto repeat; | |
3258 | } | |
709947e6 | 3259 | mark_used_flags (XEXP (x, i), flag); |
15bbde2b | 3260 | break; |
3261 | ||
3262 | case 'E': | |
3263 | for (j = 0; j < XVECLEN (x, i); j++) | |
709947e6 | 3264 | mark_used_flags (XVECEXP (x, i, j), flag); |
15bbde2b | 3265 | break; |
3266 | } | |
3267 | } | |
3268 | } | |
1cd4cfea | 3269 | |
709947e6 | 3270 | /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used |
1cd4cfea | 3271 | to look for shared sub-parts. */ |
3272 | ||
3273 | void | |
709947e6 | 3274 | reset_used_flags (rtx x) |
1cd4cfea | 3275 | { |
709947e6 | 3276 | mark_used_flags (x, 0); |
3277 | } | |
1cd4cfea | 3278 | |
709947e6 | 3279 | /* Set all the USED bits in X to allow copy_rtx_if_shared to be used |
3280 | to look for shared sub-parts. */ | |
1cd4cfea | 3281 | |
709947e6 | 3282 | void |
3283 | set_used_flags (rtx x) | |
3284 | { | |
3285 | mark_used_flags (x, 1); | |
1cd4cfea | 3286 | } |
15bbde2b | 3287 | \f |
3288 | /* Copy X if necessary so that it won't be altered by changes in OTHER. | |
3289 | Return X or the rtx for the pseudo reg the value of X was copied into. | |
3290 | OTHER must be valid as a SET_DEST. */ | |
3291 | ||
3292 | rtx | |
35cb5232 | 3293 | make_safe_from (rtx x, rtx other) |
15bbde2b | 3294 | { |
3295 | while (1) | |
3296 | switch (GET_CODE (other)) | |
3297 | { | |
3298 | case SUBREG: | |
3299 | other = SUBREG_REG (other); | |
3300 | break; | |
3301 | case STRICT_LOW_PART: | |
3302 | case SIGN_EXTEND: | |
3303 | case ZERO_EXTEND: | |
3304 | other = XEXP (other, 0); | |
3305 | break; | |
3306 | default: | |
3307 | goto done; | |
3308 | } | |
3309 | done: | |
e16ceb8e | 3310 | if ((MEM_P (other) |
15bbde2b | 3311 | && ! CONSTANT_P (x) |
8ad4c111 | 3312 | && !REG_P (x) |
15bbde2b | 3313 | && GET_CODE (x) != SUBREG) |
8ad4c111 | 3314 | || (REG_P (other) |
15bbde2b | 3315 | && (REGNO (other) < FIRST_PSEUDO_REGISTER |
3316 | || reg_mentioned_p (other, x)))) | |
3317 | { | |
3318 | rtx temp = gen_reg_rtx (GET_MODE (x)); | |
3319 | emit_move_insn (temp, x); | |
3320 | return temp; | |
3321 | } | |
3322 | return x; | |
3323 | } | |
3324 | \f | |
3325 | /* Emission of insns (adding them to the doubly-linked list). */ | |
3326 | ||
15bbde2b | 3327 | /* Return the last insn emitted, even if it is in a sequence now pushed. */ |
3328 | ||
447ab0fc | 3329 | rtx_insn * |
35cb5232 | 3330 | get_last_insn_anywhere (void) |
15bbde2b | 3331 | { |
c36aa54b | 3332 | struct sequence_stack *seq; |
3333 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
3334 | if (seq->last != 0) | |
3335 | return seq->last; | |
15bbde2b | 3336 | return 0; |
3337 | } | |
3338 | ||
70545de4 | 3339 | /* Return the first nonnote insn emitted in current sequence or current |
3340 | function. This routine looks inside SEQUENCEs. */ | |
3341 | ||
2eb8c261 | 3342 | rtx_insn * |
35cb5232 | 3343 | get_first_nonnote_insn (void) |
70545de4 | 3344 | { |
4cd001d5 | 3345 | rtx_insn *insn = get_insns (); |
f86e856e | 3346 | |
3347 | if (insn) | |
3348 | { | |
3349 | if (NOTE_P (insn)) | |
3350 | for (insn = next_insn (insn); | |
3351 | insn && NOTE_P (insn); | |
3352 | insn = next_insn (insn)) | |
3353 | continue; | |
3354 | else | |
3355 | { | |
1c14a50e | 3356 | if (NONJUMP_INSN_P (insn) |
f86e856e | 3357 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
4cd001d5 | 3358 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
f86e856e | 3359 | } |
3360 | } | |
70545de4 | 3361 | |
3362 | return insn; | |
3363 | } | |
3364 | ||
3365 | /* Return the last nonnote insn emitted in current sequence or current | |
3366 | function. This routine looks inside SEQUENCEs. */ | |
3367 | ||
2eb8c261 | 3368 | rtx_insn * |
35cb5232 | 3369 | get_last_nonnote_insn (void) |
70545de4 | 3370 | { |
4cd001d5 | 3371 | rtx_insn *insn = get_last_insn (); |
f86e856e | 3372 | |
3373 | if (insn) | |
3374 | { | |
3375 | if (NOTE_P (insn)) | |
3376 | for (insn = previous_insn (insn); | |
3377 | insn && NOTE_P (insn); | |
3378 | insn = previous_insn (insn)) | |
3379 | continue; | |
3380 | else | |
3381 | { | |
4cd001d5 | 3382 | if (NONJUMP_INSN_P (insn)) |
3383 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
3384 | insn = seq->insn (seq->len () - 1); | |
f86e856e | 3385 | } |
3386 | } | |
70545de4 | 3387 | |
3388 | return insn; | |
3389 | } | |
3390 | ||
9845d120 | 3391 | /* Return the number of actual (non-debug) insns emitted in this |
3392 | function. */ | |
3393 | ||
3394 | int | |
3395 | get_max_insn_count (void) | |
3396 | { | |
3397 | int n = cur_insn_uid; | |
3398 | ||
3399 | /* The table size must be stable across -g, to avoid codegen | |
3400 | differences due to debug insns, and not be affected by | |
3401 | -fmin-insn-uid, to avoid excessive table size and to simplify | |
3402 | debugging of -fcompare-debug failures. */ | |
3403 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
3404 | n -= cur_debug_insn_uid; | |
3405 | else | |
3406 | n -= MIN_NONDEBUG_INSN_UID; | |
3407 | ||
3408 | return n; | |
3409 | } | |
3410 | ||
15bbde2b | 3411 | \f |
3412 | /* Return the next insn. If it is a SEQUENCE, return the first insn | |
3413 | of the sequence. */ | |
3414 | ||
7bac25b3 | 3415 | rtx_insn * |
50895eab | 3416 | next_insn (rtx_insn *insn) |
15bbde2b | 3417 | { |
ce4469fa | 3418 | if (insn) |
3419 | { | |
3420 | insn = NEXT_INSN (insn); | |
3421 | if (insn && NONJUMP_INSN_P (insn) | |
3422 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4cd001d5 | 3423 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
ce4469fa | 3424 | } |
15bbde2b | 3425 | |
4cd001d5 | 3426 | return insn; |
15bbde2b | 3427 | } |
3428 | ||
3429 | /* Return the previous insn. If it is a SEQUENCE, return the last insn | |
3430 | of the sequence. */ | |
3431 | ||
7bac25b3 | 3432 | rtx_insn * |
50895eab | 3433 | previous_insn (rtx_insn *insn) |
15bbde2b | 3434 | { |
ce4469fa | 3435 | if (insn) |
3436 | { | |
3437 | insn = PREV_INSN (insn); | |
4cd001d5 | 3438 | if (insn && NONJUMP_INSN_P (insn)) |
3439 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
3440 | insn = seq->insn (seq->len () - 1); | |
ce4469fa | 3441 | } |
15bbde2b | 3442 | |
4cd001d5 | 3443 | return insn; |
15bbde2b | 3444 | } |
3445 | ||
3446 | /* Return the next insn after INSN that is not a NOTE. This routine does not | |
3447 | look inside SEQUENCEs. */ | |
3448 | ||
7bac25b3 | 3449 | rtx_insn * |
4066f31e | 3450 | next_nonnote_insn (rtx_insn *insn) |
15bbde2b | 3451 | { |
ce4469fa | 3452 | while (insn) |
3453 | { | |
3454 | insn = NEXT_INSN (insn); | |
3455 | if (insn == 0 || !NOTE_P (insn)) | |
3456 | break; | |
3457 | } | |
15bbde2b | 3458 | |
4cd001d5 | 3459 | return insn; |
15bbde2b | 3460 | } |
3461 | ||
18fc6357 | 3462 | /* Return the next insn after INSN that is not a DEBUG_INSN. This |
3463 | routine does not look inside SEQUENCEs. */ | |
c4d13c5c | 3464 | |
7bac25b3 | 3465 | rtx_insn * |
18fc6357 | 3466 | next_nondebug_insn (rtx_insn *insn) |
c4d13c5c | 3467 | { |
3468 | while (insn) | |
3469 | { | |
3470 | insn = NEXT_INSN (insn); | |
18fc6357 | 3471 | if (insn == 0 || !DEBUG_INSN_P (insn)) |
c4d13c5c | 3472 | break; |
c4d13c5c | 3473 | } |
3474 | ||
4cd001d5 | 3475 | return insn; |
c4d13c5c | 3476 | } |
3477 | ||
15bbde2b | 3478 | /* Return the previous insn before INSN that is not a NOTE. This routine does |
3479 | not look inside SEQUENCEs. */ | |
3480 | ||
7bac25b3 | 3481 | rtx_insn * |
4066f31e | 3482 | prev_nonnote_insn (rtx_insn *insn) |
15bbde2b | 3483 | { |
ce4469fa | 3484 | while (insn) |
3485 | { | |
3486 | insn = PREV_INSN (insn); | |
3487 | if (insn == 0 || !NOTE_P (insn)) | |
3488 | break; | |
3489 | } | |
15bbde2b | 3490 | |
4cd001d5 | 3491 | return insn; |
15bbde2b | 3492 | } |
3493 | ||
18fc6357 | 3494 | /* Return the previous insn before INSN that is not a DEBUG_INSN. |
3495 | This routine does not look inside SEQUENCEs. */ | |
bcc66782 | 3496 | |
7bac25b3 | 3497 | rtx_insn * |
18fc6357 | 3498 | prev_nondebug_insn (rtx_insn *insn) |
bcc66782 | 3499 | { |
3500 | while (insn) | |
3501 | { | |
3502 | insn = PREV_INSN (insn); | |
18fc6357 | 3503 | if (insn == 0 || !DEBUG_INSN_P (insn)) |
bcc66782 | 3504 | break; |
bcc66782 | 3505 | } |
3506 | ||
4cd001d5 | 3507 | return insn; |
bcc66782 | 3508 | } |
3509 | ||
18fc6357 | 3510 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN. |
3511 | This routine does not look inside SEQUENCEs. */ | |
9845d120 | 3512 | |
7bac25b3 | 3513 | rtx_insn * |
18fc6357 | 3514 | next_nonnote_nondebug_insn (rtx_insn *insn) |
9845d120 | 3515 | { |
3516 | while (insn) | |
3517 | { | |
3518 | insn = NEXT_INSN (insn); | |
18fc6357 | 3519 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
9845d120 | 3520 | break; |
3521 | } | |
3522 | ||
4cd001d5 | 3523 | return insn; |
9845d120 | 3524 | } |
3525 | ||
18fc6357 | 3526 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN, |
3527 | but stop the search before we enter another basic block. This | |
3528 | routine does not look inside SEQUENCEs. */ | |
9845d120 | 3529 | |
7bac25b3 | 3530 | rtx_insn * |
18fc6357 | 3531 | next_nonnote_nondebug_insn_bb (rtx_insn *insn) |
9845d120 | 3532 | { |
3533 | while (insn) | |
3534 | { | |
18fc6357 | 3535 | insn = NEXT_INSN (insn); |
3536 | if (insn == 0) | |
3537 | break; | |
3538 | if (DEBUG_INSN_P (insn)) | |
3539 | continue; | |
3540 | if (!NOTE_P (insn)) | |
9845d120 | 3541 | break; |
18fc6357 | 3542 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) |
3543 | return NULL; | |
9845d120 | 3544 | } |
3545 | ||
4cd001d5 | 3546 | return insn; |
9845d120 | 3547 | } |
3548 | ||
18fc6357 | 3549 | /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN. |
5b8537a8 | 3550 | This routine does not look inside SEQUENCEs. */ |
3551 | ||
7bac25b3 | 3552 | rtx_insn * |
18fc6357 | 3553 | prev_nonnote_nondebug_insn (rtx_insn *insn) |
5b8537a8 | 3554 | { |
3555 | while (insn) | |
3556 | { | |
18fc6357 | 3557 | insn = PREV_INSN (insn); |
5b8537a8 | 3558 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
3559 | break; | |
3560 | } | |
3561 | ||
4cd001d5 | 3562 | return insn; |
5b8537a8 | 3563 | } |
3564 | ||
18fc6357 | 3565 | /* Return the previous insn before INSN that is not a NOTE nor |
3566 | DEBUG_INSN, but stop the search before we enter another basic | |
3567 | block. This routine does not look inside SEQUENCEs. */ | |
5b8537a8 | 3568 | |
7bac25b3 | 3569 | rtx_insn * |
18fc6357 | 3570 | prev_nonnote_nondebug_insn_bb (rtx_insn *insn) |
5b8537a8 | 3571 | { |
3572 | while (insn) | |
3573 | { | |
3574 | insn = PREV_INSN (insn); | |
18fc6357 | 3575 | if (insn == 0) |
5b8537a8 | 3576 | break; |
18fc6357 | 3577 | if (DEBUG_INSN_P (insn)) |
3578 | continue; | |
3579 | if (!NOTE_P (insn)) | |
3580 | break; | |
3581 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) | |
3582 | return NULL; | |
5b8537a8 | 3583 | } |
3584 | ||
4cd001d5 | 3585 | return insn; |
5b8537a8 | 3586 | } |
3587 | ||
15bbde2b | 3588 | /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; |
3589 | or 0, if there is none. This routine does not look inside | |
a92771b8 | 3590 | SEQUENCEs. */ |
15bbde2b | 3591 | |
7bac25b3 | 3592 | rtx_insn * |
4cd001d5 | 3593 | next_real_insn (rtx uncast_insn) |
15bbde2b | 3594 | { |
4cd001d5 | 3595 | rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn); |
3596 | ||
ce4469fa | 3597 | while (insn) |
3598 | { | |
3599 | insn = NEXT_INSN (insn); | |
3600 | if (insn == 0 || INSN_P (insn)) | |
3601 | break; | |
3602 | } | |
15bbde2b | 3603 | |
4cd001d5 | 3604 | return insn; |
15bbde2b | 3605 | } |
3606 | ||
3607 | /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; | |
3608 | or 0, if there is none. This routine does not look inside | |
3609 | SEQUENCEs. */ | |
3610 | ||
7bac25b3 | 3611 | rtx_insn * |
4067fcc6 | 3612 | prev_real_insn (rtx_insn *insn) |
15bbde2b | 3613 | { |
ce4469fa | 3614 | while (insn) |
3615 | { | |
3616 | insn = PREV_INSN (insn); | |
3617 | if (insn == 0 || INSN_P (insn)) | |
3618 | break; | |
3619 | } | |
15bbde2b | 3620 | |
4cd001d5 | 3621 | return insn; |
15bbde2b | 3622 | } |
3623 | ||
d5f9786f | 3624 | /* Return the last CALL_INSN in the current list, or 0 if there is none. |
3625 | This routine does not look inside SEQUENCEs. */ | |
3626 | ||
ec22da62 | 3627 | rtx_call_insn * |
35cb5232 | 3628 | last_call_insn (void) |
d5f9786f | 3629 | { |
ec22da62 | 3630 | rtx_insn *insn; |
d5f9786f | 3631 | |
3632 | for (insn = get_last_insn (); | |
6d7dc5b9 | 3633 | insn && !CALL_P (insn); |
d5f9786f | 3634 | insn = PREV_INSN (insn)) |
3635 | ; | |
3636 | ||
ec22da62 | 3637 | return safe_as_a <rtx_call_insn *> (insn); |
d5f9786f | 3638 | } |
3639 | ||
15bbde2b | 3640 | /* Find the next insn after INSN that really does something. This routine |
084950ee | 3641 | does not look inside SEQUENCEs. After reload this also skips over |
3642 | standalone USE and CLOBBER insn. */ | |
15bbde2b | 3643 | |
2215ca0d | 3644 | int |
41503955 | 3645 | active_insn_p (const rtx_insn *insn) |
2215ca0d | 3646 | { |
6d7dc5b9 | 3647 | return (CALL_P (insn) || JUMP_P (insn) |
91f71fa3 | 3648 | || JUMP_TABLE_DATA_P (insn) /* FIXME */ |
6d7dc5b9 | 3649 | || (NONJUMP_INSN_P (insn) |
3a66feab | 3650 | && (! reload_completed |
3651 | || (GET_CODE (PATTERN (insn)) != USE | |
3652 | && GET_CODE (PATTERN (insn)) != CLOBBER)))); | |
2215ca0d | 3653 | } |
3654 | ||
7bac25b3 | 3655 | rtx_insn * |
41503955 | 3656 | next_active_insn (rtx_insn *insn) |
15bbde2b | 3657 | { |
ce4469fa | 3658 | while (insn) |
3659 | { | |
3660 | insn = NEXT_INSN (insn); | |
3661 | if (insn == 0 || active_insn_p (insn)) | |
3662 | break; | |
3663 | } | |
15bbde2b | 3664 | |
4cd001d5 | 3665 | return insn; |
15bbde2b | 3666 | } |
3667 | ||
3668 | /* Find the last insn before INSN that really does something. This routine | |
084950ee | 3669 | does not look inside SEQUENCEs. After reload this also skips over |
3670 | standalone USE and CLOBBER insn. */ | |
15bbde2b | 3671 | |
7bac25b3 | 3672 | rtx_insn * |
41503955 | 3673 | prev_active_insn (rtx_insn *insn) |
15bbde2b | 3674 | { |
ce4469fa | 3675 | while (insn) |
3676 | { | |
3677 | insn = PREV_INSN (insn); | |
3678 | if (insn == 0 || active_insn_p (insn)) | |
3679 | break; | |
3680 | } | |
15bbde2b | 3681 | |
4cd001d5 | 3682 | return insn; |
15bbde2b | 3683 | } |
15bbde2b | 3684 | \f |
15bbde2b | 3685 | /* Return the next insn that uses CC0 after INSN, which is assumed to |
3686 | set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter | |
3687 | applied to the result of this function should yield INSN). | |
3688 | ||
3689 | Normally, this is simply the next insn. However, if a REG_CC_USER note | |
3690 | is present, it contains the insn that uses CC0. | |
3691 | ||
3692 | Return 0 if we can't find the insn. */ | |
3693 | ||
0be88abd | 3694 | rtx_insn * |
924a5cee | 3695 | next_cc0_user (rtx_insn *insn) |
15bbde2b | 3696 | { |
b572011e | 3697 | rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX); |
15bbde2b | 3698 | |
3699 | if (note) | |
0be88abd | 3700 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
15bbde2b | 3701 | |
3702 | insn = next_nonnote_insn (insn); | |
6d7dc5b9 | 3703 | if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
4cd001d5 | 3704 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
15bbde2b | 3705 | |
9204e736 | 3706 | if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn))) |
4cd001d5 | 3707 | return insn; |
15bbde2b | 3708 | |
3709 | return 0; | |
3710 | } | |
3711 | ||
3712 | /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER | |
3713 | note, it is the previous insn. */ | |
3714 | ||
0be88abd | 3715 | rtx_insn * |
fd8b0a1a | 3716 | prev_cc0_setter (rtx_insn *insn) |
15bbde2b | 3717 | { |
b572011e | 3718 | rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); |
15bbde2b | 3719 | |
3720 | if (note) | |
0be88abd | 3721 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
15bbde2b | 3722 | |
3723 | insn = prev_nonnote_insn (insn); | |
611234b4 | 3724 | gcc_assert (sets_cc0_p (PATTERN (insn))); |
15bbde2b | 3725 | |
4cd001d5 | 3726 | return insn; |
15bbde2b | 3727 | } |
344dc2fa | 3728 | |
698ff1f0 | 3729 | /* Find a RTX_AUTOINC class rtx which matches DATA. */ |
3730 | ||
3731 | static int | |
4073adaa | 3732 | find_auto_inc (const_rtx x, const_rtx reg) |
698ff1f0 | 3733 | { |
4073adaa | 3734 | subrtx_iterator::array_type array; |
3735 | FOR_EACH_SUBRTX (iter, array, x, NONCONST) | |
698ff1f0 | 3736 | { |
4073adaa | 3737 | const_rtx x = *iter; |
3738 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC | |
3739 | && rtx_equal_p (reg, XEXP (x, 0))) | |
3740 | return true; | |
698ff1f0 | 3741 | } |
4073adaa | 3742 | return false; |
698ff1f0 | 3743 | } |
698ff1f0 | 3744 | |
344dc2fa | 3745 | /* Increment the label uses for all labels present in rtx. */ |
3746 | ||
3747 | static void | |
35cb5232 | 3748 | mark_label_nuses (rtx x) |
344dc2fa | 3749 | { |
19cb6b50 | 3750 | enum rtx_code code; |
3751 | int i, j; | |
3752 | const char *fmt; | |
344dc2fa | 3753 | |
3754 | code = GET_CODE (x); | |
c7799456 | 3755 | if (code == LABEL_REF && LABEL_P (label_ref_label (x))) |
3756 | LABEL_NUSES (label_ref_label (x))++; | |
344dc2fa | 3757 | |
3758 | fmt = GET_RTX_FORMAT (code); | |
3759 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3760 | { | |
3761 | if (fmt[i] == 'e') | |
ff385626 | 3762 | mark_label_nuses (XEXP (x, i)); |
344dc2fa | 3763 | else if (fmt[i] == 'E') |
ff385626 | 3764 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
344dc2fa | 3765 | mark_label_nuses (XVECEXP (x, i, j)); |
3766 | } | |
3767 | } | |
3768 | ||
15bbde2b | 3769 | \f |
3770 | /* Try splitting insns that can be split for better scheduling. | |
3771 | PAT is the pattern which might split. | |
3772 | TRIAL is the insn providing PAT. | |
6ef828f9 | 3773 | LAST is nonzero if we should return the last insn of the sequence produced. |
15bbde2b | 3774 | |
3775 | If this routine succeeds in splitting, it returns the first or last | |
0e69a50a | 3776 | replacement insn depending on the value of LAST. Otherwise, it |
15bbde2b | 3777 | returns TRIAL. If the insn to be returned can be split, it will be. */ |
3778 | ||
bffa1357 | 3779 | rtx_insn * |
58a87a29 | 3780 | try_split (rtx pat, rtx_insn *trial, int last) |
15bbde2b | 3781 | { |
3b50f202 | 3782 | rtx_insn *before, *after; |
4cd001d5 | 3783 | rtx note; |
3784 | rtx_insn *seq, *tem; | |
61cb1816 | 3785 | profile_probability probability; |
4cd001d5 | 3786 | rtx_insn *insn_last, *insn; |
e13693ec | 3787 | int njumps = 0; |
9ed997be | 3788 | rtx_insn *call_insn = NULL; |
3cd757b1 | 3789 | |
25e880b1 | 3790 | /* We're not good at redistributing frame information. */ |
3791 | if (RTX_FRAME_RELATED_P (trial)) | |
4cd001d5 | 3792 | return trial; |
25e880b1 | 3793 | |
3cd757b1 | 3794 | if (any_condjump_p (trial) |
3795 | && (note = find_reg_note (trial, REG_BR_PROB, 0))) | |
61cb1816 | 3796 | split_branch_probability |
3797 | = profile_probability::from_reg_br_prob_note (XINT (note, 0)); | |
3798 | else | |
3799 | split_branch_probability = profile_probability::uninitialized (); | |
3800 | ||
3cd757b1 | 3801 | probability = split_branch_probability; |
3802 | ||
58a87a29 | 3803 | seq = split_insns (pat, trial); |
3cd757b1 | 3804 | |
61cb1816 | 3805 | split_branch_probability = profile_probability::uninitialized (); |
15bbde2b | 3806 | |
e13693ec | 3807 | if (!seq) |
4cd001d5 | 3808 | return trial; |
e13693ec | 3809 | |
3810 | /* Avoid infinite loop if any insn of the result matches | |
3811 | the original pattern. */ | |
3812 | insn_last = seq; | |
3813 | while (1) | |
15bbde2b | 3814 | { |
e13693ec | 3815 | if (INSN_P (insn_last) |
3816 | && rtx_equal_p (PATTERN (insn_last), pat)) | |
4cd001d5 | 3817 | return trial; |
e13693ec | 3818 | if (!NEXT_INSN (insn_last)) |
3819 | break; | |
3820 | insn_last = NEXT_INSN (insn_last); | |
3821 | } | |
d823ba47 | 3822 | |
3072d30e | 3823 | /* We will be adding the new sequence to the function. The splitters |
3824 | may have introduced invalid RTL sharing, so unshare the sequence now. */ | |
3825 | unshare_all_rtl_in_chain (seq); | |
3826 | ||
8f869004 | 3827 | /* Mark labels and copy flags. */ |
e13693ec | 3828 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) |
3829 | { | |
6d7dc5b9 | 3830 | if (JUMP_P (insn)) |
e13693ec | 3831 | { |
8f869004 | 3832 | if (JUMP_P (trial)) |
3833 | CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial); | |
e13693ec | 3834 | mark_jump_label (PATTERN (insn), insn, 0); |
3835 | njumps++; | |
61cb1816 | 3836 | if (probability.initialized_p () |
e13693ec | 3837 | && any_condjump_p (insn) |
3838 | && !find_reg_note (insn, REG_BR_PROB, 0)) | |
31d3e01c | 3839 | { |
e13693ec | 3840 | /* We can preserve the REG_BR_PROB notes only if exactly |
3841 | one jump is created, otherwise the machine description | |
3842 | is responsible for this step using | |
3843 | split_branch_probability variable. */ | |
611234b4 | 3844 | gcc_assert (njumps == 1); |
61cb1816 | 3845 | add_reg_br_prob_note (insn, probability); |
31d3e01c | 3846 | } |
e13693ec | 3847 | } |
3848 | } | |
3849 | ||
3850 | /* If we are splitting a CALL_INSN, look for the CALL_INSN | |
b0bd0491 | 3851 | in SEQ and copy any additional information across. */ |
6d7dc5b9 | 3852 | if (CALL_P (trial)) |
e13693ec | 3853 | { |
3854 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) | |
6d7dc5b9 | 3855 | if (CALL_P (insn)) |
e13693ec | 3856 | { |
4cd001d5 | 3857 | rtx_insn *next; |
3858 | rtx *p; | |
b0bd0491 | 3859 | |
2e3b0d0f | 3860 | gcc_assert (call_insn == NULL_RTX); |
3861 | call_insn = insn; | |
3862 | ||
b0bd0491 | 3863 | /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the |
3864 | target may have explicitly specified. */ | |
3865 | p = &CALL_INSN_FUNCTION_USAGE (insn); | |
0bb5a6cd | 3866 | while (*p) |
3867 | p = &XEXP (*p, 1); | |
3868 | *p = CALL_INSN_FUNCTION_USAGE (trial); | |
b0bd0491 | 3869 | |
3870 | /* If the old call was a sibling call, the new one must | |
3871 | be too. */ | |
e13693ec | 3872 | SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); |
b0bd0491 | 3873 | |
3874 | /* If the new call is the last instruction in the sequence, | |
3875 | it will effectively replace the old call in-situ. Otherwise | |
3876 | we must move any following NOTE_INSN_CALL_ARG_LOCATION note | |
3877 | so that it comes immediately after the new call. */ | |
3878 | if (NEXT_INSN (insn)) | |
47e1410d | 3879 | for (next = NEXT_INSN (trial); |
3880 | next && NOTE_P (next); | |
3881 | next = NEXT_INSN (next)) | |
3882 | if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION) | |
b0bd0491 | 3883 | { |
3884 | remove_insn (next); | |
3885 | add_insn_after (next, insn, NULL); | |
47e1410d | 3886 | break; |
b0bd0491 | 3887 | } |
e13693ec | 3888 | } |
3889 | } | |
5262c253 | 3890 | |
e13693ec | 3891 | /* Copy notes, particularly those related to the CFG. */ |
3892 | for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) | |
3893 | { | |
3894 | switch (REG_NOTE_KIND (note)) | |
3895 | { | |
3896 | case REG_EH_REGION: | |
e38def9c | 3897 | copy_reg_eh_region_note_backward (note, insn_last, NULL); |
e13693ec | 3898 | break; |
381eb1e7 | 3899 | |
e13693ec | 3900 | case REG_NORETURN: |
3901 | case REG_SETJMP: | |
4c0315d0 | 3902 | case REG_TM: |
3c0f15b4 | 3903 | case REG_CALL_NOCF_CHECK: |
698ff1f0 | 3904 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
381eb1e7 | 3905 | { |
6d7dc5b9 | 3906 | if (CALL_P (insn)) |
a1ddb869 | 3907 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
381eb1e7 | 3908 | } |
e13693ec | 3909 | break; |
5bb27a4b | 3910 | |
e13693ec | 3911 | case REG_NON_LOCAL_GOTO: |
698ff1f0 | 3912 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
31d3e01c | 3913 | { |
6d7dc5b9 | 3914 | if (JUMP_P (insn)) |
a1ddb869 | 3915 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
31d3e01c | 3916 | } |
e13693ec | 3917 | break; |
344dc2fa | 3918 | |
698ff1f0 | 3919 | case REG_INC: |
32aa77d9 | 3920 | if (!AUTO_INC_DEC) |
3921 | break; | |
3922 | ||
698ff1f0 | 3923 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
3924 | { | |
3925 | rtx reg = XEXP (note, 0); | |
3926 | if (!FIND_REG_INC_NOTE (insn, reg) | |
4073adaa | 3927 | && find_auto_inc (PATTERN (insn), reg)) |
a1ddb869 | 3928 | add_reg_note (insn, REG_INC, reg); |
698ff1f0 | 3929 | } |
3930 | break; | |
698ff1f0 | 3931 | |
dfe00a8f | 3932 | case REG_ARGS_SIZE: |
32f1a0c8 | 3933 | fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0))); |
dfe00a8f | 3934 | break; |
3935 | ||
2e3b0d0f | 3936 | case REG_CALL_DECL: |
3937 | gcc_assert (call_insn != NULL_RTX); | |
3938 | add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
3939 | break; | |
3940 | ||
e13693ec | 3941 | default: |
3942 | break; | |
15bbde2b | 3943 | } |
e13693ec | 3944 | } |
3945 | ||
3946 | /* If there are LABELS inside the split insns increment the | |
3947 | usage count so we don't delete the label. */ | |
19d2fe05 | 3948 | if (INSN_P (trial)) |
e13693ec | 3949 | { |
3950 | insn = insn_last; | |
3951 | while (insn != NULL_RTX) | |
15bbde2b | 3952 | { |
19d2fe05 | 3953 | /* JUMP_P insns have already been "marked" above. */ |
6d7dc5b9 | 3954 | if (NONJUMP_INSN_P (insn)) |
e13693ec | 3955 | mark_label_nuses (PATTERN (insn)); |
15bbde2b | 3956 | |
e13693ec | 3957 | insn = PREV_INSN (insn); |
3958 | } | |
15bbde2b | 3959 | } |
3960 | ||
3b50f202 | 3961 | before = PREV_INSN (trial); |
3962 | after = NEXT_INSN (trial); | |
3963 | ||
5169661d | 3964 | tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial)); |
e13693ec | 3965 | |
3966 | delete_insn (trial); | |
e13693ec | 3967 | |
3968 | /* Recursively call try_split for each new insn created; by the | |
3969 | time control returns here that insn will be fully split, so | |
3970 | set LAST and continue from the insn after the one returned. | |
3971 | We can't use next_active_insn here since AFTER may be a note. | |
3972 | Ignore deleted insns, which can be occur if not optimizing. */ | |
3973 | for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem)) | |
dd1286fb | 3974 | if (! tem->deleted () && INSN_P (tem)) |
e13693ec | 3975 | tem = try_split (PATTERN (tem), tem, 1); |
3976 | ||
3977 | /* Return either the first or the last insn, depending on which was | |
3978 | requested. */ | |
3979 | return last | |
06f9d6ef | 3980 | ? (after ? PREV_INSN (after) : get_last_insn ()) |
e13693ec | 3981 | : NEXT_INSN (before); |
15bbde2b | 3982 | } |
3983 | \f | |
3984 | /* Make and return an INSN rtx, initializing all its slots. | |
6a84e367 | 3985 | Store PATTERN in the pattern slots. */ |
15bbde2b | 3986 | |
2c57d586 | 3987 | rtx_insn * |
35cb5232 | 3988 | make_insn_raw (rtx pattern) |
15bbde2b | 3989 | { |
2c57d586 | 3990 | rtx_insn *insn; |
15bbde2b | 3991 | |
2c57d586 | 3992 | insn = as_a <rtx_insn *> (rtx_alloc (INSN)); |
15bbde2b | 3993 | |
575333f9 | 3994 | INSN_UID (insn) = cur_insn_uid++; |
15bbde2b | 3995 | PATTERN (insn) = pattern; |
3996 | INSN_CODE (insn) = -1; | |
fc92fa61 | 3997 | REG_NOTES (insn) = NULL; |
5169661d | 3998 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 3999 | BLOCK_FOR_INSN (insn) = NULL; |
15bbde2b | 4000 | |
fe7f701d | 4001 | #ifdef ENABLE_RTL_CHECKING |
4002 | if (insn | |
9204e736 | 4003 | && INSN_P (insn) |
fe7f701d | 4004 | && (returnjump_p (insn) |
4005 | || (GET_CODE (insn) == SET | |
4006 | && SET_DEST (insn) == pc_rtx))) | |
4007 | { | |
c3ceba8e | 4008 | warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n"); |
fe7f701d | 4009 | debug_rtx (insn); |
4010 | } | |
4011 | #endif | |
d823ba47 | 4012 | |
15bbde2b | 4013 | return insn; |
4014 | } | |
4015 | ||
9845d120 | 4016 | /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */ |
4017 | ||
2c57d586 | 4018 | static rtx_insn * |
9845d120 | 4019 | make_debug_insn_raw (rtx pattern) |
4020 | { | |
2c57d586 | 4021 | rtx_debug_insn *insn; |
9845d120 | 4022 | |
2c57d586 | 4023 | insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN)); |
9845d120 | 4024 | INSN_UID (insn) = cur_debug_insn_uid++; |
4025 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
4026 | INSN_UID (insn) = cur_insn_uid++; | |
4027 | ||
4028 | PATTERN (insn) = pattern; | |
4029 | INSN_CODE (insn) = -1; | |
4030 | REG_NOTES (insn) = NULL; | |
5169661d | 4031 | INSN_LOCATION (insn) = curr_insn_location (); |
9845d120 | 4032 | BLOCK_FOR_INSN (insn) = NULL; |
4033 | ||
4034 | return insn; | |
4035 | } | |
4036 | ||
31d3e01c | 4037 | /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ |
15bbde2b | 4038 | |
2c57d586 | 4039 | static rtx_insn * |
35cb5232 | 4040 | make_jump_insn_raw (rtx pattern) |
15bbde2b | 4041 | { |
2c57d586 | 4042 | rtx_jump_insn *insn; |
15bbde2b | 4043 | |
2c57d586 | 4044 | insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN)); |
fc92fa61 | 4045 | INSN_UID (insn) = cur_insn_uid++; |
15bbde2b | 4046 | |
4047 | PATTERN (insn) = pattern; | |
4048 | INSN_CODE (insn) = -1; | |
fc92fa61 | 4049 | REG_NOTES (insn) = NULL; |
4050 | JUMP_LABEL (insn) = NULL; | |
5169661d | 4051 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 4052 | BLOCK_FOR_INSN (insn) = NULL; |
15bbde2b | 4053 | |
4054 | return insn; | |
4055 | } | |
6e911104 | 4056 | |
31d3e01c | 4057 | /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */ |
6e911104 | 4058 | |
2c57d586 | 4059 | static rtx_insn * |
35cb5232 | 4060 | make_call_insn_raw (rtx pattern) |
6e911104 | 4061 | { |
2c57d586 | 4062 | rtx_call_insn *insn; |
6e911104 | 4063 | |
2c57d586 | 4064 | insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN)); |
6e911104 | 4065 | INSN_UID (insn) = cur_insn_uid++; |
4066 | ||
4067 | PATTERN (insn) = pattern; | |
4068 | INSN_CODE (insn) = -1; | |
6e911104 | 4069 | REG_NOTES (insn) = NULL; |
4070 | CALL_INSN_FUNCTION_USAGE (insn) = NULL; | |
5169661d | 4071 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 4072 | BLOCK_FOR_INSN (insn) = NULL; |
6e911104 | 4073 | |
4074 | return insn; | |
4075 | } | |
35f3420b | 4076 | |
4077 | /* Like `make_insn_raw' but make a NOTE instead of an insn. */ | |
4078 | ||
cef3d8ad | 4079 | static rtx_note * |
35f3420b | 4080 | make_note_raw (enum insn_note subtype) |
4081 | { | |
4082 | /* Some notes are never created this way at all. These notes are | |
4083 | only created by patching out insns. */ | |
4084 | gcc_assert (subtype != NOTE_INSN_DELETED_LABEL | |
4085 | && subtype != NOTE_INSN_DELETED_DEBUG_LABEL); | |
4086 | ||
cef3d8ad | 4087 | rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE)); |
35f3420b | 4088 | INSN_UID (note) = cur_insn_uid++; |
4089 | NOTE_KIND (note) = subtype; | |
4090 | BLOCK_FOR_INSN (note) = NULL; | |
4091 | memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); | |
4092 | return note; | |
4093 | } | |
15bbde2b | 4094 | \f |
35f3420b | 4095 | /* Add INSN to the end of the doubly-linked list, between PREV and NEXT. |
4096 | INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects, | |
4097 | but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */ | |
4098 | ||
4099 | static inline void | |
3e75e92b | 4100 | link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next) |
35f3420b | 4101 | { |
4a57a2e8 | 4102 | SET_PREV_INSN (insn) = prev; |
4103 | SET_NEXT_INSN (insn) = next; | |
35f3420b | 4104 | if (prev != NULL) |
4105 | { | |
4a57a2e8 | 4106 | SET_NEXT_INSN (prev) = insn; |
35f3420b | 4107 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
4108 | { | |
f17e3fff | 4109 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev)); |
4110 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn; | |
35f3420b | 4111 | } |
4112 | } | |
4113 | if (next != NULL) | |
4114 | { | |
4a57a2e8 | 4115 | SET_PREV_INSN (next) = insn; |
35f3420b | 4116 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
f17e3fff | 4117 | { |
4118 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next)); | |
4119 | SET_PREV_INSN (sequence->insn (0)) = insn; | |
4120 | } | |
35f3420b | 4121 | } |
34f5b9ac | 4122 | |
4123 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4124 | { | |
f17e3fff | 4125 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn)); |
4126 | SET_PREV_INSN (sequence->insn (0)) = prev; | |
4127 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next; | |
34f5b9ac | 4128 | } |
35f3420b | 4129 | } |
4130 | ||
15bbde2b | 4131 | /* Add INSN to the end of the doubly-linked list. |
4132 | INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */ | |
4133 | ||
4134 | void | |
3e75e92b | 4135 | add_insn (rtx_insn *insn) |
15bbde2b | 4136 | { |
3e75e92b | 4137 | rtx_insn *prev = get_last_insn (); |
35f3420b | 4138 | link_insn_into_chain (insn, prev, NULL); |
c9281ef8 | 4139 | if (get_insns () == NULL) |
06f9d6ef | 4140 | set_first_insn (insn); |
06f9d6ef | 4141 | set_last_insn (insn); |
15bbde2b | 4142 | } |
4143 | ||
35f3420b | 4144 | /* Add INSN into the doubly-linked list after insn AFTER. */ |
15bbde2b | 4145 | |
35f3420b | 4146 | static void |
3e75e92b | 4147 | add_insn_after_nobb (rtx_insn *insn, rtx_insn *after) |
15bbde2b | 4148 | { |
3e75e92b | 4149 | rtx_insn *next = NEXT_INSN (after); |
15bbde2b | 4150 | |
dd1286fb | 4151 | gcc_assert (!optimize || !after->deleted ()); |
f65c10c0 | 4152 | |
35f3420b | 4153 | link_insn_into_chain (insn, after, next); |
15bbde2b | 4154 | |
35f3420b | 4155 | if (next == NULL) |
15bbde2b | 4156 | { |
c36aa54b | 4157 | struct sequence_stack *seq; |
4158 | ||
4159 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4160 | if (after == seq->last) | |
4161 | { | |
4162 | seq->last = insn; | |
4163 | break; | |
4164 | } | |
15bbde2b | 4165 | } |
35f3420b | 4166 | } |
4167 | ||
4168 | /* Add INSN into the doubly-linked list before insn BEFORE. */ | |
4169 | ||
4170 | static void | |
3e75e92b | 4171 | add_insn_before_nobb (rtx_insn *insn, rtx_insn *before) |
35f3420b | 4172 | { |
3e75e92b | 4173 | rtx_insn *prev = PREV_INSN (before); |
35f3420b | 4174 | |
dd1286fb | 4175 | gcc_assert (!optimize || !before->deleted ()); |
35f3420b | 4176 | |
4177 | link_insn_into_chain (insn, prev, before); | |
4178 | ||
4179 | if (prev == NULL) | |
15bbde2b | 4180 | { |
c36aa54b | 4181 | struct sequence_stack *seq; |
312de84d | 4182 | |
c36aa54b | 4183 | for (seq = get_current_sequence (); seq; seq = seq->next) |
4184 | if (before == seq->first) | |
4185 | { | |
4186 | seq->first = insn; | |
4187 | break; | |
4188 | } | |
4189 | ||
4190 | gcc_assert (seq); | |
15bbde2b | 4191 | } |
35f3420b | 4192 | } |
4193 | ||
4194 | /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN. | |
4195 | If BB is NULL, an attempt is made to infer the bb from before. | |
4196 | ||
4197 | This and the next function should be the only functions called | |
4198 | to insert an insn once delay slots have been filled since only | |
4199 | they know how to update a SEQUENCE. */ | |
15bbde2b | 4200 | |
35f3420b | 4201 | void |
3e75e92b | 4202 | add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb) |
35f3420b | 4203 | { |
26bb3cb2 | 4204 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
3e75e92b | 4205 | rtx_insn *after = as_a <rtx_insn *> (uncast_after); |
35f3420b | 4206 | add_insn_after_nobb (insn, after); |
6d7dc5b9 | 4207 | if (!BARRIER_P (after) |
4208 | && !BARRIER_P (insn) | |
9dda7915 | 4209 | && (bb = BLOCK_FOR_INSN (after))) |
4210 | { | |
4211 | set_block_for_insn (insn, bb); | |
308f9b79 | 4212 | if (INSN_P (insn)) |
3072d30e | 4213 | df_insn_rescan (insn); |
9dda7915 | 4214 | /* Should not happen as first in the BB is always |
3fb1e43b | 4215 | either NOTE or LABEL. */ |
5496dbfc | 4216 | if (BB_END (bb) == after |
9dda7915 | 4217 | /* Avoid clobbering of structure when creating new BB. */ |
6d7dc5b9 | 4218 | && !BARRIER_P (insn) |
ad4583d9 | 4219 | && !NOTE_INSN_BASIC_BLOCK_P (insn)) |
26bb3cb2 | 4220 | BB_END (bb) = insn; |
9dda7915 | 4221 | } |
15bbde2b | 4222 | } |
4223 | ||
35f3420b | 4224 | /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN. |
4225 | If BB is NULL, an attempt is made to infer the bb from before. | |
4226 | ||
4227 | This and the previous function should be the only functions called | |
4228 | to insert an insn once delay slots have been filled since only | |
4229 | they know how to update a SEQUENCE. */ | |
312de84d | 4230 | |
4231 | void | |
3e75e92b | 4232 | add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb) |
312de84d | 4233 | { |
3e75e92b | 4234 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
4235 | rtx_insn *before = as_a <rtx_insn *> (uncast_before); | |
35f3420b | 4236 | add_insn_before_nobb (insn, before); |
312de84d | 4237 | |
48e1416a | 4238 | if (!bb |
3072d30e | 4239 | && !BARRIER_P (before) |
4240 | && !BARRIER_P (insn)) | |
4241 | bb = BLOCK_FOR_INSN (before); | |
4242 | ||
4243 | if (bb) | |
9dda7915 | 4244 | { |
4245 | set_block_for_insn (insn, bb); | |
308f9b79 | 4246 | if (INSN_P (insn)) |
3072d30e | 4247 | df_insn_rescan (insn); |
611234b4 | 4248 | /* Should not happen as first in the BB is always either NOTE or |
ba821eb1 | 4249 | LABEL. */ |
611234b4 | 4250 | gcc_assert (BB_HEAD (bb) != insn |
4251 | /* Avoid clobbering of structure when creating new BB. */ | |
4252 | || BARRIER_P (insn) | |
ad4583d9 | 4253 | || NOTE_INSN_BASIC_BLOCK_P (insn)); |
9dda7915 | 4254 | } |
312de84d | 4255 | } |
4256 | ||
3072d30e | 4257 | /* Replace insn with an deleted instruction note. */ |
4258 | ||
fc3d1695 | 4259 | void |
4260 | set_insn_deleted (rtx insn) | |
3072d30e | 4261 | { |
91f71fa3 | 4262 | if (INSN_P (insn)) |
e149ca56 | 4263 | df_insn_delete (as_a <rtx_insn *> (insn)); |
3072d30e | 4264 | PUT_CODE (insn, NOTE); |
4265 | NOTE_KIND (insn) = NOTE_INSN_DELETED; | |
4266 | } | |
4267 | ||
4268 | ||
93ff53d3 | 4269 | /* Unlink INSN from the insn chain. |
4270 | ||
4271 | This function knows how to handle sequences. | |
4272 | ||
4273 | This function does not invalidate data flow information associated with | |
4274 | INSN (i.e. does not call df_insn_delete). That makes this function | |
4275 | usable for only disconnecting an insn from the chain, and re-emit it | |
4276 | elsewhere later. | |
4277 | ||
4278 | To later insert INSN elsewhere in the insn chain via add_insn and | |
4279 | similar functions, PREV_INSN and NEXT_INSN must be nullified by | |
4280 | the caller. Nullifying them here breaks many insn chain walks. | |
4281 | ||
4282 | To really delete an insn and related DF information, use delete_insn. */ | |
4283 | ||
7ddcf2bf | 4284 | void |
4cd001d5 | 4285 | remove_insn (rtx uncast_insn) |
7ddcf2bf | 4286 | { |
4cd001d5 | 4287 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
26bb3cb2 | 4288 | rtx_insn *next = NEXT_INSN (insn); |
4289 | rtx_insn *prev = PREV_INSN (insn); | |
e4bf866d | 4290 | basic_block bb; |
4291 | ||
7ddcf2bf | 4292 | if (prev) |
4293 | { | |
4a57a2e8 | 4294 | SET_NEXT_INSN (prev) = next; |
6d7dc5b9 | 4295 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
7ddcf2bf | 4296 | { |
f17e3fff | 4297 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev)); |
4298 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next; | |
7ddcf2bf | 4299 | } |
4300 | } | |
7ddcf2bf | 4301 | else |
4302 | { | |
c36aa54b | 4303 | struct sequence_stack *seq; |
4304 | ||
4305 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4306 | if (insn == seq->first) | |
7ddcf2bf | 4307 | { |
c36aa54b | 4308 | seq->first = next; |
7ddcf2bf | 4309 | break; |
4310 | } | |
4311 | ||
c36aa54b | 4312 | gcc_assert (seq); |
7ddcf2bf | 4313 | } |
4314 | ||
4315 | if (next) | |
4316 | { | |
4a57a2e8 | 4317 | SET_PREV_INSN (next) = prev; |
6d7dc5b9 | 4318 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
f17e3fff | 4319 | { |
4320 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next)); | |
4321 | SET_PREV_INSN (sequence->insn (0)) = prev; | |
4322 | } | |
7ddcf2bf | 4323 | } |
7ddcf2bf | 4324 | else |
4325 | { | |
c36aa54b | 4326 | struct sequence_stack *seq; |
4327 | ||
4328 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4329 | if (insn == seq->last) | |
7ddcf2bf | 4330 | { |
c36aa54b | 4331 | seq->last = prev; |
7ddcf2bf | 4332 | break; |
4333 | } | |
4334 | ||
c36aa54b | 4335 | gcc_assert (seq); |
7ddcf2bf | 4336 | } |
b983ea33 | 4337 | |
b983ea33 | 4338 | /* Fix up basic block boundaries, if necessary. */ |
6d7dc5b9 | 4339 | if (!BARRIER_P (insn) |
e4bf866d | 4340 | && (bb = BLOCK_FOR_INSN (insn))) |
4341 | { | |
5496dbfc | 4342 | if (BB_HEAD (bb) == insn) |
e4bf866d | 4343 | { |
f4aee538 | 4344 | /* Never ever delete the basic block note without deleting whole |
4345 | basic block. */ | |
611234b4 | 4346 | gcc_assert (!NOTE_P (insn)); |
26bb3cb2 | 4347 | BB_HEAD (bb) = next; |
e4bf866d | 4348 | } |
5496dbfc | 4349 | if (BB_END (bb) == insn) |
26bb3cb2 | 4350 | BB_END (bb) = prev; |
e4bf866d | 4351 | } |
7ddcf2bf | 4352 | } |
4353 | ||
d5f9786f | 4354 | /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */ |
4355 | ||
4356 | void | |
35cb5232 | 4357 | add_function_usage_to (rtx call_insn, rtx call_fusage) |
d5f9786f | 4358 | { |
611234b4 | 4359 | gcc_assert (call_insn && CALL_P (call_insn)); |
d5f9786f | 4360 | |
4361 | /* Put the register usage information on the CALL. If there is already | |
4362 | some usage information, put ours at the end. */ | |
4363 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
4364 | { | |
4365 | rtx link; | |
4366 | ||
4367 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
4368 | link = XEXP (link, 1)) | |
4369 | ; | |
4370 | ||
4371 | XEXP (link, 1) = call_fusage; | |
4372 | } | |
4373 | else | |
4374 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
4375 | } | |
4376 | ||
15bbde2b | 4377 | /* Delete all insns made since FROM. |
4378 | FROM becomes the new last instruction. */ | |
4379 | ||
4380 | void | |
57c26b3a | 4381 | delete_insns_since (rtx_insn *from) |
15bbde2b | 4382 | { |
4383 | if (from == 0) | |
06f9d6ef | 4384 | set_first_insn (0); |
15bbde2b | 4385 | else |
4a57a2e8 | 4386 | SET_NEXT_INSN (from) = 0; |
06f9d6ef | 4387 | set_last_insn (from); |
15bbde2b | 4388 | } |
4389 | ||
34e2ddcd | 4390 | /* This function is deprecated, please use sequences instead. |
4391 | ||
4392 | Move a consecutive bunch of insns to a different place in the chain. | |
15bbde2b | 4393 | The insns to be moved are those between FROM and TO. |
4394 | They are moved to a new position after the insn AFTER. | |
4395 | AFTER must not be FROM or TO or any insn in between. | |
4396 | ||
4397 | This function does not know about SEQUENCEs and hence should not be | |
4398 | called after delay-slot filling has been done. */ | |
4399 | ||
4400 | void | |
57c26b3a | 4401 | reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
15bbde2b | 4402 | { |
382ecba7 | 4403 | if (flag_checking) |
4404 | { | |
4405 | for (rtx_insn *x = from; x != to; x = NEXT_INSN (x)) | |
4406 | gcc_assert (after != x); | |
4407 | gcc_assert (after != to); | |
4408 | } | |
7f6ca11f | 4409 | |
15bbde2b | 4410 | /* Splice this bunch out of where it is now. */ |
4411 | if (PREV_INSN (from)) | |
4a57a2e8 | 4412 | SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to); |
15bbde2b | 4413 | if (NEXT_INSN (to)) |
4a57a2e8 | 4414 | SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from); |
06f9d6ef | 4415 | if (get_last_insn () == to) |
4416 | set_last_insn (PREV_INSN (from)); | |
4417 | if (get_insns () == from) | |
4418 | set_first_insn (NEXT_INSN (to)); | |
15bbde2b | 4419 | |
4420 | /* Make the new neighbors point to it and it to them. */ | |
4421 | if (NEXT_INSN (after)) | |
4a57a2e8 | 4422 | SET_PREV_INSN (NEXT_INSN (after)) = to; |
15bbde2b | 4423 | |
4a57a2e8 | 4424 | SET_NEXT_INSN (to) = NEXT_INSN (after); |
4425 | SET_PREV_INSN (from) = after; | |
4426 | SET_NEXT_INSN (after) = from; | |
9af5ce0c | 4427 | if (after == get_last_insn ()) |
06f9d6ef | 4428 | set_last_insn (to); |
15bbde2b | 4429 | } |
4430 | ||
9dda7915 | 4431 | /* Same as function above, but take care to update BB boundaries. */ |
4432 | void | |
4a3fb716 | 4433 | reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
9dda7915 | 4434 | { |
4a3fb716 | 4435 | rtx_insn *prev = PREV_INSN (from); |
9dda7915 | 4436 | basic_block bb, bb2; |
4437 | ||
4438 | reorder_insns_nobb (from, to, after); | |
4439 | ||
6d7dc5b9 | 4440 | if (!BARRIER_P (after) |
9dda7915 | 4441 | && (bb = BLOCK_FOR_INSN (after))) |
4442 | { | |
e149ca56 | 4443 | rtx_insn *x; |
3072d30e | 4444 | df_set_bb_dirty (bb); |
d4c5e26d | 4445 | |
6d7dc5b9 | 4446 | if (!BARRIER_P (from) |
9dda7915 | 4447 | && (bb2 = BLOCK_FOR_INSN (from))) |
4448 | { | |
5496dbfc | 4449 | if (BB_END (bb2) == to) |
26bb3cb2 | 4450 | BB_END (bb2) = prev; |
3072d30e | 4451 | df_set_bb_dirty (bb2); |
9dda7915 | 4452 | } |
4453 | ||
5496dbfc | 4454 | if (BB_END (bb) == after) |
26bb3cb2 | 4455 | BB_END (bb) = to; |
9dda7915 | 4456 | |
4457 | for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x)) | |
7097dd0c | 4458 | if (!BARRIER_P (x)) |
a2bdd643 | 4459 | df_insn_change_bb (x, bb); |
9dda7915 | 4460 | } |
4461 | } | |
4462 | ||
15bbde2b | 4463 | \f |
31d3e01c | 4464 | /* Emit insn(s) of given code and pattern |
4465 | at a specified place within the doubly-linked list. | |
15bbde2b | 4466 | |
31d3e01c | 4467 | All of the emit_foo global entry points accept an object |
4468 | X which is either an insn list or a PATTERN of a single | |
4469 | instruction. | |
15bbde2b | 4470 | |
31d3e01c | 4471 | There are thus a few canonical ways to generate code and |
4472 | emit it at a specific place in the instruction stream. For | |
4473 | example, consider the instruction named SPOT and the fact that | |
4474 | we would like to emit some instructions before SPOT. We might | |
4475 | do it like this: | |
15bbde2b | 4476 | |
31d3e01c | 4477 | start_sequence (); |
4478 | ... emit the new instructions ... | |
4479 | insns_head = get_insns (); | |
4480 | end_sequence (); | |
15bbde2b | 4481 | |
31d3e01c | 4482 | emit_insn_before (insns_head, SPOT); |
15bbde2b | 4483 | |
31d3e01c | 4484 | It used to be common to generate SEQUENCE rtl instead, but that |
4485 | is a relic of the past which no longer occurs. The reason is that | |
4486 | SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE | |
4487 | generated would almost certainly die right after it was created. */ | |
15bbde2b | 4488 | |
722334ea | 4489 | static rtx_insn * |
5f7c5ddd | 4490 | emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb, |
2c57d586 | 4491 | rtx_insn *(*make_raw) (rtx)) |
15bbde2b | 4492 | { |
2c57d586 | 4493 | rtx_insn *insn; |
15bbde2b | 4494 | |
611234b4 | 4495 | gcc_assert (before); |
31d3e01c | 4496 | |
4497 | if (x == NULL_RTX) | |
722334ea | 4498 | return safe_as_a <rtx_insn *> (last); |
31d3e01c | 4499 | |
4500 | switch (GET_CODE (x)) | |
15bbde2b | 4501 | { |
9845d120 | 4502 | case DEBUG_INSN: |
31d3e01c | 4503 | case INSN: |
4504 | case JUMP_INSN: | |
4505 | case CALL_INSN: | |
4506 | case CODE_LABEL: | |
4507 | case BARRIER: | |
4508 | case NOTE: | |
2c57d586 | 4509 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 4510 | while (insn) |
4511 | { | |
2c57d586 | 4512 | rtx_insn *next = NEXT_INSN (insn); |
3072d30e | 4513 | add_insn_before (insn, before, bb); |
31d3e01c | 4514 | last = insn; |
4515 | insn = next; | |
4516 | } | |
4517 | break; | |
4518 | ||
4519 | #ifdef ENABLE_RTL_CHECKING | |
4520 | case SEQUENCE: | |
611234b4 | 4521 | gcc_unreachable (); |
31d3e01c | 4522 | break; |
4523 | #endif | |
4524 | ||
4525 | default: | |
5f7c5ddd | 4526 | last = (*make_raw) (x); |
3072d30e | 4527 | add_insn_before (last, before, bb); |
31d3e01c | 4528 | break; |
15bbde2b | 4529 | } |
4530 | ||
722334ea | 4531 | return safe_as_a <rtx_insn *> (last); |
15bbde2b | 4532 | } |
4533 | ||
5f7c5ddd | 4534 | /* Make X be output before the instruction BEFORE. */ |
4535 | ||
722334ea | 4536 | rtx_insn * |
c9a09955 | 4537 | emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb) |
5f7c5ddd | 4538 | { |
4539 | return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw); | |
4540 | } | |
4541 | ||
31d3e01c | 4542 | /* Make an instruction with body X and code JUMP_INSN |
15bbde2b | 4543 | and output it before the instruction BEFORE. */ |
4544 | ||
f9a00e9e | 4545 | rtx_jump_insn * |
c9a09955 | 4546 | emit_jump_insn_before_noloc (rtx x, rtx_insn *before) |
15bbde2b | 4547 | { |
f9a00e9e | 4548 | return as_a <rtx_jump_insn *> ( |
4549 | emit_pattern_before_noloc (x, before, NULL_RTX, NULL, | |
4550 | make_jump_insn_raw)); | |
15bbde2b | 4551 | } |
4552 | ||
31d3e01c | 4553 | /* Make an instruction with body X and code CALL_INSN |
cd0fe062 | 4554 | and output it before the instruction BEFORE. */ |
4555 | ||
722334ea | 4556 | rtx_insn * |
c9a09955 | 4557 | emit_call_insn_before_noloc (rtx x, rtx_insn *before) |
cd0fe062 | 4558 | { |
5f7c5ddd | 4559 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4560 | make_call_insn_raw); | |
cd0fe062 | 4561 | } |
4562 | ||
9845d120 | 4563 | /* Make an instruction with body X and code DEBUG_INSN |
4564 | and output it before the instruction BEFORE. */ | |
4565 | ||
722334ea | 4566 | rtx_insn * |
9845d120 | 4567 | emit_debug_insn_before_noloc (rtx x, rtx before) |
4568 | { | |
5f7c5ddd | 4569 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4570 | make_debug_insn_raw); | |
9845d120 | 4571 | } |
4572 | ||
15bbde2b | 4573 | /* Make an insn of code BARRIER |
71caadc0 | 4574 | and output it before the insn BEFORE. */ |
15bbde2b | 4575 | |
722334ea | 4576 | rtx_barrier * |
35cb5232 | 4577 | emit_barrier_before (rtx before) |
15bbde2b | 4578 | { |
722334ea | 4579 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 4580 | |
4581 | INSN_UID (insn) = cur_insn_uid++; | |
4582 | ||
3072d30e | 4583 | add_insn_before (insn, before, NULL); |
15bbde2b | 4584 | return insn; |
4585 | } | |
4586 | ||
71caadc0 | 4587 | /* Emit the label LABEL before the insn BEFORE. */ |
4588 | ||
f9a00e9e | 4589 | rtx_code_label * |
c9a09955 | 4590 | emit_label_before (rtx label, rtx_insn *before) |
71caadc0 | 4591 | { |
596ef494 | 4592 | gcc_checking_assert (INSN_UID (label) == 0); |
4593 | INSN_UID (label) = cur_insn_uid++; | |
4594 | add_insn_before (label, before, NULL); | |
f9a00e9e | 4595 | return as_a <rtx_code_label *> (label); |
71caadc0 | 4596 | } |
15bbde2b | 4597 | \f |
31d3e01c | 4598 | /* Helper for emit_insn_after, handles lists of instructions |
4599 | efficiently. */ | |
15bbde2b | 4600 | |
f17e3fff | 4601 | static rtx_insn * |
4602 | emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb) | |
15bbde2b | 4603 | { |
f17e3fff | 4604 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
26bb3cb2 | 4605 | rtx_insn *last; |
4606 | rtx_insn *after_after; | |
3072d30e | 4607 | if (!bb && !BARRIER_P (after)) |
4608 | bb = BLOCK_FOR_INSN (after); | |
15bbde2b | 4609 | |
3072d30e | 4610 | if (bb) |
15bbde2b | 4611 | { |
3072d30e | 4612 | df_set_bb_dirty (bb); |
31d3e01c | 4613 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
6d7dc5b9 | 4614 | if (!BARRIER_P (last)) |
3072d30e | 4615 | { |
4616 | set_block_for_insn (last, bb); | |
4617 | df_insn_rescan (last); | |
4618 | } | |
6d7dc5b9 | 4619 | if (!BARRIER_P (last)) |
3072d30e | 4620 | { |
4621 | set_block_for_insn (last, bb); | |
4622 | df_insn_rescan (last); | |
4623 | } | |
5496dbfc | 4624 | if (BB_END (bb) == after) |
26bb3cb2 | 4625 | BB_END (bb) = last; |
15bbde2b | 4626 | } |
4627 | else | |
31d3e01c | 4628 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
4629 | continue; | |
4630 | ||
4631 | after_after = NEXT_INSN (after); | |
4632 | ||
4a57a2e8 | 4633 | SET_NEXT_INSN (after) = first; |
4634 | SET_PREV_INSN (first) = after; | |
4635 | SET_NEXT_INSN (last) = after_after; | |
31d3e01c | 4636 | if (after_after) |
4a57a2e8 | 4637 | SET_PREV_INSN (after_after) = last; |
31d3e01c | 4638 | |
9af5ce0c | 4639 | if (after == get_last_insn ()) |
06f9d6ef | 4640 | set_last_insn (last); |
e1ab7874 | 4641 | |
31d3e01c | 4642 | return last; |
4643 | } | |
4644 | ||
722334ea | 4645 | static rtx_insn * |
f17e3fff | 4646 | emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb, |
2c57d586 | 4647 | rtx_insn *(*make_raw)(rtx)) |
31d3e01c | 4648 | { |
f17e3fff | 4649 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
4650 | rtx_insn *last = after; | |
31d3e01c | 4651 | |
611234b4 | 4652 | gcc_assert (after); |
31d3e01c | 4653 | |
4654 | if (x == NULL_RTX) | |
f17e3fff | 4655 | return last; |
31d3e01c | 4656 | |
4657 | switch (GET_CODE (x)) | |
15bbde2b | 4658 | { |
9845d120 | 4659 | case DEBUG_INSN: |
31d3e01c | 4660 | case INSN: |
4661 | case JUMP_INSN: | |
4662 | case CALL_INSN: | |
4663 | case CODE_LABEL: | |
4664 | case BARRIER: | |
4665 | case NOTE: | |
26bb3cb2 | 4666 | last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb); |
31d3e01c | 4667 | break; |
4668 | ||
4669 | #ifdef ENABLE_RTL_CHECKING | |
4670 | case SEQUENCE: | |
611234b4 | 4671 | gcc_unreachable (); |
31d3e01c | 4672 | break; |
4673 | #endif | |
4674 | ||
4675 | default: | |
5f7c5ddd | 4676 | last = (*make_raw) (x); |
3072d30e | 4677 | add_insn_after (last, after, bb); |
31d3e01c | 4678 | break; |
15bbde2b | 4679 | } |
4680 | ||
f17e3fff | 4681 | return last; |
15bbde2b | 4682 | } |
4683 | ||
5f7c5ddd | 4684 | /* Make X be output after the insn AFTER and set the BB of insn. If |
4685 | BB is NULL, an attempt is made to infer the BB from AFTER. */ | |
4686 | ||
722334ea | 4687 | rtx_insn * |
5f7c5ddd | 4688 | emit_insn_after_noloc (rtx x, rtx after, basic_block bb) |
4689 | { | |
4690 | return emit_pattern_after_noloc (x, after, bb, make_insn_raw); | |
4691 | } | |
4692 | ||
1bea98fb | 4693 | |
31d3e01c | 4694 | /* Make an insn of code JUMP_INSN with body X |
15bbde2b | 4695 | and output it after the insn AFTER. */ |
4696 | ||
f9a00e9e | 4697 | rtx_jump_insn * |
0891f67c | 4698 | emit_jump_insn_after_noloc (rtx x, rtx after) |
15bbde2b | 4699 | { |
f9a00e9e | 4700 | return as_a <rtx_jump_insn *> ( |
4701 | emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw)); | |
31d3e01c | 4702 | } |
4703 | ||
4704 | /* Make an instruction with body X and code CALL_INSN | |
4705 | and output it after the instruction AFTER. */ | |
4706 | ||
722334ea | 4707 | rtx_insn * |
0891f67c | 4708 | emit_call_insn_after_noloc (rtx x, rtx after) |
31d3e01c | 4709 | { |
5f7c5ddd | 4710 | return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw); |
15bbde2b | 4711 | } |
4712 | ||
9845d120 | 4713 | /* Make an instruction with body X and code CALL_INSN |
4714 | and output it after the instruction AFTER. */ | |
4715 | ||
722334ea | 4716 | rtx_insn * |
9845d120 | 4717 | emit_debug_insn_after_noloc (rtx x, rtx after) |
4718 | { | |
5f7c5ddd | 4719 | return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw); |
9845d120 | 4720 | } |
4721 | ||
15bbde2b | 4722 | /* Make an insn of code BARRIER |
4723 | and output it after the insn AFTER. */ | |
4724 | ||
722334ea | 4725 | rtx_barrier * |
35cb5232 | 4726 | emit_barrier_after (rtx after) |
15bbde2b | 4727 | { |
722334ea | 4728 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 4729 | |
4730 | INSN_UID (insn) = cur_insn_uid++; | |
4731 | ||
3072d30e | 4732 | add_insn_after (insn, after, NULL); |
15bbde2b | 4733 | return insn; |
4734 | } | |
4735 | ||
4736 | /* Emit the label LABEL after the insn AFTER. */ | |
4737 | ||
722334ea | 4738 | rtx_insn * |
c9a09955 | 4739 | emit_label_after (rtx label, rtx_insn *after) |
15bbde2b | 4740 | { |
596ef494 | 4741 | gcc_checking_assert (INSN_UID (label) == 0); |
4742 | INSN_UID (label) = cur_insn_uid++; | |
4743 | add_insn_after (label, after, NULL); | |
722334ea | 4744 | return as_a <rtx_insn *> (label); |
15bbde2b | 4745 | } |
35f3420b | 4746 | \f |
4747 | /* Notes require a bit of special handling: Some notes need to have their | |
4748 | BLOCK_FOR_INSN set, others should never have it set, and some should | |
4749 | have it set or clear depending on the context. */ | |
4750 | ||
4751 | /* Return true iff a note of kind SUBTYPE should be emitted with routines | |
4752 | that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the | |
4753 | caller is asked to emit a note before BB_HEAD, or after BB_END. */ | |
4754 | ||
4755 | static bool | |
4756 | note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p) | |
4757 | { | |
4758 | switch (subtype) | |
4759 | { | |
4760 | /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */ | |
4761 | case NOTE_INSN_SWITCH_TEXT_SECTIONS: | |
4762 | return true; | |
4763 | ||
4764 | /* Notes for var tracking and EH region markers can appear between or | |
4765 | inside basic blocks. If the caller is emitting on the basic block | |
4766 | boundary, do not set BLOCK_FOR_INSN on the new note. */ | |
4767 | case NOTE_INSN_VAR_LOCATION: | |
4768 | case NOTE_INSN_CALL_ARG_LOCATION: | |
4769 | case NOTE_INSN_EH_REGION_BEG: | |
4770 | case NOTE_INSN_EH_REGION_END: | |
4771 | return on_bb_boundary_p; | |
4772 | ||
4773 | /* Otherwise, BLOCK_FOR_INSN must be set. */ | |
4774 | default: | |
4775 | return false; | |
4776 | } | |
4777 | } | |
15bbde2b | 4778 | |
4779 | /* Emit a note of subtype SUBTYPE after the insn AFTER. */ | |
4780 | ||
cef3d8ad | 4781 | rtx_note * |
4d86329d | 4782 | emit_note_after (enum insn_note subtype, rtx_insn *after) |
15bbde2b | 4783 | { |
cef3d8ad | 4784 | rtx_note *note = make_note_raw (subtype); |
35f3420b | 4785 | basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after); |
4786 | bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after); | |
4787 | ||
4788 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4789 | add_insn_after_nobb (note, after); | |
4790 | else | |
4791 | add_insn_after (note, after, bb); | |
4792 | return note; | |
4793 | } | |
4794 | ||
4795 | /* Emit a note of subtype SUBTYPE before the insn BEFORE. */ | |
4796 | ||
cef3d8ad | 4797 | rtx_note * |
1dc26636 | 4798 | emit_note_before (enum insn_note subtype, rtx_insn *before) |
35f3420b | 4799 | { |
cef3d8ad | 4800 | rtx_note *note = make_note_raw (subtype); |
35f3420b | 4801 | basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before); |
4802 | bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before); | |
4803 | ||
4804 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4805 | add_insn_before_nobb (note, before); | |
4806 | else | |
4807 | add_insn_before (note, before, bb); | |
15bbde2b | 4808 | return note; |
4809 | } | |
15bbde2b | 4810 | \f |
ede4ebcb | 4811 | /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC. |
4812 | MAKE_RAW indicates how to turn PATTERN into a real insn. */ | |
4813 | ||
722334ea | 4814 | static rtx_insn * |
4cd001d5 | 4815 | emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc, |
2c57d586 | 4816 | rtx_insn *(*make_raw) (rtx)) |
d321a68b | 4817 | { |
4cd001d5 | 4818 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
9ed997be | 4819 | rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
d321a68b | 4820 | |
0891f67c | 4821 | if (pattern == NULL_RTX || !loc) |
9ed997be | 4822 | return last; |
ca154f3f | 4823 | |
31d3e01c | 4824 | after = NEXT_INSN (after); |
4825 | while (1) | |
4826 | { | |
57e999d9 | 4827 | if (active_insn_p (after) |
4828 | && !JUMP_TABLE_DATA_P (after) /* FIXME */ | |
4829 | && !INSN_LOCATION (after)) | |
5169661d | 4830 | INSN_LOCATION (after) = loc; |
31d3e01c | 4831 | if (after == last) |
4832 | break; | |
4833 | after = NEXT_INSN (after); | |
4834 | } | |
9ed997be | 4835 | return last; |
d321a68b | 4836 | } |
4837 | ||
ede4ebcb | 4838 | /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN |
4839 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after | |
4840 | any DEBUG_INSNs. */ | |
4841 | ||
722334ea | 4842 | static rtx_insn * |
4cd001d5 | 4843 | emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns, |
2c57d586 | 4844 | rtx_insn *(*make_raw) (rtx)) |
0891f67c | 4845 | { |
4cd001d5 | 4846 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
4847 | rtx_insn *prev = after; | |
9845d120 | 4848 | |
ede4ebcb | 4849 | if (skip_debug_insns) |
4850 | while (DEBUG_INSN_P (prev)) | |
4851 | prev = PREV_INSN (prev); | |
9845d120 | 4852 | |
4853 | if (INSN_P (prev)) | |
5169661d | 4854 | return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev), |
ede4ebcb | 4855 | make_raw); |
0891f67c | 4856 | else |
ede4ebcb | 4857 | return emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
0891f67c | 4858 | } |
4859 | ||
5169661d | 4860 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4861 | rtx_insn * |
ede4ebcb | 4862 | emit_insn_after_setloc (rtx pattern, rtx after, int loc) |
d321a68b | 4863 | { |
ede4ebcb | 4864 | return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw); |
4865 | } | |
31d3e01c | 4866 | |
5169661d | 4867 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4868 | rtx_insn * |
ede4ebcb | 4869 | emit_insn_after (rtx pattern, rtx after) |
4870 | { | |
4871 | return emit_pattern_after (pattern, after, true, make_insn_raw); | |
4872 | } | |
ca154f3f | 4873 | |
5169661d | 4874 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
f9a00e9e | 4875 | rtx_jump_insn * |
ede4ebcb | 4876 | emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) |
4877 | { | |
f9a00e9e | 4878 | return as_a <rtx_jump_insn *> ( |
4879 | emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw)); | |
d321a68b | 4880 | } |
4881 | ||
5169661d | 4882 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
f9a00e9e | 4883 | rtx_jump_insn * |
0891f67c | 4884 | emit_jump_insn_after (rtx pattern, rtx after) |
4885 | { | |
f9a00e9e | 4886 | return as_a <rtx_jump_insn *> ( |
4887 | emit_pattern_after (pattern, after, true, make_jump_insn_raw)); | |
0891f67c | 4888 | } |
4889 | ||
5169661d | 4890 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4891 | rtx_insn * |
35cb5232 | 4892 | emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) |
d321a68b | 4893 | { |
ede4ebcb | 4894 | return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw); |
d321a68b | 4895 | } |
4896 | ||
5169661d | 4897 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4898 | rtx_insn * |
0891f67c | 4899 | emit_call_insn_after (rtx pattern, rtx after) |
4900 | { | |
ede4ebcb | 4901 | return emit_pattern_after (pattern, after, true, make_call_insn_raw); |
0891f67c | 4902 | } |
4903 | ||
5169661d | 4904 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4905 | rtx_insn * |
9845d120 | 4906 | emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc) |
4907 | { | |
ede4ebcb | 4908 | return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw); |
9845d120 | 4909 | } |
4910 | ||
5169661d | 4911 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4912 | rtx_insn * |
9845d120 | 4913 | emit_debug_insn_after (rtx pattern, rtx after) |
4914 | { | |
ede4ebcb | 4915 | return emit_pattern_after (pattern, after, false, make_debug_insn_raw); |
9845d120 | 4916 | } |
4917 | ||
ede4ebcb | 4918 | /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC. |
4919 | MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP | |
4920 | indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN, | |
4921 | CALL_INSN, etc. */ | |
4922 | ||
722334ea | 4923 | static rtx_insn * |
4cd001d5 | 4924 | emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp, |
2c57d586 | 4925 | rtx_insn *(*make_raw) (rtx)) |
d321a68b | 4926 | { |
4cd001d5 | 4927 | rtx_insn *before = as_a <rtx_insn *> (uncast_before); |
4928 | rtx_insn *first = PREV_INSN (before); | |
4929 | rtx_insn *last = emit_pattern_before_noloc (pattern, before, | |
4930 | insnp ? before : NULL_RTX, | |
4931 | NULL, make_raw); | |
0891f67c | 4932 | |
4933 | if (pattern == NULL_RTX || !loc) | |
4cd001d5 | 4934 | return last; |
0891f67c | 4935 | |
4486418e | 4936 | if (!first) |
4937 | first = get_insns (); | |
4938 | else | |
4939 | first = NEXT_INSN (first); | |
0891f67c | 4940 | while (1) |
4941 | { | |
57e999d9 | 4942 | if (active_insn_p (first) |
4943 | && !JUMP_TABLE_DATA_P (first) /* FIXME */ | |
4944 | && !INSN_LOCATION (first)) | |
5169661d | 4945 | INSN_LOCATION (first) = loc; |
0891f67c | 4946 | if (first == last) |
4947 | break; | |
4948 | first = NEXT_INSN (first); | |
4949 | } | |
4cd001d5 | 4950 | return last; |
0891f67c | 4951 | } |
4952 | ||
ede4ebcb | 4953 | /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN |
4954 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert | |
4955 | before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an | |
4956 | INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */ | |
4957 | ||
722334ea | 4958 | static rtx_insn * |
4cd001d5 | 4959 | emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns, |
2c57d586 | 4960 | bool insnp, rtx_insn *(*make_raw) (rtx)) |
0891f67c | 4961 | { |
4cd001d5 | 4962 | rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before); |
4963 | rtx_insn *next = before; | |
9845d120 | 4964 | |
ede4ebcb | 4965 | if (skip_debug_insns) |
4966 | while (DEBUG_INSN_P (next)) | |
4967 | next = PREV_INSN (next); | |
9845d120 | 4968 | |
4969 | if (INSN_P (next)) | |
5169661d | 4970 | return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next), |
ede4ebcb | 4971 | insnp, make_raw); |
0891f67c | 4972 | else |
ede4ebcb | 4973 | return emit_pattern_before_noloc (pattern, before, |
db7dd023 | 4974 | insnp ? before : NULL_RTX, |
ede4ebcb | 4975 | NULL, make_raw); |
0891f67c | 4976 | } |
4977 | ||
5169661d | 4978 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4979 | rtx_insn * |
c9a09955 | 4980 | emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) |
0891f67c | 4981 | { |
ede4ebcb | 4982 | return emit_pattern_before_setloc (pattern, before, loc, true, |
4983 | make_insn_raw); | |
4984 | } | |
0891f67c | 4985 | |
5169661d | 4986 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
722334ea | 4987 | rtx_insn * |
ede4ebcb | 4988 | emit_insn_before (rtx pattern, rtx before) |
4989 | { | |
4990 | return emit_pattern_before (pattern, before, true, true, make_insn_raw); | |
4991 | } | |
0891f67c | 4992 | |
5169661d | 4993 | /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
f9a00e9e | 4994 | rtx_jump_insn * |
c9a09955 | 4995 | emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) |
ede4ebcb | 4996 | { |
f9a00e9e | 4997 | return as_a <rtx_jump_insn *> ( |
4998 | emit_pattern_before_setloc (pattern, before, loc, false, | |
4999 | make_jump_insn_raw)); | |
0891f67c | 5000 | } |
5001 | ||
5169661d | 5002 | /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
f9a00e9e | 5003 | rtx_jump_insn * |
0891f67c | 5004 | emit_jump_insn_before (rtx pattern, rtx before) |
5005 | { | |
f9a00e9e | 5006 | return as_a <rtx_jump_insn *> ( |
5007 | emit_pattern_before (pattern, before, true, false, | |
5008 | make_jump_insn_raw)); | |
0891f67c | 5009 | } |
5010 | ||
5169661d | 5011 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 5012 | rtx_insn * |
c9a09955 | 5013 | emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) |
0891f67c | 5014 | { |
ede4ebcb | 5015 | return emit_pattern_before_setloc (pattern, before, loc, false, |
5016 | make_call_insn_raw); | |
d321a68b | 5017 | } |
0891f67c | 5018 | |
ede4ebcb | 5019 | /* Like emit_call_insn_before_noloc, |
5169661d | 5020 | but set insn_location according to BEFORE. */ |
722334ea | 5021 | rtx_insn * |
c9a09955 | 5022 | emit_call_insn_before (rtx pattern, rtx_insn *before) |
0891f67c | 5023 | { |
ede4ebcb | 5024 | return emit_pattern_before (pattern, before, true, false, |
5025 | make_call_insn_raw); | |
0891f67c | 5026 | } |
9845d120 | 5027 | |
5169661d | 5028 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 5029 | rtx_insn * |
9845d120 | 5030 | emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc) |
5031 | { | |
ede4ebcb | 5032 | return emit_pattern_before_setloc (pattern, before, loc, false, |
5033 | make_debug_insn_raw); | |
9845d120 | 5034 | } |
5035 | ||
ede4ebcb | 5036 | /* Like emit_debug_insn_before_noloc, |
5169661d | 5037 | but set insn_location according to BEFORE. */ |
722334ea | 5038 | rtx_insn * |
5518cf83 | 5039 | emit_debug_insn_before (rtx pattern, rtx_insn *before) |
9845d120 | 5040 | { |
ede4ebcb | 5041 | return emit_pattern_before (pattern, before, false, false, |
5042 | make_debug_insn_raw); | |
9845d120 | 5043 | } |
d321a68b | 5044 | \f |
31d3e01c | 5045 | /* Take X and emit it at the end of the doubly-linked |
5046 | INSN list. | |
15bbde2b | 5047 | |
5048 | Returns the last insn emitted. */ | |
5049 | ||
722334ea | 5050 | rtx_insn * |
35cb5232 | 5051 | emit_insn (rtx x) |
15bbde2b | 5052 | { |
722334ea | 5053 | rtx_insn *last = get_last_insn (); |
5054 | rtx_insn *insn; | |
15bbde2b | 5055 | |
31d3e01c | 5056 | if (x == NULL_RTX) |
5057 | return last; | |
15bbde2b | 5058 | |
31d3e01c | 5059 | switch (GET_CODE (x)) |
5060 | { | |
9845d120 | 5061 | case DEBUG_INSN: |
31d3e01c | 5062 | case INSN: |
5063 | case JUMP_INSN: | |
5064 | case CALL_INSN: | |
5065 | case CODE_LABEL: | |
5066 | case BARRIER: | |
5067 | case NOTE: | |
722334ea | 5068 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 5069 | while (insn) |
15bbde2b | 5070 | { |
722334ea | 5071 | rtx_insn *next = NEXT_INSN (insn); |
15bbde2b | 5072 | add_insn (insn); |
31d3e01c | 5073 | last = insn; |
5074 | insn = next; | |
15bbde2b | 5075 | } |
31d3e01c | 5076 | break; |
15bbde2b | 5077 | |
31d3e01c | 5078 | #ifdef ENABLE_RTL_CHECKING |
91f71fa3 | 5079 | case JUMP_TABLE_DATA: |
31d3e01c | 5080 | case SEQUENCE: |
611234b4 | 5081 | gcc_unreachable (); |
31d3e01c | 5082 | break; |
5083 | #endif | |
15bbde2b | 5084 | |
31d3e01c | 5085 | default: |
5086 | last = make_insn_raw (x); | |
5087 | add_insn (last); | |
5088 | break; | |
15bbde2b | 5089 | } |
5090 | ||
5091 | return last; | |
5092 | } | |
5093 | ||
9845d120 | 5094 | /* Make an insn of code DEBUG_INSN with pattern X |
5095 | and add it to the end of the doubly-linked list. */ | |
5096 | ||
722334ea | 5097 | rtx_insn * |
9845d120 | 5098 | emit_debug_insn (rtx x) |
5099 | { | |
722334ea | 5100 | rtx_insn *last = get_last_insn (); |
5101 | rtx_insn *insn; | |
9845d120 | 5102 | |
5103 | if (x == NULL_RTX) | |
5104 | return last; | |
5105 | ||
5106 | switch (GET_CODE (x)) | |
5107 | { | |
5108 | case DEBUG_INSN: | |
5109 | case INSN: | |
5110 | case JUMP_INSN: | |
5111 | case CALL_INSN: | |
5112 | case CODE_LABEL: | |
5113 | case BARRIER: | |
5114 | case NOTE: | |
722334ea | 5115 | insn = as_a <rtx_insn *> (x); |
9845d120 | 5116 | while (insn) |
5117 | { | |
722334ea | 5118 | rtx_insn *next = NEXT_INSN (insn); |
9845d120 | 5119 | add_insn (insn); |
5120 | last = insn; | |
5121 | insn = next; | |
5122 | } | |
5123 | break; | |
5124 | ||
5125 | #ifdef ENABLE_RTL_CHECKING | |
91f71fa3 | 5126 | case JUMP_TABLE_DATA: |
9845d120 | 5127 | case SEQUENCE: |
5128 | gcc_unreachable (); | |
5129 | break; | |
5130 | #endif | |
5131 | ||
5132 | default: | |
5133 | last = make_debug_insn_raw (x); | |
5134 | add_insn (last); | |
5135 | break; | |
5136 | } | |
5137 | ||
5138 | return last; | |
5139 | } | |
5140 | ||
31d3e01c | 5141 | /* Make an insn of code JUMP_INSN with pattern X |
5142 | and add it to the end of the doubly-linked list. */ | |
15bbde2b | 5143 | |
722334ea | 5144 | rtx_insn * |
35cb5232 | 5145 | emit_jump_insn (rtx x) |
15bbde2b | 5146 | { |
722334ea | 5147 | rtx_insn *last = NULL; |
5148 | rtx_insn *insn; | |
15bbde2b | 5149 | |
31d3e01c | 5150 | switch (GET_CODE (x)) |
15bbde2b | 5151 | { |
9845d120 | 5152 | case DEBUG_INSN: |
31d3e01c | 5153 | case INSN: |
5154 | case JUMP_INSN: | |
5155 | case CALL_INSN: | |
5156 | case CODE_LABEL: | |
5157 | case BARRIER: | |
5158 | case NOTE: | |
722334ea | 5159 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 5160 | while (insn) |
5161 | { | |
722334ea | 5162 | rtx_insn *next = NEXT_INSN (insn); |
31d3e01c | 5163 | add_insn (insn); |
5164 | last = insn; | |
5165 | insn = next; | |
5166 | } | |
5167 | break; | |
b36b07d8 | 5168 | |
31d3e01c | 5169 | #ifdef ENABLE_RTL_CHECKING |
91f71fa3 | 5170 | case JUMP_TABLE_DATA: |
31d3e01c | 5171 | case SEQUENCE: |
611234b4 | 5172 | gcc_unreachable (); |
31d3e01c | 5173 | break; |
5174 | #endif | |
b36b07d8 | 5175 | |
31d3e01c | 5176 | default: |
5177 | last = make_jump_insn_raw (x); | |
5178 | add_insn (last); | |
5179 | break; | |
9dda7915 | 5180 | } |
b36b07d8 | 5181 | |
5182 | return last; | |
5183 | } | |
5184 | ||
31d3e01c | 5185 | /* Make an insn of code CALL_INSN with pattern X |
15bbde2b | 5186 | and add it to the end of the doubly-linked list. */ |
5187 | ||
722334ea | 5188 | rtx_insn * |
35cb5232 | 5189 | emit_call_insn (rtx x) |
15bbde2b | 5190 | { |
722334ea | 5191 | rtx_insn *insn; |
31d3e01c | 5192 | |
5193 | switch (GET_CODE (x)) | |
15bbde2b | 5194 | { |
9845d120 | 5195 | case DEBUG_INSN: |
31d3e01c | 5196 | case INSN: |
5197 | case JUMP_INSN: | |
5198 | case CALL_INSN: | |
5199 | case CODE_LABEL: | |
5200 | case BARRIER: | |
5201 | case NOTE: | |
5202 | insn = emit_insn (x); | |
5203 | break; | |
15bbde2b | 5204 | |
31d3e01c | 5205 | #ifdef ENABLE_RTL_CHECKING |
5206 | case SEQUENCE: | |
91f71fa3 | 5207 | case JUMP_TABLE_DATA: |
611234b4 | 5208 | gcc_unreachable (); |
31d3e01c | 5209 | break; |
5210 | #endif | |
15bbde2b | 5211 | |
31d3e01c | 5212 | default: |
5213 | insn = make_call_insn_raw (x); | |
15bbde2b | 5214 | add_insn (insn); |
31d3e01c | 5215 | break; |
15bbde2b | 5216 | } |
31d3e01c | 5217 | |
5218 | return insn; | |
15bbde2b | 5219 | } |
5220 | ||
5221 | /* Add the label LABEL to the end of the doubly-linked list. */ | |
5222 | ||
f9a00e9e | 5223 | rtx_code_label * |
5224 | emit_label (rtx uncast_label) | |
15bbde2b | 5225 | { |
f9a00e9e | 5226 | rtx_code_label *label = as_a <rtx_code_label *> (uncast_label); |
5227 | ||
596ef494 | 5228 | gcc_checking_assert (INSN_UID (label) == 0); |
5229 | INSN_UID (label) = cur_insn_uid++; | |
f9a00e9e | 5230 | add_insn (label); |
5231 | return label; | |
15bbde2b | 5232 | } |
5233 | ||
91f71fa3 | 5234 | /* Make an insn of code JUMP_TABLE_DATA |
5235 | and add it to the end of the doubly-linked list. */ | |
5236 | ||
e41badc0 | 5237 | rtx_jump_table_data * |
91f71fa3 | 5238 | emit_jump_table_data (rtx table) |
5239 | { | |
e41badc0 | 5240 | rtx_jump_table_data *jump_table_data = |
5241 | as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA)); | |
91f71fa3 | 5242 | INSN_UID (jump_table_data) = cur_insn_uid++; |
5243 | PATTERN (jump_table_data) = table; | |
5244 | BLOCK_FOR_INSN (jump_table_data) = NULL; | |
5245 | add_insn (jump_table_data); | |
5246 | return jump_table_data; | |
5247 | } | |
5248 | ||
15bbde2b | 5249 | /* Make an insn of code BARRIER |
5250 | and add it to the end of the doubly-linked list. */ | |
5251 | ||
722334ea | 5252 | rtx_barrier * |
35cb5232 | 5253 | emit_barrier (void) |
15bbde2b | 5254 | { |
722334ea | 5255 | rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 5256 | INSN_UID (barrier) = cur_insn_uid++; |
5257 | add_insn (barrier); | |
5258 | return barrier; | |
5259 | } | |
5260 | ||
2f57e3d9 | 5261 | /* Emit a copy of note ORIG. */ |
35cb5232 | 5262 | |
cef3d8ad | 5263 | rtx_note * |
5264 | emit_note_copy (rtx_note *orig) | |
2f57e3d9 | 5265 | { |
35f3420b | 5266 | enum insn_note kind = (enum insn_note) NOTE_KIND (orig); |
cef3d8ad | 5267 | rtx_note *note = make_note_raw (kind); |
2f57e3d9 | 5268 | NOTE_DATA (note) = NOTE_DATA (orig); |
2f57e3d9 | 5269 | add_insn (note); |
31b97e8f | 5270 | return note; |
15bbde2b | 5271 | } |
5272 | ||
31b97e8f | 5273 | /* Make an insn of code NOTE or type NOTE_NO |
5274 | and add it to the end of the doubly-linked list. */ | |
15bbde2b | 5275 | |
cef3d8ad | 5276 | rtx_note * |
ad4583d9 | 5277 | emit_note (enum insn_note kind) |
15bbde2b | 5278 | { |
cef3d8ad | 5279 | rtx_note *note = make_note_raw (kind); |
15bbde2b | 5280 | add_insn (note); |
5281 | return note; | |
5282 | } | |
5283 | ||
18b42941 | 5284 | /* Emit a clobber of lvalue X. */ |
5285 | ||
722334ea | 5286 | rtx_insn * |
18b42941 | 5287 | emit_clobber (rtx x) |
5288 | { | |
5289 | /* CONCATs should not appear in the insn stream. */ | |
5290 | if (GET_CODE (x) == CONCAT) | |
5291 | { | |
5292 | emit_clobber (XEXP (x, 0)); | |
5293 | return emit_clobber (XEXP (x, 1)); | |
5294 | } | |
5295 | return emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); | |
5296 | } | |
5297 | ||
5298 | /* Return a sequence of insns to clobber lvalue X. */ | |
5299 | ||
722334ea | 5300 | rtx_insn * |
18b42941 | 5301 | gen_clobber (rtx x) |
5302 | { | |
722334ea | 5303 | rtx_insn *seq; |
18b42941 | 5304 | |
5305 | start_sequence (); | |
5306 | emit_clobber (x); | |
5307 | seq = get_insns (); | |
5308 | end_sequence (); | |
5309 | return seq; | |
5310 | } | |
5311 | ||
5312 | /* Emit a use of rvalue X. */ | |
5313 | ||
722334ea | 5314 | rtx_insn * |
18b42941 | 5315 | emit_use (rtx x) |
5316 | { | |
5317 | /* CONCATs should not appear in the insn stream. */ | |
5318 | if (GET_CODE (x) == CONCAT) | |
5319 | { | |
5320 | emit_use (XEXP (x, 0)); | |
5321 | return emit_use (XEXP (x, 1)); | |
5322 | } | |
5323 | return emit_insn (gen_rtx_USE (VOIDmode, x)); | |
5324 | } | |
5325 | ||
5326 | /* Return a sequence of insns to use rvalue X. */ | |
5327 | ||
722334ea | 5328 | rtx_insn * |
18b42941 | 5329 | gen_use (rtx x) |
5330 | { | |
722334ea | 5331 | rtx_insn *seq; |
18b42941 | 5332 | |
5333 | start_sequence (); | |
5334 | emit_use (x); | |
5335 | seq = get_insns (); | |
5336 | end_sequence (); | |
5337 | return seq; | |
5338 | } | |
5339 | ||
3a286419 | 5340 | /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction. |
5341 | Return the set in INSN that such notes describe, or NULL if the notes | |
5342 | have no meaning for INSN. */ | |
5343 | ||
5344 | rtx | |
5345 | set_for_reg_notes (rtx insn) | |
5346 | { | |
5347 | rtx pat, reg; | |
5348 | ||
5349 | if (!INSN_P (insn)) | |
5350 | return NULL_RTX; | |
5351 | ||
5352 | pat = PATTERN (insn); | |
5353 | if (GET_CODE (pat) == PARALLEL) | |
5354 | { | |
5355 | /* We do not use single_set because that ignores SETs of unused | |
5356 | registers. REG_EQUAL and REG_EQUIV notes really do require the | |
5357 | PARALLEL to have a single SET. */ | |
5358 | if (multiple_sets (insn)) | |
5359 | return NULL_RTX; | |
5360 | pat = XVECEXP (pat, 0, 0); | |
5361 | } | |
5362 | ||
5363 | if (GET_CODE (pat) != SET) | |
5364 | return NULL_RTX; | |
5365 | ||
5366 | reg = SET_DEST (pat); | |
5367 | ||
5368 | /* Notes apply to the contents of a STRICT_LOW_PART. */ | |
f2c7e335 | 5369 | if (GET_CODE (reg) == STRICT_LOW_PART |
5370 | || GET_CODE (reg) == ZERO_EXTRACT) | |
3a286419 | 5371 | reg = XEXP (reg, 0); |
5372 | ||
5373 | /* Check that we have a register. */ | |
5374 | if (!(REG_P (reg) || GET_CODE (reg) == SUBREG)) | |
5375 | return NULL_RTX; | |
5376 | ||
5377 | return pat; | |
5378 | } | |
5379 | ||
f1934a33 | 5380 | /* Place a note of KIND on insn INSN with DATUM as the datum. If a |
6312a35e | 5381 | note of this type already exists, remove it first. */ |
f1934a33 | 5382 | |
c080d8f0 | 5383 | rtx |
35cb5232 | 5384 | set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) |
f1934a33 | 5385 | { |
5386 | rtx note = find_reg_note (insn, kind, NULL_RTX); | |
5387 | ||
7e6224ab | 5388 | switch (kind) |
5389 | { | |
5390 | case REG_EQUAL: | |
5391 | case REG_EQUIV: | |
7b0b2add | 5392 | /* We need to support the REG_EQUAL on USE trick of find_reloads. */ |
5393 | if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE) | |
3a286419 | 5394 | return NULL_RTX; |
7e6224ab | 5395 | |
5396 | /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes. | |
5397 | It serves no useful purpose and breaks eliminate_regs. */ | |
5398 | if (GET_CODE (datum) == ASM_OPERANDS) | |
5399 | return NULL_RTX; | |
2f8cf22c | 5400 | |
5401 | /* Notes with side effects are dangerous. Even if the side-effect | |
5402 | initially mirrors one in PATTERN (INSN), later optimizations | |
5403 | might alter the way that the final register value is calculated | |
5404 | and so move or alter the side-effect in some way. The note would | |
5405 | then no longer be a valid substitution for SET_SRC. */ | |
5406 | if (side_effects_p (datum)) | |
5407 | return NULL_RTX; | |
7e6224ab | 5408 | break; |
5409 | ||
5410 | default: | |
5411 | break; | |
5412 | } | |
c080d8f0 | 5413 | |
3a286419 | 5414 | if (note) |
5415 | XEXP (note, 0) = datum; | |
5416 | else | |
5417 | { | |
5418 | add_reg_note (insn, kind, datum); | |
5419 | note = REG_NOTES (insn); | |
5420 | } | |
3072d30e | 5421 | |
5422 | switch (kind) | |
c080d8f0 | 5423 | { |
3072d30e | 5424 | case REG_EQUAL: |
5425 | case REG_EQUIV: | |
e149ca56 | 5426 | df_notes_rescan (as_a <rtx_insn *> (insn)); |
3072d30e | 5427 | break; |
5428 | default: | |
5429 | break; | |
c080d8f0 | 5430 | } |
f1934a33 | 5431 | |
3a286419 | 5432 | return note; |
f1934a33 | 5433 | } |
41cf444a | 5434 | |
5435 | /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */ | |
5436 | rtx | |
5437 | set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst) | |
5438 | { | |
3a286419 | 5439 | rtx set = set_for_reg_notes (insn); |
41cf444a | 5440 | |
5441 | if (set && SET_DEST (set) == dst) | |
5442 | return set_unique_reg_note (insn, kind, datum); | |
5443 | return NULL_RTX; | |
5444 | } | |
15bbde2b | 5445 | \f |
16d83c02 | 5446 | /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a |
5447 | following barrier if the instruction needs one and if ALLOW_BARRIER_P | |
5448 | is true. | |
5449 | ||
15bbde2b | 5450 | If X is a label, it is simply added into the insn chain. */ |
5451 | ||
722334ea | 5452 | rtx_insn * |
16d83c02 | 5453 | emit (rtx x, bool allow_barrier_p) |
15bbde2b | 5454 | { |
5455 | enum rtx_code code = classify_insn (x); | |
5456 | ||
611234b4 | 5457 | switch (code) |
15bbde2b | 5458 | { |
611234b4 | 5459 | case CODE_LABEL: |
5460 | return emit_label (x); | |
5461 | case INSN: | |
5462 | return emit_insn (x); | |
5463 | case JUMP_INSN: | |
5464 | { | |
722334ea | 5465 | rtx_insn *insn = emit_jump_insn (x); |
16d83c02 | 5466 | if (allow_barrier_p |
5467 | && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)) | |
611234b4 | 5468 | return emit_barrier (); |
5469 | return insn; | |
5470 | } | |
5471 | case CALL_INSN: | |
5472 | return emit_call_insn (x); | |
9845d120 | 5473 | case DEBUG_INSN: |
5474 | return emit_debug_insn (x); | |
611234b4 | 5475 | default: |
5476 | gcc_unreachable (); | |
15bbde2b | 5477 | } |
15bbde2b | 5478 | } |
5479 | \f | |
1f3233d1 | 5480 | /* Space for free sequence stack entries. */ |
7035b2ab | 5481 | static GTY ((deletable)) struct sequence_stack *free_sequence_stack; |
1f3233d1 | 5482 | |
735f4358 | 5483 | /* Begin emitting insns to a sequence. If this sequence will contain |
5484 | something that might cause the compiler to pop arguments to function | |
5485 | calls (because those pops have previously been deferred; see | |
5486 | INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust | |
5487 | before calling this function. That will ensure that the deferred | |
5488 | pops are not accidentally emitted in the middle of this sequence. */ | |
15bbde2b | 5489 | |
5490 | void | |
35cb5232 | 5491 | start_sequence (void) |
15bbde2b | 5492 | { |
5493 | struct sequence_stack *tem; | |
5494 | ||
1f3233d1 | 5495 | if (free_sequence_stack != NULL) |
5496 | { | |
5497 | tem = free_sequence_stack; | |
5498 | free_sequence_stack = tem->next; | |
5499 | } | |
5500 | else | |
25a27413 | 5501 | tem = ggc_alloc<sequence_stack> (); |
15bbde2b | 5502 | |
c36aa54b | 5503 | tem->next = get_current_sequence ()->next; |
06f9d6ef | 5504 | tem->first = get_insns (); |
5505 | tem->last = get_last_insn (); | |
c36aa54b | 5506 | get_current_sequence ()->next = tem; |
15bbde2b | 5507 | |
06f9d6ef | 5508 | set_first_insn (0); |
5509 | set_last_insn (0); | |
15bbde2b | 5510 | } |
5511 | ||
b49854c6 | 5512 | /* Set up the insn chain starting with FIRST as the current sequence, |
5513 | saving the previously current one. See the documentation for | |
5514 | start_sequence for more information about how to use this function. */ | |
15bbde2b | 5515 | |
5516 | void | |
57c26b3a | 5517 | push_to_sequence (rtx_insn *first) |
15bbde2b | 5518 | { |
57c26b3a | 5519 | rtx_insn *last; |
15bbde2b | 5520 | |
5521 | start_sequence (); | |
5522 | ||
3c802a1e | 5523 | for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last)) |
5524 | ; | |
15bbde2b | 5525 | |
06f9d6ef | 5526 | set_first_insn (first); |
5527 | set_last_insn (last); | |
15bbde2b | 5528 | } |
5529 | ||
28bf151d | 5530 | /* Like push_to_sequence, but take the last insn as an argument to avoid |
5531 | looping through the list. */ | |
5532 | ||
5533 | void | |
57c26b3a | 5534 | push_to_sequence2 (rtx_insn *first, rtx_insn *last) |
28bf151d | 5535 | { |
5536 | start_sequence (); | |
5537 | ||
06f9d6ef | 5538 | set_first_insn (first); |
5539 | set_last_insn (last); | |
28bf151d | 5540 | } |
5541 | ||
ab74c92f | 5542 | /* Set up the outer-level insn chain |
5543 | as the current sequence, saving the previously current one. */ | |
5544 | ||
5545 | void | |
35cb5232 | 5546 | push_topmost_sequence (void) |
ab74c92f | 5547 | { |
c36aa54b | 5548 | struct sequence_stack *top; |
ab74c92f | 5549 | |
5550 | start_sequence (); | |
5551 | ||
c36aa54b | 5552 | top = get_topmost_sequence (); |
06f9d6ef | 5553 | set_first_insn (top->first); |
5554 | set_last_insn (top->last); | |
ab74c92f | 5555 | } |
5556 | ||
5557 | /* After emitting to the outer-level insn chain, update the outer-level | |
5558 | insn chain, and restore the previous saved state. */ | |
5559 | ||
5560 | void | |
35cb5232 | 5561 | pop_topmost_sequence (void) |
ab74c92f | 5562 | { |
c36aa54b | 5563 | struct sequence_stack *top; |
ab74c92f | 5564 | |
c36aa54b | 5565 | top = get_topmost_sequence (); |
06f9d6ef | 5566 | top->first = get_insns (); |
5567 | top->last = get_last_insn (); | |
ab74c92f | 5568 | |
5569 | end_sequence (); | |
5570 | } | |
5571 | ||
15bbde2b | 5572 | /* After emitting to a sequence, restore previous saved state. |
5573 | ||
b49854c6 | 5574 | To get the contents of the sequence just made, you must call |
31d3e01c | 5575 | `get_insns' *before* calling here. |
b49854c6 | 5576 | |
5577 | If the compiler might have deferred popping arguments while | |
5578 | generating this sequence, and this sequence will not be immediately | |
5579 | inserted into the instruction stream, use do_pending_stack_adjust | |
31d3e01c | 5580 | before calling get_insns. That will ensure that the deferred |
b49854c6 | 5581 | pops are inserted into this sequence, and not into some random |
5582 | location in the instruction stream. See INHIBIT_DEFER_POP for more | |
5583 | information about deferred popping of arguments. */ | |
15bbde2b | 5584 | |
5585 | void | |
35cb5232 | 5586 | end_sequence (void) |
15bbde2b | 5587 | { |
c36aa54b | 5588 | struct sequence_stack *tem = get_current_sequence ()->next; |
15bbde2b | 5589 | |
06f9d6ef | 5590 | set_first_insn (tem->first); |
5591 | set_last_insn (tem->last); | |
c36aa54b | 5592 | get_current_sequence ()->next = tem->next; |
15bbde2b | 5593 | |
1f3233d1 | 5594 | memset (tem, 0, sizeof (*tem)); |
5595 | tem->next = free_sequence_stack; | |
5596 | free_sequence_stack = tem; | |
15bbde2b | 5597 | } |
5598 | ||
5599 | /* Return 1 if currently emitting into a sequence. */ | |
5600 | ||
5601 | int | |
35cb5232 | 5602 | in_sequence_p (void) |
15bbde2b | 5603 | { |
c36aa54b | 5604 | return get_current_sequence ()->next != 0; |
15bbde2b | 5605 | } |
15bbde2b | 5606 | \f |
02ebfa52 | 5607 | /* Put the various virtual registers into REGNO_REG_RTX. */ |
5608 | ||
2f3874ce | 5609 | static void |
b079a207 | 5610 | init_virtual_regs (void) |
02ebfa52 | 5611 | { |
b079a207 | 5612 | regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx; |
5613 | regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx; | |
5614 | regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx; | |
5615 | regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx; | |
5616 | regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx; | |
60778e62 | 5617 | regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM] |
5618 | = virtual_preferred_stack_boundary_rtx; | |
0a893c29 | 5619 | } |
5620 | ||
928d57e3 | 5621 | \f |
5622 | /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */ | |
5623 | static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]; | |
5624 | static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS]; | |
5625 | static int copy_insn_n_scratches; | |
5626 | ||
5627 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5628 | copied an ASM_OPERANDS. | |
5629 | In that case, it is the original input-operand vector. */ | |
5630 | static rtvec orig_asm_operands_vector; | |
5631 | ||
5632 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5633 | copied an ASM_OPERANDS. | |
5634 | In that case, it is the copied input-operand vector. */ | |
5635 | static rtvec copy_asm_operands_vector; | |
5636 | ||
5637 | /* Likewise for the constraints vector. */ | |
5638 | static rtvec orig_asm_constraints_vector; | |
5639 | static rtvec copy_asm_constraints_vector; | |
5640 | ||
5641 | /* Recursively create a new copy of an rtx for copy_insn. | |
5642 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5643 | ASM_OPERANDs properly. | |
5644 | Normally, this function is not used directly; use copy_insn as front end. | |
5645 | However, you could first copy an insn pattern with copy_insn and then use | |
5646 | this function afterwards to properly copy any REG_NOTEs containing | |
5647 | SCRATCHes. */ | |
5648 | ||
5649 | rtx | |
35cb5232 | 5650 | copy_insn_1 (rtx orig) |
928d57e3 | 5651 | { |
19cb6b50 | 5652 | rtx copy; |
5653 | int i, j; | |
5654 | RTX_CODE code; | |
5655 | const char *format_ptr; | |
928d57e3 | 5656 | |
25e880b1 | 5657 | if (orig == NULL) |
5658 | return NULL; | |
5659 | ||
928d57e3 | 5660 | code = GET_CODE (orig); |
5661 | ||
5662 | switch (code) | |
5663 | { | |
5664 | case REG: | |
d7fce3c8 | 5665 | case DEBUG_EXPR: |
0349edce | 5666 | CASE_CONST_ANY: |
928d57e3 | 5667 | case SYMBOL_REF: |
5668 | case CODE_LABEL: | |
5669 | case PC: | |
5670 | case CC0: | |
e0691b9a | 5671 | case RETURN: |
9cb2517e | 5672 | case SIMPLE_RETURN: |
928d57e3 | 5673 | return orig; |
c09425a0 | 5674 | case CLOBBER: |
b291008a | 5675 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
5676 | clobbers or clobbers of hard registers that originated as pseudos. | |
5677 | This is needed to allow safe register renaming. */ | |
2b5f32ae | 5678 | if (REG_P (XEXP (orig, 0)) |
5679 | && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))) | |
5680 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0)))) | |
c09425a0 | 5681 | return orig; |
5682 | break; | |
928d57e3 | 5683 | |
5684 | case SCRATCH: | |
5685 | for (i = 0; i < copy_insn_n_scratches; i++) | |
5686 | if (copy_insn_scratch_in[i] == orig) | |
5687 | return copy_insn_scratch_out[i]; | |
5688 | break; | |
5689 | ||
5690 | case CONST: | |
3072d30e | 5691 | if (shared_const_p (orig)) |
928d57e3 | 5692 | return orig; |
5693 | break; | |
d823ba47 | 5694 | |
928d57e3 | 5695 | /* A MEM with a constant address is not sharable. The problem is that |
5696 | the constant address may need to be reloaded. If the mem is shared, | |
5697 | then reloading one copy of this mem will cause all copies to appear | |
5698 | to have been reloaded. */ | |
5699 | ||
5700 | default: | |
5701 | break; | |
5702 | } | |
5703 | ||
f2d0e9f1 | 5704 | /* Copy the various flags, fields, and other information. We assume |
5705 | that all fields need copying, and then clear the fields that should | |
928d57e3 | 5706 | not be copied. That is the sensible default behavior, and forces |
5707 | us to explicitly document why we are *not* copying a flag. */ | |
f2d0e9f1 | 5708 | copy = shallow_copy_rtx (orig); |
928d57e3 | 5709 | |
928d57e3 | 5710 | /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */ |
6720e96c | 5711 | if (INSN_P (orig)) |
928d57e3 | 5712 | { |
7c25cb91 | 5713 | RTX_FLAG (copy, jump) = 0; |
5714 | RTX_FLAG (copy, call) = 0; | |
5715 | RTX_FLAG (copy, frame_related) = 0; | |
928d57e3 | 5716 | } |
d823ba47 | 5717 | |
928d57e3 | 5718 | format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); |
5719 | ||
5720 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) | |
f2d0e9f1 | 5721 | switch (*format_ptr++) |
5722 | { | |
5723 | case 'e': | |
5724 | if (XEXP (orig, i) != NULL) | |
5725 | XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); | |
5726 | break; | |
928d57e3 | 5727 | |
f2d0e9f1 | 5728 | case 'E': |
5729 | case 'V': | |
5730 | if (XVEC (orig, i) == orig_asm_constraints_vector) | |
5731 | XVEC (copy, i) = copy_asm_constraints_vector; | |
5732 | else if (XVEC (orig, i) == orig_asm_operands_vector) | |
5733 | XVEC (copy, i) = copy_asm_operands_vector; | |
5734 | else if (XVEC (orig, i) != NULL) | |
5735 | { | |
5736 | XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
5737 | for (j = 0; j < XVECLEN (copy, i); j++) | |
5738 | XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); | |
5739 | } | |
5740 | break; | |
928d57e3 | 5741 | |
f2d0e9f1 | 5742 | case 't': |
5743 | case 'w': | |
5744 | case 'i': | |
5745 | case 's': | |
5746 | case 'S': | |
5747 | case 'u': | |
5748 | case '0': | |
5749 | /* These are left unchanged. */ | |
5750 | break; | |
928d57e3 | 5751 | |
f2d0e9f1 | 5752 | default: |
5753 | gcc_unreachable (); | |
5754 | } | |
928d57e3 | 5755 | |
5756 | if (code == SCRATCH) | |
5757 | { | |
5758 | i = copy_insn_n_scratches++; | |
611234b4 | 5759 | gcc_assert (i < MAX_RECOG_OPERANDS); |
928d57e3 | 5760 | copy_insn_scratch_in[i] = orig; |
5761 | copy_insn_scratch_out[i] = copy; | |
5762 | } | |
5763 | else if (code == ASM_OPERANDS) | |
5764 | { | |
d91f2122 | 5765 | orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig); |
5766 | copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy); | |
5767 | orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig); | |
5768 | copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy); | |
928d57e3 | 5769 | } |
5770 | ||
5771 | return copy; | |
5772 | } | |
5773 | ||
5774 | /* Create a new copy of an rtx. | |
5775 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5776 | ASM_OPERANDs properly. | |
5777 | INSN doesn't really have to be a full INSN; it could be just the | |
5778 | pattern. */ | |
5779 | rtx | |
35cb5232 | 5780 | copy_insn (rtx insn) |
928d57e3 | 5781 | { |
5782 | copy_insn_n_scratches = 0; | |
5783 | orig_asm_operands_vector = 0; | |
5784 | orig_asm_constraints_vector = 0; | |
5785 | copy_asm_operands_vector = 0; | |
5786 | copy_asm_constraints_vector = 0; | |
5787 | return copy_insn_1 (insn); | |
5788 | } | |
02ebfa52 | 5789 | |
a9abe1f1 | 5790 | /* Return a copy of INSN that can be used in a SEQUENCE delay slot, |
5791 | on that assumption that INSN itself remains in its original place. */ | |
5792 | ||
575a12f2 | 5793 | rtx_insn * |
5794 | copy_delay_slot_insn (rtx_insn *insn) | |
a9abe1f1 | 5795 | { |
5796 | /* Copy INSN with its rtx_code, all its notes, location etc. */ | |
575a12f2 | 5797 | insn = as_a <rtx_insn *> (copy_rtx (insn)); |
a9abe1f1 | 5798 | INSN_UID (insn) = cur_insn_uid++; |
5799 | return insn; | |
5800 | } | |
5801 | ||
15bbde2b | 5802 | /* Initialize data structures and variables in this file |
5803 | before generating rtl for each function. */ | |
5804 | ||
5805 | void | |
35cb5232 | 5806 | init_emit (void) |
15bbde2b | 5807 | { |
06f9d6ef | 5808 | set_first_insn (NULL); |
5809 | set_last_insn (NULL); | |
9845d120 | 5810 | if (MIN_NONDEBUG_INSN_UID) |
5811 | cur_insn_uid = MIN_NONDEBUG_INSN_UID; | |
5812 | else | |
5813 | cur_insn_uid = 1; | |
5814 | cur_debug_insn_uid = 1; | |
15bbde2b | 5815 | reg_rtx_no = LAST_VIRTUAL_REGISTER + 1; |
15bbde2b | 5816 | first_label_num = label_num; |
c36aa54b | 5817 | get_current_sequence ()->next = NULL; |
15bbde2b | 5818 | |
15bbde2b | 5819 | /* Init the tables that describe all the pseudo regs. */ |
5820 | ||
fd6ffb7c | 5821 | crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101; |
15bbde2b | 5822 | |
fd6ffb7c | 5823 | crtl->emit.regno_pointer_align |
2457c754 | 5824 | = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length); |
d4c332ff | 5825 | |
cd769037 | 5826 | regno_reg_rtx |
5827 | = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length); | |
fcdc122e | 5828 | |
936082bb | 5829 | /* Put copies of all the hard registers into regno_reg_rtx. */ |
90295bd2 | 5830 | memcpy (regno_reg_rtx, |
679bcc8d | 5831 | initial_regno_reg_rtx, |
90295bd2 | 5832 | FIRST_PSEUDO_REGISTER * sizeof (rtx)); |
936082bb | 5833 | |
15bbde2b | 5834 | /* Put copies of all the virtual register rtx into regno_reg_rtx. */ |
b079a207 | 5835 | init_virtual_regs (); |
888e0d33 | 5836 | |
5837 | /* Indicate that the virtual registers and stack locations are | |
5838 | all pointers. */ | |
e61a0a7f | 5839 | REG_POINTER (stack_pointer_rtx) = 1; |
5840 | REG_POINTER (frame_pointer_rtx) = 1; | |
5841 | REG_POINTER (hard_frame_pointer_rtx) = 1; | |
5842 | REG_POINTER (arg_pointer_rtx) = 1; | |
888e0d33 | 5843 | |
e61a0a7f | 5844 | REG_POINTER (virtual_incoming_args_rtx) = 1; |
5845 | REG_POINTER (virtual_stack_vars_rtx) = 1; | |
5846 | REG_POINTER (virtual_stack_dynamic_rtx) = 1; | |
5847 | REG_POINTER (virtual_outgoing_args_rtx) = 1; | |
5848 | REG_POINTER (virtual_cfa_rtx) = 1; | |
89525da0 | 5849 | |
d4c332ff | 5850 | #ifdef STACK_BOUNDARY |
80909c64 | 5851 | REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; |
5852 | REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5853 | REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5854 | REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; | |
5855 | ||
5856 | REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; | |
5857 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; | |
5858 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; | |
5859 | REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; | |
213d1448 | 5860 | |
80909c64 | 5861 | REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD; |
d4c332ff | 5862 | #endif |
5863 | ||
89525da0 | 5864 | #ifdef INIT_EXPANDERS |
5865 | INIT_EXPANDERS; | |
5866 | #endif | |
15bbde2b | 5867 | } |
5868 | ||
67c52133 | 5869 | /* Return true if X is a valid element for a duplicated vector constant |
5870 | of the given mode. */ | |
5871 | ||
5872 | bool | |
5873 | valid_for_const_vec_duplicate_p (machine_mode, rtx x) | |
5874 | { | |
5875 | return (CONST_SCALAR_INT_P (x) | |
5876 | || CONST_DOUBLE_AS_FLOAT_P (x) | |
5877 | || CONST_FIXED_P (x)); | |
5878 | } | |
5879 | ||
0b51f5ce | 5880 | /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */ |
886cfd4f | 5881 | |
5882 | static rtx | |
0b51f5ce | 5883 | gen_const_vec_duplicate_1 (machine_mode mode, rtx el) |
886cfd4f | 5884 | { |
0b51f5ce | 5885 | int nunits = GET_MODE_NUNITS (mode); |
5886 | rtvec v = rtvec_alloc (nunits); | |
5887 | for (int i = 0; i < nunits; ++i) | |
5888 | RTVEC_ELT (v, i) = el; | |
5889 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
5890 | } | |
886cfd4f | 5891 | |
0b51f5ce | 5892 | /* Generate a vector constant of mode MODE in which every element has |
5893 | value ELT. */ | |
886cfd4f | 5894 | |
0b51f5ce | 5895 | rtx |
5896 | gen_const_vec_duplicate (machine_mode mode, rtx elt) | |
5897 | { | |
5898 | scalar_mode inner_mode = GET_MODE_INNER (mode); | |
5899 | if (elt == CONST0_RTX (inner_mode)) | |
5900 | return CONST0_RTX (mode); | |
5901 | else if (elt == CONST1_RTX (inner_mode)) | |
5902 | return CONST1_RTX (mode); | |
5903 | else if (elt == CONSTM1_RTX (inner_mode)) | |
5904 | return CONSTM1_RTX (mode); | |
5905 | ||
5906 | return gen_const_vec_duplicate_1 (mode, elt); | |
5907 | } | |
5908 | ||
5909 | /* Return a vector rtx of mode MODE in which every element has value X. | |
5910 | The result will be a constant if X is constant. */ | |
5911 | ||
5912 | rtx | |
5913 | gen_vec_duplicate (machine_mode mode, rtx x) | |
5914 | { | |
67c52133 | 5915 | if (valid_for_const_vec_duplicate_p (mode, x)) |
0b51f5ce | 5916 | return gen_const_vec_duplicate (mode, x); |
5917 | return gen_rtx_VEC_DUPLICATE (mode, x); | |
5918 | } | |
069b07bf | 5919 | |
ccc2ef18 | 5920 | /* A subroutine of const_vec_series_p that handles the case in which |
5921 | X is known to be an integer CONST_VECTOR. */ | |
5922 | ||
5923 | bool | |
5924 | const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out) | |
5925 | { | |
5926 | unsigned int nelts = CONST_VECTOR_NUNITS (x); | |
5927 | if (nelts < 2) | |
5928 | return false; | |
5929 | ||
5930 | scalar_mode inner = GET_MODE_INNER (GET_MODE (x)); | |
5931 | rtx base = CONST_VECTOR_ELT (x, 0); | |
5932 | rtx step = simplify_binary_operation (MINUS, inner, | |
5933 | CONST_VECTOR_ELT (x, 1), base); | |
5934 | if (rtx_equal_p (step, CONST0_RTX (inner))) | |
5935 | return false; | |
5936 | ||
5937 | for (unsigned int i = 2; i < nelts; ++i) | |
5938 | { | |
5939 | rtx diff = simplify_binary_operation (MINUS, inner, | |
5940 | CONST_VECTOR_ELT (x, i), | |
5941 | CONST_VECTOR_ELT (x, i - 1)); | |
5942 | if (!rtx_equal_p (step, diff)) | |
5943 | return false; | |
5944 | } | |
5945 | ||
5946 | *base_out = base; | |
5947 | *step_out = step; | |
5948 | return true; | |
5949 | } | |
5950 | ||
5951 | /* Generate a vector constant of mode MODE in which element I has | |
5952 | the value BASE + I * STEP. */ | |
5953 | ||
5954 | rtx | |
5955 | gen_const_vec_series (machine_mode mode, rtx base, rtx step) | |
5956 | { | |
5957 | gcc_assert (CONSTANT_P (base) && CONSTANT_P (step)); | |
5958 | ||
5959 | int nunits = GET_MODE_NUNITS (mode); | |
5960 | rtvec v = rtvec_alloc (nunits); | |
5961 | scalar_mode inner_mode = GET_MODE_INNER (mode); | |
5962 | RTVEC_ELT (v, 0) = base; | |
5963 | for (int i = 1; i < nunits; ++i) | |
5964 | RTVEC_ELT (v, i) = simplify_gen_binary (PLUS, inner_mode, | |
5965 | RTVEC_ELT (v, i - 1), step); | |
5966 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
5967 | } | |
5968 | ||
5969 | /* Generate a vector of mode MODE in which element I has the value | |
5970 | BASE + I * STEP. The result will be a constant if BASE and STEP | |
5971 | are both constants. */ | |
5972 | ||
5973 | rtx | |
5974 | gen_vec_series (machine_mode mode, rtx base, rtx step) | |
5975 | { | |
5976 | if (step == const0_rtx) | |
5977 | return gen_vec_duplicate (mode, base); | |
5978 | if (CONSTANT_P (base) && CONSTANT_P (step)) | |
5979 | return gen_const_vec_series (mode, base, step); | |
5980 | return gen_rtx_VEC_SERIES (mode, base, step); | |
5981 | } | |
5982 | ||
0b51f5ce | 5983 | /* Generate a new vector constant for mode MODE and constant value |
5984 | CONSTANT. */ | |
886cfd4f | 5985 | |
0b51f5ce | 5986 | static rtx |
5987 | gen_const_vector (machine_mode mode, int constant) | |
5988 | { | |
5989 | machine_mode inner = GET_MODE_INNER (mode); | |
886cfd4f | 5990 | |
0b51f5ce | 5991 | gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); |
5992 | ||
5993 | rtx el = const_tiny_rtx[constant][(int) inner]; | |
5994 | gcc_assert (el); | |
886cfd4f | 5995 | |
0b51f5ce | 5996 | return gen_const_vec_duplicate_1 (mode, el); |
886cfd4f | 5997 | } |
5998 | ||
9426b612 | 5999 | /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when |
6e68dcb2 | 6000 | all elements are zero, and the one vector when all elements are one. */ |
9426b612 | 6001 | rtx |
3754d046 | 6002 | gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v) |
9426b612 | 6003 | { |
0b51f5ce | 6004 | gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v)); |
6e68dcb2 | 6005 | |
6006 | /* If the values are all the same, check to see if we can use one of the | |
6007 | standard constant vectors. */ | |
0b51f5ce | 6008 | if (rtvec_all_equal_p (v)) |
6009 | return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0)); | |
6e68dcb2 | 6010 | |
6011 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
9426b612 | 6012 | } |
6013 | ||
6d8b68a3 | 6014 | /* Initialise global register information required by all functions. */ |
6015 | ||
6016 | void | |
6017 | init_emit_regs (void) | |
6018 | { | |
6019 | int i; | |
3754d046 | 6020 | machine_mode mode; |
d83fcaa1 | 6021 | mem_attrs *attrs; |
6d8b68a3 | 6022 | |
6023 | /* Reset register attributes */ | |
f863a586 | 6024 | reg_attrs_htab->empty (); |
6d8b68a3 | 6025 | |
6026 | /* We need reg_raw_mode, so initialize the modes now. */ | |
6027 | init_reg_modes_target (); | |
6028 | ||
6029 | /* Assign register numbers to the globally defined register rtx. */ | |
6d8b68a3 | 6030 | stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); |
6031 | frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); | |
6032 | hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM); | |
6033 | arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); | |
6034 | virtual_incoming_args_rtx = | |
6035 | gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); | |
6036 | virtual_stack_vars_rtx = | |
6037 | gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); | |
6038 | virtual_stack_dynamic_rtx = | |
6039 | gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); | |
6040 | virtual_outgoing_args_rtx = | |
6041 | gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); | |
6042 | virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); | |
60778e62 | 6043 | virtual_preferred_stack_boundary_rtx = |
6044 | gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM); | |
6d8b68a3 | 6045 | |
6046 | /* Initialize RTL for commonly used hard registers. These are | |
6047 | copied into regno_reg_rtx as we begin to compile each function. */ | |
6048 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
679bcc8d | 6049 | initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); |
6d8b68a3 | 6050 | |
6051 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
6052 | return_address_pointer_rtx | |
6053 | = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); | |
6054 | #endif | |
6055 | ||
639f32a2 | 6056 | pic_offset_table_rtx = NULL_RTX; |
6d8b68a3 | 6057 | if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) |
6058 | pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); | |
d83fcaa1 | 6059 | |
6060 | for (i = 0; i < (int) MAX_MACHINE_MODE; i++) | |
6061 | { | |
3754d046 | 6062 | mode = (machine_mode) i; |
25a27413 | 6063 | attrs = ggc_cleared_alloc<mem_attrs> (); |
d83fcaa1 | 6064 | attrs->align = BITS_PER_UNIT; |
6065 | attrs->addrspace = ADDR_SPACE_GENERIC; | |
6066 | if (mode != BLKmode) | |
6067 | { | |
6d58bcba | 6068 | attrs->size_known_p = true; |
6069 | attrs->size = GET_MODE_SIZE (mode); | |
d83fcaa1 | 6070 | if (STRICT_ALIGNMENT) |
6071 | attrs->align = GET_MODE_ALIGNMENT (mode); | |
6072 | } | |
6073 | mode_mem_attrs[i] = attrs; | |
6074 | } | |
15b08c01 | 6075 | |
6076 | split_branch_probability = profile_probability::uninitialized (); | |
6d8b68a3 | 6077 | } |
6078 | ||
8059b95a | 6079 | /* Initialize global machine_mode variables. */ |
6080 | ||
6081 | void | |
6082 | init_derived_machine_modes (void) | |
6083 | { | |
af8303fa | 6084 | opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode; |
6085 | FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT) | |
8059b95a | 6086 | { |
af8303fa | 6087 | scalar_int_mode mode = mode_iter.require (); |
6088 | ||
8059b95a | 6089 | if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT |
af8303fa | 6090 | && !opt_byte_mode.exists ()) |
6091 | opt_byte_mode = mode; | |
8059b95a | 6092 | |
6093 | if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD | |
af8303fa | 6094 | && !opt_word_mode.exists ()) |
6095 | opt_word_mode = mode; | |
8059b95a | 6096 | } |
6097 | ||
af8303fa | 6098 | byte_mode = opt_byte_mode.require (); |
6099 | word_mode = opt_word_mode.require (); | |
db22dc71 | 6100 | ptr_mode = as_a <scalar_int_mode> |
6101 | (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ()); | |
8059b95a | 6102 | } |
6103 | ||
01703575 | 6104 | /* Create some permanent unique rtl objects shared between all functions. */ |
15bbde2b | 6105 | |
6106 | void | |
01703575 | 6107 | init_emit_once (void) |
15bbde2b | 6108 | { |
6109 | int i; | |
3754d046 | 6110 | machine_mode mode; |
99d671f4 | 6111 | scalar_float_mode double_mode; |
2b8f5b8a | 6112 | opt_scalar_mode smode_iter; |
15bbde2b | 6113 | |
e913b5cd | 6114 | /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE, |
6115 | CONST_FIXED, and memory attribute hash tables. */ | |
f863a586 | 6116 | const_int_htab = hash_table<const_int_hasher>::create_ggc (37); |
c6259b83 | 6117 | |
e913b5cd | 6118 | #if TARGET_SUPPORTS_WIDE_INT |
f863a586 | 6119 | const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37); |
e913b5cd | 6120 | #endif |
f863a586 | 6121 | const_double_htab = hash_table<const_double_hasher>::create_ggc (37); |
2ff23ed0 | 6122 | |
bbad7cd0 | 6123 | if (NUM_POLY_INT_COEFFS > 1) |
6124 | const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37); | |
6125 | ||
f863a586 | 6126 | const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37); |
e397ad8e | 6127 | |
f863a586 | 6128 | reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37); |
77695070 | 6129 | |
57c097d5 | 6130 | #ifdef INIT_EXPANDERS |
ab5beff9 | 6131 | /* This is to initialize {init|mark|free}_machine_status before the first |
6132 | call to push_function_context_to. This is needed by the Chill front | |
3fb1e43b | 6133 | end which calls push_function_context_to before the first call to |
57c097d5 | 6134 | init_function_start. */ |
6135 | INIT_EXPANDERS; | |
6136 | #endif | |
6137 | ||
15bbde2b | 6138 | /* Create the unique rtx's for certain rtx codes and operand values. */ |
6139 | ||
48a7e3d1 | 6140 | /* Process stack-limiting command-line options. */ |
6141 | if (opt_fstack_limit_symbol_arg != NULL) | |
6142 | stack_limit_rtx | |
6143 | = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg)); | |
6144 | if (opt_fstack_limit_register_no >= 0) | |
6145 | stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no); | |
6146 | ||
8fd5918e | 6147 | /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case |
7014838c | 6148 | tries to use these variables. */ |
15bbde2b | 6149 | for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++) |
d823ba47 | 6150 | const_int_rtx[i + MAX_SAVED_CONST_INT] = |
a717d5b4 | 6151 | gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i); |
15bbde2b | 6152 | |
1a60f06a | 6153 | if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT |
6154 | && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT) | |
57c097d5 | 6155 | const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT]; |
1a60f06a | 6156 | else |
3ad7bb1c | 6157 | const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE); |
15bbde2b | 6158 | |
99d671f4 | 6159 | double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require (); |
8059b95a | 6160 | |
cc69d08a | 6161 | real_from_integer (&dconst0, double_mode, 0, SIGNED); |
6162 | real_from_integer (&dconst1, double_mode, 1, SIGNED); | |
6163 | real_from_integer (&dconst2, double_mode, 2, SIGNED); | |
3fa759a9 | 6164 | |
6165 | dconstm1 = dconst1; | |
6166 | dconstm1.sign = 1; | |
77e89269 | 6167 | |
6168 | dconsthalf = dconst1; | |
9d96125b | 6169 | SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1); |
15bbde2b | 6170 | |
ba8dfb08 | 6171 | for (i = 0; i < 3; i++) |
15bbde2b | 6172 | { |
3fa759a9 | 6173 | const REAL_VALUE_TYPE *const r = |
badfe841 | 6174 | (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2); |
6175 | ||
19a4dce4 | 6176 | FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT) |
069b07bf | 6177 | const_tiny_rtx[i][(int) mode] = |
d5f9611d | 6178 | const_double_from_real_value (*r, mode); |
069b07bf | 6179 | |
19a4dce4 | 6180 | FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT) |
2ff23ed0 | 6181 | const_tiny_rtx[i][(int) mode] = |
d5f9611d | 6182 | const_double_from_real_value (*r, mode); |
15bbde2b | 6183 | |
b572011e | 6184 | const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i); |
15bbde2b | 6185 | |
19a4dce4 | 6186 | FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
b572011e | 6187 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
7540dcc4 | 6188 | |
8c20007a | 6189 | for (mode = MIN_MODE_PARTIAL_INT; |
6190 | mode <= MAX_MODE_PARTIAL_INT; | |
3754d046 | 6191 | mode = (machine_mode)((int)(mode) + 1)) |
7540dcc4 | 6192 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
15bbde2b | 6193 | } |
6194 | ||
ba8dfb08 | 6195 | const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx; |
6196 | ||
19a4dce4 | 6197 | FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
ba8dfb08 | 6198 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
6199 | ||
8c20007a | 6200 | for (mode = MIN_MODE_PARTIAL_INT; |
6201 | mode <= MAX_MODE_PARTIAL_INT; | |
3754d046 | 6202 | mode = (machine_mode)((int)(mode) + 1)) |
dd276d20 | 6203 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
19a4dce4 | 6204 | |
6205 | FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT) | |
4248fc32 | 6206 | { |
6207 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
6208 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
6209 | } | |
6210 | ||
19a4dce4 | 6211 | FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT) |
4248fc32 | 6212 | { |
6213 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
6214 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
6215 | } | |
6216 | ||
19a4dce4 | 6217 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT) |
6e68dcb2 | 6218 | { |
6219 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6220 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
ba8dfb08 | 6221 | const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3); |
6e68dcb2 | 6222 | } |
886cfd4f | 6223 | |
19a4dce4 | 6224 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT) |
6e68dcb2 | 6225 | { |
6226 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6227 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6228 | } | |
886cfd4f | 6229 | |
2b8f5b8a | 6230 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT) |
06f0b99c | 6231 | { |
2b8f5b8a | 6232 | scalar_mode smode = smode_iter.require (); |
6233 | FCONST0 (smode).data.high = 0; | |
6234 | FCONST0 (smode).data.low = 0; | |
6235 | FCONST0 (smode).mode = smode; | |
6236 | const_tiny_rtx[0][(int) smode] | |
6237 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6238 | } |
6239 | ||
2b8f5b8a | 6240 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT) |
06f0b99c | 6241 | { |
2b8f5b8a | 6242 | scalar_mode smode = smode_iter.require (); |
6243 | FCONST0 (smode).data.high = 0; | |
6244 | FCONST0 (smode).data.low = 0; | |
6245 | FCONST0 (smode).mode = smode; | |
6246 | const_tiny_rtx[0][(int) smode] | |
6247 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6248 | } |
6249 | ||
2b8f5b8a | 6250 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM) |
06f0b99c | 6251 | { |
2b8f5b8a | 6252 | scalar_mode smode = smode_iter.require (); |
6253 | FCONST0 (smode).data.high = 0; | |
6254 | FCONST0 (smode).data.low = 0; | |
6255 | FCONST0 (smode).mode = smode; | |
6256 | const_tiny_rtx[0][(int) smode] | |
6257 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6258 | |
6259 | /* We store the value 1. */ | |
2b8f5b8a | 6260 | FCONST1 (smode).data.high = 0; |
6261 | FCONST1 (smode).data.low = 0; | |
6262 | FCONST1 (smode).mode = smode; | |
6263 | FCONST1 (smode).data | |
6264 | = double_int_one.lshift (GET_MODE_FBIT (smode), | |
d67b7119 | 6265 | HOST_BITS_PER_DOUBLE_INT, |
2b8f5b8a | 6266 | SIGNED_FIXED_POINT_MODE_P (smode)); |
6267 | const_tiny_rtx[1][(int) smode] | |
6268 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode); | |
06f0b99c | 6269 | } |
6270 | ||
2b8f5b8a | 6271 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM) |
06f0b99c | 6272 | { |
2b8f5b8a | 6273 | scalar_mode smode = smode_iter.require (); |
6274 | FCONST0 (smode).data.high = 0; | |
6275 | FCONST0 (smode).data.low = 0; | |
6276 | FCONST0 (smode).mode = smode; | |
6277 | const_tiny_rtx[0][(int) smode] | |
6278 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
06f0b99c | 6279 | |
6280 | /* We store the value 1. */ | |
2b8f5b8a | 6281 | FCONST1 (smode).data.high = 0; |
6282 | FCONST1 (smode).data.low = 0; | |
6283 | FCONST1 (smode).mode = smode; | |
6284 | FCONST1 (smode).data | |
6285 | = double_int_one.lshift (GET_MODE_FBIT (smode), | |
d67b7119 | 6286 | HOST_BITS_PER_DOUBLE_INT, |
2b8f5b8a | 6287 | SIGNED_FIXED_POINT_MODE_P (smode)); |
6288 | const_tiny_rtx[1][(int) smode] | |
6289 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode); | |
e397ad8e | 6290 | } |
6291 | ||
19a4dce4 | 6292 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT) |
e397ad8e | 6293 | { |
6294 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6295 | } | |
6296 | ||
19a4dce4 | 6297 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT) |
e397ad8e | 6298 | { |
6299 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6300 | } | |
6301 | ||
19a4dce4 | 6302 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM) |
e397ad8e | 6303 | { |
6304 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6305 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6306 | } | |
6307 | ||
19a4dce4 | 6308 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM) |
e397ad8e | 6309 | { |
6310 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6311 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
06f0b99c | 6312 | } |
6313 | ||
0fd4500a | 6314 | for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) |
3754d046 | 6315 | if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC) |
0fd4500a | 6316 | const_tiny_rtx[0][i] = const0_rtx; |
15bbde2b | 6317 | |
065336b4 | 6318 | const_tiny_rtx[0][(int) BImode] = const0_rtx; |
6319 | if (STORE_FLAG_VALUE == 1) | |
6320 | const_tiny_rtx[1][(int) BImode] = const1_rtx; | |
7d7b0bac | 6321 | |
2b8f5b8a | 6322 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS) |
058a1b7a | 6323 | { |
2b8f5b8a | 6324 | scalar_mode smode = smode_iter.require (); |
6325 | wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode)); | |
6326 | const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode); | |
058a1b7a | 6327 | } |
6328 | ||
7d7b0bac | 6329 | pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); |
6330 | ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); | |
6331 | simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); | |
6332 | cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); | |
f9a00e9e | 6333 | invalid_insn_rtx = gen_rtx_INSN (VOIDmode, |
6334 | /*prev_insn=*/NULL, | |
6335 | /*next_insn=*/NULL, | |
6336 | /*bb=*/NULL, | |
6337 | /*pattern=*/NULL_RTX, | |
6338 | /*location=*/-1, | |
6339 | CODE_FOR_nothing, | |
6340 | /*reg_notes=*/NULL_RTX); | |
15bbde2b | 6341 | } |
ac6c481d | 6342 | \f |
cd0fe062 | 6343 | /* Produce exact duplicate of insn INSN after AFTER. |
6344 | Care updating of libcall regions if present. */ | |
6345 | ||
722334ea | 6346 | rtx_insn * |
5e9c670f | 6347 | emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after) |
cd0fe062 | 6348 | { |
722334ea | 6349 | rtx_insn *new_rtx; |
6350 | rtx link; | |
cd0fe062 | 6351 | |
6352 | switch (GET_CODE (insn)) | |
6353 | { | |
6354 | case INSN: | |
9ce37fa7 | 6355 | new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after); |
cd0fe062 | 6356 | break; |
6357 | ||
6358 | case JUMP_INSN: | |
9ce37fa7 | 6359 | new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after); |
01762951 | 6360 | CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn); |
cd0fe062 | 6361 | break; |
6362 | ||
9845d120 | 6363 | case DEBUG_INSN: |
6364 | new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after); | |
6365 | break; | |
6366 | ||
cd0fe062 | 6367 | case CALL_INSN: |
9ce37fa7 | 6368 | new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after); |
cd0fe062 | 6369 | if (CALL_INSN_FUNCTION_USAGE (insn)) |
9ce37fa7 | 6370 | CALL_INSN_FUNCTION_USAGE (new_rtx) |
cd0fe062 | 6371 | = copy_insn (CALL_INSN_FUNCTION_USAGE (insn)); |
9ce37fa7 | 6372 | SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn); |
6373 | RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn); | |
6374 | RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn); | |
48e1416a | 6375 | RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx) |
9c2a0c05 | 6376 | = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn); |
cd0fe062 | 6377 | break; |
6378 | ||
6379 | default: | |
611234b4 | 6380 | gcc_unreachable (); |
cd0fe062 | 6381 | } |
6382 | ||
6383 | /* Update LABEL_NUSES. */ | |
9ce37fa7 | 6384 | mark_jump_label (PATTERN (new_rtx), new_rtx, 0); |
cd0fe062 | 6385 | |
5169661d | 6386 | INSN_LOCATION (new_rtx) = INSN_LOCATION (insn); |
ab87d1bc | 6387 | |
98116afd | 6388 | /* If the old insn is frame related, then so is the new one. This is |
6389 | primarily needed for IA-64 unwind info which marks epilogue insns, | |
6390 | which may be duplicated by the basic block reordering code. */ | |
9ce37fa7 | 6391 | RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn); |
98116afd | 6392 | |
bb99ba64 | 6393 | /* Locate the end of existing REG_NOTES in NEW_RTX. */ |
6394 | rtx *ptail = ®_NOTES (new_rtx); | |
6395 | while (*ptail != NULL_RTX) | |
6396 | ptail = &XEXP (*ptail, 1); | |
6397 | ||
19d2fe05 | 6398 | /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label |
6399 | will make them. REG_LABEL_TARGETs are created there too, but are | |
6400 | supposed to be sticky, so we copy them. */ | |
cd0fe062 | 6401 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
19d2fe05 | 6402 | if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND) |
cd0fe062 | 6403 | { |
bb99ba64 | 6404 | *ptail = duplicate_reg_note (link); |
6405 | ptail = &XEXP (*ptail, 1); | |
cd0fe062 | 6406 | } |
6407 | ||
9ce37fa7 | 6408 | INSN_CODE (new_rtx) = INSN_CODE (insn); |
6409 | return new_rtx; | |
cd0fe062 | 6410 | } |
1f3233d1 | 6411 | |
7035b2ab | 6412 | static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; |
c09425a0 | 6413 | rtx |
3754d046 | 6414 | gen_hard_reg_clobber (machine_mode mode, unsigned int regno) |
c09425a0 | 6415 | { |
6416 | if (hard_reg_clobbers[mode][regno]) | |
6417 | return hard_reg_clobbers[mode][regno]; | |
6418 | else | |
6419 | return (hard_reg_clobbers[mode][regno] = | |
6420 | gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); | |
6421 | } | |
6422 | ||
5169661d | 6423 | location_t prologue_location; |
6424 | location_t epilogue_location; | |
23a070f3 | 6425 | |
6426 | /* Hold current location information and last location information, so the | |
6427 | datastructures are built lazily only when some instructions in given | |
6428 | place are needed. */ | |
c7abeac5 | 6429 | static location_t curr_location; |
23a070f3 | 6430 | |
5169661d | 6431 | /* Allocate insn location datastructure. */ |
23a070f3 | 6432 | void |
5169661d | 6433 | insn_locations_init (void) |
23a070f3 | 6434 | { |
5169661d | 6435 | prologue_location = epilogue_location = 0; |
23a070f3 | 6436 | curr_location = UNKNOWN_LOCATION; |
23a070f3 | 6437 | } |
6438 | ||
6439 | /* At the end of emit stage, clear current location. */ | |
6440 | void | |
5169661d | 6441 | insn_locations_finalize (void) |
23a070f3 | 6442 | { |
5169661d | 6443 | epilogue_location = curr_location; |
6444 | curr_location = UNKNOWN_LOCATION; | |
23a070f3 | 6445 | } |
6446 | ||
6447 | /* Set current location. */ | |
6448 | void | |
5169661d | 6449 | set_curr_insn_location (location_t location) |
23a070f3 | 6450 | { |
23a070f3 | 6451 | curr_location = location; |
6452 | } | |
6453 | ||
6454 | /* Get current location. */ | |
6455 | location_t | |
5169661d | 6456 | curr_insn_location (void) |
23a070f3 | 6457 | { |
6458 | return curr_location; | |
6459 | } | |
6460 | ||
23a070f3 | 6461 | /* Return lexical scope block insn belongs to. */ |
6462 | tree | |
5e9c670f | 6463 | insn_scope (const rtx_insn *insn) |
23a070f3 | 6464 | { |
5169661d | 6465 | return LOCATION_BLOCK (INSN_LOCATION (insn)); |
23a070f3 | 6466 | } |
6467 | ||
6468 | /* Return line number of the statement that produced this insn. */ | |
6469 | int | |
5e9c670f | 6470 | insn_line (const rtx_insn *insn) |
23a070f3 | 6471 | { |
5169661d | 6472 | return LOCATION_LINE (INSN_LOCATION (insn)); |
23a070f3 | 6473 | } |
6474 | ||
6475 | /* Return source file of the statement that produced this insn. */ | |
6476 | const char * | |
5e9c670f | 6477 | insn_file (const rtx_insn *insn) |
23a070f3 | 6478 | { |
5169661d | 6479 | return LOCATION_FILE (INSN_LOCATION (insn)); |
23a070f3 | 6480 | } |
30c3c442 | 6481 | |
0e7ae557 | 6482 | /* Return expanded location of the statement that produced this insn. */ |
6483 | expanded_location | |
5e9c670f | 6484 | insn_location (const rtx_insn *insn) |
0e7ae557 | 6485 | { |
6486 | return expand_location (INSN_LOCATION (insn)); | |
6487 | } | |
6488 | ||
30c3c442 | 6489 | /* Return true if memory model MODEL requires a pre-operation (release-style) |
6490 | barrier or a post-operation (acquire-style) barrier. While not universal, | |
6491 | this function matches behavior of several targets. */ | |
6492 | ||
6493 | bool | |
6494 | need_atomic_barrier_p (enum memmodel model, bool pre) | |
6495 | { | |
e205c62d | 6496 | switch (model & MEMMODEL_BASE_MASK) |
30c3c442 | 6497 | { |
6498 | case MEMMODEL_RELAXED: | |
6499 | case MEMMODEL_CONSUME: | |
6500 | return false; | |
6501 | case MEMMODEL_RELEASE: | |
6502 | return pre; | |
6503 | case MEMMODEL_ACQUIRE: | |
6504 | return !pre; | |
6505 | case MEMMODEL_ACQ_REL: | |
6506 | case MEMMODEL_SEQ_CST: | |
6507 | return true; | |
6508 | default: | |
6509 | gcc_unreachable (); | |
6510 | } | |
6511 | } | |
2add0b64 | 6512 | |
bd39703a | 6513 | /* Return a constant shift amount for shifting a value of mode MODE |
6514 | by VALUE bits. */ | |
6515 | ||
6516 | rtx | |
bbad7cd0 | 6517 | gen_int_shift_amount (machine_mode, poly_int64 value) |
bd39703a | 6518 | { |
6519 | /* Use a 64-bit mode, to avoid any truncation. | |
6520 | ||
6521 | ??? Perhaps this should be automatically derived from the .md files | |
6522 | instead, or perhaps have a target hook. */ | |
6523 | scalar_int_mode shift_mode = (BITS_PER_UNIT == 8 | |
6524 | ? DImode | |
6525 | : int_mode_for_size (64, 0).require ()); | |
6526 | return gen_int_mode (value, shift_mode); | |
6527 | } | |
6528 | ||
2add0b64 | 6529 | /* Initialize fields of rtl_data related to stack alignment. */ |
6530 | ||
6531 | void | |
6532 | rtl_data::init_stack_alignment () | |
6533 | { | |
6534 | stack_alignment_needed = STACK_BOUNDARY; | |
6535 | max_used_stack_slot_alignment = STACK_BOUNDARY; | |
6536 | stack_alignment_estimated = 0; | |
6537 | preferred_stack_boundary = STACK_BOUNDARY; | |
6538 | } | |
6539 | ||
30c3c442 | 6540 | \f |
1f3233d1 | 6541 | #include "gt-emit-rtl.h" |