]>
Commit | Line | Data |
---|---|---|
5e6908ea | 1 | /* Emit RTL for the GCC expander. |
cbe34bb5 | 2 | Copyright (C) 1987-2017 Free Software Foundation, Inc. |
23b2ce53 | 3 | |
1322177d | 4 | This file is part of GCC. |
23b2ce53 | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
23b2ce53 | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
23b2ce53 RS |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
23b2ce53 RS |
19 | |
20 | ||
21 | /* Middle-to-low level generation of rtx code and insns. | |
22 | ||
f822fcf7 KH |
23 | This file contains support functions for creating rtl expressions |
24 | and manipulating them in the doubly-linked chain of insns. | |
23b2ce53 RS |
25 | |
26 | The patterns of the insns are created by machine-dependent | |
27 | routines in insn-emit.c, which is generated automatically from | |
f822fcf7 KH |
28 | the machine description. These routines make the individual rtx's |
29 | of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch], | |
30 | which are automatically generated from rtl.def; what is machine | |
a2a8cc44 KH |
31 | dependent is the kind of rtx's they make and what arguments they |
32 | use. */ | |
23b2ce53 RS |
33 | |
34 | #include "config.h" | |
670ee920 | 35 | #include "system.h" |
4977bab6 | 36 | #include "coretypes.h" |
4d0cdd0c | 37 | #include "memmodel.h" |
c7131fb2 | 38 | #include "backend.h" |
957060b5 | 39 | #include "target.h" |
23b2ce53 | 40 | #include "rtl.h" |
957060b5 | 41 | #include "tree.h" |
c7131fb2 | 42 | #include "df.h" |
957060b5 AM |
43 | #include "tm_p.h" |
44 | #include "stringpool.h" | |
957060b5 AM |
45 | #include "insn-config.h" |
46 | #include "regs.h" | |
47 | #include "emit-rtl.h" | |
48 | #include "recog.h" | |
c7131fb2 | 49 | #include "diagnostic-core.h" |
40e23961 | 50 | #include "alias.h" |
40e23961 | 51 | #include "fold-const.h" |
d8a2d370 | 52 | #include "varasm.h" |
60393bbc | 53 | #include "cfgrtl.h" |
60393bbc | 54 | #include "tree-eh.h" |
36566b39 | 55 | #include "explow.h" |
23b2ce53 | 56 | #include "expr.h" |
b5b8b0ac | 57 | #include "params.h" |
9b2b7279 | 58 | #include "builtins.h" |
9021b8ec | 59 | #include "rtl-iter.h" |
1f9ceff1 | 60 | #include "stor-layout.h" |
ecf835e9 | 61 | #include "opts.h" |
5fa396ad | 62 | #include "predict.h" |
ca695ac9 | 63 | |
5fb0e246 RS |
64 | struct target_rtl default_target_rtl; |
65 | #if SWITCHABLE_TARGET | |
66 | struct target_rtl *this_target_rtl = &default_target_rtl; | |
67 | #endif | |
68 | ||
69 | #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx) | |
70 | ||
1d445e9e ILT |
71 | /* Commonly used modes. */ |
72 | ||
501623d4 RS |
73 | scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */ |
74 | scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */ | |
75 | scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ | |
1d445e9e | 76 | |
bd60bab2 JH |
77 | /* Datastructures maintained for currently processed function in RTL form. */ |
78 | ||
3e029763 | 79 | struct rtl_data x_rtl; |
bd60bab2 JH |
80 | |
81 | /* Indexed by pseudo register number, gives the rtx for that pseudo. | |
b8698a0f | 82 | Allocated in parallel with regno_pointer_align. |
bd60bab2 JH |
83 | FIXME: We could put it into emit_status struct, but gengtype is not able to deal |
84 | with length attribute nested in top level structures. */ | |
85 | ||
86 | rtx * regno_reg_rtx; | |
23b2ce53 RS |
87 | |
88 | /* This is *not* reset after each function. It gives each CODE_LABEL | |
89 | in the entire compilation a unique label number. */ | |
90 | ||
044b4de3 | 91 | static GTY(()) int label_num = 1; |
23b2ce53 | 92 | |
23b2ce53 RS |
93 | /* We record floating-point CONST_DOUBLEs in each floating-point mode for |
94 | the values of 0, 1, and 2. For the integer entries and VOIDmode, we | |
e7c82a99 JJ |
95 | record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX |
96 | is set only for MODE_INT and MODE_VECTOR_INT modes. */ | |
23b2ce53 | 97 | |
e7c82a99 | 98 | rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE]; |
23b2ce53 | 99 | |
68d75312 JC |
100 | rtx const_true_rtx; |
101 | ||
23b2ce53 RS |
102 | REAL_VALUE_TYPE dconst0; |
103 | REAL_VALUE_TYPE dconst1; | |
104 | REAL_VALUE_TYPE dconst2; | |
105 | REAL_VALUE_TYPE dconstm1; | |
03f2ea93 | 106 | REAL_VALUE_TYPE dconsthalf; |
23b2ce53 | 107 | |
325217ed CF |
108 | /* Record fixed-point constant 0 and 1. */ |
109 | FIXED_VALUE_TYPE fconst0[MAX_FCONST0]; | |
110 | FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; | |
111 | ||
23b2ce53 RS |
112 | /* We make one copy of (const_int C) where C is in |
113 | [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] | |
114 | to save space during the compilation and simplify comparisons of | |
115 | integers. */ | |
116 | ||
5da077de | 117 | rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1]; |
23b2ce53 | 118 | |
ca4adc91 RS |
119 | /* Standard pieces of rtx, to be substituted directly into things. */ |
120 | rtx pc_rtx; | |
121 | rtx ret_rtx; | |
122 | rtx simple_return_rtx; | |
123 | rtx cc0_rtx; | |
124 | ||
1476d1bd MM |
125 | /* Marker used for denoting an INSN, which should never be accessed (i.e., |
126 | this pointer should normally never be dereferenced), but is required to be | |
127 | distinct from NULL_RTX. Currently used by peephole2 pass. */ | |
128 | rtx_insn *invalid_insn_rtx; | |
129 | ||
c13e8210 MM |
130 | /* A hash table storing CONST_INTs whose absolute value is greater |
131 | than MAX_SAVED_CONST_INT. */ | |
132 | ||
6c907cff | 133 | struct const_int_hasher : ggc_cache_ptr_hash<rtx_def> |
aebf76a2 TS |
134 | { |
135 | typedef HOST_WIDE_INT compare_type; | |
136 | ||
137 | static hashval_t hash (rtx i); | |
138 | static bool equal (rtx i, HOST_WIDE_INT h); | |
139 | }; | |
c13e8210 | 140 | |
aebf76a2 TS |
141 | static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab; |
142 | ||
6c907cff | 143 | struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def> |
aebf76a2 TS |
144 | { |
145 | static hashval_t hash (rtx x); | |
146 | static bool equal (rtx x, rtx y); | |
147 | }; | |
148 | ||
149 | static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab; | |
807e902e | 150 | |
0c12fc9b RS |
151 | struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def> |
152 | { | |
153 | typedef std::pair<machine_mode, poly_wide_int_ref> compare_type; | |
154 | ||
155 | static hashval_t hash (rtx x); | |
156 | static bool equal (rtx x, const compare_type &y); | |
157 | }; | |
158 | ||
159 | static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab; | |
160 | ||
a560d4d4 | 161 | /* A hash table storing register attribute structures. */ |
6c907cff | 162 | struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs> |
aebf76a2 TS |
163 | { |
164 | static hashval_t hash (reg_attrs *x); | |
165 | static bool equal (reg_attrs *a, reg_attrs *b); | |
166 | }; | |
167 | ||
168 | static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab; | |
a560d4d4 | 169 | |
5692c7bc | 170 | /* A hash table storing all CONST_DOUBLEs. */ |
6c907cff | 171 | struct const_double_hasher : ggc_cache_ptr_hash<rtx_def> |
aebf76a2 TS |
172 | { |
173 | static hashval_t hash (rtx x); | |
174 | static bool equal (rtx x, rtx y); | |
175 | }; | |
176 | ||
177 | static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab; | |
5692c7bc | 178 | |
091a3ac7 | 179 | /* A hash table storing all CONST_FIXEDs. */ |
6c907cff | 180 | struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def> |
aebf76a2 TS |
181 | { |
182 | static hashval_t hash (rtx x); | |
183 | static bool equal (rtx x, rtx y); | |
184 | }; | |
185 | ||
186 | static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab; | |
091a3ac7 | 187 | |
3e029763 | 188 | #define cur_insn_uid (crtl->emit.x_cur_insn_uid) |
b5b8b0ac | 189 | #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid) |
3e029763 | 190 | #define first_label_num (crtl->emit.x_first_label_num) |
23b2ce53 | 191 | |
5eb2a9f2 | 192 | static void set_used_decls (tree); |
502b8322 | 193 | static void mark_label_nuses (rtx); |
807e902e | 194 | #if TARGET_SUPPORTS_WIDE_INT |
807e902e KZ |
195 | static rtx lookup_const_wide_int (rtx); |
196 | #endif | |
502b8322 | 197 | static rtx lookup_const_double (rtx); |
091a3ac7 | 198 | static rtx lookup_const_fixed (rtx); |
ef4bddc2 | 199 | static rtx gen_const_vector (machine_mode, int); |
32b32b16 | 200 | static void copy_rtx_if_shared_1 (rtx *orig); |
c13e8210 | 201 | |
5fa396ad JH |
202 | /* Probability of the conditional branch currently proceeded by try_split. */ |
203 | profile_probability split_branch_probability; | |
ca695ac9 | 204 | \f |
c13e8210 MM |
205 | /* Returns a hash code for X (which is a really a CONST_INT). */ |
206 | ||
aebf76a2 TS |
207 | hashval_t |
208 | const_int_hasher::hash (rtx x) | |
c13e8210 | 209 | { |
aebf76a2 | 210 | return (hashval_t) INTVAL (x); |
c13e8210 MM |
211 | } |
212 | ||
cc2902df | 213 | /* Returns nonzero if the value represented by X (which is really a |
c13e8210 MM |
214 | CONST_INT) is the same as that given by Y (which is really a |
215 | HOST_WIDE_INT *). */ | |
216 | ||
aebf76a2 TS |
217 | bool |
218 | const_int_hasher::equal (rtx x, HOST_WIDE_INT y) | |
c13e8210 | 219 | { |
aebf76a2 | 220 | return (INTVAL (x) == y); |
5692c7bc ZW |
221 | } |
222 | ||
807e902e KZ |
223 | #if TARGET_SUPPORTS_WIDE_INT |
224 | /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */ | |
225 | ||
aebf76a2 TS |
226 | hashval_t |
227 | const_wide_int_hasher::hash (rtx x) | |
807e902e KZ |
228 | { |
229 | int i; | |
d7ca26e4 | 230 | unsigned HOST_WIDE_INT hash = 0; |
aebf76a2 | 231 | const_rtx xr = x; |
807e902e KZ |
232 | |
233 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
234 | hash += CONST_WIDE_INT_ELT (xr, i); | |
235 | ||
236 | return (hashval_t) hash; | |
237 | } | |
238 | ||
239 | /* Returns nonzero if the value represented by X (which is really a | |
240 | CONST_WIDE_INT) is the same as that given by Y (which is really a | |
241 | CONST_WIDE_INT). */ | |
242 | ||
aebf76a2 TS |
243 | bool |
244 | const_wide_int_hasher::equal (rtx x, rtx y) | |
807e902e KZ |
245 | { |
246 | int i; | |
aebf76a2 TS |
247 | const_rtx xr = x; |
248 | const_rtx yr = y; | |
807e902e | 249 | if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr)) |
aebf76a2 | 250 | return false; |
807e902e KZ |
251 | |
252 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
253 | if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i)) | |
aebf76a2 | 254 | return false; |
807e902e | 255 | |
aebf76a2 | 256 | return true; |
807e902e KZ |
257 | } |
258 | #endif | |
259 | ||
0c12fc9b RS |
260 | /* Returns a hash code for CONST_POLY_INT X. */ |
261 | ||
262 | hashval_t | |
263 | const_poly_int_hasher::hash (rtx x) | |
264 | { | |
265 | inchash::hash h; | |
266 | h.add_int (GET_MODE (x)); | |
267 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
268 | h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]); | |
269 | return h.end (); | |
270 | } | |
271 | ||
272 | /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */ | |
273 | ||
274 | bool | |
275 | const_poly_int_hasher::equal (rtx x, const compare_type &y) | |
276 | { | |
277 | if (GET_MODE (x) != y.first) | |
278 | return false; | |
279 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
280 | if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i]) | |
281 | return false; | |
282 | return true; | |
283 | } | |
284 | ||
5692c7bc | 285 | /* Returns a hash code for X (which is really a CONST_DOUBLE). */ |
aebf76a2 TS |
286 | hashval_t |
287 | const_double_hasher::hash (rtx x) | |
5692c7bc | 288 | { |
aebf76a2 | 289 | const_rtx const value = x; |
46b33600 | 290 | hashval_t h; |
5692c7bc | 291 | |
807e902e | 292 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode) |
46b33600 RH |
293 | h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value); |
294 | else | |
fe352c29 | 295 | { |
15c812e3 | 296 | h = real_hash (CONST_DOUBLE_REAL_VALUE (value)); |
fe352c29 DJ |
297 | /* MODE is used in the comparison, so it should be in the hash. */ |
298 | h ^= GET_MODE (value); | |
299 | } | |
5692c7bc ZW |
300 | return h; |
301 | } | |
302 | ||
cc2902df | 303 | /* Returns nonzero if the value represented by X (really a ...) |
5692c7bc | 304 | is the same as that represented by Y (really a ...) */ |
aebf76a2 TS |
305 | bool |
306 | const_double_hasher::equal (rtx x, rtx y) | |
5692c7bc | 307 | { |
aebf76a2 | 308 | const_rtx const a = x, b = y; |
5692c7bc ZW |
309 | |
310 | if (GET_MODE (a) != GET_MODE (b)) | |
311 | return 0; | |
807e902e | 312 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode) |
8580f7a0 RH |
313 | return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b) |
314 | && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b)); | |
315 | else | |
316 | return real_identical (CONST_DOUBLE_REAL_VALUE (a), | |
317 | CONST_DOUBLE_REAL_VALUE (b)); | |
c13e8210 MM |
318 | } |
319 | ||
091a3ac7 CF |
320 | /* Returns a hash code for X (which is really a CONST_FIXED). */ |
321 | ||
aebf76a2 TS |
322 | hashval_t |
323 | const_fixed_hasher::hash (rtx x) | |
091a3ac7 | 324 | { |
aebf76a2 | 325 | const_rtx const value = x; |
091a3ac7 CF |
326 | hashval_t h; |
327 | ||
328 | h = fixed_hash (CONST_FIXED_VALUE (value)); | |
329 | /* MODE is used in the comparison, so it should be in the hash. */ | |
330 | h ^= GET_MODE (value); | |
331 | return h; | |
332 | } | |
333 | ||
aebf76a2 TS |
334 | /* Returns nonzero if the value represented by X is the same as that |
335 | represented by Y. */ | |
091a3ac7 | 336 | |
aebf76a2 TS |
337 | bool |
338 | const_fixed_hasher::equal (rtx x, rtx y) | |
091a3ac7 | 339 | { |
aebf76a2 | 340 | const_rtx const a = x, b = y; |
091a3ac7 CF |
341 | |
342 | if (GET_MODE (a) != GET_MODE (b)) | |
343 | return 0; | |
344 | return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b)); | |
345 | } | |
346 | ||
f12144dd | 347 | /* Return true if the given memory attributes are equal. */ |
c13e8210 | 348 | |
96b3c03f | 349 | bool |
f12144dd | 350 | mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q) |
c13e8210 | 351 | { |
96b3c03f RB |
352 | if (p == q) |
353 | return true; | |
354 | if (!p || !q) | |
355 | return false; | |
754c3d5d RS |
356 | return (p->alias == q->alias |
357 | && p->offset_known_p == q->offset_known_p | |
d05d7551 | 358 | && (!p->offset_known_p || known_eq (p->offset, q->offset)) |
754c3d5d | 359 | && p->size_known_p == q->size_known_p |
d05d7551 | 360 | && (!p->size_known_p || known_eq (p->size, q->size)) |
754c3d5d | 361 | && p->align == q->align |
09e881c9 | 362 | && p->addrspace == q->addrspace |
78b76d08 SB |
363 | && (p->expr == q->expr |
364 | || (p->expr != NULL_TREE && q->expr != NULL_TREE | |
365 | && operand_equal_p (p->expr, q->expr, 0)))); | |
c13e8210 MM |
366 | } |
367 | ||
f12144dd | 368 | /* Set MEM's memory attributes so that they are the same as ATTRS. */ |
10b76d73 | 369 | |
f12144dd RS |
370 | static void |
371 | set_mem_attrs (rtx mem, mem_attrs *attrs) | |
372 | { | |
f12144dd RS |
373 | /* If everything is the default, we can just clear the attributes. */ |
374 | if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)])) | |
375 | { | |
376 | MEM_ATTRS (mem) = 0; | |
377 | return; | |
378 | } | |
173b24b9 | 379 | |
84053e02 RB |
380 | if (!MEM_ATTRS (mem) |
381 | || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem))) | |
173b24b9 | 382 | { |
766090c2 | 383 | MEM_ATTRS (mem) = ggc_alloc<mem_attrs> (); |
84053e02 | 384 | memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs)); |
173b24b9 | 385 | } |
c13e8210 MM |
386 | } |
387 | ||
a560d4d4 JH |
388 | /* Returns a hash code for X (which is a really a reg_attrs *). */ |
389 | ||
aebf76a2 TS |
390 | hashval_t |
391 | reg_attr_hasher::hash (reg_attrs *x) | |
a560d4d4 | 392 | { |
aebf76a2 | 393 | const reg_attrs *const p = x; |
a560d4d4 | 394 | |
84bc717b RS |
395 | inchash::hash h; |
396 | h.add_ptr (p->decl); | |
397 | h.add_poly_hwi (p->offset); | |
398 | return h.end (); | |
a560d4d4 JH |
399 | } |
400 | ||
aebf76a2 TS |
401 | /* Returns nonzero if the value represented by X is the same as that given by |
402 | Y. */ | |
a560d4d4 | 403 | |
aebf76a2 TS |
404 | bool |
405 | reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y) | |
a560d4d4 | 406 | { |
aebf76a2 TS |
407 | const reg_attrs *const p = x; |
408 | const reg_attrs *const q = y; | |
a560d4d4 | 409 | |
84bc717b | 410 | return (p->decl == q->decl && known_eq (p->offset, q->offset)); |
a560d4d4 JH |
411 | } |
412 | /* Allocate a new reg_attrs structure and insert it into the hash table if | |
413 | one identical to it is not already in the table. We are doing this for | |
414 | MEM of mode MODE. */ | |
415 | ||
416 | static reg_attrs * | |
84bc717b | 417 | get_reg_attrs (tree decl, poly_int64 offset) |
a560d4d4 JH |
418 | { |
419 | reg_attrs attrs; | |
a560d4d4 JH |
420 | |
421 | /* If everything is the default, we can just return zero. */ | |
84bc717b | 422 | if (decl == 0 && known_eq (offset, 0)) |
a560d4d4 JH |
423 | return 0; |
424 | ||
425 | attrs.decl = decl; | |
426 | attrs.offset = offset; | |
427 | ||
aebf76a2 | 428 | reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT); |
a560d4d4 JH |
429 | if (*slot == 0) |
430 | { | |
766090c2 | 431 | *slot = ggc_alloc<reg_attrs> (); |
a560d4d4 JH |
432 | memcpy (*slot, &attrs, sizeof (reg_attrs)); |
433 | } | |
434 | ||
aebf76a2 | 435 | return *slot; |
a560d4d4 JH |
436 | } |
437 | ||
6fb5fa3c DB |
438 | |
439 | #if !HAVE_blockage | |
adddc347 HPN |
440 | /* Generate an empty ASM_INPUT, which is used to block attempts to schedule, |
441 | and to block register equivalences to be seen across this insn. */ | |
6fb5fa3c DB |
442 | |
443 | rtx | |
444 | gen_blockage (void) | |
445 | { | |
446 | rtx x = gen_rtx_ASM_INPUT (VOIDmode, ""); | |
447 | MEM_VOLATILE_P (x) = true; | |
448 | return x; | |
449 | } | |
450 | #endif | |
451 | ||
452 | ||
8deccbb7 RS |
453 | /* Set the mode and register number of X to MODE and REGNO. */ |
454 | ||
455 | void | |
456 | set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno) | |
457 | { | |
9188b286 | 458 | unsigned int nregs = (HARD_REGISTER_NUM_P (regno) |
ad474626 | 459 | ? hard_regno_nregs (regno, mode) |
9188b286 | 460 | : 1); |
8deccbb7 | 461 | PUT_MODE_RAW (x, mode); |
9188b286 | 462 | set_regno_raw (x, regno, nregs); |
8deccbb7 RS |
463 | } |
464 | ||
08394eef BS |
465 | /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and |
466 | don't attempt to share with the various global pieces of rtl (such as | |
467 | frame_pointer_rtx). */ | |
468 | ||
469 | rtx | |
8deccbb7 | 470 | gen_raw_REG (machine_mode mode, unsigned int regno) |
08394eef | 471 | { |
84c2ad23 | 472 | rtx x = rtx_alloc (REG MEM_STAT_INFO); |
8deccbb7 | 473 | set_mode_and_regno (x, mode, regno); |
9fccb335 | 474 | REG_ATTRS (x) = NULL; |
08394eef BS |
475 | ORIGINAL_REGNO (x) = regno; |
476 | return x; | |
477 | } | |
478 | ||
c5c76735 JL |
479 | /* There are some RTL codes that require special attention; the generation |
480 | functions do the raw handling. If you add to this list, modify | |
481 | special_rtx in gengenrtl.c as well. */ | |
482 | ||
38e60c55 | 483 | rtx_expr_list * |
ef4bddc2 | 484 | gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list) |
38e60c55 DM |
485 | { |
486 | return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr, | |
487 | expr_list)); | |
488 | } | |
489 | ||
a756c6be | 490 | rtx_insn_list * |
ef4bddc2 | 491 | gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list) |
a756c6be DM |
492 | { |
493 | return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn, | |
494 | insn_list)); | |
495 | } | |
496 | ||
d6e1e8b8 | 497 | rtx_insn * |
ef4bddc2 | 498 | gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn, |
d6e1e8b8 DM |
499 | basic_block bb, rtx pattern, int location, int code, |
500 | rtx reg_notes) | |
501 | { | |
502 | return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode, | |
503 | prev_insn, next_insn, | |
504 | bb, pattern, location, code, | |
505 | reg_notes)); | |
506 | } | |
507 | ||
3b80f6ca | 508 | rtx |
ef4bddc2 | 509 | gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg) |
3b80f6ca RH |
510 | { |
511 | if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT) | |
5da077de | 512 | return const_int_rtx[arg + MAX_SAVED_CONST_INT]; |
3b80f6ca RH |
513 | |
514 | #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1 | |
515 | if (const_true_rtx && arg == STORE_FLAG_VALUE) | |
516 | return const_true_rtx; | |
517 | #endif | |
518 | ||
c13e8210 | 519 | /* Look up the CONST_INT in the hash table. */ |
aebf76a2 TS |
520 | rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg, |
521 | INSERT); | |
29105cea | 522 | if (*slot == 0) |
1f8f4a0b | 523 | *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg); |
c13e8210 | 524 | |
aebf76a2 | 525 | return *slot; |
3b80f6ca RH |
526 | } |
527 | ||
2496c7bd | 528 | rtx |
0c12fc9b | 529 | gen_int_mode (poly_int64 c, machine_mode mode) |
2496c7bd | 530 | { |
0c12fc9b RS |
531 | c = trunc_int_for_mode (c, mode); |
532 | if (c.is_constant ()) | |
533 | return GEN_INT (c.coeffs[0]); | |
534 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
535 | return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode); | |
2496c7bd LB |
536 | } |
537 | ||
5692c7bc ZW |
538 | /* CONST_DOUBLEs might be created from pairs of integers, or from |
539 | REAL_VALUE_TYPEs. Also, their length is known only at run time, | |
540 | so we cannot use gen_rtx_raw_CONST_DOUBLE. */ | |
541 | ||
542 | /* Determine whether REAL, a CONST_DOUBLE, already exists in the | |
543 | hash table. If so, return its counterpart; otherwise add it | |
544 | to the hash table and return it. */ | |
545 | static rtx | |
502b8322 | 546 | lookup_const_double (rtx real) |
5692c7bc | 547 | { |
aebf76a2 | 548 | rtx *slot = const_double_htab->find_slot (real, INSERT); |
5692c7bc ZW |
549 | if (*slot == 0) |
550 | *slot = real; | |
551 | ||
aebf76a2 | 552 | return *slot; |
5692c7bc | 553 | } |
29105cea | 554 | |
5692c7bc ZW |
555 | /* Return a CONST_DOUBLE rtx for a floating-point value specified by |
556 | VALUE in mode MODE. */ | |
0133b7d9 | 557 | rtx |
ef4bddc2 | 558 | const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode) |
0133b7d9 | 559 | { |
5692c7bc ZW |
560 | rtx real = rtx_alloc (CONST_DOUBLE); |
561 | PUT_MODE (real, mode); | |
562 | ||
9e254451 | 563 | real->u.rv = value; |
5692c7bc ZW |
564 | |
565 | return lookup_const_double (real); | |
566 | } | |
567 | ||
091a3ac7 CF |
568 | /* Determine whether FIXED, a CONST_FIXED, already exists in the |
569 | hash table. If so, return its counterpart; otherwise add it | |
570 | to the hash table and return it. */ | |
571 | ||
572 | static rtx | |
573 | lookup_const_fixed (rtx fixed) | |
574 | { | |
aebf76a2 | 575 | rtx *slot = const_fixed_htab->find_slot (fixed, INSERT); |
091a3ac7 CF |
576 | if (*slot == 0) |
577 | *slot = fixed; | |
578 | ||
aebf76a2 | 579 | return *slot; |
091a3ac7 CF |
580 | } |
581 | ||
582 | /* Return a CONST_FIXED rtx for a fixed-point value specified by | |
583 | VALUE in mode MODE. */ | |
584 | ||
585 | rtx | |
ef4bddc2 | 586 | const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode) |
091a3ac7 CF |
587 | { |
588 | rtx fixed = rtx_alloc (CONST_FIXED); | |
589 | PUT_MODE (fixed, mode); | |
590 | ||
591 | fixed->u.fv = value; | |
592 | ||
593 | return lookup_const_fixed (fixed); | |
594 | } | |
595 | ||
807e902e | 596 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
3e93ff81 AS |
597 | /* Constructs double_int from rtx CST. */ |
598 | ||
599 | double_int | |
600 | rtx_to_double_int (const_rtx cst) | |
601 | { | |
602 | double_int r; | |
603 | ||
604 | if (CONST_INT_P (cst)) | |
27bcd47c | 605 | r = double_int::from_shwi (INTVAL (cst)); |
48175537 | 606 | else if (CONST_DOUBLE_AS_INT_P (cst)) |
3e93ff81 AS |
607 | { |
608 | r.low = CONST_DOUBLE_LOW (cst); | |
609 | r.high = CONST_DOUBLE_HIGH (cst); | |
610 | } | |
611 | else | |
612 | gcc_unreachable (); | |
613 | ||
614 | return r; | |
615 | } | |
807e902e | 616 | #endif |
3e93ff81 | 617 | |
807e902e KZ |
618 | #if TARGET_SUPPORTS_WIDE_INT |
619 | /* Determine whether CONST_WIDE_INT WINT already exists in the hash table. | |
620 | If so, return its counterpart; otherwise add it to the hash table and | |
621 | return it. */ | |
3e93ff81 | 622 | |
807e902e KZ |
623 | static rtx |
624 | lookup_const_wide_int (rtx wint) | |
625 | { | |
aebf76a2 | 626 | rtx *slot = const_wide_int_htab->find_slot (wint, INSERT); |
807e902e KZ |
627 | if (*slot == 0) |
628 | *slot = wint; | |
629 | ||
aebf76a2 | 630 | return *slot; |
807e902e KZ |
631 | } |
632 | #endif | |
633 | ||
634 | /* Return an rtx constant for V, given that the constant has mode MODE. | |
635 | The returned rtx will be a CONST_INT if V fits, otherwise it will be | |
636 | a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT | |
637 | (if TARGET_SUPPORTS_WIDE_INT). */ | |
54fb1ae0 | 638 | |
0c12fc9b RS |
639 | static rtx |
640 | immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode) | |
54fb1ae0 | 641 | { |
807e902e | 642 | unsigned int len = v.get_len (); |
db61b7f9 RS |
643 | /* Not scalar_int_mode because we also allow pointer bound modes. */ |
644 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
807e902e KZ |
645 | |
646 | /* Allow truncation but not extension since we do not know if the | |
647 | number is signed or unsigned. */ | |
648 | gcc_assert (prec <= v.get_precision ()); | |
649 | ||
650 | if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT) | |
651 | return gen_int_mode (v.elt (0), mode); | |
652 | ||
653 | #if TARGET_SUPPORTS_WIDE_INT | |
654 | { | |
655 | unsigned int i; | |
656 | rtx value; | |
657 | unsigned int blocks_needed | |
658 | = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT; | |
659 | ||
660 | if (len > blocks_needed) | |
661 | len = blocks_needed; | |
662 | ||
663 | value = const_wide_int_alloc (len); | |
664 | ||
665 | /* It is so tempting to just put the mode in here. Must control | |
666 | myself ... */ | |
667 | PUT_MODE (value, VOIDmode); | |
668 | CWI_PUT_NUM_ELEM (value, len); | |
669 | ||
670 | for (i = 0; i < len; i++) | |
671 | CONST_WIDE_INT_ELT (value, i) = v.elt (i); | |
672 | ||
673 | return lookup_const_wide_int (value); | |
674 | } | |
675 | #else | |
676 | return immed_double_const (v.elt (0), v.elt (1), mode); | |
677 | #endif | |
54fb1ae0 AS |
678 | } |
679 | ||
807e902e | 680 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
5692c7bc ZW |
681 | /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair |
682 | of ints: I0 is the low-order word and I1 is the high-order word. | |
49ab6098 | 683 | For values that are larger than HOST_BITS_PER_DOUBLE_INT, the |
929e10f4 MS |
684 | implied upper bits are copies of the high bit of i1. The value |
685 | itself is neither signed nor unsigned. Do not use this routine for | |
686 | non-integer modes; convert to REAL_VALUE_TYPE and use | |
555affd7 | 687 | const_double_from_real_value. */ |
5692c7bc ZW |
688 | |
689 | rtx | |
ef4bddc2 | 690 | immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode) |
5692c7bc ZW |
691 | { |
692 | rtx value; | |
693 | unsigned int i; | |
694 | ||
65acccdd | 695 | /* There are the following cases (note that there are no modes with |
49ab6098 | 696 | HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT): |
65acccdd ZD |
697 | |
698 | 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use | |
699 | gen_int_mode. | |
929e10f4 MS |
700 | 2) If the value of the integer fits into HOST_WIDE_INT anyway |
701 | (i.e., i1 consists only from copies of the sign bit, and sign | |
702 | of i0 and i1 are the same), then we return a CONST_INT for i0. | |
65acccdd | 703 | 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ |
db61b7f9 RS |
704 | scalar_mode smode; |
705 | if (is_a <scalar_mode> (mode, &smode) | |
706 | && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT) | |
707 | return gen_int_mode (i0, mode); | |
5692c7bc ZW |
708 | |
709 | /* If this integer fits in one word, return a CONST_INT. */ | |
710 | if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0)) | |
711 | return GEN_INT (i0); | |
712 | ||
713 | /* We use VOIDmode for integers. */ | |
714 | value = rtx_alloc (CONST_DOUBLE); | |
715 | PUT_MODE (value, VOIDmode); | |
716 | ||
717 | CONST_DOUBLE_LOW (value) = i0; | |
718 | CONST_DOUBLE_HIGH (value) = i1; | |
719 | ||
720 | for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++) | |
721 | XWINT (value, i) = 0; | |
722 | ||
723 | return lookup_const_double (value); | |
0133b7d9 | 724 | } |
807e902e | 725 | #endif |
0133b7d9 | 726 | |
0c12fc9b RS |
727 | /* Return an rtx representation of C in mode MODE. */ |
728 | ||
729 | rtx | |
730 | immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode) | |
731 | { | |
732 | if (c.is_constant ()) | |
733 | return immed_wide_int_const_1 (c.coeffs[0], mode); | |
734 | ||
735 | /* Not scalar_int_mode because we also allow pointer bound modes. */ | |
736 | unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
737 | ||
738 | /* Allow truncation but not extension since we do not know if the | |
739 | number is signed or unsigned. */ | |
740 | gcc_assert (prec <= c.coeffs[0].get_precision ()); | |
741 | poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED); | |
742 | ||
743 | /* See whether we already have an rtx for this constant. */ | |
744 | inchash::hash h; | |
745 | h.add_int (mode); | |
746 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
747 | h.add_wide_int (newc.coeffs[i]); | |
748 | const_poly_int_hasher::compare_type typed_value (mode, newc); | |
749 | rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value, | |
750 | h.end (), INSERT); | |
751 | rtx x = *slot; | |
752 | if (x) | |
753 | return x; | |
754 | ||
755 | /* Create a new rtx. There's a choice to be made here between installing | |
756 | the actual mode of the rtx or leaving it as VOIDmode (for consistency | |
757 | with CONST_INT). In practice the handling of the codes is different | |
758 | enough that we get no benefit from using VOIDmode, and various places | |
759 | assume that VOIDmode implies CONST_INT. Using the real mode seems like | |
760 | the right long-term direction anyway. */ | |
761 | typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi; | |
762 | size_t extra_size = twi::extra_size (prec); | |
763 | x = rtx_alloc_v (CONST_POLY_INT, | |
764 | sizeof (struct const_poly_int_def) + extra_size); | |
765 | PUT_MODE (x, mode); | |
766 | CONST_POLY_INT_COEFFS (x).set_precision (prec); | |
767 | for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
768 | CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i]; | |
769 | ||
770 | *slot = x; | |
771 | return x; | |
772 | } | |
773 | ||
3b80f6ca | 774 | rtx |
ef4bddc2 | 775 | gen_rtx_REG (machine_mode mode, unsigned int regno) |
3b80f6ca RH |
776 | { |
777 | /* In case the MD file explicitly references the frame pointer, have | |
778 | all such references point to the same frame pointer. This is | |
779 | used during frame pointer elimination to distinguish the explicit | |
780 | references to these registers from pseudos that happened to be | |
781 | assigned to them. | |
782 | ||
783 | If we have eliminated the frame pointer or arg pointer, we will | |
784 | be using it as a normal register, for example as a spill | |
785 | register. In such cases, we might be accessing it in a mode that | |
786 | is not Pmode and therefore cannot use the pre-allocated rtx. | |
787 | ||
788 | Also don't do this when we are making new REGs in reload, since | |
789 | we don't want to get confused with the real pointers. */ | |
790 | ||
55a2c322 | 791 | if (mode == Pmode && !reload_in_progress && !lra_in_progress) |
3b80f6ca | 792 | { |
e10c79fe LB |
793 | if (regno == FRAME_POINTER_REGNUM |
794 | && (!reload_completed || frame_pointer_needed)) | |
3b80f6ca | 795 | return frame_pointer_rtx; |
c3e08036 TS |
796 | |
797 | if (!HARD_FRAME_POINTER_IS_FRAME_POINTER | |
798 | && regno == HARD_FRAME_POINTER_REGNUM | |
e10c79fe | 799 | && (!reload_completed || frame_pointer_needed)) |
3b80f6ca | 800 | return hard_frame_pointer_rtx; |
3f393fc6 TS |
801 | #if !HARD_FRAME_POINTER_IS_ARG_POINTER |
802 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
803 | && regno == ARG_POINTER_REGNUM) | |
3b80f6ca RH |
804 | return arg_pointer_rtx; |
805 | #endif | |
806 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
bcb33994 | 807 | if (regno == RETURN_ADDRESS_POINTER_REGNUM) |
3b80f6ca RH |
808 | return return_address_pointer_rtx; |
809 | #endif | |
fc555370 | 810 | if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM |
bf9412cd | 811 | && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM |
2d67bd7b | 812 | && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) |
68252e27 | 813 | return pic_offset_table_rtx; |
bcb33994 | 814 | if (regno == STACK_POINTER_REGNUM) |
3b80f6ca RH |
815 | return stack_pointer_rtx; |
816 | } | |
817 | ||
006a94b0 | 818 | #if 0 |
6cde4876 | 819 | /* If the per-function register table has been set up, try to re-use |
006a94b0 JL |
820 | an existing entry in that table to avoid useless generation of RTL. |
821 | ||
822 | This code is disabled for now until we can fix the various backends | |
823 | which depend on having non-shared hard registers in some cases. Long | |
824 | term we want to re-enable this code as it can significantly cut down | |
e10c79fe LB |
825 | on the amount of useless RTL that gets generated. |
826 | ||
827 | We'll also need to fix some code that runs after reload that wants to | |
828 | set ORIGINAL_REGNO. */ | |
829 | ||
6cde4876 JL |
830 | if (cfun |
831 | && cfun->emit | |
832 | && regno_reg_rtx | |
833 | && regno < FIRST_PSEUDO_REGISTER | |
834 | && reg_raw_mode[regno] == mode) | |
835 | return regno_reg_rtx[regno]; | |
006a94b0 | 836 | #endif |
6cde4876 | 837 | |
08394eef | 838 | return gen_raw_REG (mode, regno); |
3b80f6ca RH |
839 | } |
840 | ||
41472af8 | 841 | rtx |
ef4bddc2 | 842 | gen_rtx_MEM (machine_mode mode, rtx addr) |
41472af8 MM |
843 | { |
844 | rtx rt = gen_rtx_raw_MEM (mode, addr); | |
845 | ||
846 | /* This field is not cleared by the mere allocation of the rtx, so | |
847 | we clear it here. */ | |
173b24b9 | 848 | MEM_ATTRS (rt) = 0; |
41472af8 MM |
849 | |
850 | return rt; | |
851 | } | |
ddef6bc7 | 852 | |
542a8afa RH |
853 | /* Generate a memory referring to non-trapping constant memory. */ |
854 | ||
855 | rtx | |
ef4bddc2 | 856 | gen_const_mem (machine_mode mode, rtx addr) |
542a8afa RH |
857 | { |
858 | rtx mem = gen_rtx_MEM (mode, addr); | |
859 | MEM_READONLY_P (mem) = 1; | |
860 | MEM_NOTRAP_P (mem) = 1; | |
861 | return mem; | |
862 | } | |
863 | ||
bf877a76 R |
864 | /* Generate a MEM referring to fixed portions of the frame, e.g., register |
865 | save areas. */ | |
866 | ||
867 | rtx | |
ef4bddc2 | 868 | gen_frame_mem (machine_mode mode, rtx addr) |
bf877a76 R |
869 | { |
870 | rtx mem = gen_rtx_MEM (mode, addr); | |
871 | MEM_NOTRAP_P (mem) = 1; | |
872 | set_mem_alias_set (mem, get_frame_alias_set ()); | |
873 | return mem; | |
874 | } | |
875 | ||
876 | /* Generate a MEM referring to a temporary use of the stack, not part | |
877 | of the fixed stack frame. For example, something which is pushed | |
878 | by a target splitter. */ | |
879 | rtx | |
ef4bddc2 | 880 | gen_tmp_stack_mem (machine_mode mode, rtx addr) |
bf877a76 R |
881 | { |
882 | rtx mem = gen_rtx_MEM (mode, addr); | |
883 | MEM_NOTRAP_P (mem) = 1; | |
e3b5732b | 884 | if (!cfun->calls_alloca) |
bf877a76 R |
885 | set_mem_alias_set (mem, get_frame_alias_set ()); |
886 | return mem; | |
887 | } | |
888 | ||
beb72684 RH |
889 | /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if |
890 | this construct would be valid, and false otherwise. */ | |
891 | ||
892 | bool | |
ef4bddc2 | 893 | validate_subreg (machine_mode omode, machine_mode imode, |
91914e56 | 894 | const_rtx reg, poly_uint64 offset) |
ddef6bc7 | 895 | { |
beb72684 RH |
896 | unsigned int isize = GET_MODE_SIZE (imode); |
897 | unsigned int osize = GET_MODE_SIZE (omode); | |
898 | ||
899 | /* All subregs must be aligned. */ | |
91914e56 | 900 | if (!multiple_p (offset, osize)) |
beb72684 RH |
901 | return false; |
902 | ||
903 | /* The subreg offset cannot be outside the inner object. */ | |
91914e56 | 904 | if (maybe_ge (offset, isize)) |
beb72684 RH |
905 | return false; |
906 | ||
1eae67f8 RS |
907 | unsigned int regsize = REGMODE_NATURAL_SIZE (imode); |
908 | ||
beb72684 RH |
909 | /* ??? This should not be here. Temporarily continue to allow word_mode |
910 | subregs of anything. The most common offender is (subreg:SI (reg:DF)). | |
911 | Generally, backends are doing something sketchy but it'll take time to | |
912 | fix them all. */ | |
913 | if (omode == word_mode) | |
914 | ; | |
915 | /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field | |
916 | is the culprit here, and not the backends. */ | |
1eae67f8 | 917 | else if (osize >= regsize && isize >= osize) |
beb72684 RH |
918 | ; |
919 | /* Allow component subregs of complex and vector. Though given the below | |
920 | extraction rules, it's not always clear what that means. */ | |
921 | else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) | |
922 | && GET_MODE_INNER (imode) == omode) | |
923 | ; | |
924 | /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs, | |
925 | i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to | |
926 | represent this. It's questionable if this ought to be represented at | |
927 | all -- why can't this all be hidden in post-reload splitters that make | |
928 | arbitrarily mode changes to the registers themselves. */ | |
929 | else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode) | |
930 | ; | |
931 | /* Subregs involving floating point modes are not allowed to | |
932 | change size. Therefore (subreg:DI (reg:DF) 0) is fine, but | |
933 | (subreg:SI (reg:DF) 0) isn't. */ | |
934 | else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) | |
935 | { | |
55a2c322 VM |
936 | if (! (isize == osize |
937 | /* LRA can use subreg to store a floating point value in | |
938 | an integer mode. Although the floating point and the | |
939 | integer modes need the same number of hard registers, | |
940 | the size of floating point mode can be less than the | |
941 | integer mode. LRA also uses subregs for a register | |
942 | should be used in different mode in on insn. */ | |
943 | || lra_in_progress)) | |
beb72684 RH |
944 | return false; |
945 | } | |
ddef6bc7 | 946 | |
beb72684 RH |
947 | /* Paradoxical subregs must have offset zero. */ |
948 | if (osize > isize) | |
91914e56 | 949 | return known_eq (offset, 0U); |
beb72684 RH |
950 | |
951 | /* This is a normal subreg. Verify that the offset is representable. */ | |
952 | ||
953 | /* For hard registers, we already have most of these rules collected in | |
954 | subreg_offset_representable_p. */ | |
955 | if (reg && REG_P (reg) && HARD_REGISTER_P (reg)) | |
956 | { | |
957 | unsigned int regno = REGNO (reg); | |
958 | ||
beb72684 RH |
959 | if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) |
960 | && GET_MODE_INNER (imode) == omode) | |
961 | ; | |
0d803030 | 962 | else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode)) |
beb72684 | 963 | return false; |
beb72684 RH |
964 | |
965 | return subreg_offset_representable_p (regno, imode, offset, omode); | |
966 | } | |
967 | ||
968 | /* For pseudo registers, we want most of the same checks. Namely: | |
1eae67f8 RS |
969 | |
970 | Assume that the pseudo register will be allocated to hard registers | |
971 | that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE, | |
972 | the remainder must correspond to the lowpart of the containing hard | |
973 | register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset, | |
974 | otherwise it is at the lowest offset. | |
975 | ||
976 | Given that we've already checked the mode and offset alignment, | |
977 | we only have to check subblock subregs here. */ | |
978 | if (osize < regsize | |
55a2c322 | 979 | && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)))) |
beb72684 | 980 | { |
91914e56 RS |
981 | poly_uint64 block_size = MIN (isize, regsize); |
982 | unsigned int start_reg; | |
983 | poly_uint64 offset_within_reg; | |
984 | if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg) | |
985 | || (BYTES_BIG_ENDIAN | |
986 | ? maybe_ne (offset_within_reg, block_size - osize) | |
987 | : maybe_ne (offset_within_reg, 0U))) | |
beb72684 RH |
988 | return false; |
989 | } | |
990 | return true; | |
991 | } | |
992 | ||
993 | rtx | |
91914e56 | 994 | gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset) |
beb72684 RH |
995 | { |
996 | gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); | |
5692c7bc | 997 | return gen_rtx_raw_SUBREG (mode, reg, offset); |
ddef6bc7 JJ |
998 | } |
999 | ||
173b24b9 RK |
1000 | /* Generate a SUBREG representing the least-significant part of REG if MODE |
1001 | is smaller than mode of REG, otherwise paradoxical SUBREG. */ | |
1002 | ||
ddef6bc7 | 1003 | rtx |
ef4bddc2 | 1004 | gen_lowpart_SUBREG (machine_mode mode, rtx reg) |
ddef6bc7 | 1005 | { |
ef4bddc2 | 1006 | machine_mode inmode; |
ddef6bc7 JJ |
1007 | |
1008 | inmode = GET_MODE (reg); | |
1009 | if (inmode == VOIDmode) | |
1010 | inmode = mode; | |
e0e08ac2 JH |
1011 | return gen_rtx_SUBREG (mode, reg, |
1012 | subreg_lowpart_offset (mode, inmode)); | |
ddef6bc7 | 1013 | } |
fcc74520 RS |
1014 | |
1015 | rtx | |
ef4bddc2 | 1016 | gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc, |
fcc74520 RS |
1017 | enum var_init_status status) |
1018 | { | |
1019 | rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc); | |
1020 | PAT_VAR_LOCATION_STATUS (x) = status; | |
1021 | return x; | |
1022 | } | |
c5c76735 | 1023 | \f |
23b2ce53 | 1024 | |
80379f51 PB |
1025 | /* Create an rtvec and stores within it the RTXen passed in the arguments. */ |
1026 | ||
23b2ce53 | 1027 | rtvec |
e34d07f2 | 1028 | gen_rtvec (int n, ...) |
23b2ce53 | 1029 | { |
80379f51 PB |
1030 | int i; |
1031 | rtvec rt_val; | |
e34d07f2 | 1032 | va_list p; |
23b2ce53 | 1033 | |
e34d07f2 | 1034 | va_start (p, n); |
23b2ce53 | 1035 | |
80379f51 | 1036 | /* Don't allocate an empty rtvec... */ |
23b2ce53 | 1037 | if (n == 0) |
0edf1bb2 JL |
1038 | { |
1039 | va_end (p); | |
1040 | return NULL_RTVEC; | |
1041 | } | |
23b2ce53 | 1042 | |
80379f51 | 1043 | rt_val = rtvec_alloc (n); |
4f90e4a0 | 1044 | |
23b2ce53 | 1045 | for (i = 0; i < n; i++) |
80379f51 | 1046 | rt_val->elem[i] = va_arg (p, rtx); |
6268b922 | 1047 | |
e34d07f2 | 1048 | va_end (p); |
80379f51 | 1049 | return rt_val; |
23b2ce53 RS |
1050 | } |
1051 | ||
1052 | rtvec | |
502b8322 | 1053 | gen_rtvec_v (int n, rtx *argp) |
23b2ce53 | 1054 | { |
b3694847 SS |
1055 | int i; |
1056 | rtvec rt_val; | |
23b2ce53 | 1057 | |
80379f51 | 1058 | /* Don't allocate an empty rtvec... */ |
23b2ce53 | 1059 | if (n == 0) |
80379f51 | 1060 | return NULL_RTVEC; |
23b2ce53 | 1061 | |
80379f51 | 1062 | rt_val = rtvec_alloc (n); |
23b2ce53 RS |
1063 | |
1064 | for (i = 0; i < n; i++) | |
8f985ec4 | 1065 | rt_val->elem[i] = *argp++; |
23b2ce53 RS |
1066 | |
1067 | return rt_val; | |
1068 | } | |
e6eda746 DM |
1069 | |
1070 | rtvec | |
1071 | gen_rtvec_v (int n, rtx_insn **argp) | |
1072 | { | |
1073 | int i; | |
1074 | rtvec rt_val; | |
1075 | ||
1076 | /* Don't allocate an empty rtvec... */ | |
1077 | if (n == 0) | |
1078 | return NULL_RTVEC; | |
1079 | ||
1080 | rt_val = rtvec_alloc (n); | |
1081 | ||
1082 | for (i = 0; i < n; i++) | |
1083 | rt_val->elem[i] = *argp++; | |
1084 | ||
1085 | return rt_val; | |
1086 | } | |
1087 | ||
23b2ce53 | 1088 | \f |
38ae7651 RS |
1089 | /* Return the number of bytes between the start of an OUTER_MODE |
1090 | in-memory value and the start of an INNER_MODE in-memory value, | |
1091 | given that the former is a lowpart of the latter. It may be a | |
1092 | paradoxical lowpart, in which case the offset will be negative | |
1093 | on big-endian targets. */ | |
1094 | ||
91914e56 | 1095 | poly_int64 |
ef4bddc2 RS |
1096 | byte_lowpart_offset (machine_mode outer_mode, |
1097 | machine_mode inner_mode) | |
38ae7651 | 1098 | { |
03a95621 | 1099 | if (paradoxical_subreg_p (outer_mode, inner_mode)) |
38ae7651 | 1100 | return -subreg_lowpart_offset (inner_mode, outer_mode); |
03a95621 RS |
1101 | else |
1102 | return subreg_lowpart_offset (outer_mode, inner_mode); | |
38ae7651 | 1103 | } |
3d09ba95 RS |
1104 | |
1105 | /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET) | |
1106 | from address X. For paradoxical big-endian subregs this is a | |
1107 | negative value, otherwise it's the same as OFFSET. */ | |
1108 | ||
91914e56 | 1109 | poly_int64 |
3d09ba95 | 1110 | subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode, |
91914e56 | 1111 | poly_uint64 offset) |
3d09ba95 RS |
1112 | { |
1113 | if (paradoxical_subreg_p (outer_mode, inner_mode)) | |
1114 | { | |
91914e56 | 1115 | gcc_assert (known_eq (offset, 0U)); |
3d09ba95 RS |
1116 | return -subreg_lowpart_offset (inner_mode, outer_mode); |
1117 | } | |
1118 | return offset; | |
1119 | } | |
1120 | ||
1121 | /* As above, but return the offset that existing subreg X would have | |
1122 | if SUBREG_REG (X) were stored in memory. The only significant thing | |
1123 | about the current SUBREG_REG is its mode. */ | |
1124 | ||
91914e56 | 1125 | poly_int64 |
3d09ba95 RS |
1126 | subreg_memory_offset (const_rtx x) |
1127 | { | |
1128 | return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
1129 | SUBREG_BYTE (x)); | |
1130 | } | |
38ae7651 | 1131 | \f |
23b2ce53 RS |
1132 | /* Generate a REG rtx for a new pseudo register of mode MODE. |
1133 | This pseudo is assigned the next sequential register number. */ | |
1134 | ||
1135 | rtx | |
ef4bddc2 | 1136 | gen_reg_rtx (machine_mode mode) |
23b2ce53 | 1137 | { |
b3694847 | 1138 | rtx val; |
2e3f842f | 1139 | unsigned int align = GET_MODE_ALIGNMENT (mode); |
23b2ce53 | 1140 | |
f8335a4f | 1141 | gcc_assert (can_create_pseudo_p ()); |
23b2ce53 | 1142 | |
2e3f842f L |
1143 | /* If a virtual register with bigger mode alignment is generated, |
1144 | increase stack alignment estimation because it might be spilled | |
1145 | to stack later. */ | |
b8698a0f | 1146 | if (SUPPORTS_STACK_ALIGNMENT |
2e3f842f L |
1147 | && crtl->stack_alignment_estimated < align |
1148 | && !crtl->stack_realign_processed) | |
ae58e548 JJ |
1149 | { |
1150 | unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align); | |
1151 | if (crtl->stack_alignment_estimated < min_align) | |
1152 | crtl->stack_alignment_estimated = min_align; | |
1153 | } | |
2e3f842f | 1154 | |
1b3d8f8a GK |
1155 | if (generating_concat_p |
1156 | && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT | |
1157 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)) | |
fc84e8a8 RS |
1158 | { |
1159 | /* For complex modes, don't make a single pseudo. | |
1160 | Instead, make a CONCAT of two pseudos. | |
1161 | This allows noncontiguous allocation of the real and imaginary parts, | |
1162 | which makes much better code. Besides, allocating DCmode | |
1163 | pseudos overstrains reload on some machines like the 386. */ | |
1164 | rtx realpart, imagpart; | |
ef4bddc2 | 1165 | machine_mode partmode = GET_MODE_INNER (mode); |
fc84e8a8 RS |
1166 | |
1167 | realpart = gen_reg_rtx (partmode); | |
1168 | imagpart = gen_reg_rtx (partmode); | |
3b80f6ca | 1169 | return gen_rtx_CONCAT (mode, realpart, imagpart); |
fc84e8a8 RS |
1170 | } |
1171 | ||
004a7e45 UB |
1172 | /* Do not call gen_reg_rtx with uninitialized crtl. */ |
1173 | gcc_assert (crtl->emit.regno_pointer_align_length); | |
1174 | ||
f44986d7 DM |
1175 | crtl->emit.ensure_regno_capacity (); |
1176 | gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length); | |
23b2ce53 | 1177 | |
f44986d7 DM |
1178 | val = gen_raw_REG (mode, reg_rtx_no); |
1179 | regno_reg_rtx[reg_rtx_no++] = val; | |
1180 | return val; | |
1181 | } | |
0d4903b8 | 1182 | |
f44986d7 DM |
1183 | /* Make sure m_regno_pointer_align, and regno_reg_rtx are large |
1184 | enough to have elements in the range 0 <= idx <= reg_rtx_no. */ | |
49ad7cfa | 1185 | |
f44986d7 DM |
1186 | void |
1187 | emit_status::ensure_regno_capacity () | |
1188 | { | |
1189 | int old_size = regno_pointer_align_length; | |
23b2ce53 | 1190 | |
f44986d7 DM |
1191 | if (reg_rtx_no < old_size) |
1192 | return; | |
23b2ce53 | 1193 | |
f44986d7 DM |
1194 | int new_size = old_size * 2; |
1195 | while (reg_rtx_no >= new_size) | |
1196 | new_size *= 2; | |
1197 | ||
1198 | char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size); | |
1199 | memset (tmp + old_size, 0, new_size - old_size); | |
1200 | regno_pointer_align = (unsigned char *) tmp; | |
1201 | ||
1202 | rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size); | |
1203 | memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx)); | |
1204 | regno_reg_rtx = new1; | |
1205 | ||
1206 | crtl->emit.regno_pointer_align_length = new_size; | |
23b2ce53 RS |
1207 | } |
1208 | ||
a698cc03 JL |
1209 | /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */ |
1210 | ||
1211 | bool | |
1212 | reg_is_parm_p (rtx reg) | |
1213 | { | |
1214 | tree decl; | |
1215 | ||
1216 | gcc_assert (REG_P (reg)); | |
1217 | decl = REG_EXPR (reg); | |
1218 | return (decl && TREE_CODE (decl) == PARM_DECL); | |
1219 | } | |
1220 | ||
38ae7651 RS |
1221 | /* Update NEW with the same attributes as REG, but with OFFSET added |
1222 | to the REG_OFFSET. */ | |
a560d4d4 | 1223 | |
e53a16e7 | 1224 | static void |
84bc717b | 1225 | update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset) |
a560d4d4 | 1226 | { |
60564289 | 1227 | REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg), |
84bc717b | 1228 | REG_OFFSET (reg) + offset); |
e53a16e7 ILT |
1229 | } |
1230 | ||
38ae7651 RS |
1231 | /* Generate a register with same attributes as REG, but with OFFSET |
1232 | added to the REG_OFFSET. */ | |
e53a16e7 ILT |
1233 | |
1234 | rtx | |
ef4bddc2 | 1235 | gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno, |
84bc717b | 1236 | poly_int64 offset) |
e53a16e7 | 1237 | { |
60564289 | 1238 | rtx new_rtx = gen_rtx_REG (mode, regno); |
e53a16e7 | 1239 | |
60564289 KG |
1240 | update_reg_offset (new_rtx, reg, offset); |
1241 | return new_rtx; | |
e53a16e7 ILT |
1242 | } |
1243 | ||
1244 | /* Generate a new pseudo-register with the same attributes as REG, but | |
38ae7651 | 1245 | with OFFSET added to the REG_OFFSET. */ |
e53a16e7 ILT |
1246 | |
1247 | rtx | |
ef4bddc2 | 1248 | gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset) |
e53a16e7 | 1249 | { |
60564289 | 1250 | rtx new_rtx = gen_reg_rtx (mode); |
e53a16e7 | 1251 | |
60564289 KG |
1252 | update_reg_offset (new_rtx, reg, offset); |
1253 | return new_rtx; | |
a560d4d4 JH |
1254 | } |
1255 | ||
38ae7651 RS |
1256 | /* Adjust REG in-place so that it has mode MODE. It is assumed that the |
1257 | new register is a (possibly paradoxical) lowpart of the old one. */ | |
a560d4d4 JH |
1258 | |
1259 | void | |
ef4bddc2 | 1260 | adjust_reg_mode (rtx reg, machine_mode mode) |
a560d4d4 | 1261 | { |
38ae7651 RS |
1262 | update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg))); |
1263 | PUT_MODE (reg, mode); | |
1264 | } | |
1265 | ||
1266 | /* Copy REG's attributes from X, if X has any attributes. If REG and X | |
1267 | have different modes, REG is a (possibly paradoxical) lowpart of X. */ | |
1268 | ||
1269 | void | |
1270 | set_reg_attrs_from_value (rtx reg, rtx x) | |
1271 | { | |
84bc717b | 1272 | poly_int64 offset; |
de6f3f7a L |
1273 | bool can_be_reg_pointer = true; |
1274 | ||
1275 | /* Don't call mark_reg_pointer for incompatible pointer sign | |
1276 | extension. */ | |
1277 | while (GET_CODE (x) == SIGN_EXTEND | |
1278 | || GET_CODE (x) == ZERO_EXTEND | |
1279 | || GET_CODE (x) == TRUNCATE | |
1280 | || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x))) | |
1281 | { | |
2a870875 RS |
1282 | #if defined(POINTERS_EXTEND_UNSIGNED) |
1283 | if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED) | |
8d8e740c BE |
1284 | || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED) |
1285 | || (paradoxical_subreg_p (x) | |
1286 | && ! (SUBREG_PROMOTED_VAR_P (x) | |
1287 | && SUBREG_CHECK_PROMOTED_SIGN (x, | |
1288 | POINTERS_EXTEND_UNSIGNED)))) | |
2a870875 | 1289 | && !targetm.have_ptr_extend ()) |
de6f3f7a L |
1290 | can_be_reg_pointer = false; |
1291 | #endif | |
1292 | x = XEXP (x, 0); | |
1293 | } | |
38ae7651 | 1294 | |
923ba36f JJ |
1295 | /* Hard registers can be reused for multiple purposes within the same |
1296 | function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN | |
1297 | on them is wrong. */ | |
1298 | if (HARD_REGISTER_P (reg)) | |
1299 | return; | |
1300 | ||
38ae7651 | 1301 | offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x)); |
46b71b03 PB |
1302 | if (MEM_P (x)) |
1303 | { | |
527210c4 RS |
1304 | if (MEM_OFFSET_KNOWN_P (x)) |
1305 | REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x), | |
1306 | MEM_OFFSET (x) + offset); | |
de6f3f7a | 1307 | if (can_be_reg_pointer && MEM_POINTER (x)) |
0a317111 | 1308 | mark_reg_pointer (reg, 0); |
46b71b03 PB |
1309 | } |
1310 | else if (REG_P (x)) | |
1311 | { | |
1312 | if (REG_ATTRS (x)) | |
1313 | update_reg_offset (reg, x, offset); | |
de6f3f7a | 1314 | if (can_be_reg_pointer && REG_POINTER (x)) |
46b71b03 PB |
1315 | mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x))); |
1316 | } | |
1317 | } | |
1318 | ||
1319 | /* Generate a REG rtx for a new pseudo register, copying the mode | |
1320 | and attributes from X. */ | |
1321 | ||
1322 | rtx | |
1323 | gen_reg_rtx_and_attrs (rtx x) | |
1324 | { | |
1325 | rtx reg = gen_reg_rtx (GET_MODE (x)); | |
1326 | set_reg_attrs_from_value (reg, x); | |
1327 | return reg; | |
a560d4d4 JH |
1328 | } |
1329 | ||
9d18e06b JZ |
1330 | /* Set the register attributes for registers contained in PARM_RTX. |
1331 | Use needed values from memory attributes of MEM. */ | |
1332 | ||
1333 | void | |
502b8322 | 1334 | set_reg_attrs_for_parm (rtx parm_rtx, rtx mem) |
9d18e06b | 1335 | { |
f8cfc6aa | 1336 | if (REG_P (parm_rtx)) |
38ae7651 | 1337 | set_reg_attrs_from_value (parm_rtx, mem); |
9d18e06b JZ |
1338 | else if (GET_CODE (parm_rtx) == PARALLEL) |
1339 | { | |
1340 | /* Check for a NULL entry in the first slot, used to indicate that the | |
1341 | parameter goes both on the stack and in registers. */ | |
1342 | int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1; | |
1343 | for (; i < XVECLEN (parm_rtx, 0); i++) | |
1344 | { | |
1345 | rtx x = XVECEXP (parm_rtx, 0, i); | |
f8cfc6aa | 1346 | if (REG_P (XEXP (x, 0))) |
9d18e06b JZ |
1347 | REG_ATTRS (XEXP (x, 0)) |
1348 | = get_reg_attrs (MEM_EXPR (mem), | |
1349 | INTVAL (XEXP (x, 1))); | |
1350 | } | |
1351 | } | |
1352 | } | |
1353 | ||
38ae7651 RS |
1354 | /* Set the REG_ATTRS for registers in value X, given that X represents |
1355 | decl T. */ | |
a560d4d4 | 1356 | |
4e3825db | 1357 | void |
38ae7651 RS |
1358 | set_reg_attrs_for_decl_rtl (tree t, rtx x) |
1359 | { | |
1f9ceff1 AO |
1360 | if (!t) |
1361 | return; | |
1362 | tree tdecl = t; | |
38ae7651 | 1363 | if (GET_CODE (x) == SUBREG) |
fbe6ec81 | 1364 | { |
38ae7651 RS |
1365 | gcc_assert (subreg_lowpart_p (x)); |
1366 | x = SUBREG_REG (x); | |
fbe6ec81 | 1367 | } |
f8cfc6aa | 1368 | if (REG_P (x)) |
38ae7651 RS |
1369 | REG_ATTRS (x) |
1370 | = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x), | |
1f9ceff1 AO |
1371 | DECL_P (tdecl) |
1372 | ? DECL_MODE (tdecl) | |
1373 | : TYPE_MODE (TREE_TYPE (tdecl)))); | |
a560d4d4 JH |
1374 | if (GET_CODE (x) == CONCAT) |
1375 | { | |
1376 | if (REG_P (XEXP (x, 0))) | |
1377 | REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0); | |
1378 | if (REG_P (XEXP (x, 1))) | |
1379 | REG_ATTRS (XEXP (x, 1)) | |
1380 | = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0)))); | |
1381 | } | |
1382 | if (GET_CODE (x) == PARALLEL) | |
1383 | { | |
d4afac5b JZ |
1384 | int i, start; |
1385 | ||
1386 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1387 | both on the stack and in registers. */ | |
1388 | if (XEXP (XVECEXP (x, 0, 0), 0)) | |
1389 | start = 0; | |
1390 | else | |
1391 | start = 1; | |
1392 | ||
1393 | for (i = start; i < XVECLEN (x, 0); i++) | |
a560d4d4 JH |
1394 | { |
1395 | rtx y = XVECEXP (x, 0, i); | |
1396 | if (REG_P (XEXP (y, 0))) | |
1397 | REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1))); | |
1398 | } | |
1399 | } | |
1400 | } | |
1401 | ||
38ae7651 RS |
1402 | /* Assign the RTX X to declaration T. */ |
1403 | ||
1404 | void | |
1405 | set_decl_rtl (tree t, rtx x) | |
1406 | { | |
1407 | DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x; | |
1408 | if (x) | |
1409 | set_reg_attrs_for_decl_rtl (t, x); | |
1410 | } | |
1411 | ||
5141868d RS |
1412 | /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true |
1413 | if the ABI requires the parameter to be passed by reference. */ | |
38ae7651 RS |
1414 | |
1415 | void | |
5141868d | 1416 | set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p) |
38ae7651 RS |
1417 | { |
1418 | DECL_INCOMING_RTL (t) = x; | |
5141868d | 1419 | if (x && !by_reference_p) |
38ae7651 RS |
1420 | set_reg_attrs_for_decl_rtl (t, x); |
1421 | } | |
1422 | ||
754fdcca RK |
1423 | /* Identify REG (which may be a CONCAT) as a user register. */ |
1424 | ||
1425 | void | |
502b8322 | 1426 | mark_user_reg (rtx reg) |
754fdcca RK |
1427 | { |
1428 | if (GET_CODE (reg) == CONCAT) | |
1429 | { | |
1430 | REG_USERVAR_P (XEXP (reg, 0)) = 1; | |
1431 | REG_USERVAR_P (XEXP (reg, 1)) = 1; | |
1432 | } | |
754fdcca | 1433 | else |
5b0264cb NS |
1434 | { |
1435 | gcc_assert (REG_P (reg)); | |
1436 | REG_USERVAR_P (reg) = 1; | |
1437 | } | |
754fdcca RK |
1438 | } |
1439 | ||
86fe05e0 RK |
1440 | /* Identify REG as a probable pointer register and show its alignment |
1441 | as ALIGN, if nonzero. */ | |
23b2ce53 RS |
1442 | |
1443 | void | |
502b8322 | 1444 | mark_reg_pointer (rtx reg, int align) |
23b2ce53 | 1445 | { |
3502dc9c | 1446 | if (! REG_POINTER (reg)) |
00995e78 | 1447 | { |
3502dc9c | 1448 | REG_POINTER (reg) = 1; |
86fe05e0 | 1449 | |
00995e78 RE |
1450 | if (align) |
1451 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; | |
1452 | } | |
1453 | else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg))) | |
6614fd40 | 1454 | /* We can no-longer be sure just how aligned this pointer is. */ |
86fe05e0 | 1455 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; |
23b2ce53 RS |
1456 | } |
1457 | ||
1458 | /* Return 1 plus largest pseudo reg number used in the current function. */ | |
1459 | ||
1460 | int | |
502b8322 | 1461 | max_reg_num (void) |
23b2ce53 RS |
1462 | { |
1463 | return reg_rtx_no; | |
1464 | } | |
1465 | ||
1466 | /* Return 1 + the largest label number used so far in the current function. */ | |
1467 | ||
1468 | int | |
502b8322 | 1469 | max_label_num (void) |
23b2ce53 | 1470 | { |
23b2ce53 RS |
1471 | return label_num; |
1472 | } | |
1473 | ||
1474 | /* Return first label number used in this function (if any were used). */ | |
1475 | ||
1476 | int | |
502b8322 | 1477 | get_first_label_num (void) |
23b2ce53 RS |
1478 | { |
1479 | return first_label_num; | |
1480 | } | |
6de9cd9a DN |
1481 | |
1482 | /* If the rtx for label was created during the expansion of a nested | |
1483 | function, then first_label_num won't include this label number. | |
fa10beec | 1484 | Fix this now so that array indices work later. */ |
6de9cd9a DN |
1485 | |
1486 | void | |
9aa50db7 | 1487 | maybe_set_first_label_num (rtx_code_label *x) |
6de9cd9a DN |
1488 | { |
1489 | if (CODE_LABEL_NUMBER (x) < first_label_num) | |
1490 | first_label_num = CODE_LABEL_NUMBER (x); | |
1491 | } | |
51b86113 DM |
1492 | |
1493 | /* For use by the RTL function loader, when mingling with normal | |
1494 | functions. | |
1495 | Ensure that label_num is greater than the label num of X, to avoid | |
1496 | duplicate labels in the generated assembler. */ | |
1497 | ||
1498 | void | |
1499 | maybe_set_max_label_num (rtx_code_label *x) | |
1500 | { | |
1501 | if (CODE_LABEL_NUMBER (x) >= label_num) | |
1502 | label_num = CODE_LABEL_NUMBER (x) + 1; | |
1503 | } | |
1504 | ||
23b2ce53 RS |
1505 | \f |
1506 | /* Return a value representing some low-order bits of X, where the number | |
1507 | of low-order bits is given by MODE. Note that no conversion is done | |
750c9258 | 1508 | between floating-point and fixed-point values, rather, the bit |
23b2ce53 RS |
1509 | representation is returned. |
1510 | ||
1511 | This function handles the cases in common between gen_lowpart, below, | |
1512 | and two variants in cse.c and combine.c. These are the cases that can | |
1513 | be safely handled at all points in the compilation. | |
1514 | ||
1515 | If this is not a case we can handle, return 0. */ | |
1516 | ||
1517 | rtx | |
ef4bddc2 | 1518 | gen_lowpart_common (machine_mode mode, rtx x) |
23b2ce53 | 1519 | { |
ddef6bc7 | 1520 | int msize = GET_MODE_SIZE (mode); |
550d1387 | 1521 | int xsize; |
ef4bddc2 | 1522 | machine_mode innermode; |
550d1387 GK |
1523 | |
1524 | /* Unfortunately, this routine doesn't take a parameter for the mode of X, | |
1525 | so we have to make one up. Yuk. */ | |
1526 | innermode = GET_MODE (x); | |
481683e1 | 1527 | if (CONST_INT_P (x) |
db487452 | 1528 | && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT) |
f4b31647 | 1529 | innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require (); |
550d1387 | 1530 | else if (innermode == VOIDmode) |
f4b31647 | 1531 | innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require (); |
b8698a0f | 1532 | |
550d1387 GK |
1533 | xsize = GET_MODE_SIZE (innermode); |
1534 | ||
5b0264cb | 1535 | gcc_assert (innermode != VOIDmode && innermode != BLKmode); |
23b2ce53 | 1536 | |
550d1387 | 1537 | if (innermode == mode) |
23b2ce53 RS |
1538 | return x; |
1539 | ||
1eae67f8 RS |
1540 | if (SCALAR_FLOAT_MODE_P (mode)) |
1541 | { | |
1542 | /* Don't allow paradoxical FLOAT_MODE subregs. */ | |
1543 | if (msize > xsize) | |
1544 | return 0; | |
1545 | } | |
1546 | else | |
1547 | { | |
1548 | /* MODE must occupy no more of the underlying registers than X. */ | |
1549 | unsigned int regsize = REGMODE_NATURAL_SIZE (innermode); | |
1550 | unsigned int mregs = CEIL (msize, regsize); | |
1551 | unsigned int xregs = CEIL (xsize, regsize); | |
1552 | if (mregs > xregs) | |
1553 | return 0; | |
1554 | } | |
53501a19 | 1555 | |
54651377 | 1556 | scalar_int_mode int_mode, int_innermode, from_mode; |
23b2ce53 | 1557 | if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) |
54651377 RS |
1558 | && is_a <scalar_int_mode> (mode, &int_mode) |
1559 | && is_a <scalar_int_mode> (innermode, &int_innermode) | |
1560 | && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode)) | |
23b2ce53 RS |
1561 | { |
1562 | /* If we are getting the low-order part of something that has been | |
1563 | sign- or zero-extended, we can either just use the object being | |
1564 | extended or make a narrower extension. If we want an even smaller | |
1565 | piece than the size of the object being extended, call ourselves | |
1566 | recursively. | |
1567 | ||
1568 | This case is used mostly by combine and cse. */ | |
1569 | ||
54651377 | 1570 | if (from_mode == int_mode) |
23b2ce53 | 1571 | return XEXP (x, 0); |
54651377 RS |
1572 | else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode)) |
1573 | return gen_lowpart_common (int_mode, XEXP (x, 0)); | |
1574 | else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode)) | |
1575 | return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0)); | |
23b2ce53 | 1576 | } |
f8cfc6aa | 1577 | else if (GET_CODE (x) == SUBREG || REG_P (x) |
06ec586d | 1578 | || GET_CODE (x) == CONCAT || const_vec_p (x) |
0c12fc9b RS |
1579 | || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x) |
1580 | || CONST_POLY_INT_P (x)) | |
3403a1a9 | 1581 | return lowpart_subreg (mode, x, innermode); |
8aada4ad | 1582 | |
23b2ce53 RS |
1583 | /* Otherwise, we can't do this. */ |
1584 | return 0; | |
1585 | } | |
1586 | \f | |
ccba022b | 1587 | rtx |
ef4bddc2 | 1588 | gen_highpart (machine_mode mode, rtx x) |
ccba022b | 1589 | { |
ddef6bc7 | 1590 | unsigned int msize = GET_MODE_SIZE (mode); |
e0e08ac2 | 1591 | rtx result; |
ddef6bc7 | 1592 | |
ccba022b RS |
1593 | /* This case loses if X is a subreg. To catch bugs early, |
1594 | complain if an invalid MODE is used even in other cases. */ | |
5b0264cb NS |
1595 | gcc_assert (msize <= UNITS_PER_WORD |
1596 | || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))); | |
ddef6bc7 | 1597 | |
e0e08ac2 JH |
1598 | result = simplify_gen_subreg (mode, x, GET_MODE (x), |
1599 | subreg_highpart_offset (mode, GET_MODE (x))); | |
5b0264cb | 1600 | gcc_assert (result); |
b8698a0f | 1601 | |
09482e0d JW |
1602 | /* simplify_gen_subreg is not guaranteed to return a valid operand for |
1603 | the target if we have a MEM. gen_highpart must return a valid operand, | |
1604 | emitting code if necessary to do so. */ | |
5b0264cb NS |
1605 | if (MEM_P (result)) |
1606 | { | |
1607 | result = validize_mem (result); | |
1608 | gcc_assert (result); | |
1609 | } | |
b8698a0f | 1610 | |
e0e08ac2 JH |
1611 | return result; |
1612 | } | |
5222e470 | 1613 | |
26d249eb | 1614 | /* Like gen_highpart, but accept mode of EXP operand in case EXP can |
5222e470 JH |
1615 | be VOIDmode constant. */ |
1616 | rtx | |
ef4bddc2 | 1617 | gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp) |
5222e470 JH |
1618 | { |
1619 | if (GET_MODE (exp) != VOIDmode) | |
1620 | { | |
5b0264cb | 1621 | gcc_assert (GET_MODE (exp) == innermode); |
5222e470 JH |
1622 | return gen_highpart (outermode, exp); |
1623 | } | |
1624 | return simplify_gen_subreg (outermode, exp, innermode, | |
1625 | subreg_highpart_offset (outermode, innermode)); | |
1626 | } | |
68252e27 | 1627 | |
33951763 RS |
1628 | /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has |
1629 | OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | |
8698cce3 | 1630 | |
91914e56 RS |
1631 | poly_uint64 |
1632 | subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes) | |
e0e08ac2 | 1633 | { |
91914e56 RS |
1634 | gcc_checking_assert (ordered_p (outer_bytes, inner_bytes)); |
1635 | if (maybe_gt (outer_bytes, inner_bytes)) | |
33951763 RS |
1636 | /* Paradoxical subregs always have a SUBREG_BYTE of 0. */ |
1637 | return 0; | |
ddef6bc7 | 1638 | |
33951763 RS |
1639 | if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1640 | return inner_bytes - outer_bytes; | |
1641 | else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | |
1642 | return 0; | |
1643 | else | |
1644 | return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0); | |
ccba022b | 1645 | } |
eea50aa0 | 1646 | |
33951763 RS |
1647 | /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has |
1648 | OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | |
1649 | ||
91914e56 RS |
1650 | poly_uint64 |
1651 | subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes) | |
eea50aa0 | 1652 | { |
91914e56 | 1653 | gcc_assert (known_ge (inner_bytes, outer_bytes)); |
eea50aa0 | 1654 | |
33951763 RS |
1655 | if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1656 | return 0; | |
1657 | else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | |
1658 | return inner_bytes - outer_bytes; | |
1659 | else | |
1660 | return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, | |
1661 | (inner_bytes - outer_bytes) | |
1662 | * BITS_PER_UNIT); | |
eea50aa0 | 1663 | } |
ccba022b | 1664 | |
23b2ce53 RS |
1665 | /* Return 1 iff X, assumed to be a SUBREG, |
1666 | refers to the least significant part of its containing reg. | |
1667 | If X is not a SUBREG, always return 1 (it is its own low part!). */ | |
1668 | ||
1669 | int | |
fa233e34 | 1670 | subreg_lowpart_p (const_rtx x) |
23b2ce53 RS |
1671 | { |
1672 | if (GET_CODE (x) != SUBREG) | |
1673 | return 1; | |
a3a03040 RK |
1674 | else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) |
1675 | return 0; | |
23b2ce53 | 1676 | |
91914e56 RS |
1677 | return known_eq (subreg_lowpart_offset (GET_MODE (x), |
1678 | GET_MODE (SUBREG_REG (x))), | |
1679 | SUBREG_BYTE (x)); | |
23b2ce53 RS |
1680 | } |
1681 | \f | |
ddef6bc7 JJ |
1682 | /* Return subword OFFSET of operand OP. |
1683 | The word number, OFFSET, is interpreted as the word number starting | |
1684 | at the low-order address. OFFSET 0 is the low-order word if not | |
1685 | WORDS_BIG_ENDIAN, otherwise it is the high-order word. | |
1686 | ||
1687 | If we cannot extract the required word, we return zero. Otherwise, | |
1688 | an rtx corresponding to the requested word will be returned. | |
1689 | ||
1690 | VALIDATE_ADDRESS is nonzero if the address should be validated. Before | |
1691 | reload has completed, a valid address will always be returned. After | |
1692 | reload, if a valid address cannot be returned, we return zero. | |
1693 | ||
1694 | If VALIDATE_ADDRESS is zero, we simply form the required address; validating | |
1695 | it is the responsibility of the caller. | |
1696 | ||
1697 | MODE is the mode of OP in case it is a CONST_INT. | |
1698 | ||
1699 | ??? This is still rather broken for some cases. The problem for the | |
1700 | moment is that all callers of this thing provide no 'goal mode' to | |
1701 | tell us to work with. This exists because all callers were written | |
0631e0bf JH |
1702 | in a word based SUBREG world. |
1703 | Now use of this function can be deprecated by simplify_subreg in most | |
1704 | cases. | |
1705 | */ | |
ddef6bc7 JJ |
1706 | |
1707 | rtx | |
fdbfe4e5 RS |
1708 | operand_subword (rtx op, poly_uint64 offset, int validate_address, |
1709 | machine_mode mode) | |
ddef6bc7 JJ |
1710 | { |
1711 | if (mode == VOIDmode) | |
1712 | mode = GET_MODE (op); | |
1713 | ||
5b0264cb | 1714 | gcc_assert (mode != VOIDmode); |
ddef6bc7 | 1715 | |
30f7a378 | 1716 | /* If OP is narrower than a word, fail. */ |
ddef6bc7 | 1717 | if (mode != BLKmode |
fdbfe4e5 | 1718 | && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)) |
ddef6bc7 JJ |
1719 | return 0; |
1720 | ||
30f7a378 | 1721 | /* If we want a word outside OP, return zero. */ |
ddef6bc7 | 1722 | if (mode != BLKmode |
fdbfe4e5 | 1723 | && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode))) |
ddef6bc7 JJ |
1724 | return const0_rtx; |
1725 | ||
ddef6bc7 | 1726 | /* Form a new MEM at the requested address. */ |
3c0cb5de | 1727 | if (MEM_P (op)) |
ddef6bc7 | 1728 | { |
60564289 | 1729 | rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD); |
ddef6bc7 | 1730 | |
f1ec5147 | 1731 | if (! validate_address) |
60564289 | 1732 | return new_rtx; |
f1ec5147 RK |
1733 | |
1734 | else if (reload_completed) | |
ddef6bc7 | 1735 | { |
09e881c9 BE |
1736 | if (! strict_memory_address_addr_space_p (word_mode, |
1737 | XEXP (new_rtx, 0), | |
1738 | MEM_ADDR_SPACE (op))) | |
f1ec5147 | 1739 | return 0; |
ddef6bc7 | 1740 | } |
f1ec5147 | 1741 | else |
60564289 | 1742 | return replace_equiv_address (new_rtx, XEXP (new_rtx, 0)); |
ddef6bc7 JJ |
1743 | } |
1744 | ||
0631e0bf JH |
1745 | /* Rest can be handled by simplify_subreg. */ |
1746 | return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD)); | |
ddef6bc7 JJ |
1747 | } |
1748 | ||
535a42b1 NS |
1749 | /* Similar to `operand_subword', but never return 0. If we can't |
1750 | extract the required subword, put OP into a register and try again. | |
1751 | The second attempt must succeed. We always validate the address in | |
1752 | this case. | |
23b2ce53 RS |
1753 | |
1754 | MODE is the mode of OP, in case it is CONST_INT. */ | |
1755 | ||
1756 | rtx | |
fdbfe4e5 | 1757 | operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode) |
23b2ce53 | 1758 | { |
ddef6bc7 | 1759 | rtx result = operand_subword (op, offset, 1, mode); |
23b2ce53 RS |
1760 | |
1761 | if (result) | |
1762 | return result; | |
1763 | ||
1764 | if (mode != BLKmode && mode != VOIDmode) | |
77e6b0eb JC |
1765 | { |
1766 | /* If this is a register which can not be accessed by words, copy it | |
1767 | to a pseudo register. */ | |
f8cfc6aa | 1768 | if (REG_P (op)) |
77e6b0eb JC |
1769 | op = copy_to_reg (op); |
1770 | else | |
1771 | op = force_reg (mode, op); | |
1772 | } | |
23b2ce53 | 1773 | |
ddef6bc7 | 1774 | result = operand_subword (op, offset, 1, mode); |
5b0264cb | 1775 | gcc_assert (result); |
23b2ce53 RS |
1776 | |
1777 | return result; | |
1778 | } | |
1779 | \f | |
d05d7551 RS |
1780 | mem_attrs::mem_attrs () |
1781 | : expr (NULL_TREE), | |
1782 | offset (0), | |
1783 | size (0), | |
1784 | alias (0), | |
1785 | align (0), | |
1786 | addrspace (ADDR_SPACE_GENERIC), | |
1787 | offset_known_p (false), | |
1788 | size_known_p (false) | |
1789 | {} | |
1790 | ||
2b3493c8 AK |
1791 | /* Returns 1 if both MEM_EXPR can be considered equal |
1792 | and 0 otherwise. */ | |
1793 | ||
1794 | int | |
4f588890 | 1795 | mem_expr_equal_p (const_tree expr1, const_tree expr2) |
2b3493c8 AK |
1796 | { |
1797 | if (expr1 == expr2) | |
1798 | return 1; | |
1799 | ||
1800 | if (! expr1 || ! expr2) | |
1801 | return 0; | |
1802 | ||
1803 | if (TREE_CODE (expr1) != TREE_CODE (expr2)) | |
1804 | return 0; | |
1805 | ||
55b34b5f | 1806 | return operand_equal_p (expr1, expr2, 0); |
2b3493c8 AK |
1807 | } |
1808 | ||
805903b5 JJ |
1809 | /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN |
1810 | bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or | |
1811 | -1 if not known. */ | |
1812 | ||
1813 | int | |
d9223014 | 1814 | get_mem_align_offset (rtx mem, unsigned int align) |
805903b5 JJ |
1815 | { |
1816 | tree expr; | |
d05d7551 | 1817 | poly_uint64 offset; |
805903b5 JJ |
1818 | |
1819 | /* This function can't use | |
527210c4 | 1820 | if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) |
e80c2726 | 1821 | || (MAX (MEM_ALIGN (mem), |
0eb77834 | 1822 | MAX (align, get_object_alignment (MEM_EXPR (mem)))) |
805903b5 JJ |
1823 | < align)) |
1824 | return -1; | |
1825 | else | |
527210c4 | 1826 | return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1); |
805903b5 JJ |
1827 | for two reasons: |
1828 | - COMPONENT_REFs in MEM_EXPR can have NULL first operand, | |
1829 | for <variable>. get_inner_reference doesn't handle it and | |
1830 | even if it did, the alignment in that case needs to be determined | |
1831 | from DECL_FIELD_CONTEXT's TYPE_ALIGN. | |
1832 | - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR | |
1833 | isn't sufficiently aligned, the object it is in might be. */ | |
1834 | gcc_assert (MEM_P (mem)); | |
1835 | expr = MEM_EXPR (mem); | |
527210c4 | 1836 | if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
805903b5 JJ |
1837 | return -1; |
1838 | ||
527210c4 | 1839 | offset = MEM_OFFSET (mem); |
805903b5 JJ |
1840 | if (DECL_P (expr)) |
1841 | { | |
1842 | if (DECL_ALIGN (expr) < align) | |
1843 | return -1; | |
1844 | } | |
1845 | else if (INDIRECT_REF_P (expr)) | |
1846 | { | |
1847 | if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align) | |
1848 | return -1; | |
1849 | } | |
1850 | else if (TREE_CODE (expr) == COMPONENT_REF) | |
1851 | { | |
1852 | while (1) | |
1853 | { | |
1854 | tree inner = TREE_OPERAND (expr, 0); | |
1855 | tree field = TREE_OPERAND (expr, 1); | |
1856 | tree byte_offset = component_ref_field_offset (expr); | |
1857 | tree bit_offset = DECL_FIELD_BIT_OFFSET (field); | |
1858 | ||
d05d7551 | 1859 | poly_uint64 suboffset; |
805903b5 | 1860 | if (!byte_offset |
d05d7551 | 1861 | || !poly_int_tree_p (byte_offset, &suboffset) |
cc269bb6 | 1862 | || !tree_fits_uhwi_p (bit_offset)) |
805903b5 JJ |
1863 | return -1; |
1864 | ||
d05d7551 | 1865 | offset += suboffset; |
ae7e9ddd | 1866 | offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; |
805903b5 JJ |
1867 | |
1868 | if (inner == NULL_TREE) | |
1869 | { | |
1870 | if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field)) | |
1871 | < (unsigned int) align) | |
1872 | return -1; | |
1873 | break; | |
1874 | } | |
1875 | else if (DECL_P (inner)) | |
1876 | { | |
1877 | if (DECL_ALIGN (inner) < align) | |
1878 | return -1; | |
1879 | break; | |
1880 | } | |
1881 | else if (TREE_CODE (inner) != COMPONENT_REF) | |
1882 | return -1; | |
1883 | expr = inner; | |
1884 | } | |
1885 | } | |
1886 | else | |
1887 | return -1; | |
1888 | ||
d05d7551 RS |
1889 | HOST_WIDE_INT misalign; |
1890 | if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign)) | |
1891 | return -1; | |
1892 | return misalign; | |
805903b5 JJ |
1893 | } |
1894 | ||
6926c713 | 1895 | /* Given REF (a MEM) and T, either the type of X or the expression |
173b24b9 | 1896 | corresponding to REF, set the memory attributes. OBJECTP is nonzero |
6f1087be RH |
1897 | if we are making a new object of this type. BITPOS is nonzero if |
1898 | there is an offset outstanding on T that will be applied later. */ | |
173b24b9 RK |
1899 | |
1900 | void | |
502b8322 | 1901 | set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, |
d05d7551 | 1902 | poly_int64 bitpos) |
173b24b9 | 1903 | { |
d05d7551 | 1904 | poly_int64 apply_bitpos = 0; |
173b24b9 | 1905 | tree type; |
f12144dd | 1906 | struct mem_attrs attrs, *defattrs, *refattrs; |
f18a7b25 | 1907 | addr_space_t as; |
173b24b9 RK |
1908 | |
1909 | /* It can happen that type_for_mode was given a mode for which there | |
1910 | is no language-level type. In which case it returns NULL, which | |
1911 | we can see here. */ | |
1912 | if (t == NULL_TREE) | |
1913 | return; | |
1914 | ||
1915 | type = TYPE_P (t) ? t : TREE_TYPE (t); | |
eeb23c11 MM |
1916 | if (type == error_mark_node) |
1917 | return; | |
173b24b9 | 1918 | |
173b24b9 RK |
1919 | /* If we have already set DECL_RTL = ref, get_alias_set will get the |
1920 | wrong answer, as it assumes that DECL_RTL already has the right alias | |
1921 | info. Callers should not set DECL_RTL until after the call to | |
1922 | set_mem_attributes. */ | |
5b0264cb | 1923 | gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); |
173b24b9 | 1924 | |
738cc472 | 1925 | /* Get the alias set from the expression or type (perhaps using a |
8ac61af7 | 1926 | front-end routine) and use it. */ |
f12144dd | 1927 | attrs.alias = get_alias_set (t); |
173b24b9 | 1928 | |
a5e9c810 | 1929 | MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type); |
f8ad8d7c | 1930 | MEM_POINTER (ref) = POINTER_TYPE_P (type); |
173b24b9 | 1931 | |
268f7033 | 1932 | /* Default values from pre-existing memory attributes if present. */ |
f12144dd RS |
1933 | refattrs = MEM_ATTRS (ref); |
1934 | if (refattrs) | |
268f7033 UW |
1935 | { |
1936 | /* ??? Can this ever happen? Calling this routine on a MEM that | |
1937 | already carries memory attributes should probably be invalid. */ | |
f12144dd | 1938 | attrs.expr = refattrs->expr; |
754c3d5d | 1939 | attrs.offset_known_p = refattrs->offset_known_p; |
f12144dd | 1940 | attrs.offset = refattrs->offset; |
754c3d5d | 1941 | attrs.size_known_p = refattrs->size_known_p; |
f12144dd RS |
1942 | attrs.size = refattrs->size; |
1943 | attrs.align = refattrs->align; | |
268f7033 UW |
1944 | } |
1945 | ||
1946 | /* Otherwise, default values from the mode of the MEM reference. */ | |
f12144dd | 1947 | else |
268f7033 | 1948 | { |
f12144dd RS |
1949 | defattrs = mode_mem_attrs[(int) GET_MODE (ref)]; |
1950 | gcc_assert (!defattrs->expr); | |
754c3d5d | 1951 | gcc_assert (!defattrs->offset_known_p); |
f12144dd | 1952 | |
268f7033 | 1953 | /* Respect mode size. */ |
754c3d5d | 1954 | attrs.size_known_p = defattrs->size_known_p; |
f12144dd | 1955 | attrs.size = defattrs->size; |
268f7033 UW |
1956 | /* ??? Is this really necessary? We probably should always get |
1957 | the size from the type below. */ | |
1958 | ||
1959 | /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type; | |
1960 | if T is an object, always compute the object alignment below. */ | |
f12144dd RS |
1961 | if (TYPE_P (t)) |
1962 | attrs.align = defattrs->align; | |
1963 | else | |
1964 | attrs.align = BITS_PER_UNIT; | |
268f7033 UW |
1965 | /* ??? If T is a type, respecting mode alignment may *also* be wrong |
1966 | e.g. if the type carries an alignment attribute. Should we be | |
1967 | able to simply always use TYPE_ALIGN? */ | |
1968 | } | |
1969 | ||
25b75a48 BE |
1970 | /* We can set the alignment from the type if we are making an object or if |
1971 | this is an INDIRECT_REF. */ | |
1972 | if (objectp || TREE_CODE (t) == INDIRECT_REF) | |
f12144dd | 1973 | attrs.align = MAX (attrs.align, TYPE_ALIGN (type)); |
a80903ff | 1974 | |
738cc472 | 1975 | /* If the size is known, we can set that. */ |
a787ccc3 | 1976 | tree new_size = TYPE_SIZE_UNIT (type); |
738cc472 | 1977 | |
30b0317c RB |
1978 | /* The address-space is that of the type. */ |
1979 | as = TYPE_ADDR_SPACE (type); | |
1980 | ||
80965c18 RK |
1981 | /* If T is not a type, we may be able to deduce some more information about |
1982 | the expression. */ | |
1983 | if (! TYPE_P (t)) | |
8ac61af7 | 1984 | { |
8476af98 | 1985 | tree base; |
389fdba0 | 1986 | |
8ac61af7 RK |
1987 | if (TREE_THIS_VOLATILE (t)) |
1988 | MEM_VOLATILE_P (ref) = 1; | |
173b24b9 | 1989 | |
c56e3582 RK |
1990 | /* Now remove any conversions: they don't change what the underlying |
1991 | object is. Likewise for SAVE_EXPR. */ | |
1043771b | 1992 | while (CONVERT_EXPR_P (t) |
c56e3582 RK |
1993 | || TREE_CODE (t) == VIEW_CONVERT_EXPR |
1994 | || TREE_CODE (t) == SAVE_EXPR) | |
8ac61af7 RK |
1995 | t = TREE_OPERAND (t, 0); |
1996 | ||
4994da65 RG |
1997 | /* Note whether this expression can trap. */ |
1998 | MEM_NOTRAP_P (ref) = !tree_could_trap_p (t); | |
1999 | ||
2000 | base = get_base_address (t); | |
f18a7b25 MJ |
2001 | if (base) |
2002 | { | |
2003 | if (DECL_P (base) | |
2004 | && TREE_READONLY (base) | |
2005 | && (TREE_STATIC (base) || DECL_EXTERNAL (base)) | |
2006 | && !TREE_THIS_VOLATILE (base)) | |
2007 | MEM_READONLY_P (ref) = 1; | |
2008 | ||
2009 | /* Mark static const strings readonly as well. */ | |
2010 | if (TREE_CODE (base) == STRING_CST | |
2011 | && TREE_READONLY (base) | |
2012 | && TREE_STATIC (base)) | |
2013 | MEM_READONLY_P (ref) = 1; | |
2014 | ||
30b0317c | 2015 | /* Address-space information is on the base object. */ |
f18a7b25 MJ |
2016 | if (TREE_CODE (base) == MEM_REF |
2017 | || TREE_CODE (base) == TARGET_MEM_REF) | |
2018 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, | |
2019 | 0)))); | |
2020 | else | |
2021 | as = TYPE_ADDR_SPACE (TREE_TYPE (base)); | |
2022 | } | |
ba30e50d | 2023 | |
2039d7aa RH |
2024 | /* If this expression uses it's parent's alias set, mark it such |
2025 | that we won't change it. */ | |
b4ada065 | 2026 | if (component_uses_parent_alias_set_from (t) != NULL_TREE) |
10b76d73 RK |
2027 | MEM_KEEP_ALIAS_SET_P (ref) = 1; |
2028 | ||
8ac61af7 RK |
2029 | /* If this is a decl, set the attributes of the MEM from it. */ |
2030 | if (DECL_P (t)) | |
2031 | { | |
f12144dd | 2032 | attrs.expr = t; |
754c3d5d RS |
2033 | attrs.offset_known_p = true; |
2034 | attrs.offset = 0; | |
6f1087be | 2035 | apply_bitpos = bitpos; |
a787ccc3 | 2036 | new_size = DECL_SIZE_UNIT (t); |
8ac61af7 RK |
2037 | } |
2038 | ||
30b0317c | 2039 | /* ??? If we end up with a constant here do record a MEM_EXPR. */ |
6615c446 | 2040 | else if (CONSTANT_CLASS_P (t)) |
30b0317c | 2041 | ; |
998d7deb | 2042 | |
a787ccc3 RS |
2043 | /* If this is a field reference, record it. */ |
2044 | else if (TREE_CODE (t) == COMPONENT_REF) | |
998d7deb | 2045 | { |
f12144dd | 2046 | attrs.expr = t; |
754c3d5d RS |
2047 | attrs.offset_known_p = true; |
2048 | attrs.offset = 0; | |
6f1087be | 2049 | apply_bitpos = bitpos; |
a787ccc3 RS |
2050 | if (DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
2051 | new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1)); | |
998d7deb RH |
2052 | } |
2053 | ||
2054 | /* If this is an array reference, look for an outer field reference. */ | |
2055 | else if (TREE_CODE (t) == ARRAY_REF) | |
2056 | { | |
2057 | tree off_tree = size_zero_node; | |
1b1838b6 JW |
2058 | /* We can't modify t, because we use it at the end of the |
2059 | function. */ | |
2060 | tree t2 = t; | |
998d7deb RH |
2061 | |
2062 | do | |
2063 | { | |
1b1838b6 | 2064 | tree index = TREE_OPERAND (t2, 1); |
44de5aeb RK |
2065 | tree low_bound = array_ref_low_bound (t2); |
2066 | tree unit_size = array_ref_element_size (t2); | |
2567406a JH |
2067 | |
2068 | /* We assume all arrays have sizes that are a multiple of a byte. | |
2069 | First subtract the lower bound, if any, in the type of the | |
44de5aeb RK |
2070 | index, then convert to sizetype and multiply by the size of |
2071 | the array element. */ | |
2072 | if (! integer_zerop (low_bound)) | |
4845b383 KH |
2073 | index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), |
2074 | index, low_bound); | |
2567406a | 2075 | |
44de5aeb | 2076 | off_tree = size_binop (PLUS_EXPR, |
b6f65e3c RS |
2077 | size_binop (MULT_EXPR, |
2078 | fold_convert (sizetype, | |
2079 | index), | |
44de5aeb RK |
2080 | unit_size), |
2081 | off_tree); | |
1b1838b6 | 2082 | t2 = TREE_OPERAND (t2, 0); |
998d7deb | 2083 | } |
1b1838b6 | 2084 | while (TREE_CODE (t2) == ARRAY_REF); |
998d7deb | 2085 | |
30b0317c | 2086 | if (DECL_P (t2) |
12ead254 RB |
2087 | || (TREE_CODE (t2) == COMPONENT_REF |
2088 | /* For trailing arrays t2 doesn't have a size that | |
2089 | covers all valid accesses. */ | |
c3e46927 | 2090 | && ! array_at_struct_end_p (t))) |
998d7deb | 2091 | { |
f12144dd | 2092 | attrs.expr = t2; |
754c3d5d | 2093 | attrs.offset_known_p = false; |
d05d7551 | 2094 | if (poly_int_tree_p (off_tree, &attrs.offset)) |
6f1087be | 2095 | { |
754c3d5d | 2096 | attrs.offset_known_p = true; |
6f1087be RH |
2097 | apply_bitpos = bitpos; |
2098 | } | |
998d7deb | 2099 | } |
30b0317c | 2100 | /* Else do not record a MEM_EXPR. */ |
c67a1cf6 RH |
2101 | } |
2102 | ||
56c47f22 | 2103 | /* If this is an indirect reference, record it. */ |
70f34814 | 2104 | else if (TREE_CODE (t) == MEM_REF |
be1ac4ec | 2105 | || TREE_CODE (t) == TARGET_MEM_REF) |
56c47f22 | 2106 | { |
f12144dd | 2107 | attrs.expr = t; |
754c3d5d RS |
2108 | attrs.offset_known_p = true; |
2109 | attrs.offset = 0; | |
56c47f22 RG |
2110 | apply_bitpos = bitpos; |
2111 | } | |
2112 | ||
30b0317c RB |
2113 | /* Compute the alignment. */ |
2114 | unsigned int obj_align; | |
2115 | unsigned HOST_WIDE_INT obj_bitpos; | |
2116 | get_object_alignment_1 (t, &obj_align, &obj_bitpos); | |
d05d7551 RS |
2117 | unsigned int diff_align = known_alignment (obj_bitpos - bitpos); |
2118 | if (diff_align != 0) | |
2119 | obj_align = MIN (obj_align, diff_align); | |
30b0317c | 2120 | attrs.align = MAX (attrs.align, obj_align); |
8ac61af7 RK |
2121 | } |
2122 | ||
d05d7551 RS |
2123 | poly_uint64 const_size; |
2124 | if (poly_int_tree_p (new_size, &const_size)) | |
a787ccc3 RS |
2125 | { |
2126 | attrs.size_known_p = true; | |
d05d7551 | 2127 | attrs.size = const_size; |
a787ccc3 RS |
2128 | } |
2129 | ||
15c812e3 | 2130 | /* If we modified OFFSET based on T, then subtract the outstanding |
8c317c5f RH |
2131 | bit position offset. Similarly, increase the size of the accessed |
2132 | object to contain the negative offset. */ | |
d05d7551 | 2133 | if (maybe_ne (apply_bitpos, 0)) |
8c317c5f | 2134 | { |
754c3d5d | 2135 | gcc_assert (attrs.offset_known_p); |
d05d7551 RS |
2136 | poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos); |
2137 | attrs.offset -= bytepos; | |
754c3d5d | 2138 | if (attrs.size_known_p) |
d05d7551 | 2139 | attrs.size += bytepos; |
8c317c5f | 2140 | } |
6f1087be | 2141 | |
8ac61af7 | 2142 | /* Now set the attributes we computed above. */ |
f18a7b25 | 2143 | attrs.addrspace = as; |
f12144dd | 2144 | set_mem_attrs (ref, &attrs); |
173b24b9 RK |
2145 | } |
2146 | ||
6f1087be | 2147 | void |
502b8322 | 2148 | set_mem_attributes (rtx ref, tree t, int objectp) |
6f1087be RH |
2149 | { |
2150 | set_mem_attributes_minus_bitpos (ref, t, objectp, 0); | |
2151 | } | |
2152 | ||
173b24b9 RK |
2153 | /* Set the alias set of MEM to SET. */ |
2154 | ||
2155 | void | |
4862826d | 2156 | set_mem_alias_set (rtx mem, alias_set_type set) |
173b24b9 | 2157 | { |
173b24b9 | 2158 | /* If the new and old alias sets don't conflict, something is wrong. */ |
77a74ed7 | 2159 | gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); |
d05d7551 | 2160 | mem_attrs attrs (*get_mem_attrs (mem)); |
f12144dd RS |
2161 | attrs.alias = set; |
2162 | set_mem_attrs (mem, &attrs); | |
09e881c9 BE |
2163 | } |
2164 | ||
2165 | /* Set the address space of MEM to ADDRSPACE (target-defined). */ | |
2166 | ||
2167 | void | |
2168 | set_mem_addr_space (rtx mem, addr_space_t addrspace) | |
2169 | { | |
d05d7551 | 2170 | mem_attrs attrs (*get_mem_attrs (mem)); |
f12144dd RS |
2171 | attrs.addrspace = addrspace; |
2172 | set_mem_attrs (mem, &attrs); | |
173b24b9 | 2173 | } |
738cc472 | 2174 | |
d022d93e | 2175 | /* Set the alignment of MEM to ALIGN bits. */ |
738cc472 RK |
2176 | |
2177 | void | |
502b8322 | 2178 | set_mem_align (rtx mem, unsigned int align) |
738cc472 | 2179 | { |
d05d7551 | 2180 | mem_attrs attrs (*get_mem_attrs (mem)); |
f12144dd RS |
2181 | attrs.align = align; |
2182 | set_mem_attrs (mem, &attrs); | |
738cc472 | 2183 | } |
1285011e | 2184 | |
998d7deb | 2185 | /* Set the expr for MEM to EXPR. */ |
1285011e RK |
2186 | |
2187 | void | |
502b8322 | 2188 | set_mem_expr (rtx mem, tree expr) |
1285011e | 2189 | { |
d05d7551 | 2190 | mem_attrs attrs (*get_mem_attrs (mem)); |
f12144dd RS |
2191 | attrs.expr = expr; |
2192 | set_mem_attrs (mem, &attrs); | |
1285011e | 2193 | } |
998d7deb RH |
2194 | |
2195 | /* Set the offset of MEM to OFFSET. */ | |
2196 | ||
2197 | void | |
d05d7551 | 2198 | set_mem_offset (rtx mem, poly_int64 offset) |
998d7deb | 2199 | { |
d05d7551 | 2200 | mem_attrs attrs (*get_mem_attrs (mem)); |
754c3d5d RS |
2201 | attrs.offset_known_p = true; |
2202 | attrs.offset = offset; | |
527210c4 RS |
2203 | set_mem_attrs (mem, &attrs); |
2204 | } | |
2205 | ||
2206 | /* Clear the offset of MEM. */ | |
2207 | ||
2208 | void | |
2209 | clear_mem_offset (rtx mem) | |
2210 | { | |
d05d7551 | 2211 | mem_attrs attrs (*get_mem_attrs (mem)); |
754c3d5d | 2212 | attrs.offset_known_p = false; |
f12144dd | 2213 | set_mem_attrs (mem, &attrs); |
35aff10b AM |
2214 | } |
2215 | ||
2216 | /* Set the size of MEM to SIZE. */ | |
2217 | ||
2218 | void | |
d05d7551 | 2219 | set_mem_size (rtx mem, poly_int64 size) |
35aff10b | 2220 | { |
d05d7551 | 2221 | mem_attrs attrs (*get_mem_attrs (mem)); |
754c3d5d RS |
2222 | attrs.size_known_p = true; |
2223 | attrs.size = size; | |
f5541398 RS |
2224 | set_mem_attrs (mem, &attrs); |
2225 | } | |
2226 | ||
2227 | /* Clear the size of MEM. */ | |
2228 | ||
2229 | void | |
2230 | clear_mem_size (rtx mem) | |
2231 | { | |
d05d7551 | 2232 | mem_attrs attrs (*get_mem_attrs (mem)); |
754c3d5d | 2233 | attrs.size_known_p = false; |
f12144dd | 2234 | set_mem_attrs (mem, &attrs); |
998d7deb | 2235 | } |
173b24b9 | 2236 | \f |
738cc472 RK |
2237 | /* Return a memory reference like MEMREF, but with its mode changed to MODE |
2238 | and its address changed to ADDR. (VOIDmode means don't change the mode. | |
2239 | NULL for ADDR means don't change the address.) VALIDATE is nonzero if the | |
23b33725 RS |
2240 | returned memory location is required to be valid. INPLACE is true if any |
2241 | changes can be made directly to MEMREF or false if MEMREF must be treated | |
2242 | as immutable. | |
2243 | ||
2244 | The memory attributes are not changed. */ | |
23b2ce53 | 2245 | |
738cc472 | 2246 | static rtx |
ef4bddc2 | 2247 | change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate, |
23b33725 | 2248 | bool inplace) |
23b2ce53 | 2249 | { |
09e881c9 | 2250 | addr_space_t as; |
60564289 | 2251 | rtx new_rtx; |
23b2ce53 | 2252 | |
5b0264cb | 2253 | gcc_assert (MEM_P (memref)); |
09e881c9 | 2254 | as = MEM_ADDR_SPACE (memref); |
23b2ce53 RS |
2255 | if (mode == VOIDmode) |
2256 | mode = GET_MODE (memref); | |
2257 | if (addr == 0) | |
2258 | addr = XEXP (memref, 0); | |
a74ff877 | 2259 | if (mode == GET_MODE (memref) && addr == XEXP (memref, 0) |
09e881c9 | 2260 | && (!validate || memory_address_addr_space_p (mode, addr, as))) |
a74ff877 | 2261 | return memref; |
23b2ce53 | 2262 | |
91c5ee5b VM |
2263 | /* Don't validate address for LRA. LRA can make the address valid |
2264 | by itself in most efficient way. */ | |
2265 | if (validate && !lra_in_progress) | |
23b2ce53 | 2266 | { |
f1ec5147 | 2267 | if (reload_in_progress || reload_completed) |
09e881c9 | 2268 | gcc_assert (memory_address_addr_space_p (mode, addr, as)); |
f1ec5147 | 2269 | else |
09e881c9 | 2270 | addr = memory_address_addr_space (mode, addr, as); |
23b2ce53 | 2271 | } |
750c9258 | 2272 | |
9b04c6a8 RK |
2273 | if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref)) |
2274 | return memref; | |
2275 | ||
23b33725 RS |
2276 | if (inplace) |
2277 | { | |
2278 | XEXP (memref, 0) = addr; | |
2279 | return memref; | |
2280 | } | |
2281 | ||
60564289 KG |
2282 | new_rtx = gen_rtx_MEM (mode, addr); |
2283 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
2284 | return new_rtx; | |
23b2ce53 | 2285 | } |
792760b9 | 2286 | |
738cc472 RK |
2287 | /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what |
2288 | way we are changing MEMREF, so we only preserve the alias set. */ | |
f4ef873c RK |
2289 | |
2290 | rtx | |
ef4bddc2 | 2291 | change_address (rtx memref, machine_mode mode, rtx addr) |
f4ef873c | 2292 | { |
23b33725 | 2293 | rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); |
ef4bddc2 | 2294 | machine_mode mmode = GET_MODE (new_rtx); |
d05d7551 | 2295 | struct mem_attrs *defattrs; |
4e44c1ef | 2296 | |
d05d7551 | 2297 | mem_attrs attrs (*get_mem_attrs (memref)); |
f12144dd | 2298 | defattrs = mode_mem_attrs[(int) mmode]; |
754c3d5d RS |
2299 | attrs.expr = NULL_TREE; |
2300 | attrs.offset_known_p = false; | |
2301 | attrs.size_known_p = defattrs->size_known_p; | |
f12144dd RS |
2302 | attrs.size = defattrs->size; |
2303 | attrs.align = defattrs->align; | |
c2f7bcc3 | 2304 | |
fdb1c7b3 | 2305 | /* If there are no changes, just return the original memory reference. */ |
60564289 | 2306 | if (new_rtx == memref) |
4e44c1ef | 2307 | { |
f12144dd | 2308 | if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs)) |
60564289 | 2309 | return new_rtx; |
4e44c1ef | 2310 | |
60564289 KG |
2311 | new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0)); |
2312 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
4e44c1ef | 2313 | } |
fdb1c7b3 | 2314 | |
f12144dd | 2315 | set_mem_attrs (new_rtx, &attrs); |
60564289 | 2316 | return new_rtx; |
f4ef873c | 2317 | } |
792760b9 | 2318 | |
738cc472 RK |
2319 | /* Return a memory reference like MEMREF, but with its mode changed |
2320 | to MODE and its address offset by OFFSET bytes. If VALIDATE is | |
630036c6 | 2321 | nonzero, the memory address is forced to be valid. |
5ef0b50d EB |
2322 | If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS |
2323 | and the caller is responsible for adjusting MEMREF base register. | |
2324 | If ADJUST_OBJECT is zero, the underlying object associated with the | |
2325 | memory reference is left unchanged and the caller is responsible for | |
2326 | dealing with it. Otherwise, if the new memory reference is outside | |
5f2cbd0d RS |
2327 | the underlying object, even partially, then the object is dropped. |
2328 | SIZE, if nonzero, is the size of an access in cases where MODE | |
2329 | has no inherent size. */ | |
f1ec5147 RK |
2330 | |
2331 | rtx | |
d05d7551 | 2332 | adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset, |
5f2cbd0d | 2333 | int validate, int adjust_address, int adjust_object, |
d05d7551 | 2334 | poly_int64 size) |
f1ec5147 | 2335 | { |
823e3574 | 2336 | rtx addr = XEXP (memref, 0); |
60564289 | 2337 | rtx new_rtx; |
095a2d76 | 2338 | scalar_int_mode address_mode; |
d05d7551 | 2339 | struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs; |
f12144dd | 2340 | unsigned HOST_WIDE_INT max_align; |
0207fa90 | 2341 | #ifdef POINTERS_EXTEND_UNSIGNED |
095a2d76 | 2342 | scalar_int_mode pointer_mode |
0207fa90 EB |
2343 | = targetm.addr_space.pointer_mode (attrs.addrspace); |
2344 | #endif | |
823e3574 | 2345 | |
ee88e690 EB |
2346 | /* VOIDmode means no mode change for change_address_1. */ |
2347 | if (mode == VOIDmode) | |
2348 | mode = GET_MODE (memref); | |
2349 | ||
5f2cbd0d RS |
2350 | /* Take the size of non-BLKmode accesses from the mode. */ |
2351 | defattrs = mode_mem_attrs[(int) mode]; | |
2352 | if (defattrs->size_known_p) | |
2353 | size = defattrs->size; | |
2354 | ||
fdb1c7b3 | 2355 | /* If there are no changes, just return the original memory reference. */ |
d05d7551 RS |
2356 | if (mode == GET_MODE (memref) |
2357 | && known_eq (offset, 0) | |
2358 | && (known_eq (size, 0) | |
2359 | || (attrs.size_known_p && known_eq (attrs.size, size))) | |
f12144dd RS |
2360 | && (!validate || memory_address_addr_space_p (mode, addr, |
2361 | attrs.addrspace))) | |
fdb1c7b3 JH |
2362 | return memref; |
2363 | ||
d14419e4 | 2364 | /* ??? Prefer to create garbage instead of creating shared rtl. |
cc2902df | 2365 | This may happen even if offset is nonzero -- consider |
d14419e4 RH |
2366 | (plus (plus reg reg) const_int) -- so do this always. */ |
2367 | addr = copy_rtx (addr); | |
2368 | ||
a6fe9ed4 JM |
2369 | /* Convert a possibly large offset to a signed value within the |
2370 | range of the target address space. */ | |
372d6395 | 2371 | address_mode = get_address_mode (memref); |
d05d7551 | 2372 | offset = trunc_int_for_mode (offset, address_mode); |
a6fe9ed4 | 2373 | |
5ef0b50d | 2374 | if (adjust_address) |
4a78c787 RH |
2375 | { |
2376 | /* If MEMREF is a LO_SUM and the offset is within the alignment of the | |
2377 | object, we can merge it into the LO_SUM. */ | |
d05d7551 RS |
2378 | if (GET_MODE (memref) != BLKmode |
2379 | && GET_CODE (addr) == LO_SUM | |
2380 | && known_in_range_p (offset, | |
2381 | 0, (GET_MODE_ALIGNMENT (GET_MODE (memref)) | |
2382 | / BITS_PER_UNIT))) | |
d4ebfa65 | 2383 | addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), |
0a81f074 RS |
2384 | plus_constant (address_mode, |
2385 | XEXP (addr, 1), offset)); | |
0207fa90 EB |
2386 | #ifdef POINTERS_EXTEND_UNSIGNED |
2387 | /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid | |
2388 | in that mode, we merge it into the ZERO_EXTEND. We take advantage of | |
2389 | the fact that pointers are not allowed to overflow. */ | |
2390 | else if (POINTERS_EXTEND_UNSIGNED > 0 | |
2391 | && GET_CODE (addr) == ZERO_EXTEND | |
2392 | && GET_MODE (XEXP (addr, 0)) == pointer_mode | |
d05d7551 | 2393 | && known_eq (trunc_int_for_mode (offset, pointer_mode), offset)) |
0207fa90 EB |
2394 | addr = gen_rtx_ZERO_EXTEND (address_mode, |
2395 | plus_constant (pointer_mode, | |
2396 | XEXP (addr, 0), offset)); | |
2397 | #endif | |
4a78c787 | 2398 | else |
0a81f074 | 2399 | addr = plus_constant (address_mode, addr, offset); |
4a78c787 | 2400 | } |
823e3574 | 2401 | |
23b33725 | 2402 | new_rtx = change_address_1 (memref, mode, addr, validate, false); |
738cc472 | 2403 | |
09efeca1 PB |
2404 | /* If the address is a REG, change_address_1 rightfully returns memref, |
2405 | but this would destroy memref's MEM_ATTRS. */ | |
d05d7551 | 2406 | if (new_rtx == memref && maybe_ne (offset, 0)) |
09efeca1 PB |
2407 | new_rtx = copy_rtx (new_rtx); |
2408 | ||
5ef0b50d EB |
2409 | /* Conservatively drop the object if we don't know where we start from. */ |
2410 | if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p)) | |
2411 | { | |
2412 | attrs.expr = NULL_TREE; | |
2413 | attrs.alias = 0; | |
2414 | } | |
2415 | ||
738cc472 RK |
2416 | /* Compute the new values of the memory attributes due to this adjustment. |
2417 | We add the offsets and update the alignment. */ | |
754c3d5d | 2418 | if (attrs.offset_known_p) |
5ef0b50d EB |
2419 | { |
2420 | attrs.offset += offset; | |
2421 | ||
2422 | /* Drop the object if the new left end is not within its bounds. */ | |
d05d7551 | 2423 | if (adjust_object && maybe_lt (attrs.offset, 0)) |
5ef0b50d EB |
2424 | { |
2425 | attrs.expr = NULL_TREE; | |
2426 | attrs.alias = 0; | |
2427 | } | |
2428 | } | |
738cc472 | 2429 | |
03bf2c23 RK |
2430 | /* Compute the new alignment by taking the MIN of the alignment and the |
2431 | lowest-order set bit in OFFSET, but don't change the alignment if OFFSET | |
2432 | if zero. */ | |
d05d7551 | 2433 | if (maybe_ne (offset, 0)) |
f12144dd | 2434 | { |
d05d7551 | 2435 | max_align = known_alignment (offset) * BITS_PER_UNIT; |
f12144dd RS |
2436 | attrs.align = MIN (attrs.align, max_align); |
2437 | } | |
738cc472 | 2438 | |
d05d7551 | 2439 | if (maybe_ne (size, 0)) |
754c3d5d | 2440 | { |
5ef0b50d | 2441 | /* Drop the object if the new right end is not within its bounds. */ |
d05d7551 | 2442 | if (adjust_object && maybe_gt (offset + size, attrs.size)) |
5ef0b50d EB |
2443 | { |
2444 | attrs.expr = NULL_TREE; | |
2445 | attrs.alias = 0; | |
2446 | } | |
754c3d5d | 2447 | attrs.size_known_p = true; |
5f2cbd0d | 2448 | attrs.size = size; |
754c3d5d RS |
2449 | } |
2450 | else if (attrs.size_known_p) | |
5ef0b50d | 2451 | { |
5f2cbd0d | 2452 | gcc_assert (!adjust_object); |
5ef0b50d | 2453 | attrs.size -= offset; |
5f2cbd0d RS |
2454 | /* ??? The store_by_pieces machinery generates negative sizes, |
2455 | so don't assert for that here. */ | |
5ef0b50d | 2456 | } |
10b76d73 | 2457 | |
f12144dd | 2458 | set_mem_attrs (new_rtx, &attrs); |
738cc472 | 2459 | |
60564289 | 2460 | return new_rtx; |
f1ec5147 RK |
2461 | } |
2462 | ||
630036c6 JJ |
2463 | /* Return a memory reference like MEMREF, but with its mode changed |
2464 | to MODE and its address changed to ADDR, which is assumed to be | |
fa10beec | 2465 | MEMREF offset by OFFSET bytes. If VALIDATE is |
630036c6 JJ |
2466 | nonzero, the memory address is forced to be valid. */ |
2467 | ||
2468 | rtx | |
ef4bddc2 | 2469 | adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, |
d05d7551 | 2470 | poly_int64 offset, int validate) |
630036c6 | 2471 | { |
23b33725 | 2472 | memref = change_address_1 (memref, VOIDmode, addr, validate, false); |
5f2cbd0d | 2473 | return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); |
630036c6 JJ |
2474 | } |
2475 | ||
8ac61af7 RK |
2476 | /* Return a memory reference like MEMREF, but whose address is changed by |
2477 | adding OFFSET, an RTX, to it. POW2 is the highest power of two factor | |
2478 | known to be in OFFSET (possibly 1). */ | |
0d4903b8 RK |
2479 | |
2480 | rtx | |
502b8322 | 2481 | offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) |
0d4903b8 | 2482 | { |
60564289 | 2483 | rtx new_rtx, addr = XEXP (memref, 0); |
ef4bddc2 | 2484 | machine_mode address_mode; |
d05d7551 | 2485 | struct mem_attrs *defattrs; |
e3c8ea67 | 2486 | |
d05d7551 | 2487 | mem_attrs attrs (*get_mem_attrs (memref)); |
372d6395 | 2488 | address_mode = get_address_mode (memref); |
d4ebfa65 | 2489 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
e3c8ea67 | 2490 | |
68252e27 | 2491 | /* At this point we don't know _why_ the address is invalid. It |
4d6922ee | 2492 | could have secondary memory references, multiplies or anything. |
e3c8ea67 RH |
2493 | |
2494 | However, if we did go and rearrange things, we can wind up not | |
2495 | being able to recognize the magic around pic_offset_table_rtx. | |
2496 | This stuff is fragile, and is yet another example of why it is | |
2497 | bad to expose PIC machinery too early. */ | |
f12144dd RS |
2498 | if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, |
2499 | attrs.addrspace) | |
e3c8ea67 RH |
2500 | && GET_CODE (addr) == PLUS |
2501 | && XEXP (addr, 0) == pic_offset_table_rtx) | |
2502 | { | |
2503 | addr = force_reg (GET_MODE (addr), addr); | |
d4ebfa65 | 2504 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
e3c8ea67 RH |
2505 | } |
2506 | ||
60564289 | 2507 | update_temp_slot_address (XEXP (memref, 0), new_rtx); |
23b33725 | 2508 | new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false); |
0d4903b8 | 2509 | |
fdb1c7b3 | 2510 | /* If there are no changes, just return the original memory reference. */ |
60564289 KG |
2511 | if (new_rtx == memref) |
2512 | return new_rtx; | |
fdb1c7b3 | 2513 | |
0d4903b8 RK |
2514 | /* Update the alignment to reflect the offset. Reset the offset, which |
2515 | we don't know. */ | |
754c3d5d RS |
2516 | defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)]; |
2517 | attrs.offset_known_p = false; | |
2518 | attrs.size_known_p = defattrs->size_known_p; | |
2519 | attrs.size = defattrs->size; | |
f12144dd RS |
2520 | attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT); |
2521 | set_mem_attrs (new_rtx, &attrs); | |
60564289 | 2522 | return new_rtx; |
0d4903b8 | 2523 | } |
68252e27 | 2524 | |
792760b9 RK |
2525 | /* Return a memory reference like MEMREF, but with its address changed to |
2526 | ADDR. The caller is asserting that the actual piece of memory pointed | |
2527 | to is the same, just the form of the address is being changed, such as | |
23b33725 RS |
2528 | by putting something into a register. INPLACE is true if any changes |
2529 | can be made directly to MEMREF or false if MEMREF must be treated as | |
2530 | immutable. */ | |
792760b9 RK |
2531 | |
2532 | rtx | |
23b33725 | 2533 | replace_equiv_address (rtx memref, rtx addr, bool inplace) |
792760b9 | 2534 | { |
738cc472 RK |
2535 | /* change_address_1 copies the memory attribute structure without change |
2536 | and that's exactly what we want here. */ | |
40c0668b | 2537 | update_temp_slot_address (XEXP (memref, 0), addr); |
23b33725 | 2538 | return change_address_1 (memref, VOIDmode, addr, 1, inplace); |
792760b9 | 2539 | } |
738cc472 | 2540 | |
f1ec5147 RK |
2541 | /* Likewise, but the reference is not required to be valid. */ |
2542 | ||
2543 | rtx | |
23b33725 | 2544 | replace_equiv_address_nv (rtx memref, rtx addr, bool inplace) |
f1ec5147 | 2545 | { |
23b33725 | 2546 | return change_address_1 (memref, VOIDmode, addr, 0, inplace); |
f1ec5147 | 2547 | } |
e7dfe4bb RH |
2548 | |
2549 | /* Return a memory reference like MEMREF, but with its mode widened to | |
2550 | MODE and offset by OFFSET. This would be used by targets that e.g. | |
2551 | cannot issue QImode memory operations and have to use SImode memory | |
2552 | operations plus masking logic. */ | |
2553 | ||
2554 | rtx | |
d05d7551 | 2555 | widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset) |
e7dfe4bb | 2556 | { |
5f2cbd0d | 2557 | rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); |
e7dfe4bb RH |
2558 | unsigned int size = GET_MODE_SIZE (mode); |
2559 | ||
fdb1c7b3 | 2560 | /* If there are no changes, just return the original memory reference. */ |
60564289 KG |
2561 | if (new_rtx == memref) |
2562 | return new_rtx; | |
fdb1c7b3 | 2563 | |
d05d7551 | 2564 | mem_attrs attrs (*get_mem_attrs (new_rtx)); |
f12144dd | 2565 | |
e7dfe4bb RH |
2566 | /* If we don't know what offset we were at within the expression, then |
2567 | we can't know if we've overstepped the bounds. */ | |
754c3d5d | 2568 | if (! attrs.offset_known_p) |
f12144dd | 2569 | attrs.expr = NULL_TREE; |
e7dfe4bb | 2570 | |
f12144dd | 2571 | while (attrs.expr) |
e7dfe4bb | 2572 | { |
f12144dd | 2573 | if (TREE_CODE (attrs.expr) == COMPONENT_REF) |
e7dfe4bb | 2574 | { |
f12144dd RS |
2575 | tree field = TREE_OPERAND (attrs.expr, 1); |
2576 | tree offset = component_ref_field_offset (attrs.expr); | |
e7dfe4bb RH |
2577 | |
2578 | if (! DECL_SIZE_UNIT (field)) | |
2579 | { | |
f12144dd | 2580 | attrs.expr = NULL_TREE; |
e7dfe4bb RH |
2581 | break; |
2582 | } | |
2583 | ||
2584 | /* Is the field at least as large as the access? If so, ok, | |
2585 | otherwise strip back to the containing structure. */ | |
d05d7551 RS |
2586 | if (poly_int_tree_p (DECL_SIZE_UNIT (field)) |
2587 | && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size) | |
2588 | && known_ge (attrs.offset, 0)) | |
e7dfe4bb RH |
2589 | break; |
2590 | ||
d05d7551 RS |
2591 | poly_uint64 suboffset; |
2592 | if (!poly_int_tree_p (offset, &suboffset)) | |
e7dfe4bb | 2593 | { |
f12144dd | 2594 | attrs.expr = NULL_TREE; |
e7dfe4bb RH |
2595 | break; |
2596 | } | |
2597 | ||
f12144dd | 2598 | attrs.expr = TREE_OPERAND (attrs.expr, 0); |
d05d7551 | 2599 | attrs.offset += suboffset; |
ae7e9ddd | 2600 | attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
754c3d5d | 2601 | / BITS_PER_UNIT); |
e7dfe4bb RH |
2602 | } |
2603 | /* Similarly for the decl. */ | |
f12144dd RS |
2604 | else if (DECL_P (attrs.expr) |
2605 | && DECL_SIZE_UNIT (attrs.expr) | |
d05d7551 RS |
2606 | && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr)) |
2607 | && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)), | |
2608 | size) | |
2609 | && known_ge (attrs.offset, 0)) | |
e7dfe4bb RH |
2610 | break; |
2611 | else | |
2612 | { | |
2613 | /* The widened memory access overflows the expression, which means | |
2614 | that it could alias another expression. Zap it. */ | |
f12144dd | 2615 | attrs.expr = NULL_TREE; |
e7dfe4bb RH |
2616 | break; |
2617 | } | |
2618 | } | |
2619 | ||
f12144dd | 2620 | if (! attrs.expr) |
754c3d5d | 2621 | attrs.offset_known_p = false; |
e7dfe4bb RH |
2622 | |
2623 | /* The widened memory may alias other stuff, so zap the alias set. */ | |
2624 | /* ??? Maybe use get_alias_set on any remaining expression. */ | |
f12144dd | 2625 | attrs.alias = 0; |
754c3d5d RS |
2626 | attrs.size_known_p = true; |
2627 | attrs.size = size; | |
f12144dd | 2628 | set_mem_attrs (new_rtx, &attrs); |
60564289 | 2629 | return new_rtx; |
e7dfe4bb | 2630 | } |
23b2ce53 | 2631 | \f |
f6129d66 RH |
2632 | /* A fake decl that is used as the MEM_EXPR of spill slots. */ |
2633 | static GTY(()) tree spill_slot_decl; | |
2634 | ||
3d7e23f6 RH |
2635 | tree |
2636 | get_spill_slot_decl (bool force_build_p) | |
f6129d66 RH |
2637 | { |
2638 | tree d = spill_slot_decl; | |
2639 | rtx rd; | |
2640 | ||
3d7e23f6 | 2641 | if (d || !force_build_p) |
f6129d66 RH |
2642 | return d; |
2643 | ||
c2255bc4 AH |
2644 | d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
2645 | VAR_DECL, get_identifier ("%sfp"), void_type_node); | |
f6129d66 RH |
2646 | DECL_ARTIFICIAL (d) = 1; |
2647 | DECL_IGNORED_P (d) = 1; | |
2648 | TREE_USED (d) = 1; | |
f6129d66 RH |
2649 | spill_slot_decl = d; |
2650 | ||
2651 | rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); | |
2652 | MEM_NOTRAP_P (rd) = 1; | |
d05d7551 | 2653 | mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]); |
f12144dd RS |
2654 | attrs.alias = new_alias_set (); |
2655 | attrs.expr = d; | |
2656 | set_mem_attrs (rd, &attrs); | |
f6129d66 RH |
2657 | SET_DECL_RTL (d, rd); |
2658 | ||
2659 | return d; | |
2660 | } | |
2661 | ||
2662 | /* Given MEM, a result from assign_stack_local, fill in the memory | |
2663 | attributes as appropriate for a register allocator spill slot. | |
2664 | These slots are not aliasable by other memory. We arrange for | |
2665 | them all to use a single MEM_EXPR, so that the aliasing code can | |
2666 | work properly in the case of shared spill slots. */ | |
2667 | ||
2668 | void | |
2669 | set_mem_attrs_for_spill (rtx mem) | |
2670 | { | |
f12144dd | 2671 | rtx addr; |
f6129d66 | 2672 | |
d05d7551 | 2673 | mem_attrs attrs (*get_mem_attrs (mem)); |
f12144dd RS |
2674 | attrs.expr = get_spill_slot_decl (true); |
2675 | attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); | |
2676 | attrs.addrspace = ADDR_SPACE_GENERIC; | |
f6129d66 RH |
2677 | |
2678 | /* We expect the incoming memory to be of the form: | |
2679 | (mem:MODE (plus (reg sfp) (const_int offset))) | |
2680 | with perhaps the plus missing for offset = 0. */ | |
2681 | addr = XEXP (mem, 0); | |
754c3d5d | 2682 | attrs.offset_known_p = true; |
d05d7551 | 2683 | strip_offset (addr, &attrs.offset); |
f6129d66 | 2684 | |
f12144dd | 2685 | set_mem_attrs (mem, &attrs); |
f6129d66 RH |
2686 | MEM_NOTRAP_P (mem) = 1; |
2687 | } | |
2688 | \f | |
23b2ce53 RS |
2689 | /* Return a newly created CODE_LABEL rtx with a unique label number. */ |
2690 | ||
7dcc3ab5 | 2691 | rtx_code_label * |
502b8322 | 2692 | gen_label_rtx (void) |
23b2ce53 | 2693 | { |
7dcc3ab5 DM |
2694 | return as_a <rtx_code_label *> ( |
2695 | gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX, | |
2696 | NULL, label_num++, NULL)); | |
23b2ce53 RS |
2697 | } |
2698 | \f | |
2699 | /* For procedure integration. */ | |
2700 | ||
23b2ce53 | 2701 | /* Install new pointers to the first and last insns in the chain. |
86fe05e0 | 2702 | Also, set cur_insn_uid to one higher than the last in use. |
23b2ce53 RS |
2703 | Used for an inline-procedure after copying the insn chain. */ |
2704 | ||
2705 | void | |
fee3e72c | 2706 | set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last) |
23b2ce53 | 2707 | { |
fee3e72c | 2708 | rtx_insn *insn; |
86fe05e0 | 2709 | |
5936d944 JH |
2710 | set_first_insn (first); |
2711 | set_last_insn (last); | |
86fe05e0 RK |
2712 | cur_insn_uid = 0; |
2713 | ||
b5b8b0ac AO |
2714 | if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS) |
2715 | { | |
2716 | int debug_count = 0; | |
2717 | ||
2718 | cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1; | |
2719 | cur_debug_insn_uid = 0; | |
2720 | ||
2721 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2722 | if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID) | |
2723 | cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn)); | |
2724 | else | |
2725 | { | |
2726 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
2727 | if (DEBUG_INSN_P (insn)) | |
2728 | debug_count++; | |
2729 | } | |
2730 | ||
2731 | if (debug_count) | |
2732 | cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count; | |
2733 | else | |
2734 | cur_debug_insn_uid++; | |
2735 | } | |
2736 | else | |
2737 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2738 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
86fe05e0 RK |
2739 | |
2740 | cur_insn_uid++; | |
23b2ce53 | 2741 | } |
23b2ce53 | 2742 | \f |
750c9258 | 2743 | /* Go through all the RTL insn bodies and copy any invalid shared |
d1b81779 | 2744 | structure. This routine should only be called once. */ |
23b2ce53 | 2745 | |
fd743bc1 | 2746 | static void |
6bb9bf63 | 2747 | unshare_all_rtl_1 (rtx_insn *insn) |
23b2ce53 | 2748 | { |
d1b81779 | 2749 | /* Unshare just about everything else. */ |
2c07f13b | 2750 | unshare_all_rtl_in_chain (insn); |
750c9258 | 2751 | |
23b2ce53 RS |
2752 | /* Make sure the addresses of stack slots found outside the insn chain |
2753 | (such as, in DECL_RTL of a variable) are not shared | |
2754 | with the insn chain. | |
2755 | ||
2756 | This special care is necessary when the stack slot MEM does not | |
2757 | actually appear in the insn chain. If it does appear, its address | |
2758 | is unshared from all else at that point. */ | |
8c39f8ae TS |
2759 | unsigned int i; |
2760 | rtx temp; | |
2761 | FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp) | |
2762 | (*stack_slot_list)[i] = copy_rtx_if_shared (temp); | |
23b2ce53 RS |
2763 | } |
2764 | ||
750c9258 | 2765 | /* Go through all the RTL insn bodies and copy any invalid shared |
d1b81779 GK |
2766 | structure, again. This is a fairly expensive thing to do so it |
2767 | should be done sparingly. */ | |
2768 | ||
2769 | void | |
6bb9bf63 | 2770 | unshare_all_rtl_again (rtx_insn *insn) |
d1b81779 | 2771 | { |
6bb9bf63 | 2772 | rtx_insn *p; |
624c87aa RE |
2773 | tree decl; |
2774 | ||
d1b81779 | 2775 | for (p = insn; p; p = NEXT_INSN (p)) |
2c3c49de | 2776 | if (INSN_P (p)) |
d1b81779 GK |
2777 | { |
2778 | reset_used_flags (PATTERN (p)); | |
2779 | reset_used_flags (REG_NOTES (p)); | |
776bebcd JJ |
2780 | if (CALL_P (p)) |
2781 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (p)); | |
d1b81779 | 2782 | } |
624c87aa | 2783 | |
2d4aecb3 | 2784 | /* Make sure that virtual stack slots are not shared. */ |
5eb2a9f2 | 2785 | set_used_decls (DECL_INITIAL (cfun->decl)); |
2d4aecb3 | 2786 | |
624c87aa | 2787 | /* Make sure that virtual parameters are not shared. */ |
910ad8de | 2788 | for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) |
5eb2a9f2 | 2789 | set_used_flags (DECL_RTL (decl)); |
624c87aa | 2790 | |
8c39f8ae TS |
2791 | rtx temp; |
2792 | unsigned int i; | |
2793 | FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp) | |
2794 | reset_used_flags (temp); | |
624c87aa | 2795 | |
b4aaa77b | 2796 | unshare_all_rtl_1 (insn); |
fd743bc1 PB |
2797 | } |
2798 | ||
c2924966 | 2799 | unsigned int |
fd743bc1 PB |
2800 | unshare_all_rtl (void) |
2801 | { | |
b4aaa77b | 2802 | unshare_all_rtl_1 (get_insns ()); |
60ebe8ce JJ |
2803 | |
2804 | for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) | |
2805 | { | |
2806 | if (DECL_RTL_SET_P (decl)) | |
2807 | SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl))); | |
2808 | DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl)); | |
2809 | } | |
2810 | ||
c2924966 | 2811 | return 0; |
d1b81779 GK |
2812 | } |
2813 | ||
ef330312 | 2814 | |
2c07f13b JH |
2815 | /* Check that ORIG is not marked when it should not be and mark ORIG as in use, |
2816 | Recursively does the same for subexpressions. */ | |
2817 | ||
2818 | static void | |
2819 | verify_rtx_sharing (rtx orig, rtx insn) | |
2820 | { | |
2821 | rtx x = orig; | |
2822 | int i; | |
2823 | enum rtx_code code; | |
2824 | const char *format_ptr; | |
2825 | ||
2826 | if (x == 0) | |
2827 | return; | |
2828 | ||
2829 | code = GET_CODE (x); | |
2830 | ||
2831 | /* These types may be freely shared. */ | |
2832 | ||
2833 | switch (code) | |
2834 | { | |
2835 | case REG: | |
0ca5af51 AO |
2836 | case DEBUG_EXPR: |
2837 | case VALUE: | |
d8116890 | 2838 | CASE_CONST_ANY: |
2c07f13b JH |
2839 | case SYMBOL_REF: |
2840 | case LABEL_REF: | |
2841 | case CODE_LABEL: | |
2842 | case PC: | |
2843 | case CC0: | |
3810076b | 2844 | case RETURN: |
26898771 | 2845 | case SIMPLE_RETURN: |
2c07f13b | 2846 | case SCRATCH: |
3e89ed8d | 2847 | /* SCRATCH must be shared because they represent distinct values. */ |
c5c5ba89 | 2848 | return; |
3e89ed8d | 2849 | case CLOBBER: |
c5c5ba89 JH |
2850 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
2851 | clobbers or clobbers of hard registers that originated as pseudos. | |
2852 | This is needed to allow safe register renaming. */ | |
d7ae3739 EB |
2853 | if (REG_P (XEXP (x, 0)) |
2854 | && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | |
2855 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0)))) | |
3e89ed8d JH |
2856 | return; |
2857 | break; | |
2c07f13b JH |
2858 | |
2859 | case CONST: | |
6fb5fa3c | 2860 | if (shared_const_p (orig)) |
2c07f13b JH |
2861 | return; |
2862 | break; | |
2863 | ||
2864 | case MEM: | |
2865 | /* A MEM is allowed to be shared if its address is constant. */ | |
2866 | if (CONSTANT_ADDRESS_P (XEXP (x, 0)) | |
2867 | || reload_completed || reload_in_progress) | |
2868 | return; | |
2869 | ||
2870 | break; | |
2871 | ||
2872 | default: | |
2873 | break; | |
2874 | } | |
2875 | ||
2876 | /* This rtx may not be shared. If it has already been seen, | |
2877 | replace it with a copy of itself. */ | |
b2b29377 | 2878 | if (flag_checking && RTX_FLAG (x, used)) |
2c07f13b | 2879 | { |
ab532386 | 2880 | error ("invalid rtl sharing found in the insn"); |
2c07f13b | 2881 | debug_rtx (insn); |
ab532386 | 2882 | error ("shared rtx"); |
2c07f13b | 2883 | debug_rtx (x); |
ab532386 | 2884 | internal_error ("internal consistency failure"); |
2c07f13b | 2885 | } |
1a2caa7a | 2886 | gcc_assert (!RTX_FLAG (x, used)); |
b8698a0f | 2887 | |
2c07f13b JH |
2888 | RTX_FLAG (x, used) = 1; |
2889 | ||
6614fd40 | 2890 | /* Now scan the subexpressions recursively. */ |
2c07f13b JH |
2891 | |
2892 | format_ptr = GET_RTX_FORMAT (code); | |
2893 | ||
2894 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
2895 | { | |
2896 | switch (*format_ptr++) | |
2897 | { | |
2898 | case 'e': | |
2899 | verify_rtx_sharing (XEXP (x, i), insn); | |
2900 | break; | |
2901 | ||
2902 | case 'E': | |
2903 | if (XVEC (x, i) != NULL) | |
2904 | { | |
2905 | int j; | |
2906 | int len = XVECLEN (x, i); | |
2907 | ||
2908 | for (j = 0; j < len; j++) | |
2909 | { | |
1a2caa7a NS |
2910 | /* We allow sharing of ASM_OPERANDS inside single |
2911 | instruction. */ | |
2c07f13b | 2912 | if (j && GET_CODE (XVECEXP (x, i, j)) == SET |
1a2caa7a NS |
2913 | && (GET_CODE (SET_SRC (XVECEXP (x, i, j))) |
2914 | == ASM_OPERANDS)) | |
2c07f13b JH |
2915 | verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn); |
2916 | else | |
2917 | verify_rtx_sharing (XVECEXP (x, i, j), insn); | |
2918 | } | |
2919 | } | |
2920 | break; | |
2921 | } | |
2922 | } | |
2923 | return; | |
2924 | } | |
2925 | ||
0e0f87d4 SB |
2926 | /* Reset used-flags for INSN. */ |
2927 | ||
2928 | static void | |
2929 | reset_insn_used_flags (rtx insn) | |
2930 | { | |
2931 | gcc_assert (INSN_P (insn)); | |
2932 | reset_used_flags (PATTERN (insn)); | |
2933 | reset_used_flags (REG_NOTES (insn)); | |
2934 | if (CALL_P (insn)) | |
2935 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn)); | |
2936 | } | |
2937 | ||
a24243a0 | 2938 | /* Go through all the RTL insn bodies and clear all the USED bits. */ |
2c07f13b | 2939 | |
a24243a0 AK |
2940 | static void |
2941 | reset_all_used_flags (void) | |
2c07f13b | 2942 | { |
dc01c3d1 | 2943 | rtx_insn *p; |
2c07f13b JH |
2944 | |
2945 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2946 | if (INSN_P (p)) | |
2947 | { | |
0e0f87d4 SB |
2948 | rtx pat = PATTERN (p); |
2949 | if (GET_CODE (pat) != SEQUENCE) | |
2950 | reset_insn_used_flags (p); | |
2951 | else | |
2954a813 | 2952 | { |
0e0f87d4 SB |
2953 | gcc_assert (REG_NOTES (p) == NULL); |
2954 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
748e88da JDA |
2955 | { |
2956 | rtx insn = XVECEXP (pat, 0, i); | |
2957 | if (INSN_P (insn)) | |
2958 | reset_insn_used_flags (insn); | |
2959 | } | |
2954a813 | 2960 | } |
2c07f13b | 2961 | } |
a24243a0 AK |
2962 | } |
2963 | ||
0e0f87d4 SB |
2964 | /* Verify sharing in INSN. */ |
2965 | ||
2966 | static void | |
2967 | verify_insn_sharing (rtx insn) | |
2968 | { | |
2969 | gcc_assert (INSN_P (insn)); | |
4b498f72 JJ |
2970 | verify_rtx_sharing (PATTERN (insn), insn); |
2971 | verify_rtx_sharing (REG_NOTES (insn), insn); | |
0e0f87d4 | 2972 | if (CALL_P (insn)) |
4b498f72 | 2973 | verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn); |
0e0f87d4 SB |
2974 | } |
2975 | ||
a24243a0 AK |
2976 | /* Go through all the RTL insn bodies and check that there is no unexpected |
2977 | sharing in between the subexpressions. */ | |
2978 | ||
2979 | DEBUG_FUNCTION void | |
2980 | verify_rtl_sharing (void) | |
2981 | { | |
dc01c3d1 | 2982 | rtx_insn *p; |
a24243a0 AK |
2983 | |
2984 | timevar_push (TV_VERIFY_RTL_SHARING); | |
2985 | ||
2986 | reset_all_used_flags (); | |
2c07f13b JH |
2987 | |
2988 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2989 | if (INSN_P (p)) | |
2990 | { | |
0e0f87d4 SB |
2991 | rtx pat = PATTERN (p); |
2992 | if (GET_CODE (pat) != SEQUENCE) | |
2993 | verify_insn_sharing (p); | |
2994 | else | |
2995 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
748e88da JDA |
2996 | { |
2997 | rtx insn = XVECEXP (pat, 0, i); | |
2998 | if (INSN_P (insn)) | |
2999 | verify_insn_sharing (insn); | |
3000 | } | |
2c07f13b | 3001 | } |
a222c01a | 3002 | |
a24243a0 AK |
3003 | reset_all_used_flags (); |
3004 | ||
a222c01a | 3005 | timevar_pop (TV_VERIFY_RTL_SHARING); |
2c07f13b JH |
3006 | } |
3007 | ||
d1b81779 GK |
3008 | /* Go through all the RTL insn bodies and copy any invalid shared structure. |
3009 | Assumes the mark bits are cleared at entry. */ | |
3010 | ||
2c07f13b | 3011 | void |
dc01c3d1 | 3012 | unshare_all_rtl_in_chain (rtx_insn *insn) |
d1b81779 GK |
3013 | { |
3014 | for (; insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 3015 | if (INSN_P (insn)) |
d1b81779 GK |
3016 | { |
3017 | PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn)); | |
3018 | REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn)); | |
776bebcd JJ |
3019 | if (CALL_P (insn)) |
3020 | CALL_INSN_FUNCTION_USAGE (insn) | |
3021 | = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn)); | |
d1b81779 GK |
3022 | } |
3023 | } | |
3024 | ||
2d4aecb3 | 3025 | /* Go through all virtual stack slots of a function and mark them as |
5eb2a9f2 RS |
3026 | shared. We never replace the DECL_RTLs themselves with a copy, |
3027 | but expressions mentioned into a DECL_RTL cannot be shared with | |
3028 | expressions in the instruction stream. | |
3029 | ||
3030 | Note that reload may convert pseudo registers into memories in-place. | |
3031 | Pseudo registers are always shared, but MEMs never are. Thus if we | |
3032 | reset the used flags on MEMs in the instruction stream, we must set | |
3033 | them again on MEMs that appear in DECL_RTLs. */ | |
3034 | ||
2d4aecb3 | 3035 | static void |
5eb2a9f2 | 3036 | set_used_decls (tree blk) |
2d4aecb3 AO |
3037 | { |
3038 | tree t; | |
3039 | ||
3040 | /* Mark decls. */ | |
910ad8de | 3041 | for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t)) |
19e7881c | 3042 | if (DECL_RTL_SET_P (t)) |
5eb2a9f2 | 3043 | set_used_flags (DECL_RTL (t)); |
2d4aecb3 AO |
3044 | |
3045 | /* Now process sub-blocks. */ | |
87caf699 | 3046 | for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t)) |
5eb2a9f2 | 3047 | set_used_decls (t); |
2d4aecb3 AO |
3048 | } |
3049 | ||
23b2ce53 | 3050 | /* Mark ORIG as in use, and return a copy of it if it was already in use. |
ff954f39 AP |
3051 | Recursively does the same for subexpressions. Uses |
3052 | copy_rtx_if_shared_1 to reduce stack space. */ | |
23b2ce53 RS |
3053 | |
3054 | rtx | |
502b8322 | 3055 | copy_rtx_if_shared (rtx orig) |
23b2ce53 | 3056 | { |
32b32b16 AP |
3057 | copy_rtx_if_shared_1 (&orig); |
3058 | return orig; | |
3059 | } | |
3060 | ||
ff954f39 AP |
3061 | /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in |
3062 | use. Recursively does the same for subexpressions. */ | |
3063 | ||
32b32b16 AP |
3064 | static void |
3065 | copy_rtx_if_shared_1 (rtx *orig1) | |
3066 | { | |
3067 | rtx x; | |
b3694847 SS |
3068 | int i; |
3069 | enum rtx_code code; | |
32b32b16 | 3070 | rtx *last_ptr; |
b3694847 | 3071 | const char *format_ptr; |
23b2ce53 | 3072 | int copied = 0; |
32b32b16 AP |
3073 | int length; |
3074 | ||
3075 | /* Repeat is used to turn tail-recursion into iteration. */ | |
3076 | repeat: | |
3077 | x = *orig1; | |
23b2ce53 RS |
3078 | |
3079 | if (x == 0) | |
32b32b16 | 3080 | return; |
23b2ce53 RS |
3081 | |
3082 | code = GET_CODE (x); | |
3083 | ||
3084 | /* These types may be freely shared. */ | |
3085 | ||
3086 | switch (code) | |
3087 | { | |
3088 | case REG: | |
0ca5af51 AO |
3089 | case DEBUG_EXPR: |
3090 | case VALUE: | |
d8116890 | 3091 | CASE_CONST_ANY: |
23b2ce53 | 3092 | case SYMBOL_REF: |
2c07f13b | 3093 | case LABEL_REF: |
23b2ce53 RS |
3094 | case CODE_LABEL: |
3095 | case PC: | |
3096 | case CC0: | |
276e0224 | 3097 | case RETURN: |
26898771 | 3098 | case SIMPLE_RETURN: |
23b2ce53 | 3099 | case SCRATCH: |
0f41302f | 3100 | /* SCRATCH must be shared because they represent distinct values. */ |
32b32b16 | 3101 | return; |
3e89ed8d | 3102 | case CLOBBER: |
c5c5ba89 JH |
3103 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
3104 | clobbers or clobbers of hard registers that originated as pseudos. | |
3105 | This is needed to allow safe register renaming. */ | |
d7ae3739 EB |
3106 | if (REG_P (XEXP (x, 0)) |
3107 | && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | |
3108 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0)))) | |
3e89ed8d JH |
3109 | return; |
3110 | break; | |
23b2ce53 | 3111 | |
b851ea09 | 3112 | case CONST: |
6fb5fa3c | 3113 | if (shared_const_p (x)) |
32b32b16 | 3114 | return; |
b851ea09 RK |
3115 | break; |
3116 | ||
b5b8b0ac | 3117 | case DEBUG_INSN: |
23b2ce53 RS |
3118 | case INSN: |
3119 | case JUMP_INSN: | |
3120 | case CALL_INSN: | |
3121 | case NOTE: | |
23b2ce53 RS |
3122 | case BARRIER: |
3123 | /* The chain of insns is not being copied. */ | |
32b32b16 | 3124 | return; |
23b2ce53 | 3125 | |
e9a25f70 JL |
3126 | default: |
3127 | break; | |
23b2ce53 RS |
3128 | } |
3129 | ||
3130 | /* This rtx may not be shared. If it has already been seen, | |
3131 | replace it with a copy of itself. */ | |
3132 | ||
2adc7f12 | 3133 | if (RTX_FLAG (x, used)) |
23b2ce53 | 3134 | { |
aacd3885 | 3135 | x = shallow_copy_rtx (x); |
23b2ce53 RS |
3136 | copied = 1; |
3137 | } | |
2adc7f12 | 3138 | RTX_FLAG (x, used) = 1; |
23b2ce53 RS |
3139 | |
3140 | /* Now scan the subexpressions recursively. | |
3141 | We can store any replaced subexpressions directly into X | |
3142 | since we know X is not shared! Any vectors in X | |
3143 | must be copied if X was copied. */ | |
3144 | ||
3145 | format_ptr = GET_RTX_FORMAT (code); | |
32b32b16 AP |
3146 | length = GET_RTX_LENGTH (code); |
3147 | last_ptr = NULL; | |
b8698a0f | 3148 | |
32b32b16 | 3149 | for (i = 0; i < length; i++) |
23b2ce53 RS |
3150 | { |
3151 | switch (*format_ptr++) | |
3152 | { | |
3153 | case 'e': | |
32b32b16 AP |
3154 | if (last_ptr) |
3155 | copy_rtx_if_shared_1 (last_ptr); | |
3156 | last_ptr = &XEXP (x, i); | |
23b2ce53 RS |
3157 | break; |
3158 | ||
3159 | case 'E': | |
3160 | if (XVEC (x, i) != NULL) | |
3161 | { | |
b3694847 | 3162 | int j; |
f0722107 | 3163 | int len = XVECLEN (x, i); |
b8698a0f | 3164 | |
6614fd40 KH |
3165 | /* Copy the vector iff I copied the rtx and the length |
3166 | is nonzero. */ | |
f0722107 | 3167 | if (copied && len > 0) |
8f985ec4 | 3168 | XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem); |
b8698a0f | 3169 | |
5d3cc252 | 3170 | /* Call recursively on all inside the vector. */ |
f0722107 | 3171 | for (j = 0; j < len; j++) |
32b32b16 AP |
3172 | { |
3173 | if (last_ptr) | |
3174 | copy_rtx_if_shared_1 (last_ptr); | |
3175 | last_ptr = &XVECEXP (x, i, j); | |
3176 | } | |
23b2ce53 RS |
3177 | } |
3178 | break; | |
3179 | } | |
3180 | } | |
32b32b16 AP |
3181 | *orig1 = x; |
3182 | if (last_ptr) | |
3183 | { | |
3184 | orig1 = last_ptr; | |
3185 | goto repeat; | |
3186 | } | |
3187 | return; | |
23b2ce53 RS |
3188 | } |
3189 | ||
76369a82 | 3190 | /* Set the USED bit in X and its non-shareable subparts to FLAG. */ |
23b2ce53 | 3191 | |
76369a82 NF |
3192 | static void |
3193 | mark_used_flags (rtx x, int flag) | |
23b2ce53 | 3194 | { |
b3694847 SS |
3195 | int i, j; |
3196 | enum rtx_code code; | |
3197 | const char *format_ptr; | |
32b32b16 | 3198 | int length; |
23b2ce53 | 3199 | |
32b32b16 AP |
3200 | /* Repeat is used to turn tail-recursion into iteration. */ |
3201 | repeat: | |
23b2ce53 RS |
3202 | if (x == 0) |
3203 | return; | |
3204 | ||
3205 | code = GET_CODE (x); | |
3206 | ||
9faa82d8 | 3207 | /* These types may be freely shared so we needn't do any resetting |
23b2ce53 RS |
3208 | for them. */ |
3209 | ||
3210 | switch (code) | |
3211 | { | |
3212 | case REG: | |
0ca5af51 AO |
3213 | case DEBUG_EXPR: |
3214 | case VALUE: | |
d8116890 | 3215 | CASE_CONST_ANY: |
23b2ce53 RS |
3216 | case SYMBOL_REF: |
3217 | case CODE_LABEL: | |
3218 | case PC: | |
3219 | case CC0: | |
276e0224 | 3220 | case RETURN: |
26898771 | 3221 | case SIMPLE_RETURN: |
23b2ce53 RS |
3222 | return; |
3223 | ||
b5b8b0ac | 3224 | case DEBUG_INSN: |
23b2ce53 RS |
3225 | case INSN: |
3226 | case JUMP_INSN: | |
3227 | case CALL_INSN: | |
3228 | case NOTE: | |
3229 | case LABEL_REF: | |
3230 | case BARRIER: | |
3231 | /* The chain of insns is not being copied. */ | |
3232 | return; | |
750c9258 | 3233 | |
e9a25f70 JL |
3234 | default: |
3235 | break; | |
23b2ce53 RS |
3236 | } |
3237 | ||
76369a82 | 3238 | RTX_FLAG (x, used) = flag; |
23b2ce53 RS |
3239 | |
3240 | format_ptr = GET_RTX_FORMAT (code); | |
32b32b16 | 3241 | length = GET_RTX_LENGTH (code); |
b8698a0f | 3242 | |
32b32b16 | 3243 | for (i = 0; i < length; i++) |
23b2ce53 RS |
3244 | { |
3245 | switch (*format_ptr++) | |
3246 | { | |
3247 | case 'e': | |
32b32b16 AP |
3248 | if (i == length-1) |
3249 | { | |
3250 | x = XEXP (x, i); | |
3251 | goto repeat; | |
3252 | } | |
76369a82 | 3253 | mark_used_flags (XEXP (x, i), flag); |
23b2ce53 RS |
3254 | break; |
3255 | ||
3256 | case 'E': | |
3257 | for (j = 0; j < XVECLEN (x, i); j++) | |
76369a82 | 3258 | mark_used_flags (XVECEXP (x, i, j), flag); |
23b2ce53 RS |
3259 | break; |
3260 | } | |
3261 | } | |
3262 | } | |
2c07f13b | 3263 | |
76369a82 | 3264 | /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used |
2c07f13b JH |
3265 | to look for shared sub-parts. */ |
3266 | ||
3267 | void | |
76369a82 | 3268 | reset_used_flags (rtx x) |
2c07f13b | 3269 | { |
76369a82 NF |
3270 | mark_used_flags (x, 0); |
3271 | } | |
2c07f13b | 3272 | |
76369a82 NF |
3273 | /* Set all the USED bits in X to allow copy_rtx_if_shared to be used |
3274 | to look for shared sub-parts. */ | |
2c07f13b | 3275 | |
76369a82 NF |
3276 | void |
3277 | set_used_flags (rtx x) | |
3278 | { | |
3279 | mark_used_flags (x, 1); | |
2c07f13b | 3280 | } |
23b2ce53 RS |
3281 | \f |
3282 | /* Copy X if necessary so that it won't be altered by changes in OTHER. | |
3283 | Return X or the rtx for the pseudo reg the value of X was copied into. | |
3284 | OTHER must be valid as a SET_DEST. */ | |
3285 | ||
3286 | rtx | |
502b8322 | 3287 | make_safe_from (rtx x, rtx other) |
23b2ce53 RS |
3288 | { |
3289 | while (1) | |
3290 | switch (GET_CODE (other)) | |
3291 | { | |
3292 | case SUBREG: | |
3293 | other = SUBREG_REG (other); | |
3294 | break; | |
3295 | case STRICT_LOW_PART: | |
3296 | case SIGN_EXTEND: | |
3297 | case ZERO_EXTEND: | |
3298 | other = XEXP (other, 0); | |
3299 | break; | |
3300 | default: | |
3301 | goto done; | |
3302 | } | |
3303 | done: | |
3c0cb5de | 3304 | if ((MEM_P (other) |
23b2ce53 | 3305 | && ! CONSTANT_P (x) |
f8cfc6aa | 3306 | && !REG_P (x) |
23b2ce53 | 3307 | && GET_CODE (x) != SUBREG) |
f8cfc6aa | 3308 | || (REG_P (other) |
23b2ce53 RS |
3309 | && (REGNO (other) < FIRST_PSEUDO_REGISTER |
3310 | || reg_mentioned_p (other, x)))) | |
3311 | { | |
3312 | rtx temp = gen_reg_rtx (GET_MODE (x)); | |
3313 | emit_move_insn (temp, x); | |
3314 | return temp; | |
3315 | } | |
3316 | return x; | |
3317 | } | |
3318 | \f | |
3319 | /* Emission of insns (adding them to the doubly-linked list). */ | |
3320 | ||
23b2ce53 RS |
3321 | /* Return the last insn emitted, even if it is in a sequence now pushed. */ |
3322 | ||
db76cf1e | 3323 | rtx_insn * |
502b8322 | 3324 | get_last_insn_anywhere (void) |
23b2ce53 | 3325 | { |
614d5bd8 AM |
3326 | struct sequence_stack *seq; |
3327 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
3328 | if (seq->last != 0) | |
3329 | return seq->last; | |
23b2ce53 RS |
3330 | return 0; |
3331 | } | |
3332 | ||
2a496e8b JDA |
3333 | /* Return the first nonnote insn emitted in current sequence or current |
3334 | function. This routine looks inside SEQUENCEs. */ | |
3335 | ||
e4685bc8 | 3336 | rtx_insn * |
502b8322 | 3337 | get_first_nonnote_insn (void) |
2a496e8b | 3338 | { |
dc01c3d1 | 3339 | rtx_insn *insn = get_insns (); |
91373fe8 JDA |
3340 | |
3341 | if (insn) | |
3342 | { | |
3343 | if (NOTE_P (insn)) | |
3344 | for (insn = next_insn (insn); | |
3345 | insn && NOTE_P (insn); | |
3346 | insn = next_insn (insn)) | |
3347 | continue; | |
3348 | else | |
3349 | { | |
2ca202e7 | 3350 | if (NONJUMP_INSN_P (insn) |
91373fe8 | 3351 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
dc01c3d1 | 3352 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
91373fe8 JDA |
3353 | } |
3354 | } | |
2a496e8b JDA |
3355 | |
3356 | return insn; | |
3357 | } | |
3358 | ||
3359 | /* Return the last nonnote insn emitted in current sequence or current | |
3360 | function. This routine looks inside SEQUENCEs. */ | |
3361 | ||
e4685bc8 | 3362 | rtx_insn * |
502b8322 | 3363 | get_last_nonnote_insn (void) |
2a496e8b | 3364 | { |
dc01c3d1 | 3365 | rtx_insn *insn = get_last_insn (); |
91373fe8 JDA |
3366 | |
3367 | if (insn) | |
3368 | { | |
3369 | if (NOTE_P (insn)) | |
3370 | for (insn = previous_insn (insn); | |
3371 | insn && NOTE_P (insn); | |
3372 | insn = previous_insn (insn)) | |
3373 | continue; | |
3374 | else | |
3375 | { | |
dc01c3d1 DM |
3376 | if (NONJUMP_INSN_P (insn)) |
3377 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
3378 | insn = seq->insn (seq->len () - 1); | |
91373fe8 JDA |
3379 | } |
3380 | } | |
2a496e8b JDA |
3381 | |
3382 | return insn; | |
3383 | } | |
3384 | ||
b5b8b0ac AO |
3385 | /* Return the number of actual (non-debug) insns emitted in this |
3386 | function. */ | |
3387 | ||
3388 | int | |
3389 | get_max_insn_count (void) | |
3390 | { | |
3391 | int n = cur_insn_uid; | |
3392 | ||
3393 | /* The table size must be stable across -g, to avoid codegen | |
3394 | differences due to debug insns, and not be affected by | |
3395 | -fmin-insn-uid, to avoid excessive table size and to simplify | |
3396 | debugging of -fcompare-debug failures. */ | |
3397 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
3398 | n -= cur_debug_insn_uid; | |
3399 | else | |
3400 | n -= MIN_NONDEBUG_INSN_UID; | |
3401 | ||
3402 | return n; | |
3403 | } | |
3404 | ||
23b2ce53 RS |
3405 | \f |
3406 | /* Return the next insn. If it is a SEQUENCE, return the first insn | |
3407 | of the sequence. */ | |
3408 | ||
eb51c837 | 3409 | rtx_insn * |
4ce524a1 | 3410 | next_insn (rtx_insn *insn) |
23b2ce53 | 3411 | { |
75547801 KG |
3412 | if (insn) |
3413 | { | |
3414 | insn = NEXT_INSN (insn); | |
3415 | if (insn && NONJUMP_INSN_P (insn) | |
3416 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
dc01c3d1 | 3417 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
75547801 | 3418 | } |
23b2ce53 | 3419 | |
dc01c3d1 | 3420 | return insn; |
23b2ce53 RS |
3421 | } |
3422 | ||
3423 | /* Return the previous insn. If it is a SEQUENCE, return the last insn | |
3424 | of the sequence. */ | |
3425 | ||
eb51c837 | 3426 | rtx_insn * |
4ce524a1 | 3427 | previous_insn (rtx_insn *insn) |
23b2ce53 | 3428 | { |
75547801 KG |
3429 | if (insn) |
3430 | { | |
3431 | insn = PREV_INSN (insn); | |
dc01c3d1 DM |
3432 | if (insn && NONJUMP_INSN_P (insn)) |
3433 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
3434 | insn = seq->insn (seq->len () - 1); | |
75547801 | 3435 | } |
23b2ce53 | 3436 | |
dc01c3d1 | 3437 | return insn; |
23b2ce53 RS |
3438 | } |
3439 | ||
3440 | /* Return the next insn after INSN that is not a NOTE. This routine does not | |
3441 | look inside SEQUENCEs. */ | |
3442 | ||
eb51c837 | 3443 | rtx_insn * |
c9b0a227 | 3444 | next_nonnote_insn (rtx_insn *insn) |
23b2ce53 | 3445 | { |
75547801 KG |
3446 | while (insn) |
3447 | { | |
3448 | insn = NEXT_INSN (insn); | |
3449 | if (insn == 0 || !NOTE_P (insn)) | |
3450 | break; | |
3451 | } | |
23b2ce53 | 3452 | |
dc01c3d1 | 3453 | return insn; |
23b2ce53 RS |
3454 | } |
3455 | ||
f40dd646 AO |
3456 | /* Return the next insn after INSN that is not a DEBUG_INSN. This |
3457 | routine does not look inside SEQUENCEs. */ | |
1e211590 | 3458 | |
eb51c837 | 3459 | rtx_insn * |
f40dd646 | 3460 | next_nondebug_insn (rtx_insn *insn) |
1e211590 DD |
3461 | { |
3462 | while (insn) | |
3463 | { | |
3464 | insn = NEXT_INSN (insn); | |
f40dd646 | 3465 | if (insn == 0 || !DEBUG_INSN_P (insn)) |
1e211590 | 3466 | break; |
1e211590 DD |
3467 | } |
3468 | ||
dc01c3d1 | 3469 | return insn; |
1e211590 DD |
3470 | } |
3471 | ||
23b2ce53 RS |
3472 | /* Return the previous insn before INSN that is not a NOTE. This routine does |
3473 | not look inside SEQUENCEs. */ | |
3474 | ||
eb51c837 | 3475 | rtx_insn * |
c9b0a227 | 3476 | prev_nonnote_insn (rtx_insn *insn) |
23b2ce53 | 3477 | { |
75547801 KG |
3478 | while (insn) |
3479 | { | |
3480 | insn = PREV_INSN (insn); | |
3481 | if (insn == 0 || !NOTE_P (insn)) | |
3482 | break; | |
3483 | } | |
23b2ce53 | 3484 | |
dc01c3d1 | 3485 | return insn; |
23b2ce53 RS |
3486 | } |
3487 | ||
f40dd646 AO |
3488 | /* Return the previous insn before INSN that is not a DEBUG_INSN. |
3489 | This routine does not look inside SEQUENCEs. */ | |
896aa4ea | 3490 | |
eb51c837 | 3491 | rtx_insn * |
f40dd646 | 3492 | prev_nondebug_insn (rtx_insn *insn) |
896aa4ea DD |
3493 | { |
3494 | while (insn) | |
3495 | { | |
3496 | insn = PREV_INSN (insn); | |
f40dd646 | 3497 | if (insn == 0 || !DEBUG_INSN_P (insn)) |
896aa4ea | 3498 | break; |
896aa4ea DD |
3499 | } |
3500 | ||
dc01c3d1 | 3501 | return insn; |
896aa4ea DD |
3502 | } |
3503 | ||
f40dd646 AO |
3504 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN. |
3505 | This routine does not look inside SEQUENCEs. */ | |
b5b8b0ac | 3506 | |
eb51c837 | 3507 | rtx_insn * |
f40dd646 | 3508 | next_nonnote_nondebug_insn (rtx_insn *insn) |
b5b8b0ac AO |
3509 | { |
3510 | while (insn) | |
3511 | { | |
3512 | insn = NEXT_INSN (insn); | |
f40dd646 | 3513 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
b5b8b0ac AO |
3514 | break; |
3515 | } | |
3516 | ||
dc01c3d1 | 3517 | return insn; |
b5b8b0ac AO |
3518 | } |
3519 | ||
f40dd646 AO |
3520 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN, |
3521 | but stop the search before we enter another basic block. This | |
3522 | routine does not look inside SEQUENCEs. */ | |
b5b8b0ac | 3523 | |
eb51c837 | 3524 | rtx_insn * |
f40dd646 | 3525 | next_nonnote_nondebug_insn_bb (rtx_insn *insn) |
b5b8b0ac AO |
3526 | { |
3527 | while (insn) | |
3528 | { | |
f40dd646 AO |
3529 | insn = NEXT_INSN (insn); |
3530 | if (insn == 0) | |
3531 | break; | |
3532 | if (DEBUG_INSN_P (insn)) | |
3533 | continue; | |
3534 | if (!NOTE_P (insn)) | |
b5b8b0ac | 3535 | break; |
f40dd646 AO |
3536 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) |
3537 | return NULL; | |
b5b8b0ac AO |
3538 | } |
3539 | ||
dc01c3d1 | 3540 | return insn; |
b5b8b0ac AO |
3541 | } |
3542 | ||
f40dd646 | 3543 | /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN. |
f0fc0803 JJ |
3544 | This routine does not look inside SEQUENCEs. */ |
3545 | ||
eb51c837 | 3546 | rtx_insn * |
f40dd646 | 3547 | prev_nonnote_nondebug_insn (rtx_insn *insn) |
f0fc0803 JJ |
3548 | { |
3549 | while (insn) | |
3550 | { | |
f40dd646 | 3551 | insn = PREV_INSN (insn); |
f0fc0803 JJ |
3552 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
3553 | break; | |
3554 | } | |
3555 | ||
dc01c3d1 | 3556 | return insn; |
f0fc0803 JJ |
3557 | } |
3558 | ||
f40dd646 AO |
3559 | /* Return the previous insn before INSN that is not a NOTE nor |
3560 | DEBUG_INSN, but stop the search before we enter another basic | |
3561 | block. This routine does not look inside SEQUENCEs. */ | |
f0fc0803 | 3562 | |
eb51c837 | 3563 | rtx_insn * |
f40dd646 | 3564 | prev_nonnote_nondebug_insn_bb (rtx_insn *insn) |
f0fc0803 JJ |
3565 | { |
3566 | while (insn) | |
3567 | { | |
3568 | insn = PREV_INSN (insn); | |
f40dd646 | 3569 | if (insn == 0) |
f0fc0803 | 3570 | break; |
f40dd646 AO |
3571 | if (DEBUG_INSN_P (insn)) |
3572 | continue; | |
3573 | if (!NOTE_P (insn)) | |
3574 | break; | |
3575 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) | |
3576 | return NULL; | |
f0fc0803 JJ |
3577 | } |
3578 | ||
dc01c3d1 | 3579 | return insn; |
f0fc0803 JJ |
3580 | } |
3581 | ||
23b2ce53 RS |
3582 | /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; |
3583 | or 0, if there is none. This routine does not look inside | |
0f41302f | 3584 | SEQUENCEs. */ |
23b2ce53 | 3585 | |
eb51c837 | 3586 | rtx_insn * |
dc01c3d1 | 3587 | next_real_insn (rtx uncast_insn) |
23b2ce53 | 3588 | { |
dc01c3d1 DM |
3589 | rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn); |
3590 | ||
75547801 KG |
3591 | while (insn) |
3592 | { | |
3593 | insn = NEXT_INSN (insn); | |
3594 | if (insn == 0 || INSN_P (insn)) | |
3595 | break; | |
3596 | } | |
23b2ce53 | 3597 | |
dc01c3d1 | 3598 | return insn; |
23b2ce53 RS |
3599 | } |
3600 | ||
3601 | /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; | |
3602 | or 0, if there is none. This routine does not look inside | |
3603 | SEQUENCEs. */ | |
3604 | ||
eb51c837 | 3605 | rtx_insn * |
d8fd56b2 | 3606 | prev_real_insn (rtx_insn *insn) |
23b2ce53 | 3607 | { |
75547801 KG |
3608 | while (insn) |
3609 | { | |
3610 | insn = PREV_INSN (insn); | |
3611 | if (insn == 0 || INSN_P (insn)) | |
3612 | break; | |
3613 | } | |
23b2ce53 | 3614 | |
dc01c3d1 | 3615 | return insn; |
23b2ce53 RS |
3616 | } |
3617 | ||
ee960939 OH |
3618 | /* Return the last CALL_INSN in the current list, or 0 if there is none. |
3619 | This routine does not look inside SEQUENCEs. */ | |
3620 | ||
049cfc4a | 3621 | rtx_call_insn * |
502b8322 | 3622 | last_call_insn (void) |
ee960939 | 3623 | { |
049cfc4a | 3624 | rtx_insn *insn; |
ee960939 OH |
3625 | |
3626 | for (insn = get_last_insn (); | |
4b4bf941 | 3627 | insn && !CALL_P (insn); |
ee960939 OH |
3628 | insn = PREV_INSN (insn)) |
3629 | ; | |
3630 | ||
049cfc4a | 3631 | return safe_as_a <rtx_call_insn *> (insn); |
ee960939 OH |
3632 | } |
3633 | ||
23b2ce53 | 3634 | /* Find the next insn after INSN that really does something. This routine |
9c517bf3 AK |
3635 | does not look inside SEQUENCEs. After reload this also skips over |
3636 | standalone USE and CLOBBER insn. */ | |
23b2ce53 | 3637 | |
69732dcb | 3638 | int |
7c9796ed | 3639 | active_insn_p (const rtx_insn *insn) |
69732dcb | 3640 | { |
4b4bf941 | 3641 | return (CALL_P (insn) || JUMP_P (insn) |
39718607 | 3642 | || JUMP_TABLE_DATA_P (insn) /* FIXME */ |
4b4bf941 | 3643 | || (NONJUMP_INSN_P (insn) |
23b8ba81 RH |
3644 | && (! reload_completed |
3645 | || (GET_CODE (PATTERN (insn)) != USE | |
3646 | && GET_CODE (PATTERN (insn)) != CLOBBER)))); | |
69732dcb RH |
3647 | } |
3648 | ||
eb51c837 | 3649 | rtx_insn * |
7c9796ed | 3650 | next_active_insn (rtx_insn *insn) |
23b2ce53 | 3651 | { |
75547801 KG |
3652 | while (insn) |
3653 | { | |
3654 | insn = NEXT_INSN (insn); | |
3655 | if (insn == 0 || active_insn_p (insn)) | |
3656 | break; | |
3657 | } | |
23b2ce53 | 3658 | |
dc01c3d1 | 3659 | return insn; |
23b2ce53 RS |
3660 | } |
3661 | ||
3662 | /* Find the last insn before INSN that really does something. This routine | |
9c517bf3 AK |
3663 | does not look inside SEQUENCEs. After reload this also skips over |
3664 | standalone USE and CLOBBER insn. */ | |
23b2ce53 | 3665 | |
eb51c837 | 3666 | rtx_insn * |
7c9796ed | 3667 | prev_active_insn (rtx_insn *insn) |
23b2ce53 | 3668 | { |
75547801 KG |
3669 | while (insn) |
3670 | { | |
3671 | insn = PREV_INSN (insn); | |
3672 | if (insn == 0 || active_insn_p (insn)) | |
3673 | break; | |
3674 | } | |
23b2ce53 | 3675 | |
dc01c3d1 | 3676 | return insn; |
23b2ce53 | 3677 | } |
23b2ce53 | 3678 | \f |
23b2ce53 RS |
3679 | /* Return the next insn that uses CC0 after INSN, which is assumed to |
3680 | set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter | |
3681 | applied to the result of this function should yield INSN). | |
3682 | ||
3683 | Normally, this is simply the next insn. However, if a REG_CC_USER note | |
3684 | is present, it contains the insn that uses CC0. | |
3685 | ||
3686 | Return 0 if we can't find the insn. */ | |
3687 | ||
75b46023 | 3688 | rtx_insn * |
475edec0 | 3689 | next_cc0_user (rtx_insn *insn) |
23b2ce53 | 3690 | { |
906c4e36 | 3691 | rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX); |
23b2ce53 RS |
3692 | |
3693 | if (note) | |
75b46023 | 3694 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
23b2ce53 RS |
3695 | |
3696 | insn = next_nonnote_insn (insn); | |
4b4bf941 | 3697 | if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
dc01c3d1 | 3698 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
23b2ce53 | 3699 | |
2c3c49de | 3700 | if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn))) |
dc01c3d1 | 3701 | return insn; |
23b2ce53 RS |
3702 | |
3703 | return 0; | |
3704 | } | |
3705 | ||
3706 | /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER | |
3707 | note, it is the previous insn. */ | |
3708 | ||
75b46023 | 3709 | rtx_insn * |
5c8db5b4 | 3710 | prev_cc0_setter (rtx_insn *insn) |
23b2ce53 | 3711 | { |
906c4e36 | 3712 | rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); |
23b2ce53 RS |
3713 | |
3714 | if (note) | |
75b46023 | 3715 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
23b2ce53 RS |
3716 | |
3717 | insn = prev_nonnote_insn (insn); | |
5b0264cb | 3718 | gcc_assert (sets_cc0_p (PATTERN (insn))); |
23b2ce53 | 3719 | |
dc01c3d1 | 3720 | return insn; |
23b2ce53 | 3721 | } |
e5bef2e4 | 3722 | |
594f8779 RZ |
3723 | /* Find a RTX_AUTOINC class rtx which matches DATA. */ |
3724 | ||
3725 | static int | |
9021b8ec | 3726 | find_auto_inc (const_rtx x, const_rtx reg) |
594f8779 | 3727 | { |
9021b8ec RS |
3728 | subrtx_iterator::array_type array; |
3729 | FOR_EACH_SUBRTX (iter, array, x, NONCONST) | |
594f8779 | 3730 | { |
9021b8ec RS |
3731 | const_rtx x = *iter; |
3732 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC | |
3733 | && rtx_equal_p (reg, XEXP (x, 0))) | |
3734 | return true; | |
594f8779 | 3735 | } |
9021b8ec | 3736 | return false; |
594f8779 | 3737 | } |
594f8779 | 3738 | |
e5bef2e4 HB |
3739 | /* Increment the label uses for all labels present in rtx. */ |
3740 | ||
3741 | static void | |
502b8322 | 3742 | mark_label_nuses (rtx x) |
e5bef2e4 | 3743 | { |
b3694847 SS |
3744 | enum rtx_code code; |
3745 | int i, j; | |
3746 | const char *fmt; | |
e5bef2e4 HB |
3747 | |
3748 | code = GET_CODE (x); | |
04a121a7 TS |
3749 | if (code == LABEL_REF && LABEL_P (label_ref_label (x))) |
3750 | LABEL_NUSES (label_ref_label (x))++; | |
e5bef2e4 HB |
3751 | |
3752 | fmt = GET_RTX_FORMAT (code); | |
3753 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3754 | { | |
3755 | if (fmt[i] == 'e') | |
0fb7aeda | 3756 | mark_label_nuses (XEXP (x, i)); |
e5bef2e4 | 3757 | else if (fmt[i] == 'E') |
0fb7aeda | 3758 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
e5bef2e4 HB |
3759 | mark_label_nuses (XVECEXP (x, i, j)); |
3760 | } | |
3761 | } | |
3762 | ||
23b2ce53 RS |
3763 | \f |
3764 | /* Try splitting insns that can be split for better scheduling. | |
3765 | PAT is the pattern which might split. | |
3766 | TRIAL is the insn providing PAT. | |
cc2902df | 3767 | LAST is nonzero if we should return the last insn of the sequence produced. |
23b2ce53 RS |
3768 | |
3769 | If this routine succeeds in splitting, it returns the first or last | |
11147ebe | 3770 | replacement insn depending on the value of LAST. Otherwise, it |
23b2ce53 RS |
3771 | returns TRIAL. If the insn to be returned can be split, it will be. */ |
3772 | ||
53f04688 | 3773 | rtx_insn * |
bb5c4956 | 3774 | try_split (rtx pat, rtx_insn *trial, int last) |
23b2ce53 | 3775 | { |
d4eff95b | 3776 | rtx_insn *before, *after; |
dc01c3d1 DM |
3777 | rtx note; |
3778 | rtx_insn *seq, *tem; | |
5fa396ad | 3779 | profile_probability probability; |
dc01c3d1 | 3780 | rtx_insn *insn_last, *insn; |
599aedd9 | 3781 | int njumps = 0; |
e67d1102 | 3782 | rtx_insn *call_insn = NULL; |
6b24c259 | 3783 | |
cd9c1ca8 RH |
3784 | /* We're not good at redistributing frame information. */ |
3785 | if (RTX_FRAME_RELATED_P (trial)) | |
dc01c3d1 | 3786 | return trial; |
cd9c1ca8 | 3787 | |
6b24c259 JH |
3788 | if (any_condjump_p (trial) |
3789 | && (note = find_reg_note (trial, REG_BR_PROB, 0))) | |
5fa396ad JH |
3790 | split_branch_probability |
3791 | = profile_probability::from_reg_br_prob_note (XINT (note, 0)); | |
3792 | else | |
3793 | split_branch_probability = profile_probability::uninitialized (); | |
3794 | ||
6b24c259 JH |
3795 | probability = split_branch_probability; |
3796 | ||
bb5c4956 | 3797 | seq = split_insns (pat, trial); |
6b24c259 | 3798 | |
5fa396ad | 3799 | split_branch_probability = profile_probability::uninitialized (); |
23b2ce53 | 3800 | |
599aedd9 | 3801 | if (!seq) |
dc01c3d1 | 3802 | return trial; |
599aedd9 RH |
3803 | |
3804 | /* Avoid infinite loop if any insn of the result matches | |
3805 | the original pattern. */ | |
3806 | insn_last = seq; | |
3807 | while (1) | |
23b2ce53 | 3808 | { |
599aedd9 RH |
3809 | if (INSN_P (insn_last) |
3810 | && rtx_equal_p (PATTERN (insn_last), pat)) | |
dc01c3d1 | 3811 | return trial; |
599aedd9 RH |
3812 | if (!NEXT_INSN (insn_last)) |
3813 | break; | |
3814 | insn_last = NEXT_INSN (insn_last); | |
3815 | } | |
750c9258 | 3816 | |
6fb5fa3c DB |
3817 | /* We will be adding the new sequence to the function. The splitters |
3818 | may have introduced invalid RTL sharing, so unshare the sequence now. */ | |
3819 | unshare_all_rtl_in_chain (seq); | |
3820 | ||
339ba33b | 3821 | /* Mark labels and copy flags. */ |
599aedd9 RH |
3822 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) |
3823 | { | |
4b4bf941 | 3824 | if (JUMP_P (insn)) |
599aedd9 | 3825 | { |
339ba33b RS |
3826 | if (JUMP_P (trial)) |
3827 | CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial); | |
599aedd9 RH |
3828 | mark_jump_label (PATTERN (insn), insn, 0); |
3829 | njumps++; | |
5fa396ad | 3830 | if (probability.initialized_p () |
599aedd9 RH |
3831 | && any_condjump_p (insn) |
3832 | && !find_reg_note (insn, REG_BR_PROB, 0)) | |
2f937369 | 3833 | { |
599aedd9 RH |
3834 | /* We can preserve the REG_BR_PROB notes only if exactly |
3835 | one jump is created, otherwise the machine description | |
3836 | is responsible for this step using | |
3837 | split_branch_probability variable. */ | |
5b0264cb | 3838 | gcc_assert (njumps == 1); |
5fa396ad | 3839 | add_reg_br_prob_note (insn, probability); |
2f937369 | 3840 | } |
599aedd9 RH |
3841 | } |
3842 | } | |
3843 | ||
3844 | /* If we are splitting a CALL_INSN, look for the CALL_INSN | |
65712d5c | 3845 | in SEQ and copy any additional information across. */ |
4b4bf941 | 3846 | if (CALL_P (trial)) |
599aedd9 RH |
3847 | { |
3848 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) | |
4b4bf941 | 3849 | if (CALL_P (insn)) |
599aedd9 | 3850 | { |
dc01c3d1 DM |
3851 | rtx_insn *next; |
3852 | rtx *p; | |
65712d5c | 3853 | |
4f660b15 RO |
3854 | gcc_assert (call_insn == NULL_RTX); |
3855 | call_insn = insn; | |
3856 | ||
65712d5c RS |
3857 | /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the |
3858 | target may have explicitly specified. */ | |
3859 | p = &CALL_INSN_FUNCTION_USAGE (insn); | |
f6a1f3f6 RH |
3860 | while (*p) |
3861 | p = &XEXP (*p, 1); | |
3862 | *p = CALL_INSN_FUNCTION_USAGE (trial); | |
65712d5c RS |
3863 | |
3864 | /* If the old call was a sibling call, the new one must | |
3865 | be too. */ | |
599aedd9 | 3866 | SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); |
65712d5c RS |
3867 | |
3868 | /* If the new call is the last instruction in the sequence, | |
3869 | it will effectively replace the old call in-situ. Otherwise | |
3870 | we must move any following NOTE_INSN_CALL_ARG_LOCATION note | |
3871 | so that it comes immediately after the new call. */ | |
3872 | if (NEXT_INSN (insn)) | |
65f3dedb RS |
3873 | for (next = NEXT_INSN (trial); |
3874 | next && NOTE_P (next); | |
3875 | next = NEXT_INSN (next)) | |
3876 | if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION) | |
65712d5c RS |
3877 | { |
3878 | remove_insn (next); | |
3879 | add_insn_after (next, insn, NULL); | |
65f3dedb | 3880 | break; |
65712d5c | 3881 | } |
599aedd9 RH |
3882 | } |
3883 | } | |
4b5e8abe | 3884 | |
599aedd9 RH |
3885 | /* Copy notes, particularly those related to the CFG. */ |
3886 | for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) | |
3887 | { | |
3888 | switch (REG_NOTE_KIND (note)) | |
3889 | { | |
3890 | case REG_EH_REGION: | |
1d65f45c | 3891 | copy_reg_eh_region_note_backward (note, insn_last, NULL); |
599aedd9 | 3892 | break; |
216183ce | 3893 | |
599aedd9 RH |
3894 | case REG_NORETURN: |
3895 | case REG_SETJMP: | |
0a35513e | 3896 | case REG_TM: |
5c5f0b65 | 3897 | case REG_CALL_NOCF_CHECK: |
594f8779 | 3898 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
216183ce | 3899 | { |
4b4bf941 | 3900 | if (CALL_P (insn)) |
65c5f2a6 | 3901 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
216183ce | 3902 | } |
599aedd9 | 3903 | break; |
d6e95df8 | 3904 | |
599aedd9 | 3905 | case REG_NON_LOCAL_GOTO: |
594f8779 | 3906 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
2f937369 | 3907 | { |
4b4bf941 | 3908 | if (JUMP_P (insn)) |
65c5f2a6 | 3909 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
2f937369 | 3910 | } |
599aedd9 | 3911 | break; |
e5bef2e4 | 3912 | |
594f8779 | 3913 | case REG_INC: |
760edf20 TS |
3914 | if (!AUTO_INC_DEC) |
3915 | break; | |
3916 | ||
594f8779 RZ |
3917 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
3918 | { | |
3919 | rtx reg = XEXP (note, 0); | |
3920 | if (!FIND_REG_INC_NOTE (insn, reg) | |
9021b8ec | 3921 | && find_auto_inc (PATTERN (insn), reg)) |
65c5f2a6 | 3922 | add_reg_note (insn, REG_INC, reg); |
594f8779 RZ |
3923 | } |
3924 | break; | |
594f8779 | 3925 | |
9a08d230 | 3926 | case REG_ARGS_SIZE: |
e5b51ca0 | 3927 | fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0))); |
9a08d230 RH |
3928 | break; |
3929 | ||
4f660b15 RO |
3930 | case REG_CALL_DECL: |
3931 | gcc_assert (call_insn != NULL_RTX); | |
3932 | add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
3933 | break; | |
3934 | ||
599aedd9 RH |
3935 | default: |
3936 | break; | |
23b2ce53 | 3937 | } |
599aedd9 RH |
3938 | } |
3939 | ||
3940 | /* If there are LABELS inside the split insns increment the | |
3941 | usage count so we don't delete the label. */ | |
cf7c4aa6 | 3942 | if (INSN_P (trial)) |
599aedd9 RH |
3943 | { |
3944 | insn = insn_last; | |
3945 | while (insn != NULL_RTX) | |
23b2ce53 | 3946 | { |
cf7c4aa6 | 3947 | /* JUMP_P insns have already been "marked" above. */ |
4b4bf941 | 3948 | if (NONJUMP_INSN_P (insn)) |
599aedd9 | 3949 | mark_label_nuses (PATTERN (insn)); |
23b2ce53 | 3950 | |
599aedd9 RH |
3951 | insn = PREV_INSN (insn); |
3952 | } | |
23b2ce53 RS |
3953 | } |
3954 | ||
d4eff95b JC |
3955 | before = PREV_INSN (trial); |
3956 | after = NEXT_INSN (trial); | |
3957 | ||
5368224f | 3958 | tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial)); |
599aedd9 RH |
3959 | |
3960 | delete_insn (trial); | |
599aedd9 RH |
3961 | |
3962 | /* Recursively call try_split for each new insn created; by the | |
3963 | time control returns here that insn will be fully split, so | |
3964 | set LAST and continue from the insn after the one returned. | |
3965 | We can't use next_active_insn here since AFTER may be a note. | |
3966 | Ignore deleted insns, which can be occur if not optimizing. */ | |
3967 | for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem)) | |
4654c0cf | 3968 | if (! tem->deleted () && INSN_P (tem)) |
599aedd9 RH |
3969 | tem = try_split (PATTERN (tem), tem, 1); |
3970 | ||
3971 | /* Return either the first or the last insn, depending on which was | |
3972 | requested. */ | |
3973 | return last | |
5936d944 | 3974 | ? (after ? PREV_INSN (after) : get_last_insn ()) |
599aedd9 | 3975 | : NEXT_INSN (before); |
23b2ce53 RS |
3976 | } |
3977 | \f | |
3978 | /* Make and return an INSN rtx, initializing all its slots. | |
4b1f5e8c | 3979 | Store PATTERN in the pattern slots. */ |
23b2ce53 | 3980 | |
167b9fae | 3981 | rtx_insn * |
502b8322 | 3982 | make_insn_raw (rtx pattern) |
23b2ce53 | 3983 | { |
167b9fae | 3984 | rtx_insn *insn; |
23b2ce53 | 3985 | |
167b9fae | 3986 | insn = as_a <rtx_insn *> (rtx_alloc (INSN)); |
23b2ce53 | 3987 | |
43127294 | 3988 | INSN_UID (insn) = cur_insn_uid++; |
23b2ce53 RS |
3989 | PATTERN (insn) = pattern; |
3990 | INSN_CODE (insn) = -1; | |
1632afca | 3991 | REG_NOTES (insn) = NULL; |
5368224f | 3992 | INSN_LOCATION (insn) = curr_insn_location (); |
ba4f7968 | 3993 | BLOCK_FOR_INSN (insn) = NULL; |
23b2ce53 | 3994 | |
47984720 NC |
3995 | #ifdef ENABLE_RTL_CHECKING |
3996 | if (insn | |
2c3c49de | 3997 | && INSN_P (insn) |
47984720 NC |
3998 | && (returnjump_p (insn) |
3999 | || (GET_CODE (insn) == SET | |
4000 | && SET_DEST (insn) == pc_rtx))) | |
4001 | { | |
d4ee4d25 | 4002 | warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n"); |
47984720 NC |
4003 | debug_rtx (insn); |
4004 | } | |
4005 | #endif | |
750c9258 | 4006 | |
23b2ce53 RS |
4007 | return insn; |
4008 | } | |
4009 | ||
b5b8b0ac AO |
4010 | /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */ |
4011 | ||
167b9fae | 4012 | static rtx_insn * |
b5b8b0ac AO |
4013 | make_debug_insn_raw (rtx pattern) |
4014 | { | |
167b9fae | 4015 | rtx_debug_insn *insn; |
b5b8b0ac | 4016 | |
167b9fae | 4017 | insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN)); |
b5b8b0ac AO |
4018 | INSN_UID (insn) = cur_debug_insn_uid++; |
4019 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
4020 | INSN_UID (insn) = cur_insn_uid++; | |
4021 | ||
4022 | PATTERN (insn) = pattern; | |
4023 | INSN_CODE (insn) = -1; | |
4024 | REG_NOTES (insn) = NULL; | |
5368224f | 4025 | INSN_LOCATION (insn) = curr_insn_location (); |
b5b8b0ac AO |
4026 | BLOCK_FOR_INSN (insn) = NULL; |
4027 | ||
4028 | return insn; | |
4029 | } | |
4030 | ||
2f937369 | 4031 | /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ |
23b2ce53 | 4032 | |
167b9fae | 4033 | static rtx_insn * |
502b8322 | 4034 | make_jump_insn_raw (rtx pattern) |
23b2ce53 | 4035 | { |
167b9fae | 4036 | rtx_jump_insn *insn; |
23b2ce53 | 4037 | |
167b9fae | 4038 | insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN)); |
1632afca | 4039 | INSN_UID (insn) = cur_insn_uid++; |
23b2ce53 RS |
4040 | |
4041 | PATTERN (insn) = pattern; | |
4042 | INSN_CODE (insn) = -1; | |
1632afca RS |
4043 | REG_NOTES (insn) = NULL; |
4044 | JUMP_LABEL (insn) = NULL; | |
5368224f | 4045 | INSN_LOCATION (insn) = curr_insn_location (); |
ba4f7968 | 4046 | BLOCK_FOR_INSN (insn) = NULL; |
23b2ce53 RS |
4047 | |
4048 | return insn; | |
4049 | } | |
aff507f4 | 4050 | |
2f937369 | 4051 | /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */ |
aff507f4 | 4052 | |
167b9fae | 4053 | static rtx_insn * |
502b8322 | 4054 | make_call_insn_raw (rtx pattern) |
aff507f4 | 4055 | { |
167b9fae | 4056 | rtx_call_insn *insn; |
aff507f4 | 4057 | |
167b9fae | 4058 | insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN)); |
aff507f4 RK |
4059 | INSN_UID (insn) = cur_insn_uid++; |
4060 | ||
4061 | PATTERN (insn) = pattern; | |
4062 | INSN_CODE (insn) = -1; | |
aff507f4 RK |
4063 | REG_NOTES (insn) = NULL; |
4064 | CALL_INSN_FUNCTION_USAGE (insn) = NULL; | |
5368224f | 4065 | INSN_LOCATION (insn) = curr_insn_location (); |
ba4f7968 | 4066 | BLOCK_FOR_INSN (insn) = NULL; |
aff507f4 RK |
4067 | |
4068 | return insn; | |
4069 | } | |
96fba521 SB |
4070 | |
4071 | /* Like `make_insn_raw' but make a NOTE instead of an insn. */ | |
4072 | ||
66e8df53 | 4073 | static rtx_note * |
96fba521 SB |
4074 | make_note_raw (enum insn_note subtype) |
4075 | { | |
4076 | /* Some notes are never created this way at all. These notes are | |
4077 | only created by patching out insns. */ | |
4078 | gcc_assert (subtype != NOTE_INSN_DELETED_LABEL | |
4079 | && subtype != NOTE_INSN_DELETED_DEBUG_LABEL); | |
4080 | ||
66e8df53 | 4081 | rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE)); |
96fba521 SB |
4082 | INSN_UID (note) = cur_insn_uid++; |
4083 | NOTE_KIND (note) = subtype; | |
4084 | BLOCK_FOR_INSN (note) = NULL; | |
4085 | memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); | |
4086 | return note; | |
4087 | } | |
23b2ce53 | 4088 | \f |
96fba521 SB |
4089 | /* Add INSN to the end of the doubly-linked list, between PREV and NEXT. |
4090 | INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects, | |
4091 | but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */ | |
4092 | ||
4093 | static inline void | |
9152e0aa | 4094 | link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next) |
96fba521 | 4095 | { |
0f82e5c9 DM |
4096 | SET_PREV_INSN (insn) = prev; |
4097 | SET_NEXT_INSN (insn) = next; | |
96fba521 SB |
4098 | if (prev != NULL) |
4099 | { | |
0f82e5c9 | 4100 | SET_NEXT_INSN (prev) = insn; |
96fba521 SB |
4101 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
4102 | { | |
e6eda746 DM |
4103 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev)); |
4104 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn; | |
96fba521 SB |
4105 | } |
4106 | } | |
4107 | if (next != NULL) | |
4108 | { | |
0f82e5c9 | 4109 | SET_PREV_INSN (next) = insn; |
96fba521 | 4110 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
e6eda746 DM |
4111 | { |
4112 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next)); | |
4113 | SET_PREV_INSN (sequence->insn (0)) = insn; | |
4114 | } | |
96fba521 | 4115 | } |
3ccb989e SB |
4116 | |
4117 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4118 | { | |
e6eda746 DM |
4119 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn)); |
4120 | SET_PREV_INSN (sequence->insn (0)) = prev; | |
4121 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next; | |
3ccb989e | 4122 | } |
96fba521 SB |
4123 | } |
4124 | ||
23b2ce53 RS |
4125 | /* Add INSN to the end of the doubly-linked list. |
4126 | INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */ | |
4127 | ||
4128 | void | |
9152e0aa | 4129 | add_insn (rtx_insn *insn) |
23b2ce53 | 4130 | { |
9152e0aa | 4131 | rtx_insn *prev = get_last_insn (); |
96fba521 | 4132 | link_insn_into_chain (insn, prev, NULL); |
01512446 | 4133 | if (get_insns () == NULL) |
5936d944 | 4134 | set_first_insn (insn); |
5936d944 | 4135 | set_last_insn (insn); |
23b2ce53 RS |
4136 | } |
4137 | ||
96fba521 | 4138 | /* Add INSN into the doubly-linked list after insn AFTER. */ |
23b2ce53 | 4139 | |
96fba521 | 4140 | static void |
9152e0aa | 4141 | add_insn_after_nobb (rtx_insn *insn, rtx_insn *after) |
23b2ce53 | 4142 | { |
9152e0aa | 4143 | rtx_insn *next = NEXT_INSN (after); |
23b2ce53 | 4144 | |
4654c0cf | 4145 | gcc_assert (!optimize || !after->deleted ()); |
ba213285 | 4146 | |
96fba521 | 4147 | link_insn_into_chain (insn, after, next); |
23b2ce53 | 4148 | |
96fba521 | 4149 | if (next == NULL) |
23b2ce53 | 4150 | { |
614d5bd8 AM |
4151 | struct sequence_stack *seq; |
4152 | ||
4153 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4154 | if (after == seq->last) | |
4155 | { | |
4156 | seq->last = insn; | |
4157 | break; | |
4158 | } | |
23b2ce53 | 4159 | } |
96fba521 SB |
4160 | } |
4161 | ||
4162 | /* Add INSN into the doubly-linked list before insn BEFORE. */ | |
4163 | ||
4164 | static void | |
9152e0aa | 4165 | add_insn_before_nobb (rtx_insn *insn, rtx_insn *before) |
96fba521 | 4166 | { |
9152e0aa | 4167 | rtx_insn *prev = PREV_INSN (before); |
96fba521 | 4168 | |
4654c0cf | 4169 | gcc_assert (!optimize || !before->deleted ()); |
96fba521 SB |
4170 | |
4171 | link_insn_into_chain (insn, prev, before); | |
4172 | ||
4173 | if (prev == NULL) | |
23b2ce53 | 4174 | { |
614d5bd8 | 4175 | struct sequence_stack *seq; |
a0ae8e8d | 4176 | |
614d5bd8 AM |
4177 | for (seq = get_current_sequence (); seq; seq = seq->next) |
4178 | if (before == seq->first) | |
4179 | { | |
4180 | seq->first = insn; | |
4181 | break; | |
4182 | } | |
4183 | ||
4184 | gcc_assert (seq); | |
23b2ce53 | 4185 | } |
96fba521 SB |
4186 | } |
4187 | ||
4188 | /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN. | |
4189 | If BB is NULL, an attempt is made to infer the bb from before. | |
4190 | ||
4191 | This and the next function should be the only functions called | |
4192 | to insert an insn once delay slots have been filled since only | |
4193 | they know how to update a SEQUENCE. */ | |
23b2ce53 | 4194 | |
96fba521 | 4195 | void |
9152e0aa | 4196 | add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb) |
96fba521 | 4197 | { |
1130d5e3 | 4198 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
9152e0aa | 4199 | rtx_insn *after = as_a <rtx_insn *> (uncast_after); |
96fba521 | 4200 | add_insn_after_nobb (insn, after); |
4b4bf941 JQ |
4201 | if (!BARRIER_P (after) |
4202 | && !BARRIER_P (insn) | |
3c030e88 JH |
4203 | && (bb = BLOCK_FOR_INSN (after))) |
4204 | { | |
4205 | set_block_for_insn (insn, bb); | |
38c1593d | 4206 | if (INSN_P (insn)) |
6fb5fa3c | 4207 | df_insn_rescan (insn); |
3c030e88 | 4208 | /* Should not happen as first in the BB is always |
a1f300c0 | 4209 | either NOTE or LABEL. */ |
a813c111 | 4210 | if (BB_END (bb) == after |
3c030e88 | 4211 | /* Avoid clobbering of structure when creating new BB. */ |
4b4bf941 | 4212 | && !BARRIER_P (insn) |
a38e7aa5 | 4213 | && !NOTE_INSN_BASIC_BLOCK_P (insn)) |
1130d5e3 | 4214 | BB_END (bb) = insn; |
3c030e88 | 4215 | } |
23b2ce53 RS |
4216 | } |
4217 | ||
96fba521 SB |
4218 | /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN. |
4219 | If BB is NULL, an attempt is made to infer the bb from before. | |
4220 | ||
4221 | This and the previous function should be the only functions called | |
4222 | to insert an insn once delay slots have been filled since only | |
4223 | they know how to update a SEQUENCE. */ | |
a0ae8e8d RK |
4224 | |
4225 | void | |
9152e0aa | 4226 | add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb) |
a0ae8e8d | 4227 | { |
9152e0aa DM |
4228 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
4229 | rtx_insn *before = as_a <rtx_insn *> (uncast_before); | |
96fba521 | 4230 | add_insn_before_nobb (insn, before); |
a0ae8e8d | 4231 | |
b8698a0f | 4232 | if (!bb |
6fb5fa3c DB |
4233 | && !BARRIER_P (before) |
4234 | && !BARRIER_P (insn)) | |
4235 | bb = BLOCK_FOR_INSN (before); | |
4236 | ||
4237 | if (bb) | |
3c030e88 JH |
4238 | { |
4239 | set_block_for_insn (insn, bb); | |
38c1593d | 4240 | if (INSN_P (insn)) |
6fb5fa3c | 4241 | df_insn_rescan (insn); |
5b0264cb | 4242 | /* Should not happen as first in the BB is always either NOTE or |
43e05e45 | 4243 | LABEL. */ |
5b0264cb NS |
4244 | gcc_assert (BB_HEAD (bb) != insn |
4245 | /* Avoid clobbering of structure when creating new BB. */ | |
4246 | || BARRIER_P (insn) | |
a38e7aa5 | 4247 | || NOTE_INSN_BASIC_BLOCK_P (insn)); |
3c030e88 | 4248 | } |
a0ae8e8d RK |
4249 | } |
4250 | ||
6fb5fa3c DB |
4251 | /* Replace insn with an deleted instruction note. */ |
4252 | ||
0ce2b299 EB |
4253 | void |
4254 | set_insn_deleted (rtx insn) | |
6fb5fa3c | 4255 | { |
39718607 | 4256 | if (INSN_P (insn)) |
b2908ba6 | 4257 | df_insn_delete (as_a <rtx_insn *> (insn)); |
6fb5fa3c DB |
4258 | PUT_CODE (insn, NOTE); |
4259 | NOTE_KIND (insn) = NOTE_INSN_DELETED; | |
4260 | } | |
4261 | ||
4262 | ||
1f397f45 SB |
4263 | /* Unlink INSN from the insn chain. |
4264 | ||
4265 | This function knows how to handle sequences. | |
4266 | ||
4267 | This function does not invalidate data flow information associated with | |
4268 | INSN (i.e. does not call df_insn_delete). That makes this function | |
4269 | usable for only disconnecting an insn from the chain, and re-emit it | |
4270 | elsewhere later. | |
4271 | ||
4272 | To later insert INSN elsewhere in the insn chain via add_insn and | |
4273 | similar functions, PREV_INSN and NEXT_INSN must be nullified by | |
4274 | the caller. Nullifying them here breaks many insn chain walks. | |
4275 | ||
4276 | To really delete an insn and related DF information, use delete_insn. */ | |
4277 | ||
89e99eea | 4278 | void |
dc01c3d1 | 4279 | remove_insn (rtx uncast_insn) |
89e99eea | 4280 | { |
dc01c3d1 | 4281 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
1130d5e3 DM |
4282 | rtx_insn *next = NEXT_INSN (insn); |
4283 | rtx_insn *prev = PREV_INSN (insn); | |
53c17031 JH |
4284 | basic_block bb; |
4285 | ||
89e99eea DB |
4286 | if (prev) |
4287 | { | |
0f82e5c9 | 4288 | SET_NEXT_INSN (prev) = next; |
4b4bf941 | 4289 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
89e99eea | 4290 | { |
e6eda746 DM |
4291 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev)); |
4292 | SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next; | |
89e99eea DB |
4293 | } |
4294 | } | |
89e99eea DB |
4295 | else |
4296 | { | |
614d5bd8 AM |
4297 | struct sequence_stack *seq; |
4298 | ||
4299 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4300 | if (insn == seq->first) | |
89e99eea | 4301 | { |
614d5bd8 | 4302 | seq->first = next; |
89e99eea DB |
4303 | break; |
4304 | } | |
4305 | ||
614d5bd8 | 4306 | gcc_assert (seq); |
89e99eea DB |
4307 | } |
4308 | ||
4309 | if (next) | |
4310 | { | |
0f82e5c9 | 4311 | SET_PREV_INSN (next) = prev; |
4b4bf941 | 4312 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
e6eda746 DM |
4313 | { |
4314 | rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next)); | |
4315 | SET_PREV_INSN (sequence->insn (0)) = prev; | |
4316 | } | |
89e99eea | 4317 | } |
89e99eea DB |
4318 | else |
4319 | { | |
614d5bd8 AM |
4320 | struct sequence_stack *seq; |
4321 | ||
4322 | for (seq = get_current_sequence (); seq; seq = seq->next) | |
4323 | if (insn == seq->last) | |
89e99eea | 4324 | { |
614d5bd8 | 4325 | seq->last = prev; |
89e99eea DB |
4326 | break; |
4327 | } | |
4328 | ||
614d5bd8 | 4329 | gcc_assert (seq); |
89e99eea | 4330 | } |
80eb8028 | 4331 | |
80eb8028 | 4332 | /* Fix up basic block boundaries, if necessary. */ |
4b4bf941 | 4333 | if (!BARRIER_P (insn) |
53c17031 JH |
4334 | && (bb = BLOCK_FOR_INSN (insn))) |
4335 | { | |
a813c111 | 4336 | if (BB_HEAD (bb) == insn) |
53c17031 | 4337 | { |
3bf1e984 RK |
4338 | /* Never ever delete the basic block note without deleting whole |
4339 | basic block. */ | |
5b0264cb | 4340 | gcc_assert (!NOTE_P (insn)); |
1130d5e3 | 4341 | BB_HEAD (bb) = next; |
53c17031 | 4342 | } |
a813c111 | 4343 | if (BB_END (bb) == insn) |
1130d5e3 | 4344 | BB_END (bb) = prev; |
53c17031 | 4345 | } |
89e99eea DB |
4346 | } |
4347 | ||
ee960939 OH |
4348 | /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */ |
4349 | ||
4350 | void | |
502b8322 | 4351 | add_function_usage_to (rtx call_insn, rtx call_fusage) |
ee960939 | 4352 | { |
5b0264cb | 4353 | gcc_assert (call_insn && CALL_P (call_insn)); |
ee960939 OH |
4354 | |
4355 | /* Put the register usage information on the CALL. If there is already | |
4356 | some usage information, put ours at the end. */ | |
4357 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
4358 | { | |
4359 | rtx link; | |
4360 | ||
4361 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
4362 | link = XEXP (link, 1)) | |
4363 | ; | |
4364 | ||
4365 | XEXP (link, 1) = call_fusage; | |
4366 | } | |
4367 | else | |
4368 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
4369 | } | |
4370 | ||
23b2ce53 RS |
4371 | /* Delete all insns made since FROM. |
4372 | FROM becomes the new last instruction. */ | |
4373 | ||
4374 | void | |
fee3e72c | 4375 | delete_insns_since (rtx_insn *from) |
23b2ce53 RS |
4376 | { |
4377 | if (from == 0) | |
5936d944 | 4378 | set_first_insn (0); |
23b2ce53 | 4379 | else |
0f82e5c9 | 4380 | SET_NEXT_INSN (from) = 0; |
5936d944 | 4381 | set_last_insn (from); |
23b2ce53 RS |
4382 | } |
4383 | ||
5dab5552 MS |
4384 | /* This function is deprecated, please use sequences instead. |
4385 | ||
4386 | Move a consecutive bunch of insns to a different place in the chain. | |
23b2ce53 RS |
4387 | The insns to be moved are those between FROM and TO. |
4388 | They are moved to a new position after the insn AFTER. | |
4389 | AFTER must not be FROM or TO or any insn in between. | |
4390 | ||
4391 | This function does not know about SEQUENCEs and hence should not be | |
4392 | called after delay-slot filling has been done. */ | |
4393 | ||
4394 | void | |
fee3e72c | 4395 | reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
23b2ce53 | 4396 | { |
b2b29377 MM |
4397 | if (flag_checking) |
4398 | { | |
4399 | for (rtx_insn *x = from; x != to; x = NEXT_INSN (x)) | |
4400 | gcc_assert (after != x); | |
4401 | gcc_assert (after != to); | |
4402 | } | |
4f8344eb | 4403 | |
23b2ce53 RS |
4404 | /* Splice this bunch out of where it is now. */ |
4405 | if (PREV_INSN (from)) | |
0f82e5c9 | 4406 | SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to); |
23b2ce53 | 4407 | if (NEXT_INSN (to)) |
0f82e5c9 | 4408 | SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from); |
5936d944 JH |
4409 | if (get_last_insn () == to) |
4410 | set_last_insn (PREV_INSN (from)); | |
4411 | if (get_insns () == from) | |
4412 | set_first_insn (NEXT_INSN (to)); | |
23b2ce53 RS |
4413 | |
4414 | /* Make the new neighbors point to it and it to them. */ | |
4415 | if (NEXT_INSN (after)) | |
0f82e5c9 | 4416 | SET_PREV_INSN (NEXT_INSN (after)) = to; |
23b2ce53 | 4417 | |
0f82e5c9 DM |
4418 | SET_NEXT_INSN (to) = NEXT_INSN (after); |
4419 | SET_PREV_INSN (from) = after; | |
4420 | SET_NEXT_INSN (after) = from; | |
c3284718 | 4421 | if (after == get_last_insn ()) |
5936d944 | 4422 | set_last_insn (to); |
23b2ce53 RS |
4423 | } |
4424 | ||
3c030e88 JH |
4425 | /* Same as function above, but take care to update BB boundaries. */ |
4426 | void | |
ac9d2d2c | 4427 | reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
3c030e88 | 4428 | { |
ac9d2d2c | 4429 | rtx_insn *prev = PREV_INSN (from); |
3c030e88 JH |
4430 | basic_block bb, bb2; |
4431 | ||
4432 | reorder_insns_nobb (from, to, after); | |
4433 | ||
4b4bf941 | 4434 | if (!BARRIER_P (after) |
3c030e88 JH |
4435 | && (bb = BLOCK_FOR_INSN (after))) |
4436 | { | |
b2908ba6 | 4437 | rtx_insn *x; |
6fb5fa3c | 4438 | df_set_bb_dirty (bb); |
68252e27 | 4439 | |
4b4bf941 | 4440 | if (!BARRIER_P (from) |
3c030e88 JH |
4441 | && (bb2 = BLOCK_FOR_INSN (from))) |
4442 | { | |
a813c111 | 4443 | if (BB_END (bb2) == to) |
1130d5e3 | 4444 | BB_END (bb2) = prev; |
6fb5fa3c | 4445 | df_set_bb_dirty (bb2); |
3c030e88 JH |
4446 | } |
4447 | ||
a813c111 | 4448 | if (BB_END (bb) == after) |
1130d5e3 | 4449 | BB_END (bb) = to; |
3c030e88 JH |
4450 | |
4451 | for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x)) | |
7bd5ed5c | 4452 | if (!BARRIER_P (x)) |
63642d5a | 4453 | df_insn_change_bb (x, bb); |
3c030e88 JH |
4454 | } |
4455 | } | |
4456 | ||
23b2ce53 | 4457 | \f |
2f937369 DM |
4458 | /* Emit insn(s) of given code and pattern |
4459 | at a specified place within the doubly-linked list. | |
23b2ce53 | 4460 | |
2f937369 DM |
4461 | All of the emit_foo global entry points accept an object |
4462 | X which is either an insn list or a PATTERN of a single | |
4463 | instruction. | |
23b2ce53 | 4464 | |
2f937369 DM |
4465 | There are thus a few canonical ways to generate code and |
4466 | emit it at a specific place in the instruction stream. For | |
4467 | example, consider the instruction named SPOT and the fact that | |
4468 | we would like to emit some instructions before SPOT. We might | |
4469 | do it like this: | |
23b2ce53 | 4470 | |
2f937369 DM |
4471 | start_sequence (); |
4472 | ... emit the new instructions ... | |
4473 | insns_head = get_insns (); | |
4474 | end_sequence (); | |
23b2ce53 | 4475 | |
2f937369 | 4476 | emit_insn_before (insns_head, SPOT); |
23b2ce53 | 4477 | |
2f937369 DM |
4478 | It used to be common to generate SEQUENCE rtl instead, but that |
4479 | is a relic of the past which no longer occurs. The reason is that | |
4480 | SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE | |
4481 | generated would almost certainly die right after it was created. */ | |
23b2ce53 | 4482 | |
cd459bf8 | 4483 | static rtx_insn * |
5f02387d | 4484 | emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb, |
167b9fae | 4485 | rtx_insn *(*make_raw) (rtx)) |
23b2ce53 | 4486 | { |
167b9fae | 4487 | rtx_insn *insn; |
23b2ce53 | 4488 | |
5b0264cb | 4489 | gcc_assert (before); |
2f937369 DM |
4490 | |
4491 | if (x == NULL_RTX) | |
cd459bf8 | 4492 | return safe_as_a <rtx_insn *> (last); |
2f937369 DM |
4493 | |
4494 | switch (GET_CODE (x)) | |
23b2ce53 | 4495 | { |
b5b8b0ac | 4496 | case DEBUG_INSN: |
2f937369 DM |
4497 | case INSN: |
4498 | case JUMP_INSN: | |
4499 | case CALL_INSN: | |
4500 | case CODE_LABEL: | |
4501 | case BARRIER: | |
4502 | case NOTE: | |
167b9fae | 4503 | insn = as_a <rtx_insn *> (x); |
2f937369 DM |
4504 | while (insn) |
4505 | { | |
167b9fae | 4506 | rtx_insn *next = NEXT_INSN (insn); |
6fb5fa3c | 4507 | add_insn_before (insn, before, bb); |
2f937369 DM |
4508 | last = insn; |
4509 | insn = next; | |
4510 | } | |
4511 | break; | |
4512 | ||
4513 | #ifdef ENABLE_RTL_CHECKING | |
4514 | case SEQUENCE: | |
5b0264cb | 4515 | gcc_unreachable (); |
2f937369 DM |
4516 | break; |
4517 | #endif | |
4518 | ||
4519 | default: | |
5f02387d | 4520 | last = (*make_raw) (x); |
6fb5fa3c | 4521 | add_insn_before (last, before, bb); |
2f937369 | 4522 | break; |
23b2ce53 RS |
4523 | } |
4524 | ||
cd459bf8 | 4525 | return safe_as_a <rtx_insn *> (last); |
23b2ce53 RS |
4526 | } |
4527 | ||
5f02387d NF |
4528 | /* Make X be output before the instruction BEFORE. */ |
4529 | ||
cd459bf8 | 4530 | rtx_insn * |
596f2b17 | 4531 | emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb) |
5f02387d NF |
4532 | { |
4533 | return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw); | |
4534 | } | |
4535 | ||
2f937369 | 4536 | /* Make an instruction with body X and code JUMP_INSN |
23b2ce53 RS |
4537 | and output it before the instruction BEFORE. */ |
4538 | ||
1476d1bd | 4539 | rtx_jump_insn * |
596f2b17 | 4540 | emit_jump_insn_before_noloc (rtx x, rtx_insn *before) |
23b2ce53 | 4541 | { |
1476d1bd MM |
4542 | return as_a <rtx_jump_insn *> ( |
4543 | emit_pattern_before_noloc (x, before, NULL_RTX, NULL, | |
4544 | make_jump_insn_raw)); | |
23b2ce53 RS |
4545 | } |
4546 | ||
2f937369 | 4547 | /* Make an instruction with body X and code CALL_INSN |
969d70ca JH |
4548 | and output it before the instruction BEFORE. */ |
4549 | ||
cd459bf8 | 4550 | rtx_insn * |
596f2b17 | 4551 | emit_call_insn_before_noloc (rtx x, rtx_insn *before) |
969d70ca | 4552 | { |
5f02387d NF |
4553 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4554 | make_call_insn_raw); | |
969d70ca JH |
4555 | } |
4556 | ||
b5b8b0ac AO |
4557 | /* Make an instruction with body X and code DEBUG_INSN |
4558 | and output it before the instruction BEFORE. */ | |
4559 | ||
cd459bf8 | 4560 | rtx_insn * |
b5b8b0ac AO |
4561 | emit_debug_insn_before_noloc (rtx x, rtx before) |
4562 | { | |
5f02387d NF |
4563 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4564 | make_debug_insn_raw); | |
b5b8b0ac AO |
4565 | } |
4566 | ||
23b2ce53 | 4567 | /* Make an insn of code BARRIER |
e881bb1b | 4568 | and output it before the insn BEFORE. */ |
23b2ce53 | 4569 | |
cd459bf8 | 4570 | rtx_barrier * |
502b8322 | 4571 | emit_barrier_before (rtx before) |
23b2ce53 | 4572 | { |
cd459bf8 | 4573 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
23b2ce53 RS |
4574 | |
4575 | INSN_UID (insn) = cur_insn_uid++; | |
4576 | ||
6fb5fa3c | 4577 | add_insn_before (insn, before, NULL); |
23b2ce53 RS |
4578 | return insn; |
4579 | } | |
4580 | ||
e881bb1b RH |
4581 | /* Emit the label LABEL before the insn BEFORE. */ |
4582 | ||
1476d1bd | 4583 | rtx_code_label * |
596f2b17 | 4584 | emit_label_before (rtx label, rtx_insn *before) |
e881bb1b | 4585 | { |
468660d3 SB |
4586 | gcc_checking_assert (INSN_UID (label) == 0); |
4587 | INSN_UID (label) = cur_insn_uid++; | |
4588 | add_insn_before (label, before, NULL); | |
1476d1bd | 4589 | return as_a <rtx_code_label *> (label); |
e881bb1b | 4590 | } |
23b2ce53 | 4591 | \f |
2f937369 DM |
4592 | /* Helper for emit_insn_after, handles lists of instructions |
4593 | efficiently. */ | |
23b2ce53 | 4594 | |
e6eda746 DM |
4595 | static rtx_insn * |
4596 | emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb) | |
23b2ce53 | 4597 | { |
e6eda746 | 4598 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
1130d5e3 DM |
4599 | rtx_insn *last; |
4600 | rtx_insn *after_after; | |
6fb5fa3c DB |
4601 | if (!bb && !BARRIER_P (after)) |
4602 | bb = BLOCK_FOR_INSN (after); | |
23b2ce53 | 4603 | |
6fb5fa3c | 4604 | if (bb) |
23b2ce53 | 4605 | { |
6fb5fa3c | 4606 | df_set_bb_dirty (bb); |
2f937369 | 4607 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
4b4bf941 | 4608 | if (!BARRIER_P (last)) |
6fb5fa3c DB |
4609 | { |
4610 | set_block_for_insn (last, bb); | |
4611 | df_insn_rescan (last); | |
4612 | } | |
4b4bf941 | 4613 | if (!BARRIER_P (last)) |
6fb5fa3c DB |
4614 | { |
4615 | set_block_for_insn (last, bb); | |
4616 | df_insn_rescan (last); | |
4617 | } | |
a813c111 | 4618 | if (BB_END (bb) == after) |
1130d5e3 | 4619 | BB_END (bb) = last; |
23b2ce53 RS |
4620 | } |
4621 | else | |
2f937369 DM |
4622 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
4623 | continue; | |
4624 | ||
4625 | after_after = NEXT_INSN (after); | |
4626 | ||
0f82e5c9 DM |
4627 | SET_NEXT_INSN (after) = first; |
4628 | SET_PREV_INSN (first) = after; | |
4629 | SET_NEXT_INSN (last) = after_after; | |
2f937369 | 4630 | if (after_after) |
0f82e5c9 | 4631 | SET_PREV_INSN (after_after) = last; |
2f937369 | 4632 | |
c3284718 | 4633 | if (after == get_last_insn ()) |
5936d944 | 4634 | set_last_insn (last); |
e855c69d | 4635 | |
2f937369 DM |
4636 | return last; |
4637 | } | |
4638 | ||
cd459bf8 | 4639 | static rtx_insn * |
e6eda746 | 4640 | emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb, |
167b9fae | 4641 | rtx_insn *(*make_raw)(rtx)) |
2f937369 | 4642 | { |
e6eda746 DM |
4643 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
4644 | rtx_insn *last = after; | |
2f937369 | 4645 | |
5b0264cb | 4646 | gcc_assert (after); |
2f937369 DM |
4647 | |
4648 | if (x == NULL_RTX) | |
e6eda746 | 4649 | return last; |
2f937369 DM |
4650 | |
4651 | switch (GET_CODE (x)) | |
23b2ce53 | 4652 | { |
b5b8b0ac | 4653 | case DEBUG_INSN: |
2f937369 DM |
4654 | case INSN: |
4655 | case JUMP_INSN: | |
4656 | case CALL_INSN: | |
4657 | case CODE_LABEL: | |
4658 | case BARRIER: | |
4659 | case NOTE: | |
1130d5e3 | 4660 | last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb); |
2f937369 DM |
4661 | break; |
4662 | ||
4663 | #ifdef ENABLE_RTL_CHECKING | |
4664 | case SEQUENCE: | |
5b0264cb | 4665 | gcc_unreachable (); |
2f937369 DM |
4666 | break; |
4667 | #endif | |
4668 | ||
4669 | default: | |
5f02387d | 4670 | last = (*make_raw) (x); |
6fb5fa3c | 4671 | add_insn_after (last, after, bb); |
2f937369 | 4672 | break; |
23b2ce53 RS |
4673 | } |
4674 | ||
e6eda746 | 4675 | return last; |
23b2ce53 RS |
4676 | } |
4677 | ||
5f02387d NF |
4678 | /* Make X be output after the insn AFTER and set the BB of insn. If |
4679 | BB is NULL, an attempt is made to infer the BB from AFTER. */ | |
4680 | ||
cd459bf8 | 4681 | rtx_insn * |
5f02387d NF |
4682 | emit_insn_after_noloc (rtx x, rtx after, basic_block bb) |
4683 | { | |
4684 | return emit_pattern_after_noloc (x, after, bb, make_insn_raw); | |
4685 | } | |
4686 | ||
255680cf | 4687 | |
2f937369 | 4688 | /* Make an insn of code JUMP_INSN with body X |
23b2ce53 RS |
4689 | and output it after the insn AFTER. */ |
4690 | ||
1476d1bd | 4691 | rtx_jump_insn * |
a7102479 | 4692 | emit_jump_insn_after_noloc (rtx x, rtx after) |
23b2ce53 | 4693 | { |
1476d1bd MM |
4694 | return as_a <rtx_jump_insn *> ( |
4695 | emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw)); | |
2f937369 DM |
4696 | } |
4697 | ||
4698 | /* Make an instruction with body X and code CALL_INSN | |
4699 | and output it after the instruction AFTER. */ | |
4700 | ||
cd459bf8 | 4701 | rtx_insn * |
a7102479 | 4702 | emit_call_insn_after_noloc (rtx x, rtx after) |
2f937369 | 4703 | { |
5f02387d | 4704 | return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw); |
23b2ce53 RS |
4705 | } |
4706 | ||
b5b8b0ac AO |
4707 | /* Make an instruction with body X and code CALL_INSN |
4708 | and output it after the instruction AFTER. */ | |
4709 | ||
cd459bf8 | 4710 | rtx_insn * |
b5b8b0ac AO |
4711 | emit_debug_insn_after_noloc (rtx x, rtx after) |
4712 | { | |
5f02387d | 4713 | return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw); |
b5b8b0ac AO |
4714 | } |
4715 | ||
23b2ce53 RS |
4716 | /* Make an insn of code BARRIER |
4717 | and output it after the insn AFTER. */ | |
4718 | ||
cd459bf8 | 4719 | rtx_barrier * |
502b8322 | 4720 | emit_barrier_after (rtx after) |
23b2ce53 | 4721 | { |
cd459bf8 | 4722 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
23b2ce53 RS |
4723 | |
4724 | INSN_UID (insn) = cur_insn_uid++; | |
4725 | ||
6fb5fa3c | 4726 | add_insn_after (insn, after, NULL); |
23b2ce53 RS |
4727 | return insn; |
4728 | } | |
4729 | ||
4730 | /* Emit the label LABEL after the insn AFTER. */ | |
4731 | ||
cd459bf8 | 4732 | rtx_insn * |
596f2b17 | 4733 | emit_label_after (rtx label, rtx_insn *after) |
23b2ce53 | 4734 | { |
468660d3 SB |
4735 | gcc_checking_assert (INSN_UID (label) == 0); |
4736 | INSN_UID (label) = cur_insn_uid++; | |
4737 | add_insn_after (label, after, NULL); | |
cd459bf8 | 4738 | return as_a <rtx_insn *> (label); |
23b2ce53 | 4739 | } |
96fba521 SB |
4740 | \f |
4741 | /* Notes require a bit of special handling: Some notes need to have their | |
4742 | BLOCK_FOR_INSN set, others should never have it set, and some should | |
4743 | have it set or clear depending on the context. */ | |
4744 | ||
4745 | /* Return true iff a note of kind SUBTYPE should be emitted with routines | |
4746 | that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the | |
4747 | caller is asked to emit a note before BB_HEAD, or after BB_END. */ | |
4748 | ||
4749 | static bool | |
4750 | note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p) | |
4751 | { | |
4752 | switch (subtype) | |
4753 | { | |
4754 | /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */ | |
4755 | case NOTE_INSN_SWITCH_TEXT_SECTIONS: | |
4756 | return true; | |
4757 | ||
4758 | /* Notes for var tracking and EH region markers can appear between or | |
4759 | inside basic blocks. If the caller is emitting on the basic block | |
4760 | boundary, do not set BLOCK_FOR_INSN on the new note. */ | |
4761 | case NOTE_INSN_VAR_LOCATION: | |
4762 | case NOTE_INSN_CALL_ARG_LOCATION: | |
4763 | case NOTE_INSN_EH_REGION_BEG: | |
4764 | case NOTE_INSN_EH_REGION_END: | |
4765 | return on_bb_boundary_p; | |
4766 | ||
4767 | /* Otherwise, BLOCK_FOR_INSN must be set. */ | |
4768 | default: | |
4769 | return false; | |
4770 | } | |
4771 | } | |
23b2ce53 RS |
4772 | |
4773 | /* Emit a note of subtype SUBTYPE after the insn AFTER. */ | |
4774 | ||
66e8df53 | 4775 | rtx_note * |
589e43f9 | 4776 | emit_note_after (enum insn_note subtype, rtx_insn *after) |
23b2ce53 | 4777 | { |
66e8df53 | 4778 | rtx_note *note = make_note_raw (subtype); |
96fba521 SB |
4779 | basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after); |
4780 | bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after); | |
4781 | ||
4782 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4783 | add_insn_after_nobb (note, after); | |
4784 | else | |
4785 | add_insn_after (note, after, bb); | |
4786 | return note; | |
4787 | } | |
4788 | ||
4789 | /* Emit a note of subtype SUBTYPE before the insn BEFORE. */ | |
4790 | ||
66e8df53 | 4791 | rtx_note * |
89b6250d | 4792 | emit_note_before (enum insn_note subtype, rtx_insn *before) |
96fba521 | 4793 | { |
66e8df53 | 4794 | rtx_note *note = make_note_raw (subtype); |
96fba521 SB |
4795 | basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before); |
4796 | bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before); | |
4797 | ||
4798 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4799 | add_insn_before_nobb (note, before); | |
4800 | else | |
4801 | add_insn_before (note, before, bb); | |
23b2ce53 RS |
4802 | return note; |
4803 | } | |
23b2ce53 | 4804 | \f |
e8110d6f NF |
4805 | /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC. |
4806 | MAKE_RAW indicates how to turn PATTERN into a real insn. */ | |
4807 | ||
cd459bf8 | 4808 | static rtx_insn * |
dc01c3d1 | 4809 | emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc, |
167b9fae | 4810 | rtx_insn *(*make_raw) (rtx)) |
0d682900 | 4811 | { |
dc01c3d1 | 4812 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
e67d1102 | 4813 | rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
0d682900 | 4814 | |
a7102479 | 4815 | if (pattern == NULL_RTX || !loc) |
e67d1102 | 4816 | return last; |
dd3adcf8 | 4817 | |
2f937369 DM |
4818 | after = NEXT_INSN (after); |
4819 | while (1) | |
4820 | { | |
20d4397a EB |
4821 | if (active_insn_p (after) |
4822 | && !JUMP_TABLE_DATA_P (after) /* FIXME */ | |
4823 | && !INSN_LOCATION (after)) | |
5368224f | 4824 | INSN_LOCATION (after) = loc; |
2f937369 DM |
4825 | if (after == last) |
4826 | break; | |
4827 | after = NEXT_INSN (after); | |
4828 | } | |
e67d1102 | 4829 | return last; |
0d682900 JH |
4830 | } |
4831 | ||
e8110d6f NF |
4832 | /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN |
4833 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after | |
4834 | any DEBUG_INSNs. */ | |
4835 | ||
cd459bf8 | 4836 | static rtx_insn * |
dc01c3d1 | 4837 | emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns, |
167b9fae | 4838 | rtx_insn *(*make_raw) (rtx)) |
a7102479 | 4839 | { |
dc01c3d1 DM |
4840 | rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); |
4841 | rtx_insn *prev = after; | |
b5b8b0ac | 4842 | |
e8110d6f NF |
4843 | if (skip_debug_insns) |
4844 | while (DEBUG_INSN_P (prev)) | |
4845 | prev = PREV_INSN (prev); | |
b5b8b0ac AO |
4846 | |
4847 | if (INSN_P (prev)) | |
5368224f | 4848 | return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev), |
e8110d6f | 4849 | make_raw); |
a7102479 | 4850 | else |
e8110d6f | 4851 | return emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
a7102479 JH |
4852 | } |
4853 | ||
5368224f | 4854 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
cd459bf8 | 4855 | rtx_insn * |
e8110d6f | 4856 | emit_insn_after_setloc (rtx pattern, rtx after, int loc) |
0d682900 | 4857 | { |
e8110d6f NF |
4858 | return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw); |
4859 | } | |
2f937369 | 4860 | |
5368224f | 4861 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
cd459bf8 | 4862 | rtx_insn * |
e8110d6f NF |
4863 | emit_insn_after (rtx pattern, rtx after) |
4864 | { | |
4865 | return emit_pattern_after (pattern, after, true, make_insn_raw); | |
4866 | } | |
dd3adcf8 | 4867 | |
5368224f | 4868 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
1476d1bd | 4869 | rtx_jump_insn * |
e8110d6f NF |
4870 | emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) |
4871 | { | |
1476d1bd MM |
4872 | return as_a <rtx_jump_insn *> ( |
4873 | emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw)); | |
0d682900 JH |
4874 | } |
4875 | ||
5368224f | 4876 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
1476d1bd | 4877 | rtx_jump_insn * |
a7102479 JH |
4878 | emit_jump_insn_after (rtx pattern, rtx after) |
4879 | { | |
1476d1bd MM |
4880 | return as_a <rtx_jump_insn *> ( |
4881 | emit_pattern_after (pattern, after, true, make_jump_insn_raw)); | |
a7102479 JH |
4882 | } |
4883 | ||
5368224f | 4884 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
cd459bf8 | 4885 | rtx_insn * |
502b8322 | 4886 | emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) |
0d682900 | 4887 | { |
e8110d6f | 4888 | return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw); |
0d682900 JH |
4889 | } |
4890 | ||
5368224f | 4891 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
cd459bf8 | 4892 | rtx_insn * |
a7102479 JH |
4893 | emit_call_insn_after (rtx pattern, rtx after) |
4894 | { | |
e8110d6f | 4895 | return emit_pattern_after (pattern, after, true, make_call_insn_raw); |
a7102479 JH |
4896 | } |
4897 | ||
5368224f | 4898 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
cd459bf8 | 4899 | rtx_insn * |
b5b8b0ac AO |
4900 | emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc) |
4901 | { | |
e8110d6f | 4902 | return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw); |
b5b8b0ac AO |
4903 | } |
4904 | ||
5368224f | 4905 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
cd459bf8 | 4906 | rtx_insn * |
b5b8b0ac AO |
4907 | emit_debug_insn_after (rtx pattern, rtx after) |
4908 | { | |
e8110d6f | 4909 | return emit_pattern_after (pattern, after, false, make_debug_insn_raw); |
b5b8b0ac AO |
4910 | } |
4911 | ||
e8110d6f NF |
4912 | /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC. |
4913 | MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP | |
4914 | indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN, | |
4915 | CALL_INSN, etc. */ | |
4916 | ||
cd459bf8 | 4917 | static rtx_insn * |
dc01c3d1 | 4918 | emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp, |
167b9fae | 4919 | rtx_insn *(*make_raw) (rtx)) |
0d682900 | 4920 | { |
dc01c3d1 DM |
4921 | rtx_insn *before = as_a <rtx_insn *> (uncast_before); |
4922 | rtx_insn *first = PREV_INSN (before); | |
4923 | rtx_insn *last = emit_pattern_before_noloc (pattern, before, | |
4924 | insnp ? before : NULL_RTX, | |
4925 | NULL, make_raw); | |
a7102479 JH |
4926 | |
4927 | if (pattern == NULL_RTX || !loc) | |
dc01c3d1 | 4928 | return last; |
a7102479 | 4929 | |
26cb3993 JH |
4930 | if (!first) |
4931 | first = get_insns (); | |
4932 | else | |
4933 | first = NEXT_INSN (first); | |
a7102479 JH |
4934 | while (1) |
4935 | { | |
20d4397a EB |
4936 | if (active_insn_p (first) |
4937 | && !JUMP_TABLE_DATA_P (first) /* FIXME */ | |
4938 | && !INSN_LOCATION (first)) | |
5368224f | 4939 | INSN_LOCATION (first) = loc; |
a7102479 JH |
4940 | if (first == last) |
4941 | break; | |
4942 | first = NEXT_INSN (first); | |
4943 | } | |
dc01c3d1 | 4944 | return last; |
a7102479 JH |
4945 | } |
4946 | ||
e8110d6f NF |
4947 | /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN |
4948 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert | |
4949 | before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an | |
4950 | INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */ | |
4951 | ||
cd459bf8 | 4952 | static rtx_insn * |
dc01c3d1 | 4953 | emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns, |
167b9fae | 4954 | bool insnp, rtx_insn *(*make_raw) (rtx)) |
a7102479 | 4955 | { |
dc01c3d1 DM |
4956 | rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before); |
4957 | rtx_insn *next = before; | |
b5b8b0ac | 4958 | |
e8110d6f NF |
4959 | if (skip_debug_insns) |
4960 | while (DEBUG_INSN_P (next)) | |
4961 | next = PREV_INSN (next); | |
b5b8b0ac AO |
4962 | |
4963 | if (INSN_P (next)) | |
5368224f | 4964 | return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next), |
e8110d6f | 4965 | insnp, make_raw); |
a7102479 | 4966 | else |
e8110d6f | 4967 | return emit_pattern_before_noloc (pattern, before, |
9b2ea071 | 4968 | insnp ? before : NULL_RTX, |
e8110d6f | 4969 | NULL, make_raw); |
a7102479 JH |
4970 | } |
4971 | ||
5368224f | 4972 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
cd459bf8 | 4973 | rtx_insn * |
596f2b17 | 4974 | emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) |
a7102479 | 4975 | { |
e8110d6f NF |
4976 | return emit_pattern_before_setloc (pattern, before, loc, true, |
4977 | make_insn_raw); | |
4978 | } | |
a7102479 | 4979 | |
5368224f | 4980 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
cd459bf8 | 4981 | rtx_insn * |
e8110d6f NF |
4982 | emit_insn_before (rtx pattern, rtx before) |
4983 | { | |
4984 | return emit_pattern_before (pattern, before, true, true, make_insn_raw); | |
4985 | } | |
a7102479 | 4986 | |
5368224f | 4987 | /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
1476d1bd | 4988 | rtx_jump_insn * |
596f2b17 | 4989 | emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) |
e8110d6f | 4990 | { |
1476d1bd MM |
4991 | return as_a <rtx_jump_insn *> ( |
4992 | emit_pattern_before_setloc (pattern, before, loc, false, | |
4993 | make_jump_insn_raw)); | |
a7102479 JH |
4994 | } |
4995 | ||
5368224f | 4996 | /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
1476d1bd | 4997 | rtx_jump_insn * |
a7102479 JH |
4998 | emit_jump_insn_before (rtx pattern, rtx before) |
4999 | { | |
1476d1bd MM |
5000 | return as_a <rtx_jump_insn *> ( |
5001 | emit_pattern_before (pattern, before, true, false, | |
5002 | make_jump_insn_raw)); | |
a7102479 JH |
5003 | } |
5004 | ||
5368224f | 5005 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
cd459bf8 | 5006 | rtx_insn * |
596f2b17 | 5007 | emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) |
a7102479 | 5008 | { |
e8110d6f NF |
5009 | return emit_pattern_before_setloc (pattern, before, loc, false, |
5010 | make_call_insn_raw); | |
0d682900 | 5011 | } |
a7102479 | 5012 | |
e8110d6f | 5013 | /* Like emit_call_insn_before_noloc, |
5368224f | 5014 | but set insn_location according to BEFORE. */ |
cd459bf8 | 5015 | rtx_insn * |
596f2b17 | 5016 | emit_call_insn_before (rtx pattern, rtx_insn *before) |
a7102479 | 5017 | { |
e8110d6f NF |
5018 | return emit_pattern_before (pattern, before, true, false, |
5019 | make_call_insn_raw); | |
a7102479 | 5020 | } |
b5b8b0ac | 5021 | |
5368224f | 5022 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
cd459bf8 | 5023 | rtx_insn * |
b5b8b0ac AO |
5024 | emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc) |
5025 | { | |
e8110d6f NF |
5026 | return emit_pattern_before_setloc (pattern, before, loc, false, |
5027 | make_debug_insn_raw); | |
b5b8b0ac AO |
5028 | } |
5029 | ||
e8110d6f | 5030 | /* Like emit_debug_insn_before_noloc, |
5368224f | 5031 | but set insn_location according to BEFORE. */ |
cd459bf8 | 5032 | rtx_insn * |
3a6216b0 | 5033 | emit_debug_insn_before (rtx pattern, rtx_insn *before) |
b5b8b0ac | 5034 | { |
e8110d6f NF |
5035 | return emit_pattern_before (pattern, before, false, false, |
5036 | make_debug_insn_raw); | |
b5b8b0ac | 5037 | } |
0d682900 | 5038 | \f |
2f937369 DM |
5039 | /* Take X and emit it at the end of the doubly-linked |
5040 | INSN list. | |
23b2ce53 RS |
5041 | |
5042 | Returns the last insn emitted. */ | |
5043 | ||
cd459bf8 | 5044 | rtx_insn * |
502b8322 | 5045 | emit_insn (rtx x) |
23b2ce53 | 5046 | { |
cd459bf8 DM |
5047 | rtx_insn *last = get_last_insn (); |
5048 | rtx_insn *insn; | |
23b2ce53 | 5049 | |
2f937369 DM |
5050 | if (x == NULL_RTX) |
5051 | return last; | |
23b2ce53 | 5052 | |
2f937369 DM |
5053 | switch (GET_CODE (x)) |
5054 | { | |
b5b8b0ac | 5055 | case DEBUG_INSN: |
2f937369 DM |
5056 | case INSN: |
5057 | case JUMP_INSN: | |
5058 | case CALL_INSN: | |
5059 | case CODE_LABEL: | |
5060 | case BARRIER: | |
5061 | case NOTE: | |
cd459bf8 | 5062 | insn = as_a <rtx_insn *> (x); |
2f937369 | 5063 | while (insn) |
23b2ce53 | 5064 | { |
cd459bf8 | 5065 | rtx_insn *next = NEXT_INSN (insn); |
23b2ce53 | 5066 | add_insn (insn); |
2f937369 DM |
5067 | last = insn; |
5068 | insn = next; | |
23b2ce53 | 5069 | } |
2f937369 | 5070 | break; |
23b2ce53 | 5071 | |
2f937369 | 5072 | #ifdef ENABLE_RTL_CHECKING |
39718607 | 5073 | case JUMP_TABLE_DATA: |
2f937369 | 5074 | case SEQUENCE: |
5b0264cb | 5075 | gcc_unreachable (); |
2f937369 DM |
5076 | break; |
5077 | #endif | |
23b2ce53 | 5078 | |
2f937369 DM |
5079 | default: |
5080 | last = make_insn_raw (x); | |
5081 | add_insn (last); | |
5082 | break; | |
23b2ce53 RS |
5083 | } |
5084 | ||
5085 | return last; | |
5086 | } | |
5087 | ||
b5b8b0ac AO |
5088 | /* Make an insn of code DEBUG_INSN with pattern X |
5089 | and add it to the end of the doubly-linked list. */ | |
5090 | ||
cd459bf8 | 5091 | rtx_insn * |
b5b8b0ac AO |
5092 | emit_debug_insn (rtx x) |
5093 | { | |
cd459bf8 DM |
5094 | rtx_insn *last = get_last_insn (); |
5095 | rtx_insn *insn; | |
b5b8b0ac AO |
5096 | |
5097 | if (x == NULL_RTX) | |
5098 | return last; | |
5099 | ||
5100 | switch (GET_CODE (x)) | |
5101 | { | |
5102 | case DEBUG_INSN: | |
5103 | case INSN: | |
5104 | case JUMP_INSN: | |
5105 | case CALL_INSN: | |
5106 | case CODE_LABEL: | |
5107 | case BARRIER: | |
5108 | case NOTE: | |
cd459bf8 | 5109 | insn = as_a <rtx_insn *> (x); |
b5b8b0ac AO |
5110 | while (insn) |
5111 | { | |
cd459bf8 | 5112 | rtx_insn *next = NEXT_INSN (insn); |
b5b8b0ac AO |
5113 | add_insn (insn); |
5114 | last = insn; | |
5115 | insn = next; | |
5116 | } | |
5117 | break; | |
5118 | ||
5119 | #ifdef ENABLE_RTL_CHECKING | |
39718607 | 5120 | case JUMP_TABLE_DATA: |
b5b8b0ac AO |
5121 | case SEQUENCE: |
5122 | gcc_unreachable (); | |
5123 | break; | |
5124 | #endif | |
5125 | ||
5126 | default: | |
5127 | last = make_debug_insn_raw (x); | |
5128 | add_insn (last); | |
5129 | break; | |
5130 | } | |
5131 | ||
5132 | return last; | |
5133 | } | |
5134 | ||
2f937369 DM |
5135 | /* Make an insn of code JUMP_INSN with pattern X |
5136 | and add it to the end of the doubly-linked list. */ | |
23b2ce53 | 5137 | |
cd459bf8 | 5138 | rtx_insn * |
502b8322 | 5139 | emit_jump_insn (rtx x) |
23b2ce53 | 5140 | { |
cd459bf8 DM |
5141 | rtx_insn *last = NULL; |
5142 | rtx_insn *insn; | |
23b2ce53 | 5143 | |
2f937369 | 5144 | switch (GET_CODE (x)) |
23b2ce53 | 5145 | { |
b5b8b0ac | 5146 | case DEBUG_INSN: |
2f937369 DM |
5147 | case INSN: |
5148 | case JUMP_INSN: | |
5149 | case CALL_INSN: | |
5150 | case CODE_LABEL: | |
5151 | case BARRIER: | |
5152 | case NOTE: | |
cd459bf8 | 5153 | insn = as_a <rtx_insn *> (x); |
2f937369 DM |
5154 | while (insn) |
5155 | { | |
cd459bf8 | 5156 | rtx_insn *next = NEXT_INSN (insn); |
2f937369 DM |
5157 | add_insn (insn); |
5158 | last = insn; | |
5159 | insn = next; | |
5160 | } | |
5161 | break; | |
e0a5c5eb | 5162 | |
2f937369 | 5163 | #ifdef ENABLE_RTL_CHECKING |
39718607 | 5164 | case JUMP_TABLE_DATA: |
2f937369 | 5165 | case SEQUENCE: |
5b0264cb | 5166 | gcc_unreachable (); |
2f937369 DM |
5167 | break; |
5168 | #endif | |
e0a5c5eb | 5169 | |
2f937369 DM |
5170 | default: |
5171 | last = make_jump_insn_raw (x); | |
5172 | add_insn (last); | |
5173 | break; | |
3c030e88 | 5174 | } |
e0a5c5eb RS |
5175 | |
5176 | return last; | |
5177 | } | |
5178 | ||
2f937369 | 5179 | /* Make an insn of code CALL_INSN with pattern X |
23b2ce53 RS |
5180 | and add it to the end of the doubly-linked list. */ |
5181 | ||
cd459bf8 | 5182 | rtx_insn * |
502b8322 | 5183 | emit_call_insn (rtx x) |
23b2ce53 | 5184 | { |
cd459bf8 | 5185 | rtx_insn *insn; |
2f937369 DM |
5186 | |
5187 | switch (GET_CODE (x)) | |
23b2ce53 | 5188 | { |
b5b8b0ac | 5189 | case DEBUG_INSN: |
2f937369 DM |
5190 | case INSN: |
5191 | case JUMP_INSN: | |
5192 | case CALL_INSN: | |
5193 | case CODE_LABEL: | |
5194 | case BARRIER: | |
5195 | case NOTE: | |
5196 | insn = emit_insn (x); | |
5197 | break; | |
23b2ce53 | 5198 | |
2f937369 DM |
5199 | #ifdef ENABLE_RTL_CHECKING |
5200 | case SEQUENCE: | |
39718607 | 5201 | case JUMP_TABLE_DATA: |
5b0264cb | 5202 | gcc_unreachable (); |
2f937369 DM |
5203 | break; |
5204 | #endif | |
23b2ce53 | 5205 | |
2f937369 DM |
5206 | default: |
5207 | insn = make_call_insn_raw (x); | |
23b2ce53 | 5208 | add_insn (insn); |
2f937369 | 5209 | break; |
23b2ce53 | 5210 | } |
2f937369 DM |
5211 | |
5212 | return insn; | |
23b2ce53 RS |
5213 | } |
5214 | ||
5215 | /* Add the label LABEL to the end of the doubly-linked list. */ | |
5216 | ||
1476d1bd MM |
5217 | rtx_code_label * |
5218 | emit_label (rtx uncast_label) | |
23b2ce53 | 5219 | { |
1476d1bd MM |
5220 | rtx_code_label *label = as_a <rtx_code_label *> (uncast_label); |
5221 | ||
468660d3 SB |
5222 | gcc_checking_assert (INSN_UID (label) == 0); |
5223 | INSN_UID (label) = cur_insn_uid++; | |
1476d1bd MM |
5224 | add_insn (label); |
5225 | return label; | |
23b2ce53 RS |
5226 | } |
5227 | ||
39718607 SB |
5228 | /* Make an insn of code JUMP_TABLE_DATA |
5229 | and add it to the end of the doubly-linked list. */ | |
5230 | ||
4598afdd | 5231 | rtx_jump_table_data * |
39718607 SB |
5232 | emit_jump_table_data (rtx table) |
5233 | { | |
4598afdd DM |
5234 | rtx_jump_table_data *jump_table_data = |
5235 | as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA)); | |
39718607 SB |
5236 | INSN_UID (jump_table_data) = cur_insn_uid++; |
5237 | PATTERN (jump_table_data) = table; | |
5238 | BLOCK_FOR_INSN (jump_table_data) = NULL; | |
5239 | add_insn (jump_table_data); | |
5240 | return jump_table_data; | |
5241 | } | |
5242 | ||
23b2ce53 RS |
5243 | /* Make an insn of code BARRIER |
5244 | and add it to the end of the doubly-linked list. */ | |
5245 | ||
cd459bf8 | 5246 | rtx_barrier * |
502b8322 | 5247 | emit_barrier (void) |
23b2ce53 | 5248 | { |
cd459bf8 | 5249 | rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
23b2ce53 RS |
5250 | INSN_UID (barrier) = cur_insn_uid++; |
5251 | add_insn (barrier); | |
5252 | return barrier; | |
5253 | } | |
5254 | ||
5f2fc772 | 5255 | /* Emit a copy of note ORIG. */ |
502b8322 | 5256 | |
66e8df53 DM |
5257 | rtx_note * |
5258 | emit_note_copy (rtx_note *orig) | |
5f2fc772 | 5259 | { |
96fba521 | 5260 | enum insn_note kind = (enum insn_note) NOTE_KIND (orig); |
66e8df53 | 5261 | rtx_note *note = make_note_raw (kind); |
5f2fc772 | 5262 | NOTE_DATA (note) = NOTE_DATA (orig); |
5f2fc772 | 5263 | add_insn (note); |
2e040219 | 5264 | return note; |
23b2ce53 RS |
5265 | } |
5266 | ||
2e040219 NS |
5267 | /* Make an insn of code NOTE or type NOTE_NO |
5268 | and add it to the end of the doubly-linked list. */ | |
23b2ce53 | 5269 | |
66e8df53 | 5270 | rtx_note * |
a38e7aa5 | 5271 | emit_note (enum insn_note kind) |
23b2ce53 | 5272 | { |
66e8df53 | 5273 | rtx_note *note = make_note_raw (kind); |
23b2ce53 RS |
5274 | add_insn (note); |
5275 | return note; | |
5276 | } | |
5277 | ||
c41c1387 RS |
5278 | /* Emit a clobber of lvalue X. */ |
5279 | ||
cd459bf8 | 5280 | rtx_insn * |
c41c1387 RS |
5281 | emit_clobber (rtx x) |
5282 | { | |
5283 | /* CONCATs should not appear in the insn stream. */ | |
5284 | if (GET_CODE (x) == CONCAT) | |
5285 | { | |
5286 | emit_clobber (XEXP (x, 0)); | |
5287 | return emit_clobber (XEXP (x, 1)); | |
5288 | } | |
5289 | return emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); | |
5290 | } | |
5291 | ||
5292 | /* Return a sequence of insns to clobber lvalue X. */ | |
5293 | ||
cd459bf8 | 5294 | rtx_insn * |
c41c1387 RS |
5295 | gen_clobber (rtx x) |
5296 | { | |
cd459bf8 | 5297 | rtx_insn *seq; |
c41c1387 RS |
5298 | |
5299 | start_sequence (); | |
5300 | emit_clobber (x); | |
5301 | seq = get_insns (); | |
5302 | end_sequence (); | |
5303 | return seq; | |
5304 | } | |
5305 | ||
5306 | /* Emit a use of rvalue X. */ | |
5307 | ||
cd459bf8 | 5308 | rtx_insn * |
c41c1387 RS |
5309 | emit_use (rtx x) |
5310 | { | |
5311 | /* CONCATs should not appear in the insn stream. */ | |
5312 | if (GET_CODE (x) == CONCAT) | |
5313 | { | |
5314 | emit_use (XEXP (x, 0)); | |
5315 | return emit_use (XEXP (x, 1)); | |
5316 | } | |
5317 | return emit_insn (gen_rtx_USE (VOIDmode, x)); | |
5318 | } | |
5319 | ||
5320 | /* Return a sequence of insns to use rvalue X. */ | |
5321 | ||
cd459bf8 | 5322 | rtx_insn * |
c41c1387 RS |
5323 | gen_use (rtx x) |
5324 | { | |
cd459bf8 | 5325 | rtx_insn *seq; |
c41c1387 RS |
5326 | |
5327 | start_sequence (); | |
5328 | emit_use (x); | |
5329 | seq = get_insns (); | |
5330 | end_sequence (); | |
5331 | return seq; | |
5332 | } | |
5333 | ||
c8912e53 RS |
5334 | /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction. |
5335 | Return the set in INSN that such notes describe, or NULL if the notes | |
5336 | have no meaning for INSN. */ | |
5337 | ||
5338 | rtx | |
5339 | set_for_reg_notes (rtx insn) | |
5340 | { | |
5341 | rtx pat, reg; | |
5342 | ||
5343 | if (!INSN_P (insn)) | |
5344 | return NULL_RTX; | |
5345 | ||
5346 | pat = PATTERN (insn); | |
5347 | if (GET_CODE (pat) == PARALLEL) | |
5348 | { | |
5349 | /* We do not use single_set because that ignores SETs of unused | |
5350 | registers. REG_EQUAL and REG_EQUIV notes really do require the | |
5351 | PARALLEL to have a single SET. */ | |
5352 | if (multiple_sets (insn)) | |
5353 | return NULL_RTX; | |
5354 | pat = XVECEXP (pat, 0, 0); | |
5355 | } | |
5356 | ||
5357 | if (GET_CODE (pat) != SET) | |
5358 | return NULL_RTX; | |
5359 | ||
5360 | reg = SET_DEST (pat); | |
5361 | ||
5362 | /* Notes apply to the contents of a STRICT_LOW_PART. */ | |
7f7379f6 KV |
5363 | if (GET_CODE (reg) == STRICT_LOW_PART |
5364 | || GET_CODE (reg) == ZERO_EXTRACT) | |
c8912e53 RS |
5365 | reg = XEXP (reg, 0); |
5366 | ||
5367 | /* Check that we have a register. */ | |
5368 | if (!(REG_P (reg) || GET_CODE (reg) == SUBREG)) | |
5369 | return NULL_RTX; | |
5370 | ||
5371 | return pat; | |
5372 | } | |
5373 | ||
87b47c85 | 5374 | /* Place a note of KIND on insn INSN with DATUM as the datum. If a |
30f7a378 | 5375 | note of this type already exists, remove it first. */ |
87b47c85 | 5376 | |
3d238248 | 5377 | rtx |
502b8322 | 5378 | set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) |
87b47c85 AM |
5379 | { |
5380 | rtx note = find_reg_note (insn, kind, NULL_RTX); | |
5381 | ||
52488da1 JW |
5382 | switch (kind) |
5383 | { | |
5384 | case REG_EQUAL: | |
5385 | case REG_EQUIV: | |
8073cbd4 EB |
5386 | /* We need to support the REG_EQUAL on USE trick of find_reloads. */ |
5387 | if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE) | |
c8912e53 | 5388 | return NULL_RTX; |
52488da1 JW |
5389 | |
5390 | /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes. | |
5391 | It serves no useful purpose and breaks eliminate_regs. */ | |
5392 | if (GET_CODE (datum) == ASM_OPERANDS) | |
5393 | return NULL_RTX; | |
109374e2 RS |
5394 | |
5395 | /* Notes with side effects are dangerous. Even if the side-effect | |
5396 | initially mirrors one in PATTERN (INSN), later optimizations | |
5397 | might alter the way that the final register value is calculated | |
5398 | and so move or alter the side-effect in some way. The note would | |
5399 | then no longer be a valid substitution for SET_SRC. */ | |
5400 | if (side_effects_p (datum)) | |
5401 | return NULL_RTX; | |
52488da1 JW |
5402 | break; |
5403 | ||
5404 | default: | |
5405 | break; | |
5406 | } | |
3d238248 | 5407 | |
c8912e53 RS |
5408 | if (note) |
5409 | XEXP (note, 0) = datum; | |
5410 | else | |
5411 | { | |
5412 | add_reg_note (insn, kind, datum); | |
5413 | note = REG_NOTES (insn); | |
5414 | } | |
6fb5fa3c DB |
5415 | |
5416 | switch (kind) | |
3d238248 | 5417 | { |
6fb5fa3c DB |
5418 | case REG_EQUAL: |
5419 | case REG_EQUIV: | |
b2908ba6 | 5420 | df_notes_rescan (as_a <rtx_insn *> (insn)); |
6fb5fa3c DB |
5421 | break; |
5422 | default: | |
5423 | break; | |
3d238248 | 5424 | } |
87b47c85 | 5425 | |
c8912e53 | 5426 | return note; |
87b47c85 | 5427 | } |
7543f918 JR |
5428 | |
5429 | /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */ | |
5430 | rtx | |
5431 | set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst) | |
5432 | { | |
c8912e53 | 5433 | rtx set = set_for_reg_notes (insn); |
7543f918 JR |
5434 | |
5435 | if (set && SET_DEST (set) == dst) | |
5436 | return set_unique_reg_note (insn, kind, datum); | |
5437 | return NULL_RTX; | |
5438 | } | |
23b2ce53 | 5439 | \f |
9d8895c9 RS |
5440 | /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a |
5441 | following barrier if the instruction needs one and if ALLOW_BARRIER_P | |
5442 | is true. | |
5443 | ||
23b2ce53 RS |
5444 | If X is a label, it is simply added into the insn chain. */ |
5445 | ||
cd459bf8 | 5446 | rtx_insn * |
9d8895c9 | 5447 | emit (rtx x, bool allow_barrier_p) |
23b2ce53 RS |
5448 | { |
5449 | enum rtx_code code = classify_insn (x); | |
5450 | ||
5b0264cb | 5451 | switch (code) |
23b2ce53 | 5452 | { |
5b0264cb NS |
5453 | case CODE_LABEL: |
5454 | return emit_label (x); | |
5455 | case INSN: | |
5456 | return emit_insn (x); | |
5457 | case JUMP_INSN: | |
5458 | { | |
cd459bf8 | 5459 | rtx_insn *insn = emit_jump_insn (x); |
9d8895c9 RS |
5460 | if (allow_barrier_p |
5461 | && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)) | |
5b0264cb NS |
5462 | return emit_barrier (); |
5463 | return insn; | |
5464 | } | |
5465 | case CALL_INSN: | |
5466 | return emit_call_insn (x); | |
b5b8b0ac AO |
5467 | case DEBUG_INSN: |
5468 | return emit_debug_insn (x); | |
5b0264cb NS |
5469 | default: |
5470 | gcc_unreachable (); | |
23b2ce53 | 5471 | } |
23b2ce53 RS |
5472 | } |
5473 | \f | |
e2500fed | 5474 | /* Space for free sequence stack entries. */ |
1431042e | 5475 | static GTY ((deletable)) struct sequence_stack *free_sequence_stack; |
e2500fed | 5476 | |
4dfa0342 RH |
5477 | /* Begin emitting insns to a sequence. If this sequence will contain |
5478 | something that might cause the compiler to pop arguments to function | |
5479 | calls (because those pops have previously been deferred; see | |
5480 | INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust | |
5481 | before calling this function. That will ensure that the deferred | |
5482 | pops are not accidentally emitted in the middle of this sequence. */ | |
23b2ce53 RS |
5483 | |
5484 | void | |
502b8322 | 5485 | start_sequence (void) |
23b2ce53 RS |
5486 | { |
5487 | struct sequence_stack *tem; | |
5488 | ||
e2500fed GK |
5489 | if (free_sequence_stack != NULL) |
5490 | { | |
5491 | tem = free_sequence_stack; | |
5492 | free_sequence_stack = tem->next; | |
5493 | } | |
5494 | else | |
766090c2 | 5495 | tem = ggc_alloc<sequence_stack> (); |
23b2ce53 | 5496 | |
614d5bd8 | 5497 | tem->next = get_current_sequence ()->next; |
5936d944 JH |
5498 | tem->first = get_insns (); |
5499 | tem->last = get_last_insn (); | |
614d5bd8 | 5500 | get_current_sequence ()->next = tem; |
23b2ce53 | 5501 | |
5936d944 JH |
5502 | set_first_insn (0); |
5503 | set_last_insn (0); | |
23b2ce53 RS |
5504 | } |
5505 | ||
5c7a310f MM |
5506 | /* Set up the insn chain starting with FIRST as the current sequence, |
5507 | saving the previously current one. See the documentation for | |
5508 | start_sequence for more information about how to use this function. */ | |
23b2ce53 RS |
5509 | |
5510 | void | |
fee3e72c | 5511 | push_to_sequence (rtx_insn *first) |
23b2ce53 | 5512 | { |
fee3e72c | 5513 | rtx_insn *last; |
23b2ce53 RS |
5514 | |
5515 | start_sequence (); | |
5516 | ||
e84a58ff EB |
5517 | for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last)) |
5518 | ; | |
23b2ce53 | 5519 | |
5936d944 JH |
5520 | set_first_insn (first); |
5521 | set_last_insn (last); | |
23b2ce53 RS |
5522 | } |
5523 | ||
bb27eeda SE |
5524 | /* Like push_to_sequence, but take the last insn as an argument to avoid |
5525 | looping through the list. */ | |
5526 | ||
5527 | void | |
fee3e72c | 5528 | push_to_sequence2 (rtx_insn *first, rtx_insn *last) |
bb27eeda SE |
5529 | { |
5530 | start_sequence (); | |
5531 | ||
5936d944 JH |
5532 | set_first_insn (first); |
5533 | set_last_insn (last); | |
bb27eeda SE |
5534 | } |
5535 | ||
f15ae3a1 TW |
5536 | /* Set up the outer-level insn chain |
5537 | as the current sequence, saving the previously current one. */ | |
5538 | ||
5539 | void | |
502b8322 | 5540 | push_topmost_sequence (void) |
f15ae3a1 | 5541 | { |
614d5bd8 | 5542 | struct sequence_stack *top; |
f15ae3a1 TW |
5543 | |
5544 | start_sequence (); | |
5545 | ||
614d5bd8 | 5546 | top = get_topmost_sequence (); |
5936d944 JH |
5547 | set_first_insn (top->first); |
5548 | set_last_insn (top->last); | |
f15ae3a1 TW |
5549 | } |
5550 | ||
5551 | /* After emitting to the outer-level insn chain, update the outer-level | |
5552 | insn chain, and restore the previous saved state. */ | |
5553 | ||
5554 | void | |
502b8322 | 5555 | pop_topmost_sequence (void) |
f15ae3a1 | 5556 | { |
614d5bd8 | 5557 | struct sequence_stack *top; |
f15ae3a1 | 5558 | |
614d5bd8 | 5559 | top = get_topmost_sequence (); |
5936d944 JH |
5560 | top->first = get_insns (); |
5561 | top->last = get_last_insn (); | |
f15ae3a1 TW |
5562 | |
5563 | end_sequence (); | |
5564 | } | |
5565 | ||
23b2ce53 RS |
5566 | /* After emitting to a sequence, restore previous saved state. |
5567 | ||
5c7a310f | 5568 | To get the contents of the sequence just made, you must call |
2f937369 | 5569 | `get_insns' *before* calling here. |
5c7a310f MM |
5570 | |
5571 | If the compiler might have deferred popping arguments while | |
5572 | generating this sequence, and this sequence will not be immediately | |
5573 | inserted into the instruction stream, use do_pending_stack_adjust | |
2f937369 | 5574 | before calling get_insns. That will ensure that the deferred |
5c7a310f MM |
5575 | pops are inserted into this sequence, and not into some random |
5576 | location in the instruction stream. See INHIBIT_DEFER_POP for more | |
5577 | information about deferred popping of arguments. */ | |
23b2ce53 RS |
5578 | |
5579 | void | |
502b8322 | 5580 | end_sequence (void) |
23b2ce53 | 5581 | { |
614d5bd8 | 5582 | struct sequence_stack *tem = get_current_sequence ()->next; |
23b2ce53 | 5583 | |
5936d944 JH |
5584 | set_first_insn (tem->first); |
5585 | set_last_insn (tem->last); | |
614d5bd8 | 5586 | get_current_sequence ()->next = tem->next; |
23b2ce53 | 5587 | |
e2500fed GK |
5588 | memset (tem, 0, sizeof (*tem)); |
5589 | tem->next = free_sequence_stack; | |
5590 | free_sequence_stack = tem; | |
23b2ce53 RS |
5591 | } |
5592 | ||
5593 | /* Return 1 if currently emitting into a sequence. */ | |
5594 | ||
5595 | int | |
502b8322 | 5596 | in_sequence_p (void) |
23b2ce53 | 5597 | { |
614d5bd8 | 5598 | return get_current_sequence ()->next != 0; |
23b2ce53 | 5599 | } |
23b2ce53 | 5600 | \f |
59ec66dc MM |
5601 | /* Put the various virtual registers into REGNO_REG_RTX. */ |
5602 | ||
2bbdec73 | 5603 | static void |
bd60bab2 | 5604 | init_virtual_regs (void) |
59ec66dc | 5605 | { |
bd60bab2 JH |
5606 | regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx; |
5607 | regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx; | |
5608 | regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx; | |
5609 | regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx; | |
5610 | regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx; | |
32990d5b JJ |
5611 | regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM] |
5612 | = virtual_preferred_stack_boundary_rtx; | |
49ad7cfa BS |
5613 | } |
5614 | ||
da43a810 BS |
5615 | \f |
5616 | /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */ | |
5617 | static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]; | |
5618 | static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS]; | |
5619 | static int copy_insn_n_scratches; | |
5620 | ||
5621 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5622 | copied an ASM_OPERANDS. | |
5623 | In that case, it is the original input-operand vector. */ | |
5624 | static rtvec orig_asm_operands_vector; | |
5625 | ||
5626 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5627 | copied an ASM_OPERANDS. | |
5628 | In that case, it is the copied input-operand vector. */ | |
5629 | static rtvec copy_asm_operands_vector; | |
5630 | ||
5631 | /* Likewise for the constraints vector. */ | |
5632 | static rtvec orig_asm_constraints_vector; | |
5633 | static rtvec copy_asm_constraints_vector; | |
5634 | ||
5635 | /* Recursively create a new copy of an rtx for copy_insn. | |
5636 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5637 | ASM_OPERANDs properly. | |
5638 | Normally, this function is not used directly; use copy_insn as front end. | |
5639 | However, you could first copy an insn pattern with copy_insn and then use | |
5640 | this function afterwards to properly copy any REG_NOTEs containing | |
5641 | SCRATCHes. */ | |
5642 | ||
5643 | rtx | |
502b8322 | 5644 | copy_insn_1 (rtx orig) |
da43a810 | 5645 | { |
b3694847 SS |
5646 | rtx copy; |
5647 | int i, j; | |
5648 | RTX_CODE code; | |
5649 | const char *format_ptr; | |
da43a810 | 5650 | |
cd9c1ca8 RH |
5651 | if (orig == NULL) |
5652 | return NULL; | |
5653 | ||
da43a810 BS |
5654 | code = GET_CODE (orig); |
5655 | ||
5656 | switch (code) | |
5657 | { | |
5658 | case REG: | |
a52a87c3 | 5659 | case DEBUG_EXPR: |
d8116890 | 5660 | CASE_CONST_ANY: |
da43a810 BS |
5661 | case SYMBOL_REF: |
5662 | case CODE_LABEL: | |
5663 | case PC: | |
5664 | case CC0: | |
276e0224 | 5665 | case RETURN: |
26898771 | 5666 | case SIMPLE_RETURN: |
da43a810 | 5667 | return orig; |
3e89ed8d | 5668 | case CLOBBER: |
c5c5ba89 JH |
5669 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
5670 | clobbers or clobbers of hard registers that originated as pseudos. | |
5671 | This is needed to allow safe register renaming. */ | |
d7ae3739 EB |
5672 | if (REG_P (XEXP (orig, 0)) |
5673 | && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))) | |
5674 | && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0)))) | |
3e89ed8d JH |
5675 | return orig; |
5676 | break; | |
da43a810 BS |
5677 | |
5678 | case SCRATCH: | |
5679 | for (i = 0; i < copy_insn_n_scratches; i++) | |
5680 | if (copy_insn_scratch_in[i] == orig) | |
5681 | return copy_insn_scratch_out[i]; | |
5682 | break; | |
5683 | ||
5684 | case CONST: | |
6fb5fa3c | 5685 | if (shared_const_p (orig)) |
da43a810 BS |
5686 | return orig; |
5687 | break; | |
750c9258 | 5688 | |
da43a810 BS |
5689 | /* A MEM with a constant address is not sharable. The problem is that |
5690 | the constant address may need to be reloaded. If the mem is shared, | |
5691 | then reloading one copy of this mem will cause all copies to appear | |
5692 | to have been reloaded. */ | |
5693 | ||
5694 | default: | |
5695 | break; | |
5696 | } | |
5697 | ||
aacd3885 RS |
5698 | /* Copy the various flags, fields, and other information. We assume |
5699 | that all fields need copying, and then clear the fields that should | |
da43a810 BS |
5700 | not be copied. That is the sensible default behavior, and forces |
5701 | us to explicitly document why we are *not* copying a flag. */ | |
aacd3885 | 5702 | copy = shallow_copy_rtx (orig); |
da43a810 | 5703 | |
da43a810 | 5704 | /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */ |
ec8e098d | 5705 | if (INSN_P (orig)) |
da43a810 | 5706 | { |
2adc7f12 JJ |
5707 | RTX_FLAG (copy, jump) = 0; |
5708 | RTX_FLAG (copy, call) = 0; | |
5709 | RTX_FLAG (copy, frame_related) = 0; | |
da43a810 | 5710 | } |
750c9258 | 5711 | |
da43a810 BS |
5712 | format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); |
5713 | ||
5714 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) | |
aacd3885 RS |
5715 | switch (*format_ptr++) |
5716 | { | |
5717 | case 'e': | |
5718 | if (XEXP (orig, i) != NULL) | |
5719 | XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); | |
5720 | break; | |
da43a810 | 5721 | |
aacd3885 RS |
5722 | case 'E': |
5723 | case 'V': | |
5724 | if (XVEC (orig, i) == orig_asm_constraints_vector) | |
5725 | XVEC (copy, i) = copy_asm_constraints_vector; | |
5726 | else if (XVEC (orig, i) == orig_asm_operands_vector) | |
5727 | XVEC (copy, i) = copy_asm_operands_vector; | |
5728 | else if (XVEC (orig, i) != NULL) | |
5729 | { | |
5730 | XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
5731 | for (j = 0; j < XVECLEN (copy, i); j++) | |
5732 | XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); | |
5733 | } | |
5734 | break; | |
da43a810 | 5735 | |
aacd3885 RS |
5736 | case 't': |
5737 | case 'w': | |
5738 | case 'i': | |
91914e56 | 5739 | case 'p': |
aacd3885 RS |
5740 | case 's': |
5741 | case 'S': | |
5742 | case 'u': | |
5743 | case '0': | |
5744 | /* These are left unchanged. */ | |
5745 | break; | |
da43a810 | 5746 | |
aacd3885 RS |
5747 | default: |
5748 | gcc_unreachable (); | |
5749 | } | |
da43a810 BS |
5750 | |
5751 | if (code == SCRATCH) | |
5752 | { | |
5753 | i = copy_insn_n_scratches++; | |
5b0264cb | 5754 | gcc_assert (i < MAX_RECOG_OPERANDS); |
da43a810 BS |
5755 | copy_insn_scratch_in[i] = orig; |
5756 | copy_insn_scratch_out[i] = copy; | |
5757 | } | |
5758 | else if (code == ASM_OPERANDS) | |
5759 | { | |
6462bb43 AO |
5760 | orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig); |
5761 | copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy); | |
5762 | orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig); | |
5763 | copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy); | |
da43a810 BS |
5764 | } |
5765 | ||
5766 | return copy; | |
5767 | } | |
5768 | ||
5769 | /* Create a new copy of an rtx. | |
5770 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5771 | ASM_OPERANDs properly. | |
5772 | INSN doesn't really have to be a full INSN; it could be just the | |
5773 | pattern. */ | |
5774 | rtx | |
502b8322 | 5775 | copy_insn (rtx insn) |
da43a810 BS |
5776 | { |
5777 | copy_insn_n_scratches = 0; | |
5778 | orig_asm_operands_vector = 0; | |
5779 | orig_asm_constraints_vector = 0; | |
5780 | copy_asm_operands_vector = 0; | |
5781 | copy_asm_constraints_vector = 0; | |
5782 | return copy_insn_1 (insn); | |
5783 | } | |
59ec66dc | 5784 | |
8e383849 JR |
5785 | /* Return a copy of INSN that can be used in a SEQUENCE delay slot, |
5786 | on that assumption that INSN itself remains in its original place. */ | |
5787 | ||
f8f0516e DM |
5788 | rtx_insn * |
5789 | copy_delay_slot_insn (rtx_insn *insn) | |
8e383849 JR |
5790 | { |
5791 | /* Copy INSN with its rtx_code, all its notes, location etc. */ | |
f8f0516e | 5792 | insn = as_a <rtx_insn *> (copy_rtx (insn)); |
8e383849 JR |
5793 | INSN_UID (insn) = cur_insn_uid++; |
5794 | return insn; | |
5795 | } | |
5796 | ||
23b2ce53 RS |
5797 | /* Initialize data structures and variables in this file |
5798 | before generating rtl for each function. */ | |
5799 | ||
5800 | void | |
502b8322 | 5801 | init_emit (void) |
23b2ce53 | 5802 | { |
5936d944 JH |
5803 | set_first_insn (NULL); |
5804 | set_last_insn (NULL); | |
b5b8b0ac AO |
5805 | if (MIN_NONDEBUG_INSN_UID) |
5806 | cur_insn_uid = MIN_NONDEBUG_INSN_UID; | |
5807 | else | |
5808 | cur_insn_uid = 1; | |
5809 | cur_debug_insn_uid = 1; | |
23b2ce53 | 5810 | reg_rtx_no = LAST_VIRTUAL_REGISTER + 1; |
23b2ce53 | 5811 | first_label_num = label_num; |
614d5bd8 | 5812 | get_current_sequence ()->next = NULL; |
23b2ce53 | 5813 | |
23b2ce53 RS |
5814 | /* Init the tables that describe all the pseudo regs. */ |
5815 | ||
3e029763 | 5816 | crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101; |
23b2ce53 | 5817 | |
3e029763 | 5818 | crtl->emit.regno_pointer_align |
1b4572a8 | 5819 | = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length); |
86fe05e0 | 5820 | |
f44986d7 DM |
5821 | regno_reg_rtx |
5822 | = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length); | |
0d4903b8 | 5823 | |
e50126e8 | 5824 | /* Put copies of all the hard registers into regno_reg_rtx. */ |
6cde4876 | 5825 | memcpy (regno_reg_rtx, |
5fb0e246 | 5826 | initial_regno_reg_rtx, |
6cde4876 | 5827 | FIRST_PSEUDO_REGISTER * sizeof (rtx)); |
e50126e8 | 5828 | |
23b2ce53 | 5829 | /* Put copies of all the virtual register rtx into regno_reg_rtx. */ |
bd60bab2 | 5830 | init_virtual_regs (); |
740ab4a2 RK |
5831 | |
5832 | /* Indicate that the virtual registers and stack locations are | |
5833 | all pointers. */ | |
3502dc9c JDA |
5834 | REG_POINTER (stack_pointer_rtx) = 1; |
5835 | REG_POINTER (frame_pointer_rtx) = 1; | |
5836 | REG_POINTER (hard_frame_pointer_rtx) = 1; | |
5837 | REG_POINTER (arg_pointer_rtx) = 1; | |
740ab4a2 | 5838 | |
3502dc9c JDA |
5839 | REG_POINTER (virtual_incoming_args_rtx) = 1; |
5840 | REG_POINTER (virtual_stack_vars_rtx) = 1; | |
5841 | REG_POINTER (virtual_stack_dynamic_rtx) = 1; | |
5842 | REG_POINTER (virtual_outgoing_args_rtx) = 1; | |
5843 | REG_POINTER (virtual_cfa_rtx) = 1; | |
5e82e7bd | 5844 | |
86fe05e0 | 5845 | #ifdef STACK_BOUNDARY |
bdb429a5 RK |
5846 | REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; |
5847 | REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5848 | REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5849 | REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; | |
5850 | ||
5851 | REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; | |
5852 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; | |
5853 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; | |
5854 | REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; | |
da75ca93 | 5855 | |
bdb429a5 | 5856 | REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD; |
86fe05e0 RK |
5857 | #endif |
5858 | ||
5e82e7bd JVA |
5859 | #ifdef INIT_EXPANDERS |
5860 | INIT_EXPANDERS; | |
5861 | #endif | |
23b2ce53 RS |
5862 | } |
5863 | ||
9b4473b6 RS |
5864 | /* Return true if X is a valid element for a duplicated vector constant |
5865 | of the given mode. */ | |
5866 | ||
5867 | bool | |
5868 | valid_for_const_vec_duplicate_p (machine_mode, rtx x) | |
5869 | { | |
5870 | return (CONST_SCALAR_INT_P (x) | |
5871 | || CONST_DOUBLE_AS_FLOAT_P (x) | |
5872 | || CONST_FIXED_P (x)); | |
5873 | } | |
5874 | ||
59d06c05 | 5875 | /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */ |
69ef87e2 AH |
5876 | |
5877 | static rtx | |
59d06c05 | 5878 | gen_const_vec_duplicate_1 (machine_mode mode, rtx el) |
69ef87e2 | 5879 | { |
59d06c05 RS |
5880 | int nunits = GET_MODE_NUNITS (mode); |
5881 | rtvec v = rtvec_alloc (nunits); | |
5882 | for (int i = 0; i < nunits; ++i) | |
5883 | RTVEC_ELT (v, i) = el; | |
5884 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
5885 | } | |
69ef87e2 | 5886 | |
59d06c05 RS |
5887 | /* Generate a vector constant of mode MODE in which every element has |
5888 | value ELT. */ | |
69ef87e2 | 5889 | |
59d06c05 RS |
5890 | rtx |
5891 | gen_const_vec_duplicate (machine_mode mode, rtx elt) | |
5892 | { | |
5893 | scalar_mode inner_mode = GET_MODE_INNER (mode); | |
5894 | if (elt == CONST0_RTX (inner_mode)) | |
5895 | return CONST0_RTX (mode); | |
5896 | else if (elt == CONST1_RTX (inner_mode)) | |
5897 | return CONST1_RTX (mode); | |
5898 | else if (elt == CONSTM1_RTX (inner_mode)) | |
5899 | return CONSTM1_RTX (mode); | |
5900 | ||
5901 | return gen_const_vec_duplicate_1 (mode, elt); | |
5902 | } | |
5903 | ||
5904 | /* Return a vector rtx of mode MODE in which every element has value X. | |
5905 | The result will be a constant if X is constant. */ | |
5906 | ||
5907 | rtx | |
5908 | gen_vec_duplicate (machine_mode mode, rtx x) | |
5909 | { | |
9b4473b6 | 5910 | if (valid_for_const_vec_duplicate_p (mode, x)) |
59d06c05 RS |
5911 | return gen_const_vec_duplicate (mode, x); |
5912 | return gen_rtx_VEC_DUPLICATE (mode, x); | |
5913 | } | |
15ed7b52 | 5914 | |
ef339d6e RS |
5915 | /* A subroutine of const_vec_series_p that handles the case in which |
5916 | X is known to be an integer CONST_VECTOR. */ | |
5917 | ||
5918 | bool | |
5919 | const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out) | |
5920 | { | |
5921 | unsigned int nelts = CONST_VECTOR_NUNITS (x); | |
5922 | if (nelts < 2) | |
5923 | return false; | |
5924 | ||
5925 | scalar_mode inner = GET_MODE_INNER (GET_MODE (x)); | |
5926 | rtx base = CONST_VECTOR_ELT (x, 0); | |
5927 | rtx step = simplify_binary_operation (MINUS, inner, | |
5928 | CONST_VECTOR_ELT (x, 1), base); | |
5929 | if (rtx_equal_p (step, CONST0_RTX (inner))) | |
5930 | return false; | |
5931 | ||
5932 | for (unsigned int i = 2; i < nelts; ++i) | |
5933 | { | |
5934 | rtx diff = simplify_binary_operation (MINUS, inner, | |
5935 | CONST_VECTOR_ELT (x, i), | |
5936 | CONST_VECTOR_ELT (x, i - 1)); | |
5937 | if (!rtx_equal_p (step, diff)) | |
5938 | return false; | |
5939 | } | |
5940 | ||
5941 | *base_out = base; | |
5942 | *step_out = step; | |
5943 | return true; | |
5944 | } | |
5945 | ||
5946 | /* Generate a vector constant of mode MODE in which element I has | |
5947 | the value BASE + I * STEP. */ | |
5948 | ||
5949 | rtx | |
5950 | gen_const_vec_series (machine_mode mode, rtx base, rtx step) | |
5951 | { | |
5952 | gcc_assert (CONSTANT_P (base) && CONSTANT_P (step)); | |
5953 | ||
5954 | int nunits = GET_MODE_NUNITS (mode); | |
5955 | rtvec v = rtvec_alloc (nunits); | |
5956 | scalar_mode inner_mode = GET_MODE_INNER (mode); | |
5957 | RTVEC_ELT (v, 0) = base; | |
5958 | for (int i = 1; i < nunits; ++i) | |
5959 | RTVEC_ELT (v, i) = simplify_gen_binary (PLUS, inner_mode, | |
5960 | RTVEC_ELT (v, i - 1), step); | |
5961 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
5962 | } | |
5963 | ||
5964 | /* Generate a vector of mode MODE in which element I has the value | |
5965 | BASE + I * STEP. The result will be a constant if BASE and STEP | |
5966 | are both constants. */ | |
5967 | ||
5968 | rtx | |
5969 | gen_vec_series (machine_mode mode, rtx base, rtx step) | |
5970 | { | |
5971 | if (step == const0_rtx) | |
5972 | return gen_vec_duplicate (mode, base); | |
5973 | if (CONSTANT_P (base) && CONSTANT_P (step)) | |
5974 | return gen_const_vec_series (mode, base, step); | |
5975 | return gen_rtx_VEC_SERIES (mode, base, step); | |
5976 | } | |
5977 | ||
59d06c05 RS |
5978 | /* Generate a new vector constant for mode MODE and constant value |
5979 | CONSTANT. */ | |
69ef87e2 | 5980 | |
59d06c05 RS |
5981 | static rtx |
5982 | gen_const_vector (machine_mode mode, int constant) | |
5983 | { | |
5984 | machine_mode inner = GET_MODE_INNER (mode); | |
69ef87e2 | 5985 | |
59d06c05 RS |
5986 | gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); |
5987 | ||
5988 | rtx el = const_tiny_rtx[constant][(int) inner]; | |
5989 | gcc_assert (el); | |
69ef87e2 | 5990 | |
59d06c05 | 5991 | return gen_const_vec_duplicate_1 (mode, el); |
69ef87e2 AH |
5992 | } |
5993 | ||
a06e3c40 | 5994 | /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when |
a73b091d | 5995 | all elements are zero, and the one vector when all elements are one. */ |
a06e3c40 | 5996 | rtx |
ef4bddc2 | 5997 | gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v) |
a06e3c40 | 5998 | { |
59d06c05 | 5999 | gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v)); |
a73b091d JW |
6000 | |
6001 | /* If the values are all the same, check to see if we can use one of the | |
6002 | standard constant vectors. */ | |
59d06c05 RS |
6003 | if (rtvec_all_equal_p (v)) |
6004 | return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0)); | |
a73b091d JW |
6005 | |
6006 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
a06e3c40 R |
6007 | } |
6008 | ||
b5deb7b6 SL |
6009 | /* Initialise global register information required by all functions. */ |
6010 | ||
6011 | void | |
6012 | init_emit_regs (void) | |
6013 | { | |
6014 | int i; | |
ef4bddc2 | 6015 | machine_mode mode; |
1c3f523e | 6016 | mem_attrs *attrs; |
b5deb7b6 SL |
6017 | |
6018 | /* Reset register attributes */ | |
aebf76a2 | 6019 | reg_attrs_htab->empty (); |
b5deb7b6 SL |
6020 | |
6021 | /* We need reg_raw_mode, so initialize the modes now. */ | |
6022 | init_reg_modes_target (); | |
6023 | ||
6024 | /* Assign register numbers to the globally defined register rtx. */ | |
b5deb7b6 SL |
6025 | stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); |
6026 | frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); | |
6027 | hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM); | |
6028 | arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); | |
6029 | virtual_incoming_args_rtx = | |
6030 | gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); | |
6031 | virtual_stack_vars_rtx = | |
6032 | gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); | |
6033 | virtual_stack_dynamic_rtx = | |
6034 | gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); | |
6035 | virtual_outgoing_args_rtx = | |
6036 | gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); | |
6037 | virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); | |
32990d5b JJ |
6038 | virtual_preferred_stack_boundary_rtx = |
6039 | gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM); | |
b5deb7b6 SL |
6040 | |
6041 | /* Initialize RTL for commonly used hard registers. These are | |
6042 | copied into regno_reg_rtx as we begin to compile each function. */ | |
6043 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
5fb0e246 | 6044 | initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); |
b5deb7b6 SL |
6045 | |
6046 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
6047 | return_address_pointer_rtx | |
6048 | = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); | |
6049 | #endif | |
6050 | ||
ca72dad5 | 6051 | pic_offset_table_rtx = NULL_RTX; |
b5deb7b6 SL |
6052 | if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) |
6053 | pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); | |
1c3f523e RS |
6054 | |
6055 | for (i = 0; i < (int) MAX_MACHINE_MODE; i++) | |
6056 | { | |
ef4bddc2 | 6057 | mode = (machine_mode) i; |
766090c2 | 6058 | attrs = ggc_cleared_alloc<mem_attrs> (); |
1c3f523e RS |
6059 | attrs->align = BITS_PER_UNIT; |
6060 | attrs->addrspace = ADDR_SPACE_GENERIC; | |
6061 | if (mode != BLKmode) | |
6062 | { | |
754c3d5d RS |
6063 | attrs->size_known_p = true; |
6064 | attrs->size = GET_MODE_SIZE (mode); | |
1c3f523e RS |
6065 | if (STRICT_ALIGNMENT) |
6066 | attrs->align = GET_MODE_ALIGNMENT (mode); | |
6067 | } | |
6068 | mode_mem_attrs[i] = attrs; | |
6069 | } | |
af364399 ML |
6070 | |
6071 | split_branch_probability = profile_probability::uninitialized (); | |
b5deb7b6 SL |
6072 | } |
6073 | ||
aa3a12d6 RS |
6074 | /* Initialize global machine_mode variables. */ |
6075 | ||
6076 | void | |
6077 | init_derived_machine_modes (void) | |
6078 | { | |
501623d4 RS |
6079 | opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode; |
6080 | FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT) | |
aa3a12d6 | 6081 | { |
501623d4 RS |
6082 | scalar_int_mode mode = mode_iter.require (); |
6083 | ||
aa3a12d6 | 6084 | if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT |
501623d4 RS |
6085 | && !opt_byte_mode.exists ()) |
6086 | opt_byte_mode = mode; | |
aa3a12d6 RS |
6087 | |
6088 | if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD | |
501623d4 RS |
6089 | && !opt_word_mode.exists ()) |
6090 | opt_word_mode = mode; | |
aa3a12d6 RS |
6091 | } |
6092 | ||
501623d4 RS |
6093 | byte_mode = opt_byte_mode.require (); |
6094 | word_mode = opt_word_mode.require (); | |
f95c5b8e RS |
6095 | ptr_mode = as_a <scalar_int_mode> |
6096 | (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ()); | |
aa3a12d6 RS |
6097 | } |
6098 | ||
2d888286 | 6099 | /* Create some permanent unique rtl objects shared between all functions. */ |
23b2ce53 RS |
6100 | |
6101 | void | |
2d888286 | 6102 | init_emit_once (void) |
23b2ce53 RS |
6103 | { |
6104 | int i; | |
ef4bddc2 | 6105 | machine_mode mode; |
857c7b46 | 6106 | scalar_float_mode double_mode; |
16d22000 | 6107 | opt_scalar_mode smode_iter; |
23b2ce53 | 6108 | |
807e902e KZ |
6109 | /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE, |
6110 | CONST_FIXED, and memory attribute hash tables. */ | |
aebf76a2 | 6111 | const_int_htab = hash_table<const_int_hasher>::create_ggc (37); |
173b24b9 | 6112 | |
807e902e | 6113 | #if TARGET_SUPPORTS_WIDE_INT |
aebf76a2 | 6114 | const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37); |
807e902e | 6115 | #endif |
aebf76a2 | 6116 | const_double_htab = hash_table<const_double_hasher>::create_ggc (37); |
5692c7bc | 6117 | |
0c12fc9b RS |
6118 | if (NUM_POLY_INT_COEFFS > 1) |
6119 | const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37); | |
6120 | ||
aebf76a2 | 6121 | const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37); |
091a3ac7 | 6122 | |
aebf76a2 | 6123 | reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37); |
67673f5c | 6124 | |
5da077de | 6125 | #ifdef INIT_EXPANDERS |
414c4dc4 NC |
6126 | /* This is to initialize {init|mark|free}_machine_status before the first |
6127 | call to push_function_context_to. This is needed by the Chill front | |
a1f300c0 | 6128 | end which calls push_function_context_to before the first call to |
5da077de AS |
6129 | init_function_start. */ |
6130 | INIT_EXPANDERS; | |
6131 | #endif | |
6132 | ||
23b2ce53 RS |
6133 | /* Create the unique rtx's for certain rtx codes and operand values. */ |
6134 | ||
ecf835e9 KN |
6135 | /* Process stack-limiting command-line options. */ |
6136 | if (opt_fstack_limit_symbol_arg != NULL) | |
6137 | stack_limit_rtx | |
6138 | = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg)); | |
6139 | if (opt_fstack_limit_register_no >= 0) | |
6140 | stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no); | |
6141 | ||
a2a8cc44 | 6142 | /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case |
c5c76735 | 6143 | tries to use these variables. */ |
23b2ce53 | 6144 | for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++) |
750c9258 | 6145 | const_int_rtx[i + MAX_SAVED_CONST_INT] = |
f1b690f1 | 6146 | gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i); |
23b2ce53 | 6147 | |
68d75312 JC |
6148 | if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT |
6149 | && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT) | |
5da077de | 6150 | const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT]; |
68d75312 | 6151 | else |
3b80f6ca | 6152 | const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE); |
23b2ce53 | 6153 | |
857c7b46 | 6154 | double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require (); |
aa3a12d6 | 6155 | |
807e902e KZ |
6156 | real_from_integer (&dconst0, double_mode, 0, SIGNED); |
6157 | real_from_integer (&dconst1, double_mode, 1, SIGNED); | |
6158 | real_from_integer (&dconst2, double_mode, 2, SIGNED); | |
aefa9d43 KG |
6159 | |
6160 | dconstm1 = dconst1; | |
6161 | dconstm1.sign = 1; | |
03f2ea93 RS |
6162 | |
6163 | dconsthalf = dconst1; | |
1e92bbb9 | 6164 | SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1); |
23b2ce53 | 6165 | |
e7c82a99 | 6166 | for (i = 0; i < 3; i++) |
23b2ce53 | 6167 | { |
aefa9d43 | 6168 | const REAL_VALUE_TYPE *const r = |
b216cd4a ZW |
6169 | (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2); |
6170 | ||
c94843d2 | 6171 | FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT) |
15ed7b52 | 6172 | const_tiny_rtx[i][(int) mode] = |
555affd7 | 6173 | const_double_from_real_value (*r, mode); |
15ed7b52 | 6174 | |
c94843d2 | 6175 | FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT) |
5692c7bc | 6176 | const_tiny_rtx[i][(int) mode] = |
555affd7 | 6177 | const_double_from_real_value (*r, mode); |
23b2ce53 | 6178 | |
906c4e36 | 6179 | const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i); |
23b2ce53 | 6180 | |
c94843d2 | 6181 | FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
906c4e36 | 6182 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
33d3e559 | 6183 | |
ede6c734 MS |
6184 | for (mode = MIN_MODE_PARTIAL_INT; |
6185 | mode <= MAX_MODE_PARTIAL_INT; | |
ef4bddc2 | 6186 | mode = (machine_mode)((int)(mode) + 1)) |
33d3e559 | 6187 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
23b2ce53 RS |
6188 | } |
6189 | ||
e7c82a99 JJ |
6190 | const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx; |
6191 | ||
c94843d2 | 6192 | FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
e7c82a99 JJ |
6193 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
6194 | ||
ede6c734 MS |
6195 | for (mode = MIN_MODE_PARTIAL_INT; |
6196 | mode <= MAX_MODE_PARTIAL_INT; | |
ef4bddc2 | 6197 | mode = (machine_mode)((int)(mode) + 1)) |
c8a89d2a | 6198 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
c94843d2 RS |
6199 | |
6200 | FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT) | |
e90721b1 AP |
6201 | { |
6202 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
6203 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
6204 | } | |
6205 | ||
c94843d2 | 6206 | FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT) |
e90721b1 AP |
6207 | { |
6208 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
6209 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
6210 | } | |
6211 | ||
c94843d2 | 6212 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT) |
a73b091d JW |
6213 | { |
6214 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6215 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
e7c82a99 | 6216 | const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3); |
a73b091d | 6217 | } |
69ef87e2 | 6218 | |
c94843d2 | 6219 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT) |
a73b091d JW |
6220 | { |
6221 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6222 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6223 | } | |
69ef87e2 | 6224 | |
16d22000 | 6225 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT) |
325217ed | 6226 | { |
16d22000 RS |
6227 | scalar_mode smode = smode_iter.require (); |
6228 | FCONST0 (smode).data.high = 0; | |
6229 | FCONST0 (smode).data.low = 0; | |
6230 | FCONST0 (smode).mode = smode; | |
6231 | const_tiny_rtx[0][(int) smode] | |
6232 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
325217ed CF |
6233 | } |
6234 | ||
16d22000 | 6235 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT) |
325217ed | 6236 | { |
16d22000 RS |
6237 | scalar_mode smode = smode_iter.require (); |
6238 | FCONST0 (smode).data.high = 0; | |
6239 | FCONST0 (smode).data.low = 0; | |
6240 | FCONST0 (smode).mode = smode; | |
6241 | const_tiny_rtx[0][(int) smode] | |
6242 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
325217ed CF |
6243 | } |
6244 | ||
16d22000 | 6245 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM) |
325217ed | 6246 | { |
16d22000 RS |
6247 | scalar_mode smode = smode_iter.require (); |
6248 | FCONST0 (smode).data.high = 0; | |
6249 | FCONST0 (smode).data.low = 0; | |
6250 | FCONST0 (smode).mode = smode; | |
6251 | const_tiny_rtx[0][(int) smode] | |
6252 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
325217ed CF |
6253 | |
6254 | /* We store the value 1. */ | |
16d22000 RS |
6255 | FCONST1 (smode).data.high = 0; |
6256 | FCONST1 (smode).data.low = 0; | |
6257 | FCONST1 (smode).mode = smode; | |
6258 | FCONST1 (smode).data | |
6259 | = double_int_one.lshift (GET_MODE_FBIT (smode), | |
9be0ac8c | 6260 | HOST_BITS_PER_DOUBLE_INT, |
16d22000 RS |
6261 | SIGNED_FIXED_POINT_MODE_P (smode)); |
6262 | const_tiny_rtx[1][(int) smode] | |
6263 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode); | |
325217ed CF |
6264 | } |
6265 | ||
16d22000 | 6266 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM) |
325217ed | 6267 | { |
16d22000 RS |
6268 | scalar_mode smode = smode_iter.require (); |
6269 | FCONST0 (smode).data.high = 0; | |
6270 | FCONST0 (smode).data.low = 0; | |
6271 | FCONST0 (smode).mode = smode; | |
6272 | const_tiny_rtx[0][(int) smode] | |
6273 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode); | |
325217ed CF |
6274 | |
6275 | /* We store the value 1. */ | |
16d22000 RS |
6276 | FCONST1 (smode).data.high = 0; |
6277 | FCONST1 (smode).data.low = 0; | |
6278 | FCONST1 (smode).mode = smode; | |
6279 | FCONST1 (smode).data | |
6280 | = double_int_one.lshift (GET_MODE_FBIT (smode), | |
9be0ac8c | 6281 | HOST_BITS_PER_DOUBLE_INT, |
16d22000 RS |
6282 | SIGNED_FIXED_POINT_MODE_P (smode)); |
6283 | const_tiny_rtx[1][(int) smode] | |
6284 | = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode); | |
091a3ac7 CF |
6285 | } |
6286 | ||
c94843d2 | 6287 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT) |
091a3ac7 CF |
6288 | { |
6289 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6290 | } | |
6291 | ||
c94843d2 | 6292 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT) |
091a3ac7 CF |
6293 | { |
6294 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6295 | } | |
6296 | ||
c94843d2 | 6297 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM) |
091a3ac7 CF |
6298 | { |
6299 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6300 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6301 | } | |
6302 | ||
c94843d2 | 6303 | FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM) |
091a3ac7 CF |
6304 | { |
6305 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6306 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
325217ed CF |
6307 | } |
6308 | ||
dbbbbf3b | 6309 | for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) |
ef4bddc2 | 6310 | if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC) |
dbbbbf3b | 6311 | const_tiny_rtx[0][i] = const0_rtx; |
23b2ce53 | 6312 | |
f0417c82 RH |
6313 | const_tiny_rtx[0][(int) BImode] = const0_rtx; |
6314 | if (STORE_FLAG_VALUE == 1) | |
6315 | const_tiny_rtx[1][(int) BImode] = const1_rtx; | |
ca4adc91 | 6316 | |
16d22000 | 6317 | FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS) |
d5e254e1 | 6318 | { |
16d22000 RS |
6319 | scalar_mode smode = smode_iter.require (); |
6320 | wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode)); | |
6321 | const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode); | |
d5e254e1 IE |
6322 | } |
6323 | ||
ca4adc91 RS |
6324 | pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); |
6325 | ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); | |
6326 | simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); | |
6327 | cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); | |
1476d1bd MM |
6328 | invalid_insn_rtx = gen_rtx_INSN (VOIDmode, |
6329 | /*prev_insn=*/NULL, | |
6330 | /*next_insn=*/NULL, | |
6331 | /*bb=*/NULL, | |
6332 | /*pattern=*/NULL_RTX, | |
6333 | /*location=*/-1, | |
6334 | CODE_FOR_nothing, | |
6335 | /*reg_notes=*/NULL_RTX); | |
23b2ce53 | 6336 | } |
a11759a3 | 6337 | \f |
969d70ca JH |
6338 | /* Produce exact duplicate of insn INSN after AFTER. |
6339 | Care updating of libcall regions if present. */ | |
6340 | ||
cd459bf8 | 6341 | rtx_insn * |
a1950df3 | 6342 | emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after) |
969d70ca | 6343 | { |
cd459bf8 DM |
6344 | rtx_insn *new_rtx; |
6345 | rtx link; | |
969d70ca JH |
6346 | |
6347 | switch (GET_CODE (insn)) | |
6348 | { | |
6349 | case INSN: | |
60564289 | 6350 | new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after); |
969d70ca JH |
6351 | break; |
6352 | ||
6353 | case JUMP_INSN: | |
60564289 | 6354 | new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after); |
ec27069c | 6355 | CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn); |
969d70ca JH |
6356 | break; |
6357 | ||
b5b8b0ac AO |
6358 | case DEBUG_INSN: |
6359 | new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after); | |
6360 | break; | |
6361 | ||
969d70ca | 6362 | case CALL_INSN: |
60564289 | 6363 | new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after); |
969d70ca | 6364 | if (CALL_INSN_FUNCTION_USAGE (insn)) |
60564289 | 6365 | CALL_INSN_FUNCTION_USAGE (new_rtx) |
969d70ca | 6366 | = copy_insn (CALL_INSN_FUNCTION_USAGE (insn)); |
60564289 KG |
6367 | SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn); |
6368 | RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn); | |
6369 | RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn); | |
b8698a0f | 6370 | RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx) |
becfd6e5 | 6371 | = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn); |
969d70ca JH |
6372 | break; |
6373 | ||
6374 | default: | |
5b0264cb | 6375 | gcc_unreachable (); |
969d70ca JH |
6376 | } |
6377 | ||
6378 | /* Update LABEL_NUSES. */ | |
60564289 | 6379 | mark_jump_label (PATTERN (new_rtx), new_rtx, 0); |
969d70ca | 6380 | |
5368224f | 6381 | INSN_LOCATION (new_rtx) = INSN_LOCATION (insn); |
ba4f7968 | 6382 | |
0a3d71f5 JW |
6383 | /* If the old insn is frame related, then so is the new one. This is |
6384 | primarily needed for IA-64 unwind info which marks epilogue insns, | |
6385 | which may be duplicated by the basic block reordering code. */ | |
60564289 | 6386 | RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn); |
0a3d71f5 | 6387 | |
1581a12c BS |
6388 | /* Locate the end of existing REG_NOTES in NEW_RTX. */ |
6389 | rtx *ptail = ®_NOTES (new_rtx); | |
6390 | while (*ptail != NULL_RTX) | |
6391 | ptail = &XEXP (*ptail, 1); | |
6392 | ||
cf7c4aa6 HPN |
6393 | /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label |
6394 | will make them. REG_LABEL_TARGETs are created there too, but are | |
6395 | supposed to be sticky, so we copy them. */ | |
969d70ca | 6396 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
cf7c4aa6 | 6397 | if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND) |
969d70ca | 6398 | { |
1581a12c BS |
6399 | *ptail = duplicate_reg_note (link); |
6400 | ptail = &XEXP (*ptail, 1); | |
969d70ca JH |
6401 | } |
6402 | ||
60564289 KG |
6403 | INSN_CODE (new_rtx) = INSN_CODE (insn); |
6404 | return new_rtx; | |
969d70ca | 6405 | } |
e2500fed | 6406 | |
1431042e | 6407 | static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; |
3e89ed8d | 6408 | rtx |
ef4bddc2 | 6409 | gen_hard_reg_clobber (machine_mode mode, unsigned int regno) |
3e89ed8d JH |
6410 | { |
6411 | if (hard_reg_clobbers[mode][regno]) | |
6412 | return hard_reg_clobbers[mode][regno]; | |
6413 | else | |
6414 | return (hard_reg_clobbers[mode][regno] = | |
6415 | gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); | |
6416 | } | |
6417 | ||
5368224f DC |
6418 | location_t prologue_location; |
6419 | location_t epilogue_location; | |
78bde837 SB |
6420 | |
6421 | /* Hold current location information and last location information, so the | |
6422 | datastructures are built lazily only when some instructions in given | |
6423 | place are needed. */ | |
3a50da34 | 6424 | static location_t curr_location; |
78bde837 | 6425 | |
5368224f | 6426 | /* Allocate insn location datastructure. */ |
78bde837 | 6427 | void |
5368224f | 6428 | insn_locations_init (void) |
78bde837 | 6429 | { |
5368224f | 6430 | prologue_location = epilogue_location = 0; |
78bde837 | 6431 | curr_location = UNKNOWN_LOCATION; |
78bde837 SB |
6432 | } |
6433 | ||
6434 | /* At the end of emit stage, clear current location. */ | |
6435 | void | |
5368224f | 6436 | insn_locations_finalize (void) |
78bde837 | 6437 | { |
5368224f DC |
6438 | epilogue_location = curr_location; |
6439 | curr_location = UNKNOWN_LOCATION; | |
78bde837 SB |
6440 | } |
6441 | ||
6442 | /* Set current location. */ | |
6443 | void | |
5368224f | 6444 | set_curr_insn_location (location_t location) |
78bde837 | 6445 | { |
78bde837 SB |
6446 | curr_location = location; |
6447 | } | |
6448 | ||
6449 | /* Get current location. */ | |
6450 | location_t | |
5368224f | 6451 | curr_insn_location (void) |
78bde837 SB |
6452 | { |
6453 | return curr_location; | |
6454 | } | |
6455 | ||
78bde837 SB |
6456 | /* Return lexical scope block insn belongs to. */ |
6457 | tree | |
a1950df3 | 6458 | insn_scope (const rtx_insn *insn) |
78bde837 | 6459 | { |
5368224f | 6460 | return LOCATION_BLOCK (INSN_LOCATION (insn)); |
78bde837 SB |
6461 | } |
6462 | ||
6463 | /* Return line number of the statement that produced this insn. */ | |
6464 | int | |
a1950df3 | 6465 | insn_line (const rtx_insn *insn) |
78bde837 | 6466 | { |
5368224f | 6467 | return LOCATION_LINE (INSN_LOCATION (insn)); |
78bde837 SB |
6468 | } |
6469 | ||
6470 | /* Return source file of the statement that produced this insn. */ | |
6471 | const char * | |
a1950df3 | 6472 | insn_file (const rtx_insn *insn) |
78bde837 | 6473 | { |
5368224f | 6474 | return LOCATION_FILE (INSN_LOCATION (insn)); |
78bde837 | 6475 | } |
8930883e | 6476 | |
ffa4602f EB |
6477 | /* Return expanded location of the statement that produced this insn. */ |
6478 | expanded_location | |
a1950df3 | 6479 | insn_location (const rtx_insn *insn) |
ffa4602f EB |
6480 | { |
6481 | return expand_location (INSN_LOCATION (insn)); | |
6482 | } | |
6483 | ||
8930883e MK |
6484 | /* Return true if memory model MODEL requires a pre-operation (release-style) |
6485 | barrier or a post-operation (acquire-style) barrier. While not universal, | |
6486 | this function matches behavior of several targets. */ | |
6487 | ||
6488 | bool | |
6489 | need_atomic_barrier_p (enum memmodel model, bool pre) | |
6490 | { | |
40ad260d | 6491 | switch (model & MEMMODEL_BASE_MASK) |
8930883e MK |
6492 | { |
6493 | case MEMMODEL_RELAXED: | |
6494 | case MEMMODEL_CONSUME: | |
6495 | return false; | |
6496 | case MEMMODEL_RELEASE: | |
6497 | return pre; | |
6498 | case MEMMODEL_ACQUIRE: | |
6499 | return !pre; | |
6500 | case MEMMODEL_ACQ_REL: | |
6501 | case MEMMODEL_SEQ_CST: | |
6502 | return true; | |
6503 | default: | |
6504 | gcc_unreachable (); | |
6505 | } | |
6506 | } | |
8194c537 | 6507 | |
abd3c800 RS |
6508 | /* Return a constant shift amount for shifting a value of mode MODE |
6509 | by VALUE bits. */ | |
6510 | ||
6511 | rtx | |
0c12fc9b | 6512 | gen_int_shift_amount (machine_mode, poly_int64 value) |
abd3c800 RS |
6513 | { |
6514 | /* Use a 64-bit mode, to avoid any truncation. | |
6515 | ||
6516 | ??? Perhaps this should be automatically derived from the .md files | |
6517 | instead, or perhaps have a target hook. */ | |
6518 | scalar_int_mode shift_mode = (BITS_PER_UNIT == 8 | |
6519 | ? DImode | |
6520 | : int_mode_for_size (64, 0).require ()); | |
6521 | return gen_int_mode (value, shift_mode); | |
6522 | } | |
6523 | ||
8194c537 DM |
6524 | /* Initialize fields of rtl_data related to stack alignment. */ |
6525 | ||
6526 | void | |
6527 | rtl_data::init_stack_alignment () | |
6528 | { | |
6529 | stack_alignment_needed = STACK_BOUNDARY; | |
6530 | max_used_stack_slot_alignment = STACK_BOUNDARY; | |
6531 | stack_alignment_estimated = 0; | |
6532 | preferred_stack_boundary = STACK_BOUNDARY; | |
6533 | } | |
6534 | ||
8930883e | 6535 | \f |
e2500fed | 6536 | #include "gt-emit-rtl.h" |