]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
re PR middle-end/78468 (libgomp.c/reduction-10.c and many more FAIL)
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
71 /* Commonly used modes. */
72
73 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128 rtx_insn *invalid_insn_rtx;
129
130 /* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
133 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
134 {
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139 };
140
141 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
143 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
144 {
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147 };
148
149 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
150
151 /* A hash table storing register attribute structures. */
152 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
153 {
154 static hashval_t hash (reg_attrs *x);
155 static bool equal (reg_attrs *a, reg_attrs *b);
156 };
157
158 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
159
160 /* A hash table storing all CONST_DOUBLEs. */
161 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
162 {
163 static hashval_t hash (rtx x);
164 static bool equal (rtx x, rtx y);
165 };
166
167 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
168
169 /* A hash table storing all CONST_FIXEDs. */
170 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
171 {
172 static hashval_t hash (rtx x);
173 static bool equal (rtx x, rtx y);
174 };
175
176 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
177
178 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
179 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
180 #define first_label_num (crtl->emit.x_first_label_num)
181
182 static void set_used_decls (tree);
183 static void mark_label_nuses (rtx);
184 #if TARGET_SUPPORTS_WIDE_INT
185 static rtx lookup_const_wide_int (rtx);
186 #endif
187 static rtx lookup_const_double (rtx);
188 static rtx lookup_const_fixed (rtx);
189 static reg_attrs *get_reg_attrs (tree, int);
190 static rtx gen_const_vector (machine_mode, int);
191 static void copy_rtx_if_shared_1 (rtx *orig);
192
193 /* Probability of the conditional branch currently proceeded by try_split. */
194 profile_probability split_branch_probability;
195 \f
196 /* Returns a hash code for X (which is a really a CONST_INT). */
197
198 hashval_t
199 const_int_hasher::hash (rtx x)
200 {
201 return (hashval_t) INTVAL (x);
202 }
203
204 /* Returns nonzero if the value represented by X (which is really a
205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
207
208 bool
209 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
210 {
211 return (INTVAL (x) == y);
212 }
213
214 #if TARGET_SUPPORTS_WIDE_INT
215 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
216
217 hashval_t
218 const_wide_int_hasher::hash (rtx x)
219 {
220 int i;
221 unsigned HOST_WIDE_INT hash = 0;
222 const_rtx xr = x;
223
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
226
227 return (hashval_t) hash;
228 }
229
230 /* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
233
234 bool
235 const_wide_int_hasher::equal (rtx x, rtx y)
236 {
237 int i;
238 const_rtx xr = x;
239 const_rtx yr = y;
240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
241 return false;
242
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
245 return false;
246
247 return true;
248 }
249 #endif
250
251 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
252 hashval_t
253 const_double_hasher::hash (rtx x)
254 {
255 const_rtx const value = x;
256 hashval_t h;
257
258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
261 {
262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 }
266 return h;
267 }
268
269 /* Returns nonzero if the value represented by X (really a ...)
270 is the same as that represented by Y (really a ...) */
271 bool
272 const_double_hasher::equal (rtx x, rtx y)
273 {
274 const_rtx const a = x, b = y;
275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
284 }
285
286 /* Returns a hash code for X (which is really a CONST_FIXED). */
287
288 hashval_t
289 const_fixed_hasher::hash (rtx x)
290 {
291 const_rtx const value = x;
292 hashval_t h;
293
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
298 }
299
300 /* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
302
303 bool
304 const_fixed_hasher::equal (rtx x, rtx y)
305 {
306 const_rtx const a = x, b = y;
307
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
311 }
312
313 /* Return true if the given memory attributes are equal. */
314
315 bool
316 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
317 {
318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
328 && p->addrspace == q->addrspace
329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
332 }
333
334 /* Set MEM's memory attributes so that they are the same as ATTRS. */
335
336 static void
337 set_mem_attrs (rtx mem, mem_attrs *attrs)
338 {
339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
341 {
342 MEM_ATTRS (mem) = 0;
343 return;
344 }
345
346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
348 {
349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
351 }
352 }
353
354 /* Returns a hash code for X (which is a really a reg_attrs *). */
355
356 hashval_t
357 reg_attr_hasher::hash (reg_attrs *x)
358 {
359 const reg_attrs *const p = x;
360
361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
362 }
363
364 /* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
366
367 bool
368 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
369 {
370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
372
373 return (p->decl == q->decl && p->offset == q->offset);
374 }
375 /* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
378
379 static reg_attrs *
380 get_reg_attrs (tree decl, int offset)
381 {
382 reg_attrs attrs;
383
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
387
388 attrs.decl = decl;
389 attrs.offset = offset;
390
391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
392 if (*slot == 0)
393 {
394 *slot = ggc_alloc<reg_attrs> ();
395 memcpy (*slot, &attrs, sizeof (reg_attrs));
396 }
397
398 return *slot;
399 }
400
401
402 #if !HAVE_blockage
403 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
405
406 rtx
407 gen_blockage (void)
408 {
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
412 }
413 #endif
414
415
416 /* Set the mode and register number of X to MODE and REGNO. */
417
418 void
419 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
420 {
421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
422 ? hard_regno_nregs (regno, mode)
423 : 1);
424 PUT_MODE_RAW (x, mode);
425 set_regno_raw (x, regno, nregs);
426 }
427
428 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432 rtx
433 gen_raw_REG (machine_mode mode, unsigned int regno)
434 {
435 rtx x = rtx_alloc (REG MEM_STAT_INFO);
436 set_mode_and_regno (x, mode, regno);
437 REG_ATTRS (x) = NULL;
438 ORIGINAL_REGNO (x) = regno;
439 return x;
440 }
441
442 /* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
445
446 rtx_expr_list *
447 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
448 {
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
451 }
452
453 rtx_insn_list *
454 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
455 {
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
458 }
459
460 rtx_insn *
461 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
464 {
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
469 }
470
471 rtx
472 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
473 {
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
476
477 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480 #endif
481
482 /* Look up the CONST_INT in the hash table. */
483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
485 if (*slot == 0)
486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
487
488 return *slot;
489 }
490
491 rtx
492 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
493 {
494 return GEN_INT (trunc_int_for_mode (c, mode));
495 }
496
497 /* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500
501 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504 static rtx
505 lookup_const_double (rtx real)
506 {
507 rtx *slot = const_double_htab->find_slot (real, INSERT);
508 if (*slot == 0)
509 *slot = real;
510
511 return *slot;
512 }
513
514 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
516 rtx
517 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
518 {
519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
521
522 real->u.rv = value;
523
524 return lookup_const_double (real);
525 }
526
527 /* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
530
531 static rtx
532 lookup_const_fixed (rtx fixed)
533 {
534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
535 if (*slot == 0)
536 *slot = fixed;
537
538 return *slot;
539 }
540
541 /* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
543
544 rtx
545 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
546 {
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
549
550 fixed->u.fv = value;
551
552 return lookup_const_fixed (fixed);
553 }
554
555 #if TARGET_SUPPORTS_WIDE_INT == 0
556 /* Constructs double_int from rtx CST. */
557
558 double_int
559 rtx_to_double_int (const_rtx cst)
560 {
561 double_int r;
562
563 if (CONST_INT_P (cst))
564 r = double_int::from_shwi (INTVAL (cst));
565 else if (CONST_DOUBLE_AS_INT_P (cst))
566 {
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
569 }
570 else
571 gcc_unreachable ();
572
573 return r;
574 }
575 #endif
576
577 #if TARGET_SUPPORTS_WIDE_INT
578 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
580 return it. */
581
582 static rtx
583 lookup_const_wide_int (rtx wint)
584 {
585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
586 if (*slot == 0)
587 *slot = wint;
588
589 return *slot;
590 }
591 #endif
592
593 /* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
597
598 rtx
599 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
600 {
601 unsigned int len = v.get_len ();
602 /* Not scalar_int_mode because we also allow pointer bound modes. */
603 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
604
605 /* Allow truncation but not extension since we do not know if the
606 number is signed or unsigned. */
607 gcc_assert (prec <= v.get_precision ());
608
609 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (v.elt (0), mode);
611
612 #if TARGET_SUPPORTS_WIDE_INT
613 {
614 unsigned int i;
615 rtx value;
616 unsigned int blocks_needed
617 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618
619 if (len > blocks_needed)
620 len = blocks_needed;
621
622 value = const_wide_int_alloc (len);
623
624 /* It is so tempting to just put the mode in here. Must control
625 myself ... */
626 PUT_MODE (value, VOIDmode);
627 CWI_PUT_NUM_ELEM (value, len);
628
629 for (i = 0; i < len; i++)
630 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
631
632 return lookup_const_wide_int (value);
633 }
634 #else
635 return immed_double_const (v.elt (0), v.elt (1), mode);
636 #endif
637 }
638
639 #if TARGET_SUPPORTS_WIDE_INT == 0
640 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
641 of ints: I0 is the low-order word and I1 is the high-order word.
642 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
643 implied upper bits are copies of the high bit of i1. The value
644 itself is neither signed nor unsigned. Do not use this routine for
645 non-integer modes; convert to REAL_VALUE_TYPE and use
646 const_double_from_real_value. */
647
648 rtx
649 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
650 {
651 rtx value;
652 unsigned int i;
653
654 /* There are the following cases (note that there are no modes with
655 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
656
657 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
658 gen_int_mode.
659 2) If the value of the integer fits into HOST_WIDE_INT anyway
660 (i.e., i1 consists only from copies of the sign bit, and sign
661 of i0 and i1 are the same), then we return a CONST_INT for i0.
662 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
663 scalar_mode smode;
664 if (is_a <scalar_mode> (mode, &smode)
665 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
666 return gen_int_mode (i0, mode);
667
668 /* If this integer fits in one word, return a CONST_INT. */
669 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
670 return GEN_INT (i0);
671
672 /* We use VOIDmode for integers. */
673 value = rtx_alloc (CONST_DOUBLE);
674 PUT_MODE (value, VOIDmode);
675
676 CONST_DOUBLE_LOW (value) = i0;
677 CONST_DOUBLE_HIGH (value) = i1;
678
679 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
680 XWINT (value, i) = 0;
681
682 return lookup_const_double (value);
683 }
684 #endif
685
686 rtx
687 gen_rtx_REG (machine_mode mode, unsigned int regno)
688 {
689 /* In case the MD file explicitly references the frame pointer, have
690 all such references point to the same frame pointer. This is
691 used during frame pointer elimination to distinguish the explicit
692 references to these registers from pseudos that happened to be
693 assigned to them.
694
695 If we have eliminated the frame pointer or arg pointer, we will
696 be using it as a normal register, for example as a spill
697 register. In such cases, we might be accessing it in a mode that
698 is not Pmode and therefore cannot use the pre-allocated rtx.
699
700 Also don't do this when we are making new REGs in reload, since
701 we don't want to get confused with the real pointers. */
702
703 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
704 {
705 if (regno == FRAME_POINTER_REGNUM
706 && (!reload_completed || frame_pointer_needed))
707 return frame_pointer_rtx;
708
709 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
710 && regno == HARD_FRAME_POINTER_REGNUM
711 && (!reload_completed || frame_pointer_needed))
712 return hard_frame_pointer_rtx;
713 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
714 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
715 && regno == ARG_POINTER_REGNUM)
716 return arg_pointer_rtx;
717 #endif
718 #ifdef RETURN_ADDRESS_POINTER_REGNUM
719 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
720 return return_address_pointer_rtx;
721 #endif
722 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
723 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
724 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
725 return pic_offset_table_rtx;
726 if (regno == STACK_POINTER_REGNUM)
727 return stack_pointer_rtx;
728 }
729
730 #if 0
731 /* If the per-function register table has been set up, try to re-use
732 an existing entry in that table to avoid useless generation of RTL.
733
734 This code is disabled for now until we can fix the various backends
735 which depend on having non-shared hard registers in some cases. Long
736 term we want to re-enable this code as it can significantly cut down
737 on the amount of useless RTL that gets generated.
738
739 We'll also need to fix some code that runs after reload that wants to
740 set ORIGINAL_REGNO. */
741
742 if (cfun
743 && cfun->emit
744 && regno_reg_rtx
745 && regno < FIRST_PSEUDO_REGISTER
746 && reg_raw_mode[regno] == mode)
747 return regno_reg_rtx[regno];
748 #endif
749
750 return gen_raw_REG (mode, regno);
751 }
752
753 rtx
754 gen_rtx_MEM (machine_mode mode, rtx addr)
755 {
756 rtx rt = gen_rtx_raw_MEM (mode, addr);
757
758 /* This field is not cleared by the mere allocation of the rtx, so
759 we clear it here. */
760 MEM_ATTRS (rt) = 0;
761
762 return rt;
763 }
764
765 /* Generate a memory referring to non-trapping constant memory. */
766
767 rtx
768 gen_const_mem (machine_mode mode, rtx addr)
769 {
770 rtx mem = gen_rtx_MEM (mode, addr);
771 MEM_READONLY_P (mem) = 1;
772 MEM_NOTRAP_P (mem) = 1;
773 return mem;
774 }
775
776 /* Generate a MEM referring to fixed portions of the frame, e.g., register
777 save areas. */
778
779 rtx
780 gen_frame_mem (machine_mode mode, rtx addr)
781 {
782 rtx mem = gen_rtx_MEM (mode, addr);
783 MEM_NOTRAP_P (mem) = 1;
784 set_mem_alias_set (mem, get_frame_alias_set ());
785 return mem;
786 }
787
788 /* Generate a MEM referring to a temporary use of the stack, not part
789 of the fixed stack frame. For example, something which is pushed
790 by a target splitter. */
791 rtx
792 gen_tmp_stack_mem (machine_mode mode, rtx addr)
793 {
794 rtx mem = gen_rtx_MEM (mode, addr);
795 MEM_NOTRAP_P (mem) = 1;
796 if (!cfun->calls_alloca)
797 set_mem_alias_set (mem, get_frame_alias_set ());
798 return mem;
799 }
800
801 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
802 this construct would be valid, and false otherwise. */
803
804 bool
805 validate_subreg (machine_mode omode, machine_mode imode,
806 const_rtx reg, unsigned int offset)
807 {
808 unsigned int isize = GET_MODE_SIZE (imode);
809 unsigned int osize = GET_MODE_SIZE (omode);
810
811 /* All subregs must be aligned. */
812 if (offset % osize != 0)
813 return false;
814
815 /* The subreg offset cannot be outside the inner object. */
816 if (offset >= isize)
817 return false;
818
819 unsigned int regsize = REGMODE_NATURAL_SIZE (imode);
820
821 /* ??? This should not be here. Temporarily continue to allow word_mode
822 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
823 Generally, backends are doing something sketchy but it'll take time to
824 fix them all. */
825 if (omode == word_mode)
826 ;
827 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
828 is the culprit here, and not the backends. */
829 else if (osize >= regsize && isize >= osize)
830 ;
831 /* Allow component subregs of complex and vector. Though given the below
832 extraction rules, it's not always clear what that means. */
833 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
834 && GET_MODE_INNER (imode) == omode)
835 ;
836 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
837 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
838 represent this. It's questionable if this ought to be represented at
839 all -- why can't this all be hidden in post-reload splitters that make
840 arbitrarily mode changes to the registers themselves. */
841 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
842 ;
843 /* Subregs involving floating point modes are not allowed to
844 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
845 (subreg:SI (reg:DF) 0) isn't. */
846 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
847 {
848 if (! (isize == osize
849 /* LRA can use subreg to store a floating point value in
850 an integer mode. Although the floating point and the
851 integer modes need the same number of hard registers,
852 the size of floating point mode can be less than the
853 integer mode. LRA also uses subregs for a register
854 should be used in different mode in on insn. */
855 || lra_in_progress))
856 return false;
857 }
858
859 /* Paradoxical subregs must have offset zero. */
860 if (osize > isize)
861 return offset == 0;
862
863 /* This is a normal subreg. Verify that the offset is representable. */
864
865 /* For hard registers, we already have most of these rules collected in
866 subreg_offset_representable_p. */
867 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
868 {
869 unsigned int regno = REGNO (reg);
870
871 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
872 && GET_MODE_INNER (imode) == omode)
873 ;
874 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
875 return false;
876
877 return subreg_offset_representable_p (regno, imode, offset, omode);
878 }
879
880 /* For pseudo registers, we want most of the same checks. Namely:
881
882 Assume that the pseudo register will be allocated to hard registers
883 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
884 the remainder must correspond to the lowpart of the containing hard
885 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
886 otherwise it is at the lowest offset.
887
888 Given that we've already checked the mode and offset alignment,
889 we only have to check subblock subregs here. */
890 if (osize < regsize
891 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
892 {
893 unsigned int block_size = MIN (isize, regsize);
894 unsigned int offset_within_block = offset % block_size;
895 if (BYTES_BIG_ENDIAN
896 ? offset_within_block != block_size - osize
897 : offset_within_block != 0)
898 return false;
899 }
900 return true;
901 }
902
903 rtx
904 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
905 {
906 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
907 return gen_rtx_raw_SUBREG (mode, reg, offset);
908 }
909
910 /* Generate a SUBREG representing the least-significant part of REG if MODE
911 is smaller than mode of REG, otherwise paradoxical SUBREG. */
912
913 rtx
914 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
915 {
916 machine_mode inmode;
917
918 inmode = GET_MODE (reg);
919 if (inmode == VOIDmode)
920 inmode = mode;
921 return gen_rtx_SUBREG (mode, reg,
922 subreg_lowpart_offset (mode, inmode));
923 }
924
925 rtx
926 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
927 enum var_init_status status)
928 {
929 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
930 PAT_VAR_LOCATION_STATUS (x) = status;
931 return x;
932 }
933 \f
934
935 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
936
937 rtvec
938 gen_rtvec (int n, ...)
939 {
940 int i;
941 rtvec rt_val;
942 va_list p;
943
944 va_start (p, n);
945
946 /* Don't allocate an empty rtvec... */
947 if (n == 0)
948 {
949 va_end (p);
950 return NULL_RTVEC;
951 }
952
953 rt_val = rtvec_alloc (n);
954
955 for (i = 0; i < n; i++)
956 rt_val->elem[i] = va_arg (p, rtx);
957
958 va_end (p);
959 return rt_val;
960 }
961
962 rtvec
963 gen_rtvec_v (int n, rtx *argp)
964 {
965 int i;
966 rtvec rt_val;
967
968 /* Don't allocate an empty rtvec... */
969 if (n == 0)
970 return NULL_RTVEC;
971
972 rt_val = rtvec_alloc (n);
973
974 for (i = 0; i < n; i++)
975 rt_val->elem[i] = *argp++;
976
977 return rt_val;
978 }
979
980 rtvec
981 gen_rtvec_v (int n, rtx_insn **argp)
982 {
983 int i;
984 rtvec rt_val;
985
986 /* Don't allocate an empty rtvec... */
987 if (n == 0)
988 return NULL_RTVEC;
989
990 rt_val = rtvec_alloc (n);
991
992 for (i = 0; i < n; i++)
993 rt_val->elem[i] = *argp++;
994
995 return rt_val;
996 }
997
998 \f
999 /* Return the number of bytes between the start of an OUTER_MODE
1000 in-memory value and the start of an INNER_MODE in-memory value,
1001 given that the former is a lowpart of the latter. It may be a
1002 paradoxical lowpart, in which case the offset will be negative
1003 on big-endian targets. */
1004
1005 int
1006 byte_lowpart_offset (machine_mode outer_mode,
1007 machine_mode inner_mode)
1008 {
1009 if (paradoxical_subreg_p (outer_mode, inner_mode))
1010 return -subreg_lowpart_offset (inner_mode, outer_mode);
1011 else
1012 return subreg_lowpart_offset (outer_mode, inner_mode);
1013 }
1014
1015 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1016 from address X. For paradoxical big-endian subregs this is a
1017 negative value, otherwise it's the same as OFFSET. */
1018
1019 int
1020 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1021 unsigned int offset)
1022 {
1023 if (paradoxical_subreg_p (outer_mode, inner_mode))
1024 {
1025 gcc_assert (offset == 0);
1026 return -subreg_lowpart_offset (inner_mode, outer_mode);
1027 }
1028 return offset;
1029 }
1030
1031 /* As above, but return the offset that existing subreg X would have
1032 if SUBREG_REG (X) were stored in memory. The only significant thing
1033 about the current SUBREG_REG is its mode. */
1034
1035 int
1036 subreg_memory_offset (const_rtx x)
1037 {
1038 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1039 SUBREG_BYTE (x));
1040 }
1041 \f
1042 /* Generate a REG rtx for a new pseudo register of mode MODE.
1043 This pseudo is assigned the next sequential register number. */
1044
1045 rtx
1046 gen_reg_rtx (machine_mode mode)
1047 {
1048 rtx val;
1049 unsigned int align = GET_MODE_ALIGNMENT (mode);
1050
1051 gcc_assert (can_create_pseudo_p ());
1052
1053 /* If a virtual register with bigger mode alignment is generated,
1054 increase stack alignment estimation because it might be spilled
1055 to stack later. */
1056 if (SUPPORTS_STACK_ALIGNMENT
1057 && crtl->stack_alignment_estimated < align
1058 && !crtl->stack_realign_processed)
1059 {
1060 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1061 if (crtl->stack_alignment_estimated < min_align)
1062 crtl->stack_alignment_estimated = min_align;
1063 }
1064
1065 if (generating_concat_p
1066 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1067 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1068 {
1069 /* For complex modes, don't make a single pseudo.
1070 Instead, make a CONCAT of two pseudos.
1071 This allows noncontiguous allocation of the real and imaginary parts,
1072 which makes much better code. Besides, allocating DCmode
1073 pseudos overstrains reload on some machines like the 386. */
1074 rtx realpart, imagpart;
1075 machine_mode partmode = GET_MODE_INNER (mode);
1076
1077 realpart = gen_reg_rtx (partmode);
1078 imagpart = gen_reg_rtx (partmode);
1079 return gen_rtx_CONCAT (mode, realpart, imagpart);
1080 }
1081
1082 /* Do not call gen_reg_rtx with uninitialized crtl. */
1083 gcc_assert (crtl->emit.regno_pointer_align_length);
1084
1085 crtl->emit.ensure_regno_capacity ();
1086 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1087
1088 val = gen_raw_REG (mode, reg_rtx_no);
1089 regno_reg_rtx[reg_rtx_no++] = val;
1090 return val;
1091 }
1092
1093 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1094 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1095
1096 void
1097 emit_status::ensure_regno_capacity ()
1098 {
1099 int old_size = regno_pointer_align_length;
1100
1101 if (reg_rtx_no < old_size)
1102 return;
1103
1104 int new_size = old_size * 2;
1105 while (reg_rtx_no >= new_size)
1106 new_size *= 2;
1107
1108 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1109 memset (tmp + old_size, 0, new_size - old_size);
1110 regno_pointer_align = (unsigned char *) tmp;
1111
1112 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1113 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1114 regno_reg_rtx = new1;
1115
1116 crtl->emit.regno_pointer_align_length = new_size;
1117 }
1118
1119 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1120
1121 bool
1122 reg_is_parm_p (rtx reg)
1123 {
1124 tree decl;
1125
1126 gcc_assert (REG_P (reg));
1127 decl = REG_EXPR (reg);
1128 return (decl && TREE_CODE (decl) == PARM_DECL);
1129 }
1130
1131 /* Update NEW with the same attributes as REG, but with OFFSET added
1132 to the REG_OFFSET. */
1133
1134 static void
1135 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1136 {
1137 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1138 REG_OFFSET (reg) + offset);
1139 }
1140
1141 /* Generate a register with same attributes as REG, but with OFFSET
1142 added to the REG_OFFSET. */
1143
1144 rtx
1145 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1146 int offset)
1147 {
1148 rtx new_rtx = gen_rtx_REG (mode, regno);
1149
1150 update_reg_offset (new_rtx, reg, offset);
1151 return new_rtx;
1152 }
1153
1154 /* Generate a new pseudo-register with the same attributes as REG, but
1155 with OFFSET added to the REG_OFFSET. */
1156
1157 rtx
1158 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1159 {
1160 rtx new_rtx = gen_reg_rtx (mode);
1161
1162 update_reg_offset (new_rtx, reg, offset);
1163 return new_rtx;
1164 }
1165
1166 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1167 new register is a (possibly paradoxical) lowpart of the old one. */
1168
1169 void
1170 adjust_reg_mode (rtx reg, machine_mode mode)
1171 {
1172 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1173 PUT_MODE (reg, mode);
1174 }
1175
1176 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1177 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1178
1179 void
1180 set_reg_attrs_from_value (rtx reg, rtx x)
1181 {
1182 int offset;
1183 bool can_be_reg_pointer = true;
1184
1185 /* Don't call mark_reg_pointer for incompatible pointer sign
1186 extension. */
1187 while (GET_CODE (x) == SIGN_EXTEND
1188 || GET_CODE (x) == ZERO_EXTEND
1189 || GET_CODE (x) == TRUNCATE
1190 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1191 {
1192 #if defined(POINTERS_EXTEND_UNSIGNED)
1193 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1194 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1195 || (paradoxical_subreg_p (x)
1196 && ! (SUBREG_PROMOTED_VAR_P (x)
1197 && SUBREG_CHECK_PROMOTED_SIGN (x,
1198 POINTERS_EXTEND_UNSIGNED))))
1199 && !targetm.have_ptr_extend ())
1200 can_be_reg_pointer = false;
1201 #endif
1202 x = XEXP (x, 0);
1203 }
1204
1205 /* Hard registers can be reused for multiple purposes within the same
1206 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1207 on them is wrong. */
1208 if (HARD_REGISTER_P (reg))
1209 return;
1210
1211 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1212 if (MEM_P (x))
1213 {
1214 if (MEM_OFFSET_KNOWN_P (x))
1215 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1216 MEM_OFFSET (x) + offset);
1217 if (can_be_reg_pointer && MEM_POINTER (x))
1218 mark_reg_pointer (reg, 0);
1219 }
1220 else if (REG_P (x))
1221 {
1222 if (REG_ATTRS (x))
1223 update_reg_offset (reg, x, offset);
1224 if (can_be_reg_pointer && REG_POINTER (x))
1225 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1226 }
1227 }
1228
1229 /* Generate a REG rtx for a new pseudo register, copying the mode
1230 and attributes from X. */
1231
1232 rtx
1233 gen_reg_rtx_and_attrs (rtx x)
1234 {
1235 rtx reg = gen_reg_rtx (GET_MODE (x));
1236 set_reg_attrs_from_value (reg, x);
1237 return reg;
1238 }
1239
1240 /* Set the register attributes for registers contained in PARM_RTX.
1241 Use needed values from memory attributes of MEM. */
1242
1243 void
1244 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1245 {
1246 if (REG_P (parm_rtx))
1247 set_reg_attrs_from_value (parm_rtx, mem);
1248 else if (GET_CODE (parm_rtx) == PARALLEL)
1249 {
1250 /* Check for a NULL entry in the first slot, used to indicate that the
1251 parameter goes both on the stack and in registers. */
1252 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1253 for (; i < XVECLEN (parm_rtx, 0); i++)
1254 {
1255 rtx x = XVECEXP (parm_rtx, 0, i);
1256 if (REG_P (XEXP (x, 0)))
1257 REG_ATTRS (XEXP (x, 0))
1258 = get_reg_attrs (MEM_EXPR (mem),
1259 INTVAL (XEXP (x, 1)));
1260 }
1261 }
1262 }
1263
1264 /* Set the REG_ATTRS for registers in value X, given that X represents
1265 decl T. */
1266
1267 void
1268 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1269 {
1270 if (!t)
1271 return;
1272 tree tdecl = t;
1273 if (GET_CODE (x) == SUBREG)
1274 {
1275 gcc_assert (subreg_lowpart_p (x));
1276 x = SUBREG_REG (x);
1277 }
1278 if (REG_P (x))
1279 REG_ATTRS (x)
1280 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1281 DECL_P (tdecl)
1282 ? DECL_MODE (tdecl)
1283 : TYPE_MODE (TREE_TYPE (tdecl))));
1284 if (GET_CODE (x) == CONCAT)
1285 {
1286 if (REG_P (XEXP (x, 0)))
1287 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1288 if (REG_P (XEXP (x, 1)))
1289 REG_ATTRS (XEXP (x, 1))
1290 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1291 }
1292 if (GET_CODE (x) == PARALLEL)
1293 {
1294 int i, start;
1295
1296 /* Check for a NULL entry, used to indicate that the parameter goes
1297 both on the stack and in registers. */
1298 if (XEXP (XVECEXP (x, 0, 0), 0))
1299 start = 0;
1300 else
1301 start = 1;
1302
1303 for (i = start; i < XVECLEN (x, 0); i++)
1304 {
1305 rtx y = XVECEXP (x, 0, i);
1306 if (REG_P (XEXP (y, 0)))
1307 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1308 }
1309 }
1310 }
1311
1312 /* Assign the RTX X to declaration T. */
1313
1314 void
1315 set_decl_rtl (tree t, rtx x)
1316 {
1317 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1318 if (x)
1319 set_reg_attrs_for_decl_rtl (t, x);
1320 }
1321
1322 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1323 if the ABI requires the parameter to be passed by reference. */
1324
1325 void
1326 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1327 {
1328 DECL_INCOMING_RTL (t) = x;
1329 if (x && !by_reference_p)
1330 set_reg_attrs_for_decl_rtl (t, x);
1331 }
1332
1333 /* Identify REG (which may be a CONCAT) as a user register. */
1334
1335 void
1336 mark_user_reg (rtx reg)
1337 {
1338 if (GET_CODE (reg) == CONCAT)
1339 {
1340 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1341 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1342 }
1343 else
1344 {
1345 gcc_assert (REG_P (reg));
1346 REG_USERVAR_P (reg) = 1;
1347 }
1348 }
1349
1350 /* Identify REG as a probable pointer register and show its alignment
1351 as ALIGN, if nonzero. */
1352
1353 void
1354 mark_reg_pointer (rtx reg, int align)
1355 {
1356 if (! REG_POINTER (reg))
1357 {
1358 REG_POINTER (reg) = 1;
1359
1360 if (align)
1361 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1362 }
1363 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1364 /* We can no-longer be sure just how aligned this pointer is. */
1365 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1366 }
1367
1368 /* Return 1 plus largest pseudo reg number used in the current function. */
1369
1370 int
1371 max_reg_num (void)
1372 {
1373 return reg_rtx_no;
1374 }
1375
1376 /* Return 1 + the largest label number used so far in the current function. */
1377
1378 int
1379 max_label_num (void)
1380 {
1381 return label_num;
1382 }
1383
1384 /* Return first label number used in this function (if any were used). */
1385
1386 int
1387 get_first_label_num (void)
1388 {
1389 return first_label_num;
1390 }
1391
1392 /* If the rtx for label was created during the expansion of a nested
1393 function, then first_label_num won't include this label number.
1394 Fix this now so that array indices work later. */
1395
1396 void
1397 maybe_set_first_label_num (rtx_code_label *x)
1398 {
1399 if (CODE_LABEL_NUMBER (x) < first_label_num)
1400 first_label_num = CODE_LABEL_NUMBER (x);
1401 }
1402
1403 /* For use by the RTL function loader, when mingling with normal
1404 functions.
1405 Ensure that label_num is greater than the label num of X, to avoid
1406 duplicate labels in the generated assembler. */
1407
1408 void
1409 maybe_set_max_label_num (rtx_code_label *x)
1410 {
1411 if (CODE_LABEL_NUMBER (x) >= label_num)
1412 label_num = CODE_LABEL_NUMBER (x) + 1;
1413 }
1414
1415 \f
1416 /* Return a value representing some low-order bits of X, where the number
1417 of low-order bits is given by MODE. Note that no conversion is done
1418 between floating-point and fixed-point values, rather, the bit
1419 representation is returned.
1420
1421 This function handles the cases in common between gen_lowpart, below,
1422 and two variants in cse.c and combine.c. These are the cases that can
1423 be safely handled at all points in the compilation.
1424
1425 If this is not a case we can handle, return 0. */
1426
1427 rtx
1428 gen_lowpart_common (machine_mode mode, rtx x)
1429 {
1430 int msize = GET_MODE_SIZE (mode);
1431 int xsize;
1432 machine_mode innermode;
1433
1434 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1435 so we have to make one up. Yuk. */
1436 innermode = GET_MODE (x);
1437 if (CONST_INT_P (x)
1438 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1439 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1440 else if (innermode == VOIDmode)
1441 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1442
1443 xsize = GET_MODE_SIZE (innermode);
1444
1445 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1446
1447 if (innermode == mode)
1448 return x;
1449
1450 if (SCALAR_FLOAT_MODE_P (mode))
1451 {
1452 /* Don't allow paradoxical FLOAT_MODE subregs. */
1453 if (msize > xsize)
1454 return 0;
1455 }
1456 else
1457 {
1458 /* MODE must occupy no more of the underlying registers than X. */
1459 unsigned int regsize = REGMODE_NATURAL_SIZE (innermode);
1460 unsigned int mregs = CEIL (msize, regsize);
1461 unsigned int xregs = CEIL (xsize, regsize);
1462 if (mregs > xregs)
1463 return 0;
1464 }
1465
1466 scalar_int_mode int_mode, int_innermode, from_mode;
1467 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1468 && is_a <scalar_int_mode> (mode, &int_mode)
1469 && is_a <scalar_int_mode> (innermode, &int_innermode)
1470 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1471 {
1472 /* If we are getting the low-order part of something that has been
1473 sign- or zero-extended, we can either just use the object being
1474 extended or make a narrower extension. If we want an even smaller
1475 piece than the size of the object being extended, call ourselves
1476 recursively.
1477
1478 This case is used mostly by combine and cse. */
1479
1480 if (from_mode == int_mode)
1481 return XEXP (x, 0);
1482 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1483 return gen_lowpart_common (int_mode, XEXP (x, 0));
1484 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1485 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1486 }
1487 else if (GET_CODE (x) == SUBREG || REG_P (x)
1488 || GET_CODE (x) == CONCAT || const_vec_p (x)
1489 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1490 return lowpart_subreg (mode, x, innermode);
1491
1492 /* Otherwise, we can't do this. */
1493 return 0;
1494 }
1495 \f
1496 rtx
1497 gen_highpart (machine_mode mode, rtx x)
1498 {
1499 unsigned int msize = GET_MODE_SIZE (mode);
1500 rtx result;
1501
1502 /* This case loses if X is a subreg. To catch bugs early,
1503 complain if an invalid MODE is used even in other cases. */
1504 gcc_assert (msize <= UNITS_PER_WORD
1505 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1506
1507 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1508 subreg_highpart_offset (mode, GET_MODE (x)));
1509 gcc_assert (result);
1510
1511 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1512 the target if we have a MEM. gen_highpart must return a valid operand,
1513 emitting code if necessary to do so. */
1514 if (MEM_P (result))
1515 {
1516 result = validize_mem (result);
1517 gcc_assert (result);
1518 }
1519
1520 return result;
1521 }
1522
1523 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1524 be VOIDmode constant. */
1525 rtx
1526 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1527 {
1528 if (GET_MODE (exp) != VOIDmode)
1529 {
1530 gcc_assert (GET_MODE (exp) == innermode);
1531 return gen_highpart (outermode, exp);
1532 }
1533 return simplify_gen_subreg (outermode, exp, innermode,
1534 subreg_highpart_offset (outermode, innermode));
1535 }
1536
1537 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1538 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1539
1540 unsigned int
1541 subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
1542 {
1543 if (outer_bytes > inner_bytes)
1544 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1545 return 0;
1546
1547 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1548 return inner_bytes - outer_bytes;
1549 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1550 return 0;
1551 else
1552 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1553 }
1554
1555 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1556 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1557
1558 unsigned int
1559 subreg_size_highpart_offset (unsigned int outer_bytes,
1560 unsigned int inner_bytes)
1561 {
1562 gcc_assert (inner_bytes >= outer_bytes);
1563
1564 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1565 return 0;
1566 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1567 return inner_bytes - outer_bytes;
1568 else
1569 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1570 (inner_bytes - outer_bytes)
1571 * BITS_PER_UNIT);
1572 }
1573
1574 /* Return 1 iff X, assumed to be a SUBREG,
1575 refers to the least significant part of its containing reg.
1576 If X is not a SUBREG, always return 1 (it is its own low part!). */
1577
1578 int
1579 subreg_lowpart_p (const_rtx x)
1580 {
1581 if (GET_CODE (x) != SUBREG)
1582 return 1;
1583 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1584 return 0;
1585
1586 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1587 == SUBREG_BYTE (x));
1588 }
1589 \f
1590 /* Return subword OFFSET of operand OP.
1591 The word number, OFFSET, is interpreted as the word number starting
1592 at the low-order address. OFFSET 0 is the low-order word if not
1593 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1594
1595 If we cannot extract the required word, we return zero. Otherwise,
1596 an rtx corresponding to the requested word will be returned.
1597
1598 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1599 reload has completed, a valid address will always be returned. After
1600 reload, if a valid address cannot be returned, we return zero.
1601
1602 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1603 it is the responsibility of the caller.
1604
1605 MODE is the mode of OP in case it is a CONST_INT.
1606
1607 ??? This is still rather broken for some cases. The problem for the
1608 moment is that all callers of this thing provide no 'goal mode' to
1609 tell us to work with. This exists because all callers were written
1610 in a word based SUBREG world.
1611 Now use of this function can be deprecated by simplify_subreg in most
1612 cases.
1613 */
1614
1615 rtx
1616 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1617 {
1618 if (mode == VOIDmode)
1619 mode = GET_MODE (op);
1620
1621 gcc_assert (mode != VOIDmode);
1622
1623 /* If OP is narrower than a word, fail. */
1624 if (mode != BLKmode
1625 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1626 return 0;
1627
1628 /* If we want a word outside OP, return zero. */
1629 if (mode != BLKmode
1630 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1631 return const0_rtx;
1632
1633 /* Form a new MEM at the requested address. */
1634 if (MEM_P (op))
1635 {
1636 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1637
1638 if (! validate_address)
1639 return new_rtx;
1640
1641 else if (reload_completed)
1642 {
1643 if (! strict_memory_address_addr_space_p (word_mode,
1644 XEXP (new_rtx, 0),
1645 MEM_ADDR_SPACE (op)))
1646 return 0;
1647 }
1648 else
1649 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1650 }
1651
1652 /* Rest can be handled by simplify_subreg. */
1653 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1654 }
1655
1656 /* Similar to `operand_subword', but never return 0. If we can't
1657 extract the required subword, put OP into a register and try again.
1658 The second attempt must succeed. We always validate the address in
1659 this case.
1660
1661 MODE is the mode of OP, in case it is CONST_INT. */
1662
1663 rtx
1664 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1665 {
1666 rtx result = operand_subword (op, offset, 1, mode);
1667
1668 if (result)
1669 return result;
1670
1671 if (mode != BLKmode && mode != VOIDmode)
1672 {
1673 /* If this is a register which can not be accessed by words, copy it
1674 to a pseudo register. */
1675 if (REG_P (op))
1676 op = copy_to_reg (op);
1677 else
1678 op = force_reg (mode, op);
1679 }
1680
1681 result = operand_subword (op, offset, 1, mode);
1682 gcc_assert (result);
1683
1684 return result;
1685 }
1686 \f
1687 /* Returns 1 if both MEM_EXPR can be considered equal
1688 and 0 otherwise. */
1689
1690 int
1691 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1692 {
1693 if (expr1 == expr2)
1694 return 1;
1695
1696 if (! expr1 || ! expr2)
1697 return 0;
1698
1699 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1700 return 0;
1701
1702 return operand_equal_p (expr1, expr2, 0);
1703 }
1704
1705 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1706 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1707 -1 if not known. */
1708
1709 int
1710 get_mem_align_offset (rtx mem, unsigned int align)
1711 {
1712 tree expr;
1713 unsigned HOST_WIDE_INT offset;
1714
1715 /* This function can't use
1716 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1717 || (MAX (MEM_ALIGN (mem),
1718 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1719 < align))
1720 return -1;
1721 else
1722 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1723 for two reasons:
1724 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1725 for <variable>. get_inner_reference doesn't handle it and
1726 even if it did, the alignment in that case needs to be determined
1727 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1728 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1729 isn't sufficiently aligned, the object it is in might be. */
1730 gcc_assert (MEM_P (mem));
1731 expr = MEM_EXPR (mem);
1732 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1733 return -1;
1734
1735 offset = MEM_OFFSET (mem);
1736 if (DECL_P (expr))
1737 {
1738 if (DECL_ALIGN (expr) < align)
1739 return -1;
1740 }
1741 else if (INDIRECT_REF_P (expr))
1742 {
1743 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1744 return -1;
1745 }
1746 else if (TREE_CODE (expr) == COMPONENT_REF)
1747 {
1748 while (1)
1749 {
1750 tree inner = TREE_OPERAND (expr, 0);
1751 tree field = TREE_OPERAND (expr, 1);
1752 tree byte_offset = component_ref_field_offset (expr);
1753 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1754
1755 if (!byte_offset
1756 || !tree_fits_uhwi_p (byte_offset)
1757 || !tree_fits_uhwi_p (bit_offset))
1758 return -1;
1759
1760 offset += tree_to_uhwi (byte_offset);
1761 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1762
1763 if (inner == NULL_TREE)
1764 {
1765 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1766 < (unsigned int) align)
1767 return -1;
1768 break;
1769 }
1770 else if (DECL_P (inner))
1771 {
1772 if (DECL_ALIGN (inner) < align)
1773 return -1;
1774 break;
1775 }
1776 else if (TREE_CODE (inner) != COMPONENT_REF)
1777 return -1;
1778 expr = inner;
1779 }
1780 }
1781 else
1782 return -1;
1783
1784 return offset & ((align / BITS_PER_UNIT) - 1);
1785 }
1786
1787 /* Given REF (a MEM) and T, either the type of X or the expression
1788 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1789 if we are making a new object of this type. BITPOS is nonzero if
1790 there is an offset outstanding on T that will be applied later. */
1791
1792 void
1793 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1794 HOST_WIDE_INT bitpos)
1795 {
1796 HOST_WIDE_INT apply_bitpos = 0;
1797 tree type;
1798 struct mem_attrs attrs, *defattrs, *refattrs;
1799 addr_space_t as;
1800
1801 /* It can happen that type_for_mode was given a mode for which there
1802 is no language-level type. In which case it returns NULL, which
1803 we can see here. */
1804 if (t == NULL_TREE)
1805 return;
1806
1807 type = TYPE_P (t) ? t : TREE_TYPE (t);
1808 if (type == error_mark_node)
1809 return;
1810
1811 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1812 wrong answer, as it assumes that DECL_RTL already has the right alias
1813 info. Callers should not set DECL_RTL until after the call to
1814 set_mem_attributes. */
1815 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1816
1817 memset (&attrs, 0, sizeof (attrs));
1818
1819 /* Get the alias set from the expression or type (perhaps using a
1820 front-end routine) and use it. */
1821 attrs.alias = get_alias_set (t);
1822
1823 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1824 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1825
1826 /* Default values from pre-existing memory attributes if present. */
1827 refattrs = MEM_ATTRS (ref);
1828 if (refattrs)
1829 {
1830 /* ??? Can this ever happen? Calling this routine on a MEM that
1831 already carries memory attributes should probably be invalid. */
1832 attrs.expr = refattrs->expr;
1833 attrs.offset_known_p = refattrs->offset_known_p;
1834 attrs.offset = refattrs->offset;
1835 attrs.size_known_p = refattrs->size_known_p;
1836 attrs.size = refattrs->size;
1837 attrs.align = refattrs->align;
1838 }
1839
1840 /* Otherwise, default values from the mode of the MEM reference. */
1841 else
1842 {
1843 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1844 gcc_assert (!defattrs->expr);
1845 gcc_assert (!defattrs->offset_known_p);
1846
1847 /* Respect mode size. */
1848 attrs.size_known_p = defattrs->size_known_p;
1849 attrs.size = defattrs->size;
1850 /* ??? Is this really necessary? We probably should always get
1851 the size from the type below. */
1852
1853 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1854 if T is an object, always compute the object alignment below. */
1855 if (TYPE_P (t))
1856 attrs.align = defattrs->align;
1857 else
1858 attrs.align = BITS_PER_UNIT;
1859 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1860 e.g. if the type carries an alignment attribute. Should we be
1861 able to simply always use TYPE_ALIGN? */
1862 }
1863
1864 /* We can set the alignment from the type if we are making an object or if
1865 this is an INDIRECT_REF. */
1866 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1867 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1868
1869 /* If the size is known, we can set that. */
1870 tree new_size = TYPE_SIZE_UNIT (type);
1871
1872 /* The address-space is that of the type. */
1873 as = TYPE_ADDR_SPACE (type);
1874
1875 /* If T is not a type, we may be able to deduce some more information about
1876 the expression. */
1877 if (! TYPE_P (t))
1878 {
1879 tree base;
1880
1881 if (TREE_THIS_VOLATILE (t))
1882 MEM_VOLATILE_P (ref) = 1;
1883
1884 /* Now remove any conversions: they don't change what the underlying
1885 object is. Likewise for SAVE_EXPR. */
1886 while (CONVERT_EXPR_P (t)
1887 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1888 || TREE_CODE (t) == SAVE_EXPR)
1889 t = TREE_OPERAND (t, 0);
1890
1891 /* Note whether this expression can trap. */
1892 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1893
1894 base = get_base_address (t);
1895 if (base)
1896 {
1897 if (DECL_P (base)
1898 && TREE_READONLY (base)
1899 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1900 && !TREE_THIS_VOLATILE (base))
1901 MEM_READONLY_P (ref) = 1;
1902
1903 /* Mark static const strings readonly as well. */
1904 if (TREE_CODE (base) == STRING_CST
1905 && TREE_READONLY (base)
1906 && TREE_STATIC (base))
1907 MEM_READONLY_P (ref) = 1;
1908
1909 /* Address-space information is on the base object. */
1910 if (TREE_CODE (base) == MEM_REF
1911 || TREE_CODE (base) == TARGET_MEM_REF)
1912 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1913 0))));
1914 else
1915 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1916 }
1917
1918 /* If this expression uses it's parent's alias set, mark it such
1919 that we won't change it. */
1920 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1921 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1922
1923 /* If this is a decl, set the attributes of the MEM from it. */
1924 if (DECL_P (t))
1925 {
1926 attrs.expr = t;
1927 attrs.offset_known_p = true;
1928 attrs.offset = 0;
1929 apply_bitpos = bitpos;
1930 new_size = DECL_SIZE_UNIT (t);
1931 }
1932
1933 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1934 else if (CONSTANT_CLASS_P (t))
1935 ;
1936
1937 /* If this is a field reference, record it. */
1938 else if (TREE_CODE (t) == COMPONENT_REF)
1939 {
1940 attrs.expr = t;
1941 attrs.offset_known_p = true;
1942 attrs.offset = 0;
1943 apply_bitpos = bitpos;
1944 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1945 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1946 }
1947
1948 /* If this is an array reference, look for an outer field reference. */
1949 else if (TREE_CODE (t) == ARRAY_REF)
1950 {
1951 tree off_tree = size_zero_node;
1952 /* We can't modify t, because we use it at the end of the
1953 function. */
1954 tree t2 = t;
1955
1956 do
1957 {
1958 tree index = TREE_OPERAND (t2, 1);
1959 tree low_bound = array_ref_low_bound (t2);
1960 tree unit_size = array_ref_element_size (t2);
1961
1962 /* We assume all arrays have sizes that are a multiple of a byte.
1963 First subtract the lower bound, if any, in the type of the
1964 index, then convert to sizetype and multiply by the size of
1965 the array element. */
1966 if (! integer_zerop (low_bound))
1967 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1968 index, low_bound);
1969
1970 off_tree = size_binop (PLUS_EXPR,
1971 size_binop (MULT_EXPR,
1972 fold_convert (sizetype,
1973 index),
1974 unit_size),
1975 off_tree);
1976 t2 = TREE_OPERAND (t2, 0);
1977 }
1978 while (TREE_CODE (t2) == ARRAY_REF);
1979
1980 if (DECL_P (t2)
1981 || (TREE_CODE (t2) == COMPONENT_REF
1982 /* For trailing arrays t2 doesn't have a size that
1983 covers all valid accesses. */
1984 && ! array_at_struct_end_p (t)))
1985 {
1986 attrs.expr = t2;
1987 attrs.offset_known_p = false;
1988 if (tree_fits_uhwi_p (off_tree))
1989 {
1990 attrs.offset_known_p = true;
1991 attrs.offset = tree_to_uhwi (off_tree);
1992 apply_bitpos = bitpos;
1993 }
1994 }
1995 /* Else do not record a MEM_EXPR. */
1996 }
1997
1998 /* If this is an indirect reference, record it. */
1999 else if (TREE_CODE (t) == MEM_REF
2000 || TREE_CODE (t) == TARGET_MEM_REF)
2001 {
2002 attrs.expr = t;
2003 attrs.offset_known_p = true;
2004 attrs.offset = 0;
2005 apply_bitpos = bitpos;
2006 }
2007
2008 /* Compute the alignment. */
2009 unsigned int obj_align;
2010 unsigned HOST_WIDE_INT obj_bitpos;
2011 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2012 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
2013 if (obj_bitpos != 0)
2014 obj_align = least_bit_hwi (obj_bitpos);
2015 attrs.align = MAX (attrs.align, obj_align);
2016 }
2017
2018 if (tree_fits_uhwi_p (new_size))
2019 {
2020 attrs.size_known_p = true;
2021 attrs.size = tree_to_uhwi (new_size);
2022 }
2023
2024 /* If we modified OFFSET based on T, then subtract the outstanding
2025 bit position offset. Similarly, increase the size of the accessed
2026 object to contain the negative offset. */
2027 if (apply_bitpos)
2028 {
2029 gcc_assert (attrs.offset_known_p);
2030 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
2031 if (attrs.size_known_p)
2032 attrs.size += apply_bitpos / BITS_PER_UNIT;
2033 }
2034
2035 /* Now set the attributes we computed above. */
2036 attrs.addrspace = as;
2037 set_mem_attrs (ref, &attrs);
2038 }
2039
2040 void
2041 set_mem_attributes (rtx ref, tree t, int objectp)
2042 {
2043 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2044 }
2045
2046 /* Set the alias set of MEM to SET. */
2047
2048 void
2049 set_mem_alias_set (rtx mem, alias_set_type set)
2050 {
2051 struct mem_attrs attrs;
2052
2053 /* If the new and old alias sets don't conflict, something is wrong. */
2054 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2055 attrs = *get_mem_attrs (mem);
2056 attrs.alias = set;
2057 set_mem_attrs (mem, &attrs);
2058 }
2059
2060 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2061
2062 void
2063 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2064 {
2065 struct mem_attrs attrs;
2066
2067 attrs = *get_mem_attrs (mem);
2068 attrs.addrspace = addrspace;
2069 set_mem_attrs (mem, &attrs);
2070 }
2071
2072 /* Set the alignment of MEM to ALIGN bits. */
2073
2074 void
2075 set_mem_align (rtx mem, unsigned int align)
2076 {
2077 struct mem_attrs attrs;
2078
2079 attrs = *get_mem_attrs (mem);
2080 attrs.align = align;
2081 set_mem_attrs (mem, &attrs);
2082 }
2083
2084 /* Set the expr for MEM to EXPR. */
2085
2086 void
2087 set_mem_expr (rtx mem, tree expr)
2088 {
2089 struct mem_attrs attrs;
2090
2091 attrs = *get_mem_attrs (mem);
2092 attrs.expr = expr;
2093 set_mem_attrs (mem, &attrs);
2094 }
2095
2096 /* Set the offset of MEM to OFFSET. */
2097
2098 void
2099 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2100 {
2101 struct mem_attrs attrs;
2102
2103 attrs = *get_mem_attrs (mem);
2104 attrs.offset_known_p = true;
2105 attrs.offset = offset;
2106 set_mem_attrs (mem, &attrs);
2107 }
2108
2109 /* Clear the offset of MEM. */
2110
2111 void
2112 clear_mem_offset (rtx mem)
2113 {
2114 struct mem_attrs attrs;
2115
2116 attrs = *get_mem_attrs (mem);
2117 attrs.offset_known_p = false;
2118 set_mem_attrs (mem, &attrs);
2119 }
2120
2121 /* Set the size of MEM to SIZE. */
2122
2123 void
2124 set_mem_size (rtx mem, HOST_WIDE_INT size)
2125 {
2126 struct mem_attrs attrs;
2127
2128 attrs = *get_mem_attrs (mem);
2129 attrs.size_known_p = true;
2130 attrs.size = size;
2131 set_mem_attrs (mem, &attrs);
2132 }
2133
2134 /* Clear the size of MEM. */
2135
2136 void
2137 clear_mem_size (rtx mem)
2138 {
2139 struct mem_attrs attrs;
2140
2141 attrs = *get_mem_attrs (mem);
2142 attrs.size_known_p = false;
2143 set_mem_attrs (mem, &attrs);
2144 }
2145 \f
2146 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2147 and its address changed to ADDR. (VOIDmode means don't change the mode.
2148 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2149 returned memory location is required to be valid. INPLACE is true if any
2150 changes can be made directly to MEMREF or false if MEMREF must be treated
2151 as immutable.
2152
2153 The memory attributes are not changed. */
2154
2155 static rtx
2156 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2157 bool inplace)
2158 {
2159 addr_space_t as;
2160 rtx new_rtx;
2161
2162 gcc_assert (MEM_P (memref));
2163 as = MEM_ADDR_SPACE (memref);
2164 if (mode == VOIDmode)
2165 mode = GET_MODE (memref);
2166 if (addr == 0)
2167 addr = XEXP (memref, 0);
2168 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2169 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2170 return memref;
2171
2172 /* Don't validate address for LRA. LRA can make the address valid
2173 by itself in most efficient way. */
2174 if (validate && !lra_in_progress)
2175 {
2176 if (reload_in_progress || reload_completed)
2177 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2178 else
2179 addr = memory_address_addr_space (mode, addr, as);
2180 }
2181
2182 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2183 return memref;
2184
2185 if (inplace)
2186 {
2187 XEXP (memref, 0) = addr;
2188 return memref;
2189 }
2190
2191 new_rtx = gen_rtx_MEM (mode, addr);
2192 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2193 return new_rtx;
2194 }
2195
2196 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2197 way we are changing MEMREF, so we only preserve the alias set. */
2198
2199 rtx
2200 change_address (rtx memref, machine_mode mode, rtx addr)
2201 {
2202 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2203 machine_mode mmode = GET_MODE (new_rtx);
2204 struct mem_attrs attrs, *defattrs;
2205
2206 attrs = *get_mem_attrs (memref);
2207 defattrs = mode_mem_attrs[(int) mmode];
2208 attrs.expr = NULL_TREE;
2209 attrs.offset_known_p = false;
2210 attrs.size_known_p = defattrs->size_known_p;
2211 attrs.size = defattrs->size;
2212 attrs.align = defattrs->align;
2213
2214 /* If there are no changes, just return the original memory reference. */
2215 if (new_rtx == memref)
2216 {
2217 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2218 return new_rtx;
2219
2220 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2221 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2222 }
2223
2224 set_mem_attrs (new_rtx, &attrs);
2225 return new_rtx;
2226 }
2227
2228 /* Return a memory reference like MEMREF, but with its mode changed
2229 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2230 nonzero, the memory address is forced to be valid.
2231 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2232 and the caller is responsible for adjusting MEMREF base register.
2233 If ADJUST_OBJECT is zero, the underlying object associated with the
2234 memory reference is left unchanged and the caller is responsible for
2235 dealing with it. Otherwise, if the new memory reference is outside
2236 the underlying object, even partially, then the object is dropped.
2237 SIZE, if nonzero, is the size of an access in cases where MODE
2238 has no inherent size. */
2239
2240 rtx
2241 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2242 int validate, int adjust_address, int adjust_object,
2243 HOST_WIDE_INT size)
2244 {
2245 rtx addr = XEXP (memref, 0);
2246 rtx new_rtx;
2247 scalar_int_mode address_mode;
2248 int pbits;
2249 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2250 unsigned HOST_WIDE_INT max_align;
2251 #ifdef POINTERS_EXTEND_UNSIGNED
2252 scalar_int_mode pointer_mode
2253 = targetm.addr_space.pointer_mode (attrs.addrspace);
2254 #endif
2255
2256 /* VOIDmode means no mode change for change_address_1. */
2257 if (mode == VOIDmode)
2258 mode = GET_MODE (memref);
2259
2260 /* Take the size of non-BLKmode accesses from the mode. */
2261 defattrs = mode_mem_attrs[(int) mode];
2262 if (defattrs->size_known_p)
2263 size = defattrs->size;
2264
2265 /* If there are no changes, just return the original memory reference. */
2266 if (mode == GET_MODE (memref) && !offset
2267 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2268 && (!validate || memory_address_addr_space_p (mode, addr,
2269 attrs.addrspace)))
2270 return memref;
2271
2272 /* ??? Prefer to create garbage instead of creating shared rtl.
2273 This may happen even if offset is nonzero -- consider
2274 (plus (plus reg reg) const_int) -- so do this always. */
2275 addr = copy_rtx (addr);
2276
2277 /* Convert a possibly large offset to a signed value within the
2278 range of the target address space. */
2279 address_mode = get_address_mode (memref);
2280 pbits = GET_MODE_BITSIZE (address_mode);
2281 if (HOST_BITS_PER_WIDE_INT > pbits)
2282 {
2283 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2284 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2285 >> shift);
2286 }
2287
2288 if (adjust_address)
2289 {
2290 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2291 object, we can merge it into the LO_SUM. */
2292 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2293 && offset >= 0
2294 && (unsigned HOST_WIDE_INT) offset
2295 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2296 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2297 plus_constant (address_mode,
2298 XEXP (addr, 1), offset));
2299 #ifdef POINTERS_EXTEND_UNSIGNED
2300 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2301 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2302 the fact that pointers are not allowed to overflow. */
2303 else if (POINTERS_EXTEND_UNSIGNED > 0
2304 && GET_CODE (addr) == ZERO_EXTEND
2305 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2306 && trunc_int_for_mode (offset, pointer_mode) == offset)
2307 addr = gen_rtx_ZERO_EXTEND (address_mode,
2308 plus_constant (pointer_mode,
2309 XEXP (addr, 0), offset));
2310 #endif
2311 else
2312 addr = plus_constant (address_mode, addr, offset);
2313 }
2314
2315 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2316
2317 /* If the address is a REG, change_address_1 rightfully returns memref,
2318 but this would destroy memref's MEM_ATTRS. */
2319 if (new_rtx == memref && offset != 0)
2320 new_rtx = copy_rtx (new_rtx);
2321
2322 /* Conservatively drop the object if we don't know where we start from. */
2323 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2324 {
2325 attrs.expr = NULL_TREE;
2326 attrs.alias = 0;
2327 }
2328
2329 /* Compute the new values of the memory attributes due to this adjustment.
2330 We add the offsets and update the alignment. */
2331 if (attrs.offset_known_p)
2332 {
2333 attrs.offset += offset;
2334
2335 /* Drop the object if the new left end is not within its bounds. */
2336 if (adjust_object && attrs.offset < 0)
2337 {
2338 attrs.expr = NULL_TREE;
2339 attrs.alias = 0;
2340 }
2341 }
2342
2343 /* Compute the new alignment by taking the MIN of the alignment and the
2344 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2345 if zero. */
2346 if (offset != 0)
2347 {
2348 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
2349 attrs.align = MIN (attrs.align, max_align);
2350 }
2351
2352 if (size)
2353 {
2354 /* Drop the object if the new right end is not within its bounds. */
2355 if (adjust_object && (offset + size) > attrs.size)
2356 {
2357 attrs.expr = NULL_TREE;
2358 attrs.alias = 0;
2359 }
2360 attrs.size_known_p = true;
2361 attrs.size = size;
2362 }
2363 else if (attrs.size_known_p)
2364 {
2365 gcc_assert (!adjust_object);
2366 attrs.size -= offset;
2367 /* ??? The store_by_pieces machinery generates negative sizes,
2368 so don't assert for that here. */
2369 }
2370
2371 set_mem_attrs (new_rtx, &attrs);
2372
2373 return new_rtx;
2374 }
2375
2376 /* Return a memory reference like MEMREF, but with its mode changed
2377 to MODE and its address changed to ADDR, which is assumed to be
2378 MEMREF offset by OFFSET bytes. If VALIDATE is
2379 nonzero, the memory address is forced to be valid. */
2380
2381 rtx
2382 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2383 HOST_WIDE_INT offset, int validate)
2384 {
2385 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2386 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2387 }
2388
2389 /* Return a memory reference like MEMREF, but whose address is changed by
2390 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2391 known to be in OFFSET (possibly 1). */
2392
2393 rtx
2394 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2395 {
2396 rtx new_rtx, addr = XEXP (memref, 0);
2397 machine_mode address_mode;
2398 struct mem_attrs attrs, *defattrs;
2399
2400 attrs = *get_mem_attrs (memref);
2401 address_mode = get_address_mode (memref);
2402 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2403
2404 /* At this point we don't know _why_ the address is invalid. It
2405 could have secondary memory references, multiplies or anything.
2406
2407 However, if we did go and rearrange things, we can wind up not
2408 being able to recognize the magic around pic_offset_table_rtx.
2409 This stuff is fragile, and is yet another example of why it is
2410 bad to expose PIC machinery too early. */
2411 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2412 attrs.addrspace)
2413 && GET_CODE (addr) == PLUS
2414 && XEXP (addr, 0) == pic_offset_table_rtx)
2415 {
2416 addr = force_reg (GET_MODE (addr), addr);
2417 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2418 }
2419
2420 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2421 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2422
2423 /* If there are no changes, just return the original memory reference. */
2424 if (new_rtx == memref)
2425 return new_rtx;
2426
2427 /* Update the alignment to reflect the offset. Reset the offset, which
2428 we don't know. */
2429 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2430 attrs.offset_known_p = false;
2431 attrs.size_known_p = defattrs->size_known_p;
2432 attrs.size = defattrs->size;
2433 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2434 set_mem_attrs (new_rtx, &attrs);
2435 return new_rtx;
2436 }
2437
2438 /* Return a memory reference like MEMREF, but with its address changed to
2439 ADDR. The caller is asserting that the actual piece of memory pointed
2440 to is the same, just the form of the address is being changed, such as
2441 by putting something into a register. INPLACE is true if any changes
2442 can be made directly to MEMREF or false if MEMREF must be treated as
2443 immutable. */
2444
2445 rtx
2446 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2447 {
2448 /* change_address_1 copies the memory attribute structure without change
2449 and that's exactly what we want here. */
2450 update_temp_slot_address (XEXP (memref, 0), addr);
2451 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2452 }
2453
2454 /* Likewise, but the reference is not required to be valid. */
2455
2456 rtx
2457 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2458 {
2459 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2460 }
2461
2462 /* Return a memory reference like MEMREF, but with its mode widened to
2463 MODE and offset by OFFSET. This would be used by targets that e.g.
2464 cannot issue QImode memory operations and have to use SImode memory
2465 operations plus masking logic. */
2466
2467 rtx
2468 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2469 {
2470 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2471 struct mem_attrs attrs;
2472 unsigned int size = GET_MODE_SIZE (mode);
2473
2474 /* If there are no changes, just return the original memory reference. */
2475 if (new_rtx == memref)
2476 return new_rtx;
2477
2478 attrs = *get_mem_attrs (new_rtx);
2479
2480 /* If we don't know what offset we were at within the expression, then
2481 we can't know if we've overstepped the bounds. */
2482 if (! attrs.offset_known_p)
2483 attrs.expr = NULL_TREE;
2484
2485 while (attrs.expr)
2486 {
2487 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2488 {
2489 tree field = TREE_OPERAND (attrs.expr, 1);
2490 tree offset = component_ref_field_offset (attrs.expr);
2491
2492 if (! DECL_SIZE_UNIT (field))
2493 {
2494 attrs.expr = NULL_TREE;
2495 break;
2496 }
2497
2498 /* Is the field at least as large as the access? If so, ok,
2499 otherwise strip back to the containing structure. */
2500 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2501 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2502 && attrs.offset >= 0)
2503 break;
2504
2505 if (! tree_fits_uhwi_p (offset))
2506 {
2507 attrs.expr = NULL_TREE;
2508 break;
2509 }
2510
2511 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2512 attrs.offset += tree_to_uhwi (offset);
2513 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2514 / BITS_PER_UNIT);
2515 }
2516 /* Similarly for the decl. */
2517 else if (DECL_P (attrs.expr)
2518 && DECL_SIZE_UNIT (attrs.expr)
2519 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2520 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2521 && (! attrs.offset_known_p || attrs.offset >= 0))
2522 break;
2523 else
2524 {
2525 /* The widened memory access overflows the expression, which means
2526 that it could alias another expression. Zap it. */
2527 attrs.expr = NULL_TREE;
2528 break;
2529 }
2530 }
2531
2532 if (! attrs.expr)
2533 attrs.offset_known_p = false;
2534
2535 /* The widened memory may alias other stuff, so zap the alias set. */
2536 /* ??? Maybe use get_alias_set on any remaining expression. */
2537 attrs.alias = 0;
2538 attrs.size_known_p = true;
2539 attrs.size = size;
2540 set_mem_attrs (new_rtx, &attrs);
2541 return new_rtx;
2542 }
2543 \f
2544 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2545 static GTY(()) tree spill_slot_decl;
2546
2547 tree
2548 get_spill_slot_decl (bool force_build_p)
2549 {
2550 tree d = spill_slot_decl;
2551 rtx rd;
2552 struct mem_attrs attrs;
2553
2554 if (d || !force_build_p)
2555 return d;
2556
2557 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2558 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2559 DECL_ARTIFICIAL (d) = 1;
2560 DECL_IGNORED_P (d) = 1;
2561 TREE_USED (d) = 1;
2562 spill_slot_decl = d;
2563
2564 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2565 MEM_NOTRAP_P (rd) = 1;
2566 attrs = *mode_mem_attrs[(int) BLKmode];
2567 attrs.alias = new_alias_set ();
2568 attrs.expr = d;
2569 set_mem_attrs (rd, &attrs);
2570 SET_DECL_RTL (d, rd);
2571
2572 return d;
2573 }
2574
2575 /* Given MEM, a result from assign_stack_local, fill in the memory
2576 attributes as appropriate for a register allocator spill slot.
2577 These slots are not aliasable by other memory. We arrange for
2578 them all to use a single MEM_EXPR, so that the aliasing code can
2579 work properly in the case of shared spill slots. */
2580
2581 void
2582 set_mem_attrs_for_spill (rtx mem)
2583 {
2584 struct mem_attrs attrs;
2585 rtx addr;
2586
2587 attrs = *get_mem_attrs (mem);
2588 attrs.expr = get_spill_slot_decl (true);
2589 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2590 attrs.addrspace = ADDR_SPACE_GENERIC;
2591
2592 /* We expect the incoming memory to be of the form:
2593 (mem:MODE (plus (reg sfp) (const_int offset)))
2594 with perhaps the plus missing for offset = 0. */
2595 addr = XEXP (mem, 0);
2596 attrs.offset_known_p = true;
2597 attrs.offset = 0;
2598 if (GET_CODE (addr) == PLUS
2599 && CONST_INT_P (XEXP (addr, 1)))
2600 attrs.offset = INTVAL (XEXP (addr, 1));
2601
2602 set_mem_attrs (mem, &attrs);
2603 MEM_NOTRAP_P (mem) = 1;
2604 }
2605 \f
2606 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2607
2608 rtx_code_label *
2609 gen_label_rtx (void)
2610 {
2611 return as_a <rtx_code_label *> (
2612 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2613 NULL, label_num++, NULL));
2614 }
2615 \f
2616 /* For procedure integration. */
2617
2618 /* Install new pointers to the first and last insns in the chain.
2619 Also, set cur_insn_uid to one higher than the last in use.
2620 Used for an inline-procedure after copying the insn chain. */
2621
2622 void
2623 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2624 {
2625 rtx_insn *insn;
2626
2627 set_first_insn (first);
2628 set_last_insn (last);
2629 cur_insn_uid = 0;
2630
2631 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2632 {
2633 int debug_count = 0;
2634
2635 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2636 cur_debug_insn_uid = 0;
2637
2638 for (insn = first; insn; insn = NEXT_INSN (insn))
2639 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2640 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2641 else
2642 {
2643 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2644 if (DEBUG_INSN_P (insn))
2645 debug_count++;
2646 }
2647
2648 if (debug_count)
2649 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2650 else
2651 cur_debug_insn_uid++;
2652 }
2653 else
2654 for (insn = first; insn; insn = NEXT_INSN (insn))
2655 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2656
2657 cur_insn_uid++;
2658 }
2659 \f
2660 /* Go through all the RTL insn bodies and copy any invalid shared
2661 structure. This routine should only be called once. */
2662
2663 static void
2664 unshare_all_rtl_1 (rtx_insn *insn)
2665 {
2666 /* Unshare just about everything else. */
2667 unshare_all_rtl_in_chain (insn);
2668
2669 /* Make sure the addresses of stack slots found outside the insn chain
2670 (such as, in DECL_RTL of a variable) are not shared
2671 with the insn chain.
2672
2673 This special care is necessary when the stack slot MEM does not
2674 actually appear in the insn chain. If it does appear, its address
2675 is unshared from all else at that point. */
2676 unsigned int i;
2677 rtx temp;
2678 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2679 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2680 }
2681
2682 /* Go through all the RTL insn bodies and copy any invalid shared
2683 structure, again. This is a fairly expensive thing to do so it
2684 should be done sparingly. */
2685
2686 void
2687 unshare_all_rtl_again (rtx_insn *insn)
2688 {
2689 rtx_insn *p;
2690 tree decl;
2691
2692 for (p = insn; p; p = NEXT_INSN (p))
2693 if (INSN_P (p))
2694 {
2695 reset_used_flags (PATTERN (p));
2696 reset_used_flags (REG_NOTES (p));
2697 if (CALL_P (p))
2698 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2699 }
2700
2701 /* Make sure that virtual stack slots are not shared. */
2702 set_used_decls (DECL_INITIAL (cfun->decl));
2703
2704 /* Make sure that virtual parameters are not shared. */
2705 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2706 set_used_flags (DECL_RTL (decl));
2707
2708 rtx temp;
2709 unsigned int i;
2710 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2711 reset_used_flags (temp);
2712
2713 unshare_all_rtl_1 (insn);
2714 }
2715
2716 unsigned int
2717 unshare_all_rtl (void)
2718 {
2719 unshare_all_rtl_1 (get_insns ());
2720
2721 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2722 {
2723 if (DECL_RTL_SET_P (decl))
2724 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2725 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2726 }
2727
2728 return 0;
2729 }
2730
2731
2732 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2733 Recursively does the same for subexpressions. */
2734
2735 static void
2736 verify_rtx_sharing (rtx orig, rtx insn)
2737 {
2738 rtx x = orig;
2739 int i;
2740 enum rtx_code code;
2741 const char *format_ptr;
2742
2743 if (x == 0)
2744 return;
2745
2746 code = GET_CODE (x);
2747
2748 /* These types may be freely shared. */
2749
2750 switch (code)
2751 {
2752 case REG:
2753 case DEBUG_EXPR:
2754 case VALUE:
2755 CASE_CONST_ANY:
2756 case SYMBOL_REF:
2757 case LABEL_REF:
2758 case CODE_LABEL:
2759 case PC:
2760 case CC0:
2761 case RETURN:
2762 case SIMPLE_RETURN:
2763 case SCRATCH:
2764 /* SCRATCH must be shared because they represent distinct values. */
2765 return;
2766 case CLOBBER:
2767 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2768 clobbers or clobbers of hard registers that originated as pseudos.
2769 This is needed to allow safe register renaming. */
2770 if (REG_P (XEXP (x, 0))
2771 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2772 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2773 return;
2774 break;
2775
2776 case CONST:
2777 if (shared_const_p (orig))
2778 return;
2779 break;
2780
2781 case MEM:
2782 /* A MEM is allowed to be shared if its address is constant. */
2783 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2784 || reload_completed || reload_in_progress)
2785 return;
2786
2787 break;
2788
2789 default:
2790 break;
2791 }
2792
2793 /* This rtx may not be shared. If it has already been seen,
2794 replace it with a copy of itself. */
2795 if (flag_checking && RTX_FLAG (x, used))
2796 {
2797 error ("invalid rtl sharing found in the insn");
2798 debug_rtx (insn);
2799 error ("shared rtx");
2800 debug_rtx (x);
2801 internal_error ("internal consistency failure");
2802 }
2803 gcc_assert (!RTX_FLAG (x, used));
2804
2805 RTX_FLAG (x, used) = 1;
2806
2807 /* Now scan the subexpressions recursively. */
2808
2809 format_ptr = GET_RTX_FORMAT (code);
2810
2811 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2812 {
2813 switch (*format_ptr++)
2814 {
2815 case 'e':
2816 verify_rtx_sharing (XEXP (x, i), insn);
2817 break;
2818
2819 case 'E':
2820 if (XVEC (x, i) != NULL)
2821 {
2822 int j;
2823 int len = XVECLEN (x, i);
2824
2825 for (j = 0; j < len; j++)
2826 {
2827 /* We allow sharing of ASM_OPERANDS inside single
2828 instruction. */
2829 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2830 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2831 == ASM_OPERANDS))
2832 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2833 else
2834 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2835 }
2836 }
2837 break;
2838 }
2839 }
2840 return;
2841 }
2842
2843 /* Reset used-flags for INSN. */
2844
2845 static void
2846 reset_insn_used_flags (rtx insn)
2847 {
2848 gcc_assert (INSN_P (insn));
2849 reset_used_flags (PATTERN (insn));
2850 reset_used_flags (REG_NOTES (insn));
2851 if (CALL_P (insn))
2852 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2853 }
2854
2855 /* Go through all the RTL insn bodies and clear all the USED bits. */
2856
2857 static void
2858 reset_all_used_flags (void)
2859 {
2860 rtx_insn *p;
2861
2862 for (p = get_insns (); p; p = NEXT_INSN (p))
2863 if (INSN_P (p))
2864 {
2865 rtx pat = PATTERN (p);
2866 if (GET_CODE (pat) != SEQUENCE)
2867 reset_insn_used_flags (p);
2868 else
2869 {
2870 gcc_assert (REG_NOTES (p) == NULL);
2871 for (int i = 0; i < XVECLEN (pat, 0); i++)
2872 {
2873 rtx insn = XVECEXP (pat, 0, i);
2874 if (INSN_P (insn))
2875 reset_insn_used_flags (insn);
2876 }
2877 }
2878 }
2879 }
2880
2881 /* Verify sharing in INSN. */
2882
2883 static void
2884 verify_insn_sharing (rtx insn)
2885 {
2886 gcc_assert (INSN_P (insn));
2887 verify_rtx_sharing (PATTERN (insn), insn);
2888 verify_rtx_sharing (REG_NOTES (insn), insn);
2889 if (CALL_P (insn))
2890 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2891 }
2892
2893 /* Go through all the RTL insn bodies and check that there is no unexpected
2894 sharing in between the subexpressions. */
2895
2896 DEBUG_FUNCTION void
2897 verify_rtl_sharing (void)
2898 {
2899 rtx_insn *p;
2900
2901 timevar_push (TV_VERIFY_RTL_SHARING);
2902
2903 reset_all_used_flags ();
2904
2905 for (p = get_insns (); p; p = NEXT_INSN (p))
2906 if (INSN_P (p))
2907 {
2908 rtx pat = PATTERN (p);
2909 if (GET_CODE (pat) != SEQUENCE)
2910 verify_insn_sharing (p);
2911 else
2912 for (int i = 0; i < XVECLEN (pat, 0); i++)
2913 {
2914 rtx insn = XVECEXP (pat, 0, i);
2915 if (INSN_P (insn))
2916 verify_insn_sharing (insn);
2917 }
2918 }
2919
2920 reset_all_used_flags ();
2921
2922 timevar_pop (TV_VERIFY_RTL_SHARING);
2923 }
2924
2925 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2926 Assumes the mark bits are cleared at entry. */
2927
2928 void
2929 unshare_all_rtl_in_chain (rtx_insn *insn)
2930 {
2931 for (; insn; insn = NEXT_INSN (insn))
2932 if (INSN_P (insn))
2933 {
2934 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2935 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2936 if (CALL_P (insn))
2937 CALL_INSN_FUNCTION_USAGE (insn)
2938 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2939 }
2940 }
2941
2942 /* Go through all virtual stack slots of a function and mark them as
2943 shared. We never replace the DECL_RTLs themselves with a copy,
2944 but expressions mentioned into a DECL_RTL cannot be shared with
2945 expressions in the instruction stream.
2946
2947 Note that reload may convert pseudo registers into memories in-place.
2948 Pseudo registers are always shared, but MEMs never are. Thus if we
2949 reset the used flags on MEMs in the instruction stream, we must set
2950 them again on MEMs that appear in DECL_RTLs. */
2951
2952 static void
2953 set_used_decls (tree blk)
2954 {
2955 tree t;
2956
2957 /* Mark decls. */
2958 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2959 if (DECL_RTL_SET_P (t))
2960 set_used_flags (DECL_RTL (t));
2961
2962 /* Now process sub-blocks. */
2963 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2964 set_used_decls (t);
2965 }
2966
2967 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2968 Recursively does the same for subexpressions. Uses
2969 copy_rtx_if_shared_1 to reduce stack space. */
2970
2971 rtx
2972 copy_rtx_if_shared (rtx orig)
2973 {
2974 copy_rtx_if_shared_1 (&orig);
2975 return orig;
2976 }
2977
2978 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2979 use. Recursively does the same for subexpressions. */
2980
2981 static void
2982 copy_rtx_if_shared_1 (rtx *orig1)
2983 {
2984 rtx x;
2985 int i;
2986 enum rtx_code code;
2987 rtx *last_ptr;
2988 const char *format_ptr;
2989 int copied = 0;
2990 int length;
2991
2992 /* Repeat is used to turn tail-recursion into iteration. */
2993 repeat:
2994 x = *orig1;
2995
2996 if (x == 0)
2997 return;
2998
2999 code = GET_CODE (x);
3000
3001 /* These types may be freely shared. */
3002
3003 switch (code)
3004 {
3005 case REG:
3006 case DEBUG_EXPR:
3007 case VALUE:
3008 CASE_CONST_ANY:
3009 case SYMBOL_REF:
3010 case LABEL_REF:
3011 case CODE_LABEL:
3012 case PC:
3013 case CC0:
3014 case RETURN:
3015 case SIMPLE_RETURN:
3016 case SCRATCH:
3017 /* SCRATCH must be shared because they represent distinct values. */
3018 return;
3019 case CLOBBER:
3020 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3021 clobbers or clobbers of hard registers that originated as pseudos.
3022 This is needed to allow safe register renaming. */
3023 if (REG_P (XEXP (x, 0))
3024 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3025 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3026 return;
3027 break;
3028
3029 case CONST:
3030 if (shared_const_p (x))
3031 return;
3032 break;
3033
3034 case DEBUG_INSN:
3035 case INSN:
3036 case JUMP_INSN:
3037 case CALL_INSN:
3038 case NOTE:
3039 case BARRIER:
3040 /* The chain of insns is not being copied. */
3041 return;
3042
3043 default:
3044 break;
3045 }
3046
3047 /* This rtx may not be shared. If it has already been seen,
3048 replace it with a copy of itself. */
3049
3050 if (RTX_FLAG (x, used))
3051 {
3052 x = shallow_copy_rtx (x);
3053 copied = 1;
3054 }
3055 RTX_FLAG (x, used) = 1;
3056
3057 /* Now scan the subexpressions recursively.
3058 We can store any replaced subexpressions directly into X
3059 since we know X is not shared! Any vectors in X
3060 must be copied if X was copied. */
3061
3062 format_ptr = GET_RTX_FORMAT (code);
3063 length = GET_RTX_LENGTH (code);
3064 last_ptr = NULL;
3065
3066 for (i = 0; i < length; i++)
3067 {
3068 switch (*format_ptr++)
3069 {
3070 case 'e':
3071 if (last_ptr)
3072 copy_rtx_if_shared_1 (last_ptr);
3073 last_ptr = &XEXP (x, i);
3074 break;
3075
3076 case 'E':
3077 if (XVEC (x, i) != NULL)
3078 {
3079 int j;
3080 int len = XVECLEN (x, i);
3081
3082 /* Copy the vector iff I copied the rtx and the length
3083 is nonzero. */
3084 if (copied && len > 0)
3085 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3086
3087 /* Call recursively on all inside the vector. */
3088 for (j = 0; j < len; j++)
3089 {
3090 if (last_ptr)
3091 copy_rtx_if_shared_1 (last_ptr);
3092 last_ptr = &XVECEXP (x, i, j);
3093 }
3094 }
3095 break;
3096 }
3097 }
3098 *orig1 = x;
3099 if (last_ptr)
3100 {
3101 orig1 = last_ptr;
3102 goto repeat;
3103 }
3104 return;
3105 }
3106
3107 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3108
3109 static void
3110 mark_used_flags (rtx x, int flag)
3111 {
3112 int i, j;
3113 enum rtx_code code;
3114 const char *format_ptr;
3115 int length;
3116
3117 /* Repeat is used to turn tail-recursion into iteration. */
3118 repeat:
3119 if (x == 0)
3120 return;
3121
3122 code = GET_CODE (x);
3123
3124 /* These types may be freely shared so we needn't do any resetting
3125 for them. */
3126
3127 switch (code)
3128 {
3129 case REG:
3130 case DEBUG_EXPR:
3131 case VALUE:
3132 CASE_CONST_ANY:
3133 case SYMBOL_REF:
3134 case CODE_LABEL:
3135 case PC:
3136 case CC0:
3137 case RETURN:
3138 case SIMPLE_RETURN:
3139 return;
3140
3141 case DEBUG_INSN:
3142 case INSN:
3143 case JUMP_INSN:
3144 case CALL_INSN:
3145 case NOTE:
3146 case LABEL_REF:
3147 case BARRIER:
3148 /* The chain of insns is not being copied. */
3149 return;
3150
3151 default:
3152 break;
3153 }
3154
3155 RTX_FLAG (x, used) = flag;
3156
3157 format_ptr = GET_RTX_FORMAT (code);
3158 length = GET_RTX_LENGTH (code);
3159
3160 for (i = 0; i < length; i++)
3161 {
3162 switch (*format_ptr++)
3163 {
3164 case 'e':
3165 if (i == length-1)
3166 {
3167 x = XEXP (x, i);
3168 goto repeat;
3169 }
3170 mark_used_flags (XEXP (x, i), flag);
3171 break;
3172
3173 case 'E':
3174 for (j = 0; j < XVECLEN (x, i); j++)
3175 mark_used_flags (XVECEXP (x, i, j), flag);
3176 break;
3177 }
3178 }
3179 }
3180
3181 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3182 to look for shared sub-parts. */
3183
3184 void
3185 reset_used_flags (rtx x)
3186 {
3187 mark_used_flags (x, 0);
3188 }
3189
3190 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3191 to look for shared sub-parts. */
3192
3193 void
3194 set_used_flags (rtx x)
3195 {
3196 mark_used_flags (x, 1);
3197 }
3198 \f
3199 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3200 Return X or the rtx for the pseudo reg the value of X was copied into.
3201 OTHER must be valid as a SET_DEST. */
3202
3203 rtx
3204 make_safe_from (rtx x, rtx other)
3205 {
3206 while (1)
3207 switch (GET_CODE (other))
3208 {
3209 case SUBREG:
3210 other = SUBREG_REG (other);
3211 break;
3212 case STRICT_LOW_PART:
3213 case SIGN_EXTEND:
3214 case ZERO_EXTEND:
3215 other = XEXP (other, 0);
3216 break;
3217 default:
3218 goto done;
3219 }
3220 done:
3221 if ((MEM_P (other)
3222 && ! CONSTANT_P (x)
3223 && !REG_P (x)
3224 && GET_CODE (x) != SUBREG)
3225 || (REG_P (other)
3226 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3227 || reg_mentioned_p (other, x))))
3228 {
3229 rtx temp = gen_reg_rtx (GET_MODE (x));
3230 emit_move_insn (temp, x);
3231 return temp;
3232 }
3233 return x;
3234 }
3235 \f
3236 /* Emission of insns (adding them to the doubly-linked list). */
3237
3238 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3239
3240 rtx_insn *
3241 get_last_insn_anywhere (void)
3242 {
3243 struct sequence_stack *seq;
3244 for (seq = get_current_sequence (); seq; seq = seq->next)
3245 if (seq->last != 0)
3246 return seq->last;
3247 return 0;
3248 }
3249
3250 /* Return the first nonnote insn emitted in current sequence or current
3251 function. This routine looks inside SEQUENCEs. */
3252
3253 rtx_insn *
3254 get_first_nonnote_insn (void)
3255 {
3256 rtx_insn *insn = get_insns ();
3257
3258 if (insn)
3259 {
3260 if (NOTE_P (insn))
3261 for (insn = next_insn (insn);
3262 insn && NOTE_P (insn);
3263 insn = next_insn (insn))
3264 continue;
3265 else
3266 {
3267 if (NONJUMP_INSN_P (insn)
3268 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3269 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3270 }
3271 }
3272
3273 return insn;
3274 }
3275
3276 /* Return the last nonnote insn emitted in current sequence or current
3277 function. This routine looks inside SEQUENCEs. */
3278
3279 rtx_insn *
3280 get_last_nonnote_insn (void)
3281 {
3282 rtx_insn *insn = get_last_insn ();
3283
3284 if (insn)
3285 {
3286 if (NOTE_P (insn))
3287 for (insn = previous_insn (insn);
3288 insn && NOTE_P (insn);
3289 insn = previous_insn (insn))
3290 continue;
3291 else
3292 {
3293 if (NONJUMP_INSN_P (insn))
3294 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3295 insn = seq->insn (seq->len () - 1);
3296 }
3297 }
3298
3299 return insn;
3300 }
3301
3302 /* Return the number of actual (non-debug) insns emitted in this
3303 function. */
3304
3305 int
3306 get_max_insn_count (void)
3307 {
3308 int n = cur_insn_uid;
3309
3310 /* The table size must be stable across -g, to avoid codegen
3311 differences due to debug insns, and not be affected by
3312 -fmin-insn-uid, to avoid excessive table size and to simplify
3313 debugging of -fcompare-debug failures. */
3314 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3315 n -= cur_debug_insn_uid;
3316 else
3317 n -= MIN_NONDEBUG_INSN_UID;
3318
3319 return n;
3320 }
3321
3322 \f
3323 /* Return the next insn. If it is a SEQUENCE, return the first insn
3324 of the sequence. */
3325
3326 rtx_insn *
3327 next_insn (rtx_insn *insn)
3328 {
3329 if (insn)
3330 {
3331 insn = NEXT_INSN (insn);
3332 if (insn && NONJUMP_INSN_P (insn)
3333 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3334 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3335 }
3336
3337 return insn;
3338 }
3339
3340 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3341 of the sequence. */
3342
3343 rtx_insn *
3344 previous_insn (rtx_insn *insn)
3345 {
3346 if (insn)
3347 {
3348 insn = PREV_INSN (insn);
3349 if (insn && NONJUMP_INSN_P (insn))
3350 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3351 insn = seq->insn (seq->len () - 1);
3352 }
3353
3354 return insn;
3355 }
3356
3357 /* Return the next insn after INSN that is not a NOTE. This routine does not
3358 look inside SEQUENCEs. */
3359
3360 rtx_insn *
3361 next_nonnote_insn (rtx_insn *insn)
3362 {
3363 while (insn)
3364 {
3365 insn = NEXT_INSN (insn);
3366 if (insn == 0 || !NOTE_P (insn))
3367 break;
3368 }
3369
3370 return insn;
3371 }
3372
3373 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3374 routine does not look inside SEQUENCEs. */
3375
3376 rtx_insn *
3377 next_nondebug_insn (rtx_insn *insn)
3378 {
3379 while (insn)
3380 {
3381 insn = NEXT_INSN (insn);
3382 if (insn == 0 || !DEBUG_INSN_P (insn))
3383 break;
3384 }
3385
3386 return insn;
3387 }
3388
3389 /* Return the previous insn before INSN that is not a NOTE. This routine does
3390 not look inside SEQUENCEs. */
3391
3392 rtx_insn *
3393 prev_nonnote_insn (rtx_insn *insn)
3394 {
3395 while (insn)
3396 {
3397 insn = PREV_INSN (insn);
3398 if (insn == 0 || !NOTE_P (insn))
3399 break;
3400 }
3401
3402 return insn;
3403 }
3404
3405 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3406 This routine does not look inside SEQUENCEs. */
3407
3408 rtx_insn *
3409 prev_nondebug_insn (rtx_insn *insn)
3410 {
3411 while (insn)
3412 {
3413 insn = PREV_INSN (insn);
3414 if (insn == 0 || !DEBUG_INSN_P (insn))
3415 break;
3416 }
3417
3418 return insn;
3419 }
3420
3421 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3422 This routine does not look inside SEQUENCEs. */
3423
3424 rtx_insn *
3425 next_nonnote_nondebug_insn (rtx_insn *insn)
3426 {
3427 while (insn)
3428 {
3429 insn = NEXT_INSN (insn);
3430 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3431 break;
3432 }
3433
3434 return insn;
3435 }
3436
3437 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3438 but stop the search before we enter another basic block. This
3439 routine does not look inside SEQUENCEs. */
3440
3441 rtx_insn *
3442 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3443 {
3444 while (insn)
3445 {
3446 insn = NEXT_INSN (insn);
3447 if (insn == 0)
3448 break;
3449 if (DEBUG_INSN_P (insn))
3450 continue;
3451 if (!NOTE_P (insn))
3452 break;
3453 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3454 return NULL;
3455 }
3456
3457 return insn;
3458 }
3459
3460 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3461 This routine does not look inside SEQUENCEs. */
3462
3463 rtx_insn *
3464 prev_nonnote_nondebug_insn (rtx_insn *insn)
3465 {
3466 while (insn)
3467 {
3468 insn = PREV_INSN (insn);
3469 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3470 break;
3471 }
3472
3473 return insn;
3474 }
3475
3476 /* Return the previous insn before INSN that is not a NOTE nor
3477 DEBUG_INSN, but stop the search before we enter another basic
3478 block. This routine does not look inside SEQUENCEs. */
3479
3480 rtx_insn *
3481 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3482 {
3483 while (insn)
3484 {
3485 insn = PREV_INSN (insn);
3486 if (insn == 0)
3487 break;
3488 if (DEBUG_INSN_P (insn))
3489 continue;
3490 if (!NOTE_P (insn))
3491 break;
3492 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3493 return NULL;
3494 }
3495
3496 return insn;
3497 }
3498
3499 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3500 or 0, if there is none. This routine does not look inside
3501 SEQUENCEs. */
3502
3503 rtx_insn *
3504 next_real_insn (rtx uncast_insn)
3505 {
3506 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3507
3508 while (insn)
3509 {
3510 insn = NEXT_INSN (insn);
3511 if (insn == 0 || INSN_P (insn))
3512 break;
3513 }
3514
3515 return insn;
3516 }
3517
3518 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3519 or 0, if there is none. This routine does not look inside
3520 SEQUENCEs. */
3521
3522 rtx_insn *
3523 prev_real_insn (rtx_insn *insn)
3524 {
3525 while (insn)
3526 {
3527 insn = PREV_INSN (insn);
3528 if (insn == 0 || INSN_P (insn))
3529 break;
3530 }
3531
3532 return insn;
3533 }
3534
3535 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3536 This routine does not look inside SEQUENCEs. */
3537
3538 rtx_call_insn *
3539 last_call_insn (void)
3540 {
3541 rtx_insn *insn;
3542
3543 for (insn = get_last_insn ();
3544 insn && !CALL_P (insn);
3545 insn = PREV_INSN (insn))
3546 ;
3547
3548 return safe_as_a <rtx_call_insn *> (insn);
3549 }
3550
3551 /* Find the next insn after INSN that really does something. This routine
3552 does not look inside SEQUENCEs. After reload this also skips over
3553 standalone USE and CLOBBER insn. */
3554
3555 int
3556 active_insn_p (const rtx_insn *insn)
3557 {
3558 return (CALL_P (insn) || JUMP_P (insn)
3559 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3560 || (NONJUMP_INSN_P (insn)
3561 && (! reload_completed
3562 || (GET_CODE (PATTERN (insn)) != USE
3563 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3564 }
3565
3566 rtx_insn *
3567 next_active_insn (rtx_insn *insn)
3568 {
3569 while (insn)
3570 {
3571 insn = NEXT_INSN (insn);
3572 if (insn == 0 || active_insn_p (insn))
3573 break;
3574 }
3575
3576 return insn;
3577 }
3578
3579 /* Find the last insn before INSN that really does something. This routine
3580 does not look inside SEQUENCEs. After reload this also skips over
3581 standalone USE and CLOBBER insn. */
3582
3583 rtx_insn *
3584 prev_active_insn (rtx_insn *insn)
3585 {
3586 while (insn)
3587 {
3588 insn = PREV_INSN (insn);
3589 if (insn == 0 || active_insn_p (insn))
3590 break;
3591 }
3592
3593 return insn;
3594 }
3595 \f
3596 /* Return the next insn that uses CC0 after INSN, which is assumed to
3597 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3598 applied to the result of this function should yield INSN).
3599
3600 Normally, this is simply the next insn. However, if a REG_CC_USER note
3601 is present, it contains the insn that uses CC0.
3602
3603 Return 0 if we can't find the insn. */
3604
3605 rtx_insn *
3606 next_cc0_user (rtx_insn *insn)
3607 {
3608 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3609
3610 if (note)
3611 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3612
3613 insn = next_nonnote_insn (insn);
3614 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3615 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3616
3617 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3618 return insn;
3619
3620 return 0;
3621 }
3622
3623 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3624 note, it is the previous insn. */
3625
3626 rtx_insn *
3627 prev_cc0_setter (rtx_insn *insn)
3628 {
3629 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3630
3631 if (note)
3632 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3633
3634 insn = prev_nonnote_insn (insn);
3635 gcc_assert (sets_cc0_p (PATTERN (insn)));
3636
3637 return insn;
3638 }
3639
3640 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3641
3642 static int
3643 find_auto_inc (const_rtx x, const_rtx reg)
3644 {
3645 subrtx_iterator::array_type array;
3646 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3647 {
3648 const_rtx x = *iter;
3649 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3650 && rtx_equal_p (reg, XEXP (x, 0)))
3651 return true;
3652 }
3653 return false;
3654 }
3655
3656 /* Increment the label uses for all labels present in rtx. */
3657
3658 static void
3659 mark_label_nuses (rtx x)
3660 {
3661 enum rtx_code code;
3662 int i, j;
3663 const char *fmt;
3664
3665 code = GET_CODE (x);
3666 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3667 LABEL_NUSES (label_ref_label (x))++;
3668
3669 fmt = GET_RTX_FORMAT (code);
3670 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3671 {
3672 if (fmt[i] == 'e')
3673 mark_label_nuses (XEXP (x, i));
3674 else if (fmt[i] == 'E')
3675 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3676 mark_label_nuses (XVECEXP (x, i, j));
3677 }
3678 }
3679
3680 \f
3681 /* Try splitting insns that can be split for better scheduling.
3682 PAT is the pattern which might split.
3683 TRIAL is the insn providing PAT.
3684 LAST is nonzero if we should return the last insn of the sequence produced.
3685
3686 If this routine succeeds in splitting, it returns the first or last
3687 replacement insn depending on the value of LAST. Otherwise, it
3688 returns TRIAL. If the insn to be returned can be split, it will be. */
3689
3690 rtx_insn *
3691 try_split (rtx pat, rtx_insn *trial, int last)
3692 {
3693 rtx_insn *before, *after;
3694 rtx note;
3695 rtx_insn *seq, *tem;
3696 profile_probability probability;
3697 rtx_insn *insn_last, *insn;
3698 int njumps = 0;
3699 rtx_insn *call_insn = NULL;
3700
3701 /* We're not good at redistributing frame information. */
3702 if (RTX_FRAME_RELATED_P (trial))
3703 return trial;
3704
3705 if (any_condjump_p (trial)
3706 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3707 split_branch_probability
3708 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3709 else
3710 split_branch_probability = profile_probability::uninitialized ();
3711
3712 probability = split_branch_probability;
3713
3714 seq = split_insns (pat, trial);
3715
3716 split_branch_probability = profile_probability::uninitialized ();
3717
3718 if (!seq)
3719 return trial;
3720
3721 /* Avoid infinite loop if any insn of the result matches
3722 the original pattern. */
3723 insn_last = seq;
3724 while (1)
3725 {
3726 if (INSN_P (insn_last)
3727 && rtx_equal_p (PATTERN (insn_last), pat))
3728 return trial;
3729 if (!NEXT_INSN (insn_last))
3730 break;
3731 insn_last = NEXT_INSN (insn_last);
3732 }
3733
3734 /* We will be adding the new sequence to the function. The splitters
3735 may have introduced invalid RTL sharing, so unshare the sequence now. */
3736 unshare_all_rtl_in_chain (seq);
3737
3738 /* Mark labels and copy flags. */
3739 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3740 {
3741 if (JUMP_P (insn))
3742 {
3743 if (JUMP_P (trial))
3744 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3745 mark_jump_label (PATTERN (insn), insn, 0);
3746 njumps++;
3747 if (probability.initialized_p ()
3748 && any_condjump_p (insn)
3749 && !find_reg_note (insn, REG_BR_PROB, 0))
3750 {
3751 /* We can preserve the REG_BR_PROB notes only if exactly
3752 one jump is created, otherwise the machine description
3753 is responsible for this step using
3754 split_branch_probability variable. */
3755 gcc_assert (njumps == 1);
3756 add_reg_br_prob_note (insn, probability);
3757 }
3758 }
3759 }
3760
3761 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3762 in SEQ and copy any additional information across. */
3763 if (CALL_P (trial))
3764 {
3765 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3766 if (CALL_P (insn))
3767 {
3768 rtx_insn *next;
3769 rtx *p;
3770
3771 gcc_assert (call_insn == NULL_RTX);
3772 call_insn = insn;
3773
3774 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3775 target may have explicitly specified. */
3776 p = &CALL_INSN_FUNCTION_USAGE (insn);
3777 while (*p)
3778 p = &XEXP (*p, 1);
3779 *p = CALL_INSN_FUNCTION_USAGE (trial);
3780
3781 /* If the old call was a sibling call, the new one must
3782 be too. */
3783 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3784
3785 /* If the new call is the last instruction in the sequence,
3786 it will effectively replace the old call in-situ. Otherwise
3787 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3788 so that it comes immediately after the new call. */
3789 if (NEXT_INSN (insn))
3790 for (next = NEXT_INSN (trial);
3791 next && NOTE_P (next);
3792 next = NEXT_INSN (next))
3793 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3794 {
3795 remove_insn (next);
3796 add_insn_after (next, insn, NULL);
3797 break;
3798 }
3799 }
3800 }
3801
3802 /* Copy notes, particularly those related to the CFG. */
3803 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3804 {
3805 switch (REG_NOTE_KIND (note))
3806 {
3807 case REG_EH_REGION:
3808 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3809 break;
3810
3811 case REG_NORETURN:
3812 case REG_SETJMP:
3813 case REG_TM:
3814 case REG_CALL_NOCF_CHECK:
3815 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3816 {
3817 if (CALL_P (insn))
3818 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3819 }
3820 break;
3821
3822 case REG_NON_LOCAL_GOTO:
3823 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3824 {
3825 if (JUMP_P (insn))
3826 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3827 }
3828 break;
3829
3830 case REG_INC:
3831 if (!AUTO_INC_DEC)
3832 break;
3833
3834 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3835 {
3836 rtx reg = XEXP (note, 0);
3837 if (!FIND_REG_INC_NOTE (insn, reg)
3838 && find_auto_inc (PATTERN (insn), reg))
3839 add_reg_note (insn, REG_INC, reg);
3840 }
3841 break;
3842
3843 case REG_ARGS_SIZE:
3844 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3845 break;
3846
3847 case REG_CALL_DECL:
3848 gcc_assert (call_insn != NULL_RTX);
3849 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3850 break;
3851
3852 default:
3853 break;
3854 }
3855 }
3856
3857 /* If there are LABELS inside the split insns increment the
3858 usage count so we don't delete the label. */
3859 if (INSN_P (trial))
3860 {
3861 insn = insn_last;
3862 while (insn != NULL_RTX)
3863 {
3864 /* JUMP_P insns have already been "marked" above. */
3865 if (NONJUMP_INSN_P (insn))
3866 mark_label_nuses (PATTERN (insn));
3867
3868 insn = PREV_INSN (insn);
3869 }
3870 }
3871
3872 before = PREV_INSN (trial);
3873 after = NEXT_INSN (trial);
3874
3875 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3876
3877 delete_insn (trial);
3878
3879 /* Recursively call try_split for each new insn created; by the
3880 time control returns here that insn will be fully split, so
3881 set LAST and continue from the insn after the one returned.
3882 We can't use next_active_insn here since AFTER may be a note.
3883 Ignore deleted insns, which can be occur if not optimizing. */
3884 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3885 if (! tem->deleted () && INSN_P (tem))
3886 tem = try_split (PATTERN (tem), tem, 1);
3887
3888 /* Return either the first or the last insn, depending on which was
3889 requested. */
3890 return last
3891 ? (after ? PREV_INSN (after) : get_last_insn ())
3892 : NEXT_INSN (before);
3893 }
3894 \f
3895 /* Make and return an INSN rtx, initializing all its slots.
3896 Store PATTERN in the pattern slots. */
3897
3898 rtx_insn *
3899 make_insn_raw (rtx pattern)
3900 {
3901 rtx_insn *insn;
3902
3903 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3904
3905 INSN_UID (insn) = cur_insn_uid++;
3906 PATTERN (insn) = pattern;
3907 INSN_CODE (insn) = -1;
3908 REG_NOTES (insn) = NULL;
3909 INSN_LOCATION (insn) = curr_insn_location ();
3910 BLOCK_FOR_INSN (insn) = NULL;
3911
3912 #ifdef ENABLE_RTL_CHECKING
3913 if (insn
3914 && INSN_P (insn)
3915 && (returnjump_p (insn)
3916 || (GET_CODE (insn) == SET
3917 && SET_DEST (insn) == pc_rtx)))
3918 {
3919 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3920 debug_rtx (insn);
3921 }
3922 #endif
3923
3924 return insn;
3925 }
3926
3927 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3928
3929 static rtx_insn *
3930 make_debug_insn_raw (rtx pattern)
3931 {
3932 rtx_debug_insn *insn;
3933
3934 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3935 INSN_UID (insn) = cur_debug_insn_uid++;
3936 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3937 INSN_UID (insn) = cur_insn_uid++;
3938
3939 PATTERN (insn) = pattern;
3940 INSN_CODE (insn) = -1;
3941 REG_NOTES (insn) = NULL;
3942 INSN_LOCATION (insn) = curr_insn_location ();
3943 BLOCK_FOR_INSN (insn) = NULL;
3944
3945 return insn;
3946 }
3947
3948 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3949
3950 static rtx_insn *
3951 make_jump_insn_raw (rtx pattern)
3952 {
3953 rtx_jump_insn *insn;
3954
3955 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3956 INSN_UID (insn) = cur_insn_uid++;
3957
3958 PATTERN (insn) = pattern;
3959 INSN_CODE (insn) = -1;
3960 REG_NOTES (insn) = NULL;
3961 JUMP_LABEL (insn) = NULL;
3962 INSN_LOCATION (insn) = curr_insn_location ();
3963 BLOCK_FOR_INSN (insn) = NULL;
3964
3965 return insn;
3966 }
3967
3968 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3969
3970 static rtx_insn *
3971 make_call_insn_raw (rtx pattern)
3972 {
3973 rtx_call_insn *insn;
3974
3975 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3976 INSN_UID (insn) = cur_insn_uid++;
3977
3978 PATTERN (insn) = pattern;
3979 INSN_CODE (insn) = -1;
3980 REG_NOTES (insn) = NULL;
3981 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3982 INSN_LOCATION (insn) = curr_insn_location ();
3983 BLOCK_FOR_INSN (insn) = NULL;
3984
3985 return insn;
3986 }
3987
3988 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3989
3990 static rtx_note *
3991 make_note_raw (enum insn_note subtype)
3992 {
3993 /* Some notes are never created this way at all. These notes are
3994 only created by patching out insns. */
3995 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3996 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3997
3998 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3999 INSN_UID (note) = cur_insn_uid++;
4000 NOTE_KIND (note) = subtype;
4001 BLOCK_FOR_INSN (note) = NULL;
4002 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4003 return note;
4004 }
4005 \f
4006 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4007 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4008 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4009
4010 static inline void
4011 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4012 {
4013 SET_PREV_INSN (insn) = prev;
4014 SET_NEXT_INSN (insn) = next;
4015 if (prev != NULL)
4016 {
4017 SET_NEXT_INSN (prev) = insn;
4018 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4019 {
4020 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4021 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4022 }
4023 }
4024 if (next != NULL)
4025 {
4026 SET_PREV_INSN (next) = insn;
4027 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4028 {
4029 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4030 SET_PREV_INSN (sequence->insn (0)) = insn;
4031 }
4032 }
4033
4034 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4035 {
4036 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4037 SET_PREV_INSN (sequence->insn (0)) = prev;
4038 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4039 }
4040 }
4041
4042 /* Add INSN to the end of the doubly-linked list.
4043 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4044
4045 void
4046 add_insn (rtx_insn *insn)
4047 {
4048 rtx_insn *prev = get_last_insn ();
4049 link_insn_into_chain (insn, prev, NULL);
4050 if (NULL == get_insns ())
4051 set_first_insn (insn);
4052 set_last_insn (insn);
4053 }
4054
4055 /* Add INSN into the doubly-linked list after insn AFTER. */
4056
4057 static void
4058 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4059 {
4060 rtx_insn *next = NEXT_INSN (after);
4061
4062 gcc_assert (!optimize || !after->deleted ());
4063
4064 link_insn_into_chain (insn, after, next);
4065
4066 if (next == NULL)
4067 {
4068 struct sequence_stack *seq;
4069
4070 for (seq = get_current_sequence (); seq; seq = seq->next)
4071 if (after == seq->last)
4072 {
4073 seq->last = insn;
4074 break;
4075 }
4076 }
4077 }
4078
4079 /* Add INSN into the doubly-linked list before insn BEFORE. */
4080
4081 static void
4082 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4083 {
4084 rtx_insn *prev = PREV_INSN (before);
4085
4086 gcc_assert (!optimize || !before->deleted ());
4087
4088 link_insn_into_chain (insn, prev, before);
4089
4090 if (prev == NULL)
4091 {
4092 struct sequence_stack *seq;
4093
4094 for (seq = get_current_sequence (); seq; seq = seq->next)
4095 if (before == seq->first)
4096 {
4097 seq->first = insn;
4098 break;
4099 }
4100
4101 gcc_assert (seq);
4102 }
4103 }
4104
4105 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4106 If BB is NULL, an attempt is made to infer the bb from before.
4107
4108 This and the next function should be the only functions called
4109 to insert an insn once delay slots have been filled since only
4110 they know how to update a SEQUENCE. */
4111
4112 void
4113 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4114 {
4115 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4116 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4117 add_insn_after_nobb (insn, after);
4118 if (!BARRIER_P (after)
4119 && !BARRIER_P (insn)
4120 && (bb = BLOCK_FOR_INSN (after)))
4121 {
4122 set_block_for_insn (insn, bb);
4123 if (INSN_P (insn))
4124 df_insn_rescan (insn);
4125 /* Should not happen as first in the BB is always
4126 either NOTE or LABEL. */
4127 if (BB_END (bb) == after
4128 /* Avoid clobbering of structure when creating new BB. */
4129 && !BARRIER_P (insn)
4130 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4131 BB_END (bb) = insn;
4132 }
4133 }
4134
4135 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4136 If BB is NULL, an attempt is made to infer the bb from before.
4137
4138 This and the previous function should be the only functions called
4139 to insert an insn once delay slots have been filled since only
4140 they know how to update a SEQUENCE. */
4141
4142 void
4143 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4144 {
4145 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4146 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4147 add_insn_before_nobb (insn, before);
4148
4149 if (!bb
4150 && !BARRIER_P (before)
4151 && !BARRIER_P (insn))
4152 bb = BLOCK_FOR_INSN (before);
4153
4154 if (bb)
4155 {
4156 set_block_for_insn (insn, bb);
4157 if (INSN_P (insn))
4158 df_insn_rescan (insn);
4159 /* Should not happen as first in the BB is always either NOTE or
4160 LABEL. */
4161 gcc_assert (BB_HEAD (bb) != insn
4162 /* Avoid clobbering of structure when creating new BB. */
4163 || BARRIER_P (insn)
4164 || NOTE_INSN_BASIC_BLOCK_P (insn));
4165 }
4166 }
4167
4168 /* Replace insn with an deleted instruction note. */
4169
4170 void
4171 set_insn_deleted (rtx insn)
4172 {
4173 if (INSN_P (insn))
4174 df_insn_delete (as_a <rtx_insn *> (insn));
4175 PUT_CODE (insn, NOTE);
4176 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4177 }
4178
4179
4180 /* Unlink INSN from the insn chain.
4181
4182 This function knows how to handle sequences.
4183
4184 This function does not invalidate data flow information associated with
4185 INSN (i.e. does not call df_insn_delete). That makes this function
4186 usable for only disconnecting an insn from the chain, and re-emit it
4187 elsewhere later.
4188
4189 To later insert INSN elsewhere in the insn chain via add_insn and
4190 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4191 the caller. Nullifying them here breaks many insn chain walks.
4192
4193 To really delete an insn and related DF information, use delete_insn. */
4194
4195 void
4196 remove_insn (rtx uncast_insn)
4197 {
4198 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4199 rtx_insn *next = NEXT_INSN (insn);
4200 rtx_insn *prev = PREV_INSN (insn);
4201 basic_block bb;
4202
4203 if (prev)
4204 {
4205 SET_NEXT_INSN (prev) = next;
4206 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4207 {
4208 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4209 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4210 }
4211 }
4212 else
4213 {
4214 struct sequence_stack *seq;
4215
4216 for (seq = get_current_sequence (); seq; seq = seq->next)
4217 if (insn == seq->first)
4218 {
4219 seq->first = next;
4220 break;
4221 }
4222
4223 gcc_assert (seq);
4224 }
4225
4226 if (next)
4227 {
4228 SET_PREV_INSN (next) = prev;
4229 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4230 {
4231 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4232 SET_PREV_INSN (sequence->insn (0)) = prev;
4233 }
4234 }
4235 else
4236 {
4237 struct sequence_stack *seq;
4238
4239 for (seq = get_current_sequence (); seq; seq = seq->next)
4240 if (insn == seq->last)
4241 {
4242 seq->last = prev;
4243 break;
4244 }
4245
4246 gcc_assert (seq);
4247 }
4248
4249 /* Fix up basic block boundaries, if necessary. */
4250 if (!BARRIER_P (insn)
4251 && (bb = BLOCK_FOR_INSN (insn)))
4252 {
4253 if (BB_HEAD (bb) == insn)
4254 {
4255 /* Never ever delete the basic block note without deleting whole
4256 basic block. */
4257 gcc_assert (!NOTE_P (insn));
4258 BB_HEAD (bb) = next;
4259 }
4260 if (BB_END (bb) == insn)
4261 BB_END (bb) = prev;
4262 }
4263 }
4264
4265 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4266
4267 void
4268 add_function_usage_to (rtx call_insn, rtx call_fusage)
4269 {
4270 gcc_assert (call_insn && CALL_P (call_insn));
4271
4272 /* Put the register usage information on the CALL. If there is already
4273 some usage information, put ours at the end. */
4274 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4275 {
4276 rtx link;
4277
4278 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4279 link = XEXP (link, 1))
4280 ;
4281
4282 XEXP (link, 1) = call_fusage;
4283 }
4284 else
4285 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4286 }
4287
4288 /* Delete all insns made since FROM.
4289 FROM becomes the new last instruction. */
4290
4291 void
4292 delete_insns_since (rtx_insn *from)
4293 {
4294 if (from == 0)
4295 set_first_insn (0);
4296 else
4297 SET_NEXT_INSN (from) = 0;
4298 set_last_insn (from);
4299 }
4300
4301 /* This function is deprecated, please use sequences instead.
4302
4303 Move a consecutive bunch of insns to a different place in the chain.
4304 The insns to be moved are those between FROM and TO.
4305 They are moved to a new position after the insn AFTER.
4306 AFTER must not be FROM or TO or any insn in between.
4307
4308 This function does not know about SEQUENCEs and hence should not be
4309 called after delay-slot filling has been done. */
4310
4311 void
4312 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4313 {
4314 if (flag_checking)
4315 {
4316 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4317 gcc_assert (after != x);
4318 gcc_assert (after != to);
4319 }
4320
4321 /* Splice this bunch out of where it is now. */
4322 if (PREV_INSN (from))
4323 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4324 if (NEXT_INSN (to))
4325 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4326 if (get_last_insn () == to)
4327 set_last_insn (PREV_INSN (from));
4328 if (get_insns () == from)
4329 set_first_insn (NEXT_INSN (to));
4330
4331 /* Make the new neighbors point to it and it to them. */
4332 if (NEXT_INSN (after))
4333 SET_PREV_INSN (NEXT_INSN (after)) = to;
4334
4335 SET_NEXT_INSN (to) = NEXT_INSN (after);
4336 SET_PREV_INSN (from) = after;
4337 SET_NEXT_INSN (after) = from;
4338 if (after == get_last_insn ())
4339 set_last_insn (to);
4340 }
4341
4342 /* Same as function above, but take care to update BB boundaries. */
4343 void
4344 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4345 {
4346 rtx_insn *prev = PREV_INSN (from);
4347 basic_block bb, bb2;
4348
4349 reorder_insns_nobb (from, to, after);
4350
4351 if (!BARRIER_P (after)
4352 && (bb = BLOCK_FOR_INSN (after)))
4353 {
4354 rtx_insn *x;
4355 df_set_bb_dirty (bb);
4356
4357 if (!BARRIER_P (from)
4358 && (bb2 = BLOCK_FOR_INSN (from)))
4359 {
4360 if (BB_END (bb2) == to)
4361 BB_END (bb2) = prev;
4362 df_set_bb_dirty (bb2);
4363 }
4364
4365 if (BB_END (bb) == after)
4366 BB_END (bb) = to;
4367
4368 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4369 if (!BARRIER_P (x))
4370 df_insn_change_bb (x, bb);
4371 }
4372 }
4373
4374 \f
4375 /* Emit insn(s) of given code and pattern
4376 at a specified place within the doubly-linked list.
4377
4378 All of the emit_foo global entry points accept an object
4379 X which is either an insn list or a PATTERN of a single
4380 instruction.
4381
4382 There are thus a few canonical ways to generate code and
4383 emit it at a specific place in the instruction stream. For
4384 example, consider the instruction named SPOT and the fact that
4385 we would like to emit some instructions before SPOT. We might
4386 do it like this:
4387
4388 start_sequence ();
4389 ... emit the new instructions ...
4390 insns_head = get_insns ();
4391 end_sequence ();
4392
4393 emit_insn_before (insns_head, SPOT);
4394
4395 It used to be common to generate SEQUENCE rtl instead, but that
4396 is a relic of the past which no longer occurs. The reason is that
4397 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4398 generated would almost certainly die right after it was created. */
4399
4400 static rtx_insn *
4401 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4402 rtx_insn *(*make_raw) (rtx))
4403 {
4404 rtx_insn *insn;
4405
4406 gcc_assert (before);
4407
4408 if (x == NULL_RTX)
4409 return safe_as_a <rtx_insn *> (last);
4410
4411 switch (GET_CODE (x))
4412 {
4413 case DEBUG_INSN:
4414 case INSN:
4415 case JUMP_INSN:
4416 case CALL_INSN:
4417 case CODE_LABEL:
4418 case BARRIER:
4419 case NOTE:
4420 insn = as_a <rtx_insn *> (x);
4421 while (insn)
4422 {
4423 rtx_insn *next = NEXT_INSN (insn);
4424 add_insn_before (insn, before, bb);
4425 last = insn;
4426 insn = next;
4427 }
4428 break;
4429
4430 #ifdef ENABLE_RTL_CHECKING
4431 case SEQUENCE:
4432 gcc_unreachable ();
4433 break;
4434 #endif
4435
4436 default:
4437 last = (*make_raw) (x);
4438 add_insn_before (last, before, bb);
4439 break;
4440 }
4441
4442 return safe_as_a <rtx_insn *> (last);
4443 }
4444
4445 /* Make X be output before the instruction BEFORE. */
4446
4447 rtx_insn *
4448 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4449 {
4450 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4451 }
4452
4453 /* Make an instruction with body X and code JUMP_INSN
4454 and output it before the instruction BEFORE. */
4455
4456 rtx_jump_insn *
4457 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4458 {
4459 return as_a <rtx_jump_insn *> (
4460 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4461 make_jump_insn_raw));
4462 }
4463
4464 /* Make an instruction with body X and code CALL_INSN
4465 and output it before the instruction BEFORE. */
4466
4467 rtx_insn *
4468 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4469 {
4470 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4471 make_call_insn_raw);
4472 }
4473
4474 /* Make an instruction with body X and code DEBUG_INSN
4475 and output it before the instruction BEFORE. */
4476
4477 rtx_insn *
4478 emit_debug_insn_before_noloc (rtx x, rtx before)
4479 {
4480 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4481 make_debug_insn_raw);
4482 }
4483
4484 /* Make an insn of code BARRIER
4485 and output it before the insn BEFORE. */
4486
4487 rtx_barrier *
4488 emit_barrier_before (rtx before)
4489 {
4490 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4491
4492 INSN_UID (insn) = cur_insn_uid++;
4493
4494 add_insn_before (insn, before, NULL);
4495 return insn;
4496 }
4497
4498 /* Emit the label LABEL before the insn BEFORE. */
4499
4500 rtx_code_label *
4501 emit_label_before (rtx label, rtx_insn *before)
4502 {
4503 gcc_checking_assert (INSN_UID (label) == 0);
4504 INSN_UID (label) = cur_insn_uid++;
4505 add_insn_before (label, before, NULL);
4506 return as_a <rtx_code_label *> (label);
4507 }
4508 \f
4509 /* Helper for emit_insn_after, handles lists of instructions
4510 efficiently. */
4511
4512 static rtx_insn *
4513 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4514 {
4515 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4516 rtx_insn *last;
4517 rtx_insn *after_after;
4518 if (!bb && !BARRIER_P (after))
4519 bb = BLOCK_FOR_INSN (after);
4520
4521 if (bb)
4522 {
4523 df_set_bb_dirty (bb);
4524 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4525 if (!BARRIER_P (last))
4526 {
4527 set_block_for_insn (last, bb);
4528 df_insn_rescan (last);
4529 }
4530 if (!BARRIER_P (last))
4531 {
4532 set_block_for_insn (last, bb);
4533 df_insn_rescan (last);
4534 }
4535 if (BB_END (bb) == after)
4536 BB_END (bb) = last;
4537 }
4538 else
4539 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4540 continue;
4541
4542 after_after = NEXT_INSN (after);
4543
4544 SET_NEXT_INSN (after) = first;
4545 SET_PREV_INSN (first) = after;
4546 SET_NEXT_INSN (last) = after_after;
4547 if (after_after)
4548 SET_PREV_INSN (after_after) = last;
4549
4550 if (after == get_last_insn ())
4551 set_last_insn (last);
4552
4553 return last;
4554 }
4555
4556 static rtx_insn *
4557 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4558 rtx_insn *(*make_raw)(rtx))
4559 {
4560 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4561 rtx_insn *last = after;
4562
4563 gcc_assert (after);
4564
4565 if (x == NULL_RTX)
4566 return last;
4567
4568 switch (GET_CODE (x))
4569 {
4570 case DEBUG_INSN:
4571 case INSN:
4572 case JUMP_INSN:
4573 case CALL_INSN:
4574 case CODE_LABEL:
4575 case BARRIER:
4576 case NOTE:
4577 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4578 break;
4579
4580 #ifdef ENABLE_RTL_CHECKING
4581 case SEQUENCE:
4582 gcc_unreachable ();
4583 break;
4584 #endif
4585
4586 default:
4587 last = (*make_raw) (x);
4588 add_insn_after (last, after, bb);
4589 break;
4590 }
4591
4592 return last;
4593 }
4594
4595 /* Make X be output after the insn AFTER and set the BB of insn. If
4596 BB is NULL, an attempt is made to infer the BB from AFTER. */
4597
4598 rtx_insn *
4599 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4600 {
4601 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4602 }
4603
4604
4605 /* Make an insn of code JUMP_INSN with body X
4606 and output it after the insn AFTER. */
4607
4608 rtx_jump_insn *
4609 emit_jump_insn_after_noloc (rtx x, rtx after)
4610 {
4611 return as_a <rtx_jump_insn *> (
4612 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4613 }
4614
4615 /* Make an instruction with body X and code CALL_INSN
4616 and output it after the instruction AFTER. */
4617
4618 rtx_insn *
4619 emit_call_insn_after_noloc (rtx x, rtx after)
4620 {
4621 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4622 }
4623
4624 /* Make an instruction with body X and code CALL_INSN
4625 and output it after the instruction AFTER. */
4626
4627 rtx_insn *
4628 emit_debug_insn_after_noloc (rtx x, rtx after)
4629 {
4630 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4631 }
4632
4633 /* Make an insn of code BARRIER
4634 and output it after the insn AFTER. */
4635
4636 rtx_barrier *
4637 emit_barrier_after (rtx after)
4638 {
4639 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4640
4641 INSN_UID (insn) = cur_insn_uid++;
4642
4643 add_insn_after (insn, after, NULL);
4644 return insn;
4645 }
4646
4647 /* Emit the label LABEL after the insn AFTER. */
4648
4649 rtx_insn *
4650 emit_label_after (rtx label, rtx_insn *after)
4651 {
4652 gcc_checking_assert (INSN_UID (label) == 0);
4653 INSN_UID (label) = cur_insn_uid++;
4654 add_insn_after (label, after, NULL);
4655 return as_a <rtx_insn *> (label);
4656 }
4657 \f
4658 /* Notes require a bit of special handling: Some notes need to have their
4659 BLOCK_FOR_INSN set, others should never have it set, and some should
4660 have it set or clear depending on the context. */
4661
4662 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4663 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4664 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4665
4666 static bool
4667 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4668 {
4669 switch (subtype)
4670 {
4671 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4672 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4673 return true;
4674
4675 /* Notes for var tracking and EH region markers can appear between or
4676 inside basic blocks. If the caller is emitting on the basic block
4677 boundary, do not set BLOCK_FOR_INSN on the new note. */
4678 case NOTE_INSN_VAR_LOCATION:
4679 case NOTE_INSN_CALL_ARG_LOCATION:
4680 case NOTE_INSN_EH_REGION_BEG:
4681 case NOTE_INSN_EH_REGION_END:
4682 return on_bb_boundary_p;
4683
4684 /* Otherwise, BLOCK_FOR_INSN must be set. */
4685 default:
4686 return false;
4687 }
4688 }
4689
4690 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4691
4692 rtx_note *
4693 emit_note_after (enum insn_note subtype, rtx_insn *after)
4694 {
4695 rtx_note *note = make_note_raw (subtype);
4696 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4697 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4698
4699 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4700 add_insn_after_nobb (note, after);
4701 else
4702 add_insn_after (note, after, bb);
4703 return note;
4704 }
4705
4706 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4707
4708 rtx_note *
4709 emit_note_before (enum insn_note subtype, rtx_insn *before)
4710 {
4711 rtx_note *note = make_note_raw (subtype);
4712 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4713 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4714
4715 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4716 add_insn_before_nobb (note, before);
4717 else
4718 add_insn_before (note, before, bb);
4719 return note;
4720 }
4721 \f
4722 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4723 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4724
4725 static rtx_insn *
4726 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4727 rtx_insn *(*make_raw) (rtx))
4728 {
4729 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4730 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4731
4732 if (pattern == NULL_RTX || !loc)
4733 return last;
4734
4735 after = NEXT_INSN (after);
4736 while (1)
4737 {
4738 if (active_insn_p (after)
4739 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4740 && !INSN_LOCATION (after))
4741 INSN_LOCATION (after) = loc;
4742 if (after == last)
4743 break;
4744 after = NEXT_INSN (after);
4745 }
4746 return last;
4747 }
4748
4749 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4750 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4751 any DEBUG_INSNs. */
4752
4753 static rtx_insn *
4754 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4755 rtx_insn *(*make_raw) (rtx))
4756 {
4757 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4758 rtx_insn *prev = after;
4759
4760 if (skip_debug_insns)
4761 while (DEBUG_INSN_P (prev))
4762 prev = PREV_INSN (prev);
4763
4764 if (INSN_P (prev))
4765 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4766 make_raw);
4767 else
4768 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4769 }
4770
4771 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4772 rtx_insn *
4773 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4774 {
4775 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4776 }
4777
4778 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4779 rtx_insn *
4780 emit_insn_after (rtx pattern, rtx after)
4781 {
4782 return emit_pattern_after (pattern, after, true, make_insn_raw);
4783 }
4784
4785 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4786 rtx_jump_insn *
4787 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4788 {
4789 return as_a <rtx_jump_insn *> (
4790 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4791 }
4792
4793 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4794 rtx_jump_insn *
4795 emit_jump_insn_after (rtx pattern, rtx after)
4796 {
4797 return as_a <rtx_jump_insn *> (
4798 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4799 }
4800
4801 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4802 rtx_insn *
4803 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4804 {
4805 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4806 }
4807
4808 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4809 rtx_insn *
4810 emit_call_insn_after (rtx pattern, rtx after)
4811 {
4812 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4813 }
4814
4815 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4816 rtx_insn *
4817 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4818 {
4819 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4820 }
4821
4822 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4823 rtx_insn *
4824 emit_debug_insn_after (rtx pattern, rtx after)
4825 {
4826 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4827 }
4828
4829 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4830 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4831 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4832 CALL_INSN, etc. */
4833
4834 static rtx_insn *
4835 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4836 rtx_insn *(*make_raw) (rtx))
4837 {
4838 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4839 rtx_insn *first = PREV_INSN (before);
4840 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4841 insnp ? before : NULL_RTX,
4842 NULL, make_raw);
4843
4844 if (pattern == NULL_RTX || !loc)
4845 return last;
4846
4847 if (!first)
4848 first = get_insns ();
4849 else
4850 first = NEXT_INSN (first);
4851 while (1)
4852 {
4853 if (active_insn_p (first)
4854 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4855 && !INSN_LOCATION (first))
4856 INSN_LOCATION (first) = loc;
4857 if (first == last)
4858 break;
4859 first = NEXT_INSN (first);
4860 }
4861 return last;
4862 }
4863
4864 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4865 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4866 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4867 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4868
4869 static rtx_insn *
4870 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4871 bool insnp, rtx_insn *(*make_raw) (rtx))
4872 {
4873 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4874 rtx_insn *next = before;
4875
4876 if (skip_debug_insns)
4877 while (DEBUG_INSN_P (next))
4878 next = PREV_INSN (next);
4879
4880 if (INSN_P (next))
4881 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4882 insnp, make_raw);
4883 else
4884 return emit_pattern_before_noloc (pattern, before,
4885 insnp ? before : NULL_RTX,
4886 NULL, make_raw);
4887 }
4888
4889 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4890 rtx_insn *
4891 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4892 {
4893 return emit_pattern_before_setloc (pattern, before, loc, true,
4894 make_insn_raw);
4895 }
4896
4897 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4898 rtx_insn *
4899 emit_insn_before (rtx pattern, rtx before)
4900 {
4901 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4902 }
4903
4904 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4905 rtx_jump_insn *
4906 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4907 {
4908 return as_a <rtx_jump_insn *> (
4909 emit_pattern_before_setloc (pattern, before, loc, false,
4910 make_jump_insn_raw));
4911 }
4912
4913 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4914 rtx_jump_insn *
4915 emit_jump_insn_before (rtx pattern, rtx before)
4916 {
4917 return as_a <rtx_jump_insn *> (
4918 emit_pattern_before (pattern, before, true, false,
4919 make_jump_insn_raw));
4920 }
4921
4922 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4923 rtx_insn *
4924 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4925 {
4926 return emit_pattern_before_setloc (pattern, before, loc, false,
4927 make_call_insn_raw);
4928 }
4929
4930 /* Like emit_call_insn_before_noloc,
4931 but set insn_location according to BEFORE. */
4932 rtx_insn *
4933 emit_call_insn_before (rtx pattern, rtx_insn *before)
4934 {
4935 return emit_pattern_before (pattern, before, true, false,
4936 make_call_insn_raw);
4937 }
4938
4939 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4940 rtx_insn *
4941 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4942 {
4943 return emit_pattern_before_setloc (pattern, before, loc, false,
4944 make_debug_insn_raw);
4945 }
4946
4947 /* Like emit_debug_insn_before_noloc,
4948 but set insn_location according to BEFORE. */
4949 rtx_insn *
4950 emit_debug_insn_before (rtx pattern, rtx_insn *before)
4951 {
4952 return emit_pattern_before (pattern, before, false, false,
4953 make_debug_insn_raw);
4954 }
4955 \f
4956 /* Take X and emit it at the end of the doubly-linked
4957 INSN list.
4958
4959 Returns the last insn emitted. */
4960
4961 rtx_insn *
4962 emit_insn (rtx x)
4963 {
4964 rtx_insn *last = get_last_insn ();
4965 rtx_insn *insn;
4966
4967 if (x == NULL_RTX)
4968 return last;
4969
4970 switch (GET_CODE (x))
4971 {
4972 case DEBUG_INSN:
4973 case INSN:
4974 case JUMP_INSN:
4975 case CALL_INSN:
4976 case CODE_LABEL:
4977 case BARRIER:
4978 case NOTE:
4979 insn = as_a <rtx_insn *> (x);
4980 while (insn)
4981 {
4982 rtx_insn *next = NEXT_INSN (insn);
4983 add_insn (insn);
4984 last = insn;
4985 insn = next;
4986 }
4987 break;
4988
4989 #ifdef ENABLE_RTL_CHECKING
4990 case JUMP_TABLE_DATA:
4991 case SEQUENCE:
4992 gcc_unreachable ();
4993 break;
4994 #endif
4995
4996 default:
4997 last = make_insn_raw (x);
4998 add_insn (last);
4999 break;
5000 }
5001
5002 return last;
5003 }
5004
5005 /* Make an insn of code DEBUG_INSN with pattern X
5006 and add it to the end of the doubly-linked list. */
5007
5008 rtx_insn *
5009 emit_debug_insn (rtx x)
5010 {
5011 rtx_insn *last = get_last_insn ();
5012 rtx_insn *insn;
5013
5014 if (x == NULL_RTX)
5015 return last;
5016
5017 switch (GET_CODE (x))
5018 {
5019 case DEBUG_INSN:
5020 case INSN:
5021 case JUMP_INSN:
5022 case CALL_INSN:
5023 case CODE_LABEL:
5024 case BARRIER:
5025 case NOTE:
5026 insn = as_a <rtx_insn *> (x);
5027 while (insn)
5028 {
5029 rtx_insn *next = NEXT_INSN (insn);
5030 add_insn (insn);
5031 last = insn;
5032 insn = next;
5033 }
5034 break;
5035
5036 #ifdef ENABLE_RTL_CHECKING
5037 case JUMP_TABLE_DATA:
5038 case SEQUENCE:
5039 gcc_unreachable ();
5040 break;
5041 #endif
5042
5043 default:
5044 last = make_debug_insn_raw (x);
5045 add_insn (last);
5046 break;
5047 }
5048
5049 return last;
5050 }
5051
5052 /* Make an insn of code JUMP_INSN with pattern X
5053 and add it to the end of the doubly-linked list. */
5054
5055 rtx_insn *
5056 emit_jump_insn (rtx x)
5057 {
5058 rtx_insn *last = NULL;
5059 rtx_insn *insn;
5060
5061 switch (GET_CODE (x))
5062 {
5063 case DEBUG_INSN:
5064 case INSN:
5065 case JUMP_INSN:
5066 case CALL_INSN:
5067 case CODE_LABEL:
5068 case BARRIER:
5069 case NOTE:
5070 insn = as_a <rtx_insn *> (x);
5071 while (insn)
5072 {
5073 rtx_insn *next = NEXT_INSN (insn);
5074 add_insn (insn);
5075 last = insn;
5076 insn = next;
5077 }
5078 break;
5079
5080 #ifdef ENABLE_RTL_CHECKING
5081 case JUMP_TABLE_DATA:
5082 case SEQUENCE:
5083 gcc_unreachable ();
5084 break;
5085 #endif
5086
5087 default:
5088 last = make_jump_insn_raw (x);
5089 add_insn (last);
5090 break;
5091 }
5092
5093 return last;
5094 }
5095
5096 /* Make an insn of code CALL_INSN with pattern X
5097 and add it to the end of the doubly-linked list. */
5098
5099 rtx_insn *
5100 emit_call_insn (rtx x)
5101 {
5102 rtx_insn *insn;
5103
5104 switch (GET_CODE (x))
5105 {
5106 case DEBUG_INSN:
5107 case INSN:
5108 case JUMP_INSN:
5109 case CALL_INSN:
5110 case CODE_LABEL:
5111 case BARRIER:
5112 case NOTE:
5113 insn = emit_insn (x);
5114 break;
5115
5116 #ifdef ENABLE_RTL_CHECKING
5117 case SEQUENCE:
5118 case JUMP_TABLE_DATA:
5119 gcc_unreachable ();
5120 break;
5121 #endif
5122
5123 default:
5124 insn = make_call_insn_raw (x);
5125 add_insn (insn);
5126 break;
5127 }
5128
5129 return insn;
5130 }
5131
5132 /* Add the label LABEL to the end of the doubly-linked list. */
5133
5134 rtx_code_label *
5135 emit_label (rtx uncast_label)
5136 {
5137 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5138
5139 gcc_checking_assert (INSN_UID (label) == 0);
5140 INSN_UID (label) = cur_insn_uid++;
5141 add_insn (label);
5142 return label;
5143 }
5144
5145 /* Make an insn of code JUMP_TABLE_DATA
5146 and add it to the end of the doubly-linked list. */
5147
5148 rtx_jump_table_data *
5149 emit_jump_table_data (rtx table)
5150 {
5151 rtx_jump_table_data *jump_table_data =
5152 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5153 INSN_UID (jump_table_data) = cur_insn_uid++;
5154 PATTERN (jump_table_data) = table;
5155 BLOCK_FOR_INSN (jump_table_data) = NULL;
5156 add_insn (jump_table_data);
5157 return jump_table_data;
5158 }
5159
5160 /* Make an insn of code BARRIER
5161 and add it to the end of the doubly-linked list. */
5162
5163 rtx_barrier *
5164 emit_barrier (void)
5165 {
5166 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5167 INSN_UID (barrier) = cur_insn_uid++;
5168 add_insn (barrier);
5169 return barrier;
5170 }
5171
5172 /* Emit a copy of note ORIG. */
5173
5174 rtx_note *
5175 emit_note_copy (rtx_note *orig)
5176 {
5177 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5178 rtx_note *note = make_note_raw (kind);
5179 NOTE_DATA (note) = NOTE_DATA (orig);
5180 add_insn (note);
5181 return note;
5182 }
5183
5184 /* Make an insn of code NOTE or type NOTE_NO
5185 and add it to the end of the doubly-linked list. */
5186
5187 rtx_note *
5188 emit_note (enum insn_note kind)
5189 {
5190 rtx_note *note = make_note_raw (kind);
5191 add_insn (note);
5192 return note;
5193 }
5194
5195 /* Emit a clobber of lvalue X. */
5196
5197 rtx_insn *
5198 emit_clobber (rtx x)
5199 {
5200 /* CONCATs should not appear in the insn stream. */
5201 if (GET_CODE (x) == CONCAT)
5202 {
5203 emit_clobber (XEXP (x, 0));
5204 return emit_clobber (XEXP (x, 1));
5205 }
5206 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5207 }
5208
5209 /* Return a sequence of insns to clobber lvalue X. */
5210
5211 rtx_insn *
5212 gen_clobber (rtx x)
5213 {
5214 rtx_insn *seq;
5215
5216 start_sequence ();
5217 emit_clobber (x);
5218 seq = get_insns ();
5219 end_sequence ();
5220 return seq;
5221 }
5222
5223 /* Emit a use of rvalue X. */
5224
5225 rtx_insn *
5226 emit_use (rtx x)
5227 {
5228 /* CONCATs should not appear in the insn stream. */
5229 if (GET_CODE (x) == CONCAT)
5230 {
5231 emit_use (XEXP (x, 0));
5232 return emit_use (XEXP (x, 1));
5233 }
5234 return emit_insn (gen_rtx_USE (VOIDmode, x));
5235 }
5236
5237 /* Return a sequence of insns to use rvalue X. */
5238
5239 rtx_insn *
5240 gen_use (rtx x)
5241 {
5242 rtx_insn *seq;
5243
5244 start_sequence ();
5245 emit_use (x);
5246 seq = get_insns ();
5247 end_sequence ();
5248 return seq;
5249 }
5250
5251 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5252 Return the set in INSN that such notes describe, or NULL if the notes
5253 have no meaning for INSN. */
5254
5255 rtx
5256 set_for_reg_notes (rtx insn)
5257 {
5258 rtx pat, reg;
5259
5260 if (!INSN_P (insn))
5261 return NULL_RTX;
5262
5263 pat = PATTERN (insn);
5264 if (GET_CODE (pat) == PARALLEL)
5265 {
5266 /* We do not use single_set because that ignores SETs of unused
5267 registers. REG_EQUAL and REG_EQUIV notes really do require the
5268 PARALLEL to have a single SET. */
5269 if (multiple_sets (insn))
5270 return NULL_RTX;
5271 pat = XVECEXP (pat, 0, 0);
5272 }
5273
5274 if (GET_CODE (pat) != SET)
5275 return NULL_RTX;
5276
5277 reg = SET_DEST (pat);
5278
5279 /* Notes apply to the contents of a STRICT_LOW_PART. */
5280 if (GET_CODE (reg) == STRICT_LOW_PART
5281 || GET_CODE (reg) == ZERO_EXTRACT)
5282 reg = XEXP (reg, 0);
5283
5284 /* Check that we have a register. */
5285 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5286 return NULL_RTX;
5287
5288 return pat;
5289 }
5290
5291 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5292 note of this type already exists, remove it first. */
5293
5294 rtx
5295 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5296 {
5297 rtx note = find_reg_note (insn, kind, NULL_RTX);
5298
5299 switch (kind)
5300 {
5301 case REG_EQUAL:
5302 case REG_EQUIV:
5303 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5304 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5305 return NULL_RTX;
5306
5307 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5308 It serves no useful purpose and breaks eliminate_regs. */
5309 if (GET_CODE (datum) == ASM_OPERANDS)
5310 return NULL_RTX;
5311
5312 /* Notes with side effects are dangerous. Even if the side-effect
5313 initially mirrors one in PATTERN (INSN), later optimizations
5314 might alter the way that the final register value is calculated
5315 and so move or alter the side-effect in some way. The note would
5316 then no longer be a valid substitution for SET_SRC. */
5317 if (side_effects_p (datum))
5318 return NULL_RTX;
5319 break;
5320
5321 default:
5322 break;
5323 }
5324
5325 if (note)
5326 XEXP (note, 0) = datum;
5327 else
5328 {
5329 add_reg_note (insn, kind, datum);
5330 note = REG_NOTES (insn);
5331 }
5332
5333 switch (kind)
5334 {
5335 case REG_EQUAL:
5336 case REG_EQUIV:
5337 df_notes_rescan (as_a <rtx_insn *> (insn));
5338 break;
5339 default:
5340 break;
5341 }
5342
5343 return note;
5344 }
5345
5346 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5347 rtx
5348 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5349 {
5350 rtx set = set_for_reg_notes (insn);
5351
5352 if (set && SET_DEST (set) == dst)
5353 return set_unique_reg_note (insn, kind, datum);
5354 return NULL_RTX;
5355 }
5356 \f
5357 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5358 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5359 is true.
5360
5361 If X is a label, it is simply added into the insn chain. */
5362
5363 rtx_insn *
5364 emit (rtx x, bool allow_barrier_p)
5365 {
5366 enum rtx_code code = classify_insn (x);
5367
5368 switch (code)
5369 {
5370 case CODE_LABEL:
5371 return emit_label (x);
5372 case INSN:
5373 return emit_insn (x);
5374 case JUMP_INSN:
5375 {
5376 rtx_insn *insn = emit_jump_insn (x);
5377 if (allow_barrier_p
5378 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5379 return emit_barrier ();
5380 return insn;
5381 }
5382 case CALL_INSN:
5383 return emit_call_insn (x);
5384 case DEBUG_INSN:
5385 return emit_debug_insn (x);
5386 default:
5387 gcc_unreachable ();
5388 }
5389 }
5390 \f
5391 /* Space for free sequence stack entries. */
5392 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5393
5394 /* Begin emitting insns to a sequence. If this sequence will contain
5395 something that might cause the compiler to pop arguments to function
5396 calls (because those pops have previously been deferred; see
5397 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5398 before calling this function. That will ensure that the deferred
5399 pops are not accidentally emitted in the middle of this sequence. */
5400
5401 void
5402 start_sequence (void)
5403 {
5404 struct sequence_stack *tem;
5405
5406 if (free_sequence_stack != NULL)
5407 {
5408 tem = free_sequence_stack;
5409 free_sequence_stack = tem->next;
5410 }
5411 else
5412 tem = ggc_alloc<sequence_stack> ();
5413
5414 tem->next = get_current_sequence ()->next;
5415 tem->first = get_insns ();
5416 tem->last = get_last_insn ();
5417 get_current_sequence ()->next = tem;
5418
5419 set_first_insn (0);
5420 set_last_insn (0);
5421 }
5422
5423 /* Set up the insn chain starting with FIRST as the current sequence,
5424 saving the previously current one. See the documentation for
5425 start_sequence for more information about how to use this function. */
5426
5427 void
5428 push_to_sequence (rtx_insn *first)
5429 {
5430 rtx_insn *last;
5431
5432 start_sequence ();
5433
5434 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5435 ;
5436
5437 set_first_insn (first);
5438 set_last_insn (last);
5439 }
5440
5441 /* Like push_to_sequence, but take the last insn as an argument to avoid
5442 looping through the list. */
5443
5444 void
5445 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5446 {
5447 start_sequence ();
5448
5449 set_first_insn (first);
5450 set_last_insn (last);
5451 }
5452
5453 /* Set up the outer-level insn chain
5454 as the current sequence, saving the previously current one. */
5455
5456 void
5457 push_topmost_sequence (void)
5458 {
5459 struct sequence_stack *top;
5460
5461 start_sequence ();
5462
5463 top = get_topmost_sequence ();
5464 set_first_insn (top->first);
5465 set_last_insn (top->last);
5466 }
5467
5468 /* After emitting to the outer-level insn chain, update the outer-level
5469 insn chain, and restore the previous saved state. */
5470
5471 void
5472 pop_topmost_sequence (void)
5473 {
5474 struct sequence_stack *top;
5475
5476 top = get_topmost_sequence ();
5477 top->first = get_insns ();
5478 top->last = get_last_insn ();
5479
5480 end_sequence ();
5481 }
5482
5483 /* After emitting to a sequence, restore previous saved state.
5484
5485 To get the contents of the sequence just made, you must call
5486 `get_insns' *before* calling here.
5487
5488 If the compiler might have deferred popping arguments while
5489 generating this sequence, and this sequence will not be immediately
5490 inserted into the instruction stream, use do_pending_stack_adjust
5491 before calling get_insns. That will ensure that the deferred
5492 pops are inserted into this sequence, and not into some random
5493 location in the instruction stream. See INHIBIT_DEFER_POP for more
5494 information about deferred popping of arguments. */
5495
5496 void
5497 end_sequence (void)
5498 {
5499 struct sequence_stack *tem = get_current_sequence ()->next;
5500
5501 set_first_insn (tem->first);
5502 set_last_insn (tem->last);
5503 get_current_sequence ()->next = tem->next;
5504
5505 memset (tem, 0, sizeof (*tem));
5506 tem->next = free_sequence_stack;
5507 free_sequence_stack = tem;
5508 }
5509
5510 /* Return 1 if currently emitting into a sequence. */
5511
5512 int
5513 in_sequence_p (void)
5514 {
5515 return get_current_sequence ()->next != 0;
5516 }
5517 \f
5518 /* Put the various virtual registers into REGNO_REG_RTX. */
5519
5520 static void
5521 init_virtual_regs (void)
5522 {
5523 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5524 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5525 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5526 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5527 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5528 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5529 = virtual_preferred_stack_boundary_rtx;
5530 }
5531
5532 \f
5533 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5534 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5535 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5536 static int copy_insn_n_scratches;
5537
5538 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5539 copied an ASM_OPERANDS.
5540 In that case, it is the original input-operand vector. */
5541 static rtvec orig_asm_operands_vector;
5542
5543 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5544 copied an ASM_OPERANDS.
5545 In that case, it is the copied input-operand vector. */
5546 static rtvec copy_asm_operands_vector;
5547
5548 /* Likewise for the constraints vector. */
5549 static rtvec orig_asm_constraints_vector;
5550 static rtvec copy_asm_constraints_vector;
5551
5552 /* Recursively create a new copy of an rtx for copy_insn.
5553 This function differs from copy_rtx in that it handles SCRATCHes and
5554 ASM_OPERANDs properly.
5555 Normally, this function is not used directly; use copy_insn as front end.
5556 However, you could first copy an insn pattern with copy_insn and then use
5557 this function afterwards to properly copy any REG_NOTEs containing
5558 SCRATCHes. */
5559
5560 rtx
5561 copy_insn_1 (rtx orig)
5562 {
5563 rtx copy;
5564 int i, j;
5565 RTX_CODE code;
5566 const char *format_ptr;
5567
5568 if (orig == NULL)
5569 return NULL;
5570
5571 code = GET_CODE (orig);
5572
5573 switch (code)
5574 {
5575 case REG:
5576 case DEBUG_EXPR:
5577 CASE_CONST_ANY:
5578 case SYMBOL_REF:
5579 case CODE_LABEL:
5580 case PC:
5581 case CC0:
5582 case RETURN:
5583 case SIMPLE_RETURN:
5584 return orig;
5585 case CLOBBER:
5586 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5587 clobbers or clobbers of hard registers that originated as pseudos.
5588 This is needed to allow safe register renaming. */
5589 if (REG_P (XEXP (orig, 0))
5590 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5591 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5592 return orig;
5593 break;
5594
5595 case SCRATCH:
5596 for (i = 0; i < copy_insn_n_scratches; i++)
5597 if (copy_insn_scratch_in[i] == orig)
5598 return copy_insn_scratch_out[i];
5599 break;
5600
5601 case CONST:
5602 if (shared_const_p (orig))
5603 return orig;
5604 break;
5605
5606 /* A MEM with a constant address is not sharable. The problem is that
5607 the constant address may need to be reloaded. If the mem is shared,
5608 then reloading one copy of this mem will cause all copies to appear
5609 to have been reloaded. */
5610
5611 default:
5612 break;
5613 }
5614
5615 /* Copy the various flags, fields, and other information. We assume
5616 that all fields need copying, and then clear the fields that should
5617 not be copied. That is the sensible default behavior, and forces
5618 us to explicitly document why we are *not* copying a flag. */
5619 copy = shallow_copy_rtx (orig);
5620
5621 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5622 if (INSN_P (orig))
5623 {
5624 RTX_FLAG (copy, jump) = 0;
5625 RTX_FLAG (copy, call) = 0;
5626 RTX_FLAG (copy, frame_related) = 0;
5627 }
5628
5629 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5630
5631 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5632 switch (*format_ptr++)
5633 {
5634 case 'e':
5635 if (XEXP (orig, i) != NULL)
5636 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5637 break;
5638
5639 case 'E':
5640 case 'V':
5641 if (XVEC (orig, i) == orig_asm_constraints_vector)
5642 XVEC (copy, i) = copy_asm_constraints_vector;
5643 else if (XVEC (orig, i) == orig_asm_operands_vector)
5644 XVEC (copy, i) = copy_asm_operands_vector;
5645 else if (XVEC (orig, i) != NULL)
5646 {
5647 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5648 for (j = 0; j < XVECLEN (copy, i); j++)
5649 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5650 }
5651 break;
5652
5653 case 't':
5654 case 'w':
5655 case 'i':
5656 case 's':
5657 case 'S':
5658 case 'u':
5659 case '0':
5660 /* These are left unchanged. */
5661 break;
5662
5663 default:
5664 gcc_unreachable ();
5665 }
5666
5667 if (code == SCRATCH)
5668 {
5669 i = copy_insn_n_scratches++;
5670 gcc_assert (i < MAX_RECOG_OPERANDS);
5671 copy_insn_scratch_in[i] = orig;
5672 copy_insn_scratch_out[i] = copy;
5673 }
5674 else if (code == ASM_OPERANDS)
5675 {
5676 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5677 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5678 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5679 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5680 }
5681
5682 return copy;
5683 }
5684
5685 /* Create a new copy of an rtx.
5686 This function differs from copy_rtx in that it handles SCRATCHes and
5687 ASM_OPERANDs properly.
5688 INSN doesn't really have to be a full INSN; it could be just the
5689 pattern. */
5690 rtx
5691 copy_insn (rtx insn)
5692 {
5693 copy_insn_n_scratches = 0;
5694 orig_asm_operands_vector = 0;
5695 orig_asm_constraints_vector = 0;
5696 copy_asm_operands_vector = 0;
5697 copy_asm_constraints_vector = 0;
5698 return copy_insn_1 (insn);
5699 }
5700
5701 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5702 on that assumption that INSN itself remains in its original place. */
5703
5704 rtx_insn *
5705 copy_delay_slot_insn (rtx_insn *insn)
5706 {
5707 /* Copy INSN with its rtx_code, all its notes, location etc. */
5708 insn = as_a <rtx_insn *> (copy_rtx (insn));
5709 INSN_UID (insn) = cur_insn_uid++;
5710 return insn;
5711 }
5712
5713 /* Initialize data structures and variables in this file
5714 before generating rtl for each function. */
5715
5716 void
5717 init_emit (void)
5718 {
5719 set_first_insn (NULL);
5720 set_last_insn (NULL);
5721 if (MIN_NONDEBUG_INSN_UID)
5722 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5723 else
5724 cur_insn_uid = 1;
5725 cur_debug_insn_uid = 1;
5726 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5727 first_label_num = label_num;
5728 get_current_sequence ()->next = NULL;
5729
5730 /* Init the tables that describe all the pseudo regs. */
5731
5732 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5733
5734 crtl->emit.regno_pointer_align
5735 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5736
5737 regno_reg_rtx
5738 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5739
5740 /* Put copies of all the hard registers into regno_reg_rtx. */
5741 memcpy (regno_reg_rtx,
5742 initial_regno_reg_rtx,
5743 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5744
5745 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5746 init_virtual_regs ();
5747
5748 /* Indicate that the virtual registers and stack locations are
5749 all pointers. */
5750 REG_POINTER (stack_pointer_rtx) = 1;
5751 REG_POINTER (frame_pointer_rtx) = 1;
5752 REG_POINTER (hard_frame_pointer_rtx) = 1;
5753 REG_POINTER (arg_pointer_rtx) = 1;
5754
5755 REG_POINTER (virtual_incoming_args_rtx) = 1;
5756 REG_POINTER (virtual_stack_vars_rtx) = 1;
5757 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5758 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5759 REG_POINTER (virtual_cfa_rtx) = 1;
5760
5761 #ifdef STACK_BOUNDARY
5762 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5763 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5764 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5765 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5766
5767 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5768 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5769 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5770 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5771
5772 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5773 #endif
5774
5775 #ifdef INIT_EXPANDERS
5776 INIT_EXPANDERS;
5777 #endif
5778 }
5779
5780 /* Return true if X is a valid element for a duplicated vector constant
5781 of the given mode. */
5782
5783 bool
5784 valid_for_const_vec_duplicate_p (machine_mode, rtx x)
5785 {
5786 return (CONST_SCALAR_INT_P (x)
5787 || CONST_DOUBLE_AS_FLOAT_P (x)
5788 || CONST_FIXED_P (x));
5789 }
5790
5791 /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */
5792
5793 static rtx
5794 gen_const_vec_duplicate_1 (machine_mode mode, rtx el)
5795 {
5796 int nunits = GET_MODE_NUNITS (mode);
5797 rtvec v = rtvec_alloc (nunits);
5798 for (int i = 0; i < nunits; ++i)
5799 RTVEC_ELT (v, i) = el;
5800 return gen_rtx_raw_CONST_VECTOR (mode, v);
5801 }
5802
5803 /* Generate a vector constant of mode MODE in which every element has
5804 value ELT. */
5805
5806 rtx
5807 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5808 {
5809 scalar_mode inner_mode = GET_MODE_INNER (mode);
5810 if (elt == CONST0_RTX (inner_mode))
5811 return CONST0_RTX (mode);
5812 else if (elt == CONST1_RTX (inner_mode))
5813 return CONST1_RTX (mode);
5814 else if (elt == CONSTM1_RTX (inner_mode))
5815 return CONSTM1_RTX (mode);
5816
5817 return gen_const_vec_duplicate_1 (mode, elt);
5818 }
5819
5820 /* Return a vector rtx of mode MODE in which every element has value X.
5821 The result will be a constant if X is constant. */
5822
5823 rtx
5824 gen_vec_duplicate (machine_mode mode, rtx x)
5825 {
5826 if (valid_for_const_vec_duplicate_p (mode, x))
5827 return gen_const_vec_duplicate (mode, x);
5828 return gen_rtx_VEC_DUPLICATE (mode, x);
5829 }
5830
5831 /* A subroutine of const_vec_series_p that handles the case in which
5832 X is known to be an integer CONST_VECTOR. */
5833
5834 bool
5835 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5836 {
5837 unsigned int nelts = CONST_VECTOR_NUNITS (x);
5838 if (nelts < 2)
5839 return false;
5840
5841 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5842 rtx base = CONST_VECTOR_ELT (x, 0);
5843 rtx step = simplify_binary_operation (MINUS, inner,
5844 CONST_VECTOR_ELT (x, 1), base);
5845 if (rtx_equal_p (step, CONST0_RTX (inner)))
5846 return false;
5847
5848 for (unsigned int i = 2; i < nelts; ++i)
5849 {
5850 rtx diff = simplify_binary_operation (MINUS, inner,
5851 CONST_VECTOR_ELT (x, i),
5852 CONST_VECTOR_ELT (x, i - 1));
5853 if (!rtx_equal_p (step, diff))
5854 return false;
5855 }
5856
5857 *base_out = base;
5858 *step_out = step;
5859 return true;
5860 }
5861
5862 /* Generate a vector constant of mode MODE in which element I has
5863 the value BASE + I * STEP. */
5864
5865 rtx
5866 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5867 {
5868 gcc_assert (CONSTANT_P (base) && CONSTANT_P (step));
5869
5870 int nunits = GET_MODE_NUNITS (mode);
5871 rtvec v = rtvec_alloc (nunits);
5872 scalar_mode inner_mode = GET_MODE_INNER (mode);
5873 RTVEC_ELT (v, 0) = base;
5874 for (int i = 1; i < nunits; ++i)
5875 RTVEC_ELT (v, i) = simplify_gen_binary (PLUS, inner_mode,
5876 RTVEC_ELT (v, i - 1), step);
5877 return gen_rtx_raw_CONST_VECTOR (mode, v);
5878 }
5879
5880 /* Generate a vector of mode MODE in which element I has the value
5881 BASE + I * STEP. The result will be a constant if BASE and STEP
5882 are both constants. */
5883
5884 rtx
5885 gen_vec_series (machine_mode mode, rtx base, rtx step)
5886 {
5887 if (step == const0_rtx)
5888 return gen_vec_duplicate (mode, base);
5889 if (CONSTANT_P (base) && CONSTANT_P (step))
5890 return gen_const_vec_series (mode, base, step);
5891 return gen_rtx_VEC_SERIES (mode, base, step);
5892 }
5893
5894 /* Generate a new vector constant for mode MODE and constant value
5895 CONSTANT. */
5896
5897 static rtx
5898 gen_const_vector (machine_mode mode, int constant)
5899 {
5900 machine_mode inner = GET_MODE_INNER (mode);
5901
5902 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5903
5904 rtx el = const_tiny_rtx[constant][(int) inner];
5905 gcc_assert (el);
5906
5907 return gen_const_vec_duplicate_1 (mode, el);
5908 }
5909
5910 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5911 all elements are zero, and the one vector when all elements are one. */
5912 rtx
5913 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5914 {
5915 gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v));
5916
5917 /* If the values are all the same, check to see if we can use one of the
5918 standard constant vectors. */
5919 if (rtvec_all_equal_p (v))
5920 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
5921
5922 return gen_rtx_raw_CONST_VECTOR (mode, v);
5923 }
5924
5925 /* Initialise global register information required by all functions. */
5926
5927 void
5928 init_emit_regs (void)
5929 {
5930 int i;
5931 machine_mode mode;
5932 mem_attrs *attrs;
5933
5934 /* Reset register attributes */
5935 reg_attrs_htab->empty ();
5936
5937 /* We need reg_raw_mode, so initialize the modes now. */
5938 init_reg_modes_target ();
5939
5940 /* Assign register numbers to the globally defined register rtx. */
5941 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5942 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5943 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5944 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5945 virtual_incoming_args_rtx =
5946 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5947 virtual_stack_vars_rtx =
5948 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5949 virtual_stack_dynamic_rtx =
5950 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5951 virtual_outgoing_args_rtx =
5952 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5953 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5954 virtual_preferred_stack_boundary_rtx =
5955 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5956
5957 /* Initialize RTL for commonly used hard registers. These are
5958 copied into regno_reg_rtx as we begin to compile each function. */
5959 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5960 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5961
5962 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5963 return_address_pointer_rtx
5964 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5965 #endif
5966
5967 pic_offset_table_rtx = NULL_RTX;
5968 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5969 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5970
5971 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5972 {
5973 mode = (machine_mode) i;
5974 attrs = ggc_cleared_alloc<mem_attrs> ();
5975 attrs->align = BITS_PER_UNIT;
5976 attrs->addrspace = ADDR_SPACE_GENERIC;
5977 if (mode != BLKmode)
5978 {
5979 attrs->size_known_p = true;
5980 attrs->size = GET_MODE_SIZE (mode);
5981 if (STRICT_ALIGNMENT)
5982 attrs->align = GET_MODE_ALIGNMENT (mode);
5983 }
5984 mode_mem_attrs[i] = attrs;
5985 }
5986
5987 split_branch_probability = profile_probability::uninitialized ();
5988 }
5989
5990 /* Initialize global machine_mode variables. */
5991
5992 void
5993 init_derived_machine_modes (void)
5994 {
5995 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
5996 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
5997 {
5998 scalar_int_mode mode = mode_iter.require ();
5999
6000 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6001 && !opt_byte_mode.exists ())
6002 opt_byte_mode = mode;
6003
6004 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6005 && !opt_word_mode.exists ())
6006 opt_word_mode = mode;
6007 }
6008
6009 byte_mode = opt_byte_mode.require ();
6010 word_mode = opt_word_mode.require ();
6011 ptr_mode = as_a <scalar_int_mode>
6012 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6013 }
6014
6015 /* Create some permanent unique rtl objects shared between all functions. */
6016
6017 void
6018 init_emit_once (void)
6019 {
6020 int i;
6021 machine_mode mode;
6022 scalar_float_mode double_mode;
6023 opt_scalar_mode smode_iter;
6024
6025 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6026 CONST_FIXED, and memory attribute hash tables. */
6027 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6028
6029 #if TARGET_SUPPORTS_WIDE_INT
6030 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6031 #endif
6032 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6033
6034 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6035
6036 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6037
6038 #ifdef INIT_EXPANDERS
6039 /* This is to initialize {init|mark|free}_machine_status before the first
6040 call to push_function_context_to. This is needed by the Chill front
6041 end which calls push_function_context_to before the first call to
6042 init_function_start. */
6043 INIT_EXPANDERS;
6044 #endif
6045
6046 /* Create the unique rtx's for certain rtx codes and operand values. */
6047
6048 /* Process stack-limiting command-line options. */
6049 if (opt_fstack_limit_symbol_arg != NULL)
6050 stack_limit_rtx
6051 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6052 if (opt_fstack_limit_register_no >= 0)
6053 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6054
6055 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6056 tries to use these variables. */
6057 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6058 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6059 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6060
6061 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6062 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6063 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6064 else
6065 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6066
6067 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6068
6069 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6070 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6071 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6072
6073 dconstm1 = dconst1;
6074 dconstm1.sign = 1;
6075
6076 dconsthalf = dconst1;
6077 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6078
6079 for (i = 0; i < 3; i++)
6080 {
6081 const REAL_VALUE_TYPE *const r =
6082 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6083
6084 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6085 const_tiny_rtx[i][(int) mode] =
6086 const_double_from_real_value (*r, mode);
6087
6088 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6089 const_tiny_rtx[i][(int) mode] =
6090 const_double_from_real_value (*r, mode);
6091
6092 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6093
6094 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6095 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6096
6097 for (mode = MIN_MODE_PARTIAL_INT;
6098 mode <= MAX_MODE_PARTIAL_INT;
6099 mode = (machine_mode)((int)(mode) + 1))
6100 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6101 }
6102
6103 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6104
6105 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6106 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6107
6108 for (mode = MIN_MODE_PARTIAL_INT;
6109 mode <= MAX_MODE_PARTIAL_INT;
6110 mode = (machine_mode)((int)(mode) + 1))
6111 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6112
6113 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6114 {
6115 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6116 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6117 }
6118
6119 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6120 {
6121 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6122 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6123 }
6124
6125 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6126 {
6127 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6128 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6129 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6130 }
6131
6132 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6133 {
6134 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6135 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6136 }
6137
6138 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6139 {
6140 scalar_mode smode = smode_iter.require ();
6141 FCONST0 (smode).data.high = 0;
6142 FCONST0 (smode).data.low = 0;
6143 FCONST0 (smode).mode = smode;
6144 const_tiny_rtx[0][(int) smode]
6145 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6146 }
6147
6148 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6149 {
6150 scalar_mode smode = smode_iter.require ();
6151 FCONST0 (smode).data.high = 0;
6152 FCONST0 (smode).data.low = 0;
6153 FCONST0 (smode).mode = smode;
6154 const_tiny_rtx[0][(int) smode]
6155 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6156 }
6157
6158 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6159 {
6160 scalar_mode smode = smode_iter.require ();
6161 FCONST0 (smode).data.high = 0;
6162 FCONST0 (smode).data.low = 0;
6163 FCONST0 (smode).mode = smode;
6164 const_tiny_rtx[0][(int) smode]
6165 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6166
6167 /* We store the value 1. */
6168 FCONST1 (smode).data.high = 0;
6169 FCONST1 (smode).data.low = 0;
6170 FCONST1 (smode).mode = smode;
6171 FCONST1 (smode).data
6172 = double_int_one.lshift (GET_MODE_FBIT (smode),
6173 HOST_BITS_PER_DOUBLE_INT,
6174 SIGNED_FIXED_POINT_MODE_P (smode));
6175 const_tiny_rtx[1][(int) smode]
6176 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6177 }
6178
6179 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6180 {
6181 scalar_mode smode = smode_iter.require ();
6182 FCONST0 (smode).data.high = 0;
6183 FCONST0 (smode).data.low = 0;
6184 FCONST0 (smode).mode = smode;
6185 const_tiny_rtx[0][(int) smode]
6186 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6187
6188 /* We store the value 1. */
6189 FCONST1 (smode).data.high = 0;
6190 FCONST1 (smode).data.low = 0;
6191 FCONST1 (smode).mode = smode;
6192 FCONST1 (smode).data
6193 = double_int_one.lshift (GET_MODE_FBIT (smode),
6194 HOST_BITS_PER_DOUBLE_INT,
6195 SIGNED_FIXED_POINT_MODE_P (smode));
6196 const_tiny_rtx[1][(int) smode]
6197 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6198 }
6199
6200 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6201 {
6202 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6203 }
6204
6205 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6206 {
6207 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6208 }
6209
6210 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6211 {
6212 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6213 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6214 }
6215
6216 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6217 {
6218 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6219 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6220 }
6221
6222 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6223 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6224 const_tiny_rtx[0][i] = const0_rtx;
6225
6226 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6227 if (STORE_FLAG_VALUE == 1)
6228 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6229
6230 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6231 {
6232 scalar_mode smode = smode_iter.require ();
6233 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6234 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6235 }
6236
6237 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6238 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6239 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6240 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6241 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6242 /*prev_insn=*/NULL,
6243 /*next_insn=*/NULL,
6244 /*bb=*/NULL,
6245 /*pattern=*/NULL_RTX,
6246 /*location=*/-1,
6247 CODE_FOR_nothing,
6248 /*reg_notes=*/NULL_RTX);
6249 }
6250 \f
6251 /* Produce exact duplicate of insn INSN after AFTER.
6252 Care updating of libcall regions if present. */
6253
6254 rtx_insn *
6255 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6256 {
6257 rtx_insn *new_rtx;
6258 rtx link;
6259
6260 switch (GET_CODE (insn))
6261 {
6262 case INSN:
6263 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6264 break;
6265
6266 case JUMP_INSN:
6267 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6268 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6269 break;
6270
6271 case DEBUG_INSN:
6272 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6273 break;
6274
6275 case CALL_INSN:
6276 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6277 if (CALL_INSN_FUNCTION_USAGE (insn))
6278 CALL_INSN_FUNCTION_USAGE (new_rtx)
6279 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6280 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6281 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6282 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6283 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6284 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6285 break;
6286
6287 default:
6288 gcc_unreachable ();
6289 }
6290
6291 /* Update LABEL_NUSES. */
6292 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6293
6294 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6295
6296 /* If the old insn is frame related, then so is the new one. This is
6297 primarily needed for IA-64 unwind info which marks epilogue insns,
6298 which may be duplicated by the basic block reordering code. */
6299 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6300
6301 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6302 rtx *ptail = &REG_NOTES (new_rtx);
6303 while (*ptail != NULL_RTX)
6304 ptail = &XEXP (*ptail, 1);
6305
6306 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6307 will make them. REG_LABEL_TARGETs are created there too, but are
6308 supposed to be sticky, so we copy them. */
6309 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6310 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6311 {
6312 *ptail = duplicate_reg_note (link);
6313 ptail = &XEXP (*ptail, 1);
6314 }
6315
6316 INSN_CODE (new_rtx) = INSN_CODE (insn);
6317 return new_rtx;
6318 }
6319
6320 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6321 rtx
6322 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6323 {
6324 if (hard_reg_clobbers[mode][regno])
6325 return hard_reg_clobbers[mode][regno];
6326 else
6327 return (hard_reg_clobbers[mode][regno] =
6328 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6329 }
6330
6331 location_t prologue_location;
6332 location_t epilogue_location;
6333
6334 /* Hold current location information and last location information, so the
6335 datastructures are built lazily only when some instructions in given
6336 place are needed. */
6337 static location_t curr_location;
6338
6339 /* Allocate insn location datastructure. */
6340 void
6341 insn_locations_init (void)
6342 {
6343 prologue_location = epilogue_location = 0;
6344 curr_location = UNKNOWN_LOCATION;
6345 }
6346
6347 /* At the end of emit stage, clear current location. */
6348 void
6349 insn_locations_finalize (void)
6350 {
6351 epilogue_location = curr_location;
6352 curr_location = UNKNOWN_LOCATION;
6353 }
6354
6355 /* Set current location. */
6356 void
6357 set_curr_insn_location (location_t location)
6358 {
6359 curr_location = location;
6360 }
6361
6362 /* Get current location. */
6363 location_t
6364 curr_insn_location (void)
6365 {
6366 return curr_location;
6367 }
6368
6369 /* Return lexical scope block insn belongs to. */
6370 tree
6371 insn_scope (const rtx_insn *insn)
6372 {
6373 return LOCATION_BLOCK (INSN_LOCATION (insn));
6374 }
6375
6376 /* Return line number of the statement that produced this insn. */
6377 int
6378 insn_line (const rtx_insn *insn)
6379 {
6380 return LOCATION_LINE (INSN_LOCATION (insn));
6381 }
6382
6383 /* Return source file of the statement that produced this insn. */
6384 const char *
6385 insn_file (const rtx_insn *insn)
6386 {
6387 return LOCATION_FILE (INSN_LOCATION (insn));
6388 }
6389
6390 /* Return expanded location of the statement that produced this insn. */
6391 expanded_location
6392 insn_location (const rtx_insn *insn)
6393 {
6394 return expand_location (INSN_LOCATION (insn));
6395 }
6396
6397 /* Return true if memory model MODEL requires a pre-operation (release-style)
6398 barrier or a post-operation (acquire-style) barrier. While not universal,
6399 this function matches behavior of several targets. */
6400
6401 bool
6402 need_atomic_barrier_p (enum memmodel model, bool pre)
6403 {
6404 switch (model & MEMMODEL_BASE_MASK)
6405 {
6406 case MEMMODEL_RELAXED:
6407 case MEMMODEL_CONSUME:
6408 return false;
6409 case MEMMODEL_RELEASE:
6410 return pre;
6411 case MEMMODEL_ACQUIRE:
6412 return !pre;
6413 case MEMMODEL_ACQ_REL:
6414 case MEMMODEL_SEQ_CST:
6415 return true;
6416 default:
6417 gcc_unreachable ();
6418 }
6419 }
6420
6421 /* Initialize fields of rtl_data related to stack alignment. */
6422
6423 void
6424 rtl_data::init_stack_alignment ()
6425 {
6426 stack_alignment_needed = STACK_BOUNDARY;
6427 max_used_stack_slot_alignment = STACK_BOUNDARY;
6428 stack_alignment_estimated = 0;
6429 preferred_stack_boundary = STACK_BOUNDARY;
6430 }
6431
6432 \f
6433 #include "gt-emit-rtl.h"