]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
Update copyright years.
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
66
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
71
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73
74 /* Commonly used modes. */
75
76 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
79
80 /* Datastructures maintained for currently processed function in RTL form. */
81
82 struct rtl_data x_rtl;
83
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
88
89 rtx * regno_reg_rtx;
90
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
93
94 static GTY(()) int label_num = 1;
95
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
100
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
102
103 rtx const_true_rtx;
104
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
109 REAL_VALUE_TYPE dconsthalf;
110
111 /* Record fixed-point constant 0 and 1. */
112 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
113 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
114
115 /* We make one copy of (const_int C) where C is in
116 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117 to save space during the compilation and simplify comparisons of
118 integers. */
119
120 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
121
122 /* Standard pieces of rtx, to be substituted directly into things. */
123 rtx pc_rtx;
124 rtx ret_rtx;
125 rtx simple_return_rtx;
126 rtx cc0_rtx;
127
128 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
129 this pointer should normally never be dereferenced), but is required to be
130 distinct from NULL_RTX. Currently used by peephole2 pass. */
131 rtx_insn *invalid_insn_rtx;
132
133 /* A hash table storing CONST_INTs whose absolute value is greater
134 than MAX_SAVED_CONST_INT. */
135
136 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
137 {
138 typedef HOST_WIDE_INT compare_type;
139
140 static hashval_t hash (rtx i);
141 static bool equal (rtx i, HOST_WIDE_INT h);
142 };
143
144 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
145
146 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
147 {
148 static hashval_t hash (rtx x);
149 static bool equal (rtx x, rtx y);
150 };
151
152 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
153
154 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
155 {
156 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
157
158 static hashval_t hash (rtx x);
159 static bool equal (rtx x, const compare_type &y);
160 };
161
162 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
163
164 /* A hash table storing register attribute structures. */
165 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
166 {
167 static hashval_t hash (reg_attrs *x);
168 static bool equal (reg_attrs *a, reg_attrs *b);
169 };
170
171 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
172
173 /* A hash table storing all CONST_DOUBLEs. */
174 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
175 {
176 static hashval_t hash (rtx x);
177 static bool equal (rtx x, rtx y);
178 };
179
180 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
181
182 /* A hash table storing all CONST_FIXEDs. */
183 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
184 {
185 static hashval_t hash (rtx x);
186 static bool equal (rtx x, rtx y);
187 };
188
189 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
190
191 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
192 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
193 #define first_label_num (crtl->emit.x_first_label_num)
194
195 static void set_used_decls (tree);
196 static void mark_label_nuses (rtx);
197 #if TARGET_SUPPORTS_WIDE_INT
198 static rtx lookup_const_wide_int (rtx);
199 #endif
200 static rtx lookup_const_double (rtx);
201 static rtx lookup_const_fixed (rtx);
202 static rtx gen_const_vector (machine_mode, int);
203 static void copy_rtx_if_shared_1 (rtx *orig);
204
205 /* Probability of the conditional branch currently proceeded by try_split. */
206 profile_probability split_branch_probability;
207 \f
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 hashval_t
211 const_int_hasher::hash (rtx x)
212 {
213 return (hashval_t) INTVAL (x);
214 }
215
216 /* Returns nonzero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
218 HOST_WIDE_INT *). */
219
220 bool
221 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
222 {
223 return (INTVAL (x) == y);
224 }
225
226 #if TARGET_SUPPORTS_WIDE_INT
227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
228
229 hashval_t
230 const_wide_int_hasher::hash (rtx x)
231 {
232 int i;
233 unsigned HOST_WIDE_INT hash = 0;
234 const_rtx xr = x;
235
236 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
237 hash += CONST_WIDE_INT_ELT (xr, i);
238
239 return (hashval_t) hash;
240 }
241
242 /* Returns nonzero if the value represented by X (which is really a
243 CONST_WIDE_INT) is the same as that given by Y (which is really a
244 CONST_WIDE_INT). */
245
246 bool
247 const_wide_int_hasher::equal (rtx x, rtx y)
248 {
249 int i;
250 const_rtx xr = x;
251 const_rtx yr = y;
252 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
253 return false;
254
255 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
256 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
257 return false;
258
259 return true;
260 }
261 #endif
262
263 /* Returns a hash code for CONST_POLY_INT X. */
264
265 hashval_t
266 const_poly_int_hasher::hash (rtx x)
267 {
268 inchash::hash h;
269 h.add_int (GET_MODE (x));
270 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
271 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
272 return h.end ();
273 }
274
275 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
276
277 bool
278 const_poly_int_hasher::equal (rtx x, const compare_type &y)
279 {
280 if (GET_MODE (x) != y.first)
281 return false;
282 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
283 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
284 return false;
285 return true;
286 }
287
288 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
289 hashval_t
290 const_double_hasher::hash (rtx x)
291 {
292 const_rtx const value = x;
293 hashval_t h;
294
295 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
296 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
297 else
298 {
299 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
300 /* MODE is used in the comparison, so it should be in the hash. */
301 h ^= GET_MODE (value);
302 }
303 return h;
304 }
305
306 /* Returns nonzero if the value represented by X (really a ...)
307 is the same as that represented by Y (really a ...) */
308 bool
309 const_double_hasher::equal (rtx x, rtx y)
310 {
311 const_rtx const a = x, b = y;
312
313 if (GET_MODE (a) != GET_MODE (b))
314 return 0;
315 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
316 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
317 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
318 else
319 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
320 CONST_DOUBLE_REAL_VALUE (b));
321 }
322
323 /* Returns a hash code for X (which is really a CONST_FIXED). */
324
325 hashval_t
326 const_fixed_hasher::hash (rtx x)
327 {
328 const_rtx const value = x;
329 hashval_t h;
330
331 h = fixed_hash (CONST_FIXED_VALUE (value));
332 /* MODE is used in the comparison, so it should be in the hash. */
333 h ^= GET_MODE (value);
334 return h;
335 }
336
337 /* Returns nonzero if the value represented by X is the same as that
338 represented by Y. */
339
340 bool
341 const_fixed_hasher::equal (rtx x, rtx y)
342 {
343 const_rtx const a = x, b = y;
344
345 if (GET_MODE (a) != GET_MODE (b))
346 return 0;
347 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
348 }
349
350 /* Return true if the given memory attributes are equal. */
351
352 bool
353 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
354 {
355 if (p == q)
356 return true;
357 if (!p || !q)
358 return false;
359 return (p->alias == q->alias
360 && p->offset_known_p == q->offset_known_p
361 && (!p->offset_known_p || known_eq (p->offset, q->offset))
362 && p->size_known_p == q->size_known_p
363 && (!p->size_known_p || known_eq (p->size, q->size))
364 && p->align == q->align
365 && p->addrspace == q->addrspace
366 && (p->expr == q->expr
367 || (p->expr != NULL_TREE && q->expr != NULL_TREE
368 && operand_equal_p (p->expr, q->expr, 0))));
369 }
370
371 /* Set MEM's memory attributes so that they are the same as ATTRS. */
372
373 static void
374 set_mem_attrs (rtx mem, mem_attrs *attrs)
375 {
376 /* If everything is the default, we can just clear the attributes. */
377 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
378 {
379 MEM_ATTRS (mem) = 0;
380 return;
381 }
382
383 if (!MEM_ATTRS (mem)
384 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
385 {
386 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
387 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
388 }
389 }
390
391 /* Returns a hash code for X (which is a really a reg_attrs *). */
392
393 hashval_t
394 reg_attr_hasher::hash (reg_attrs *x)
395 {
396 const reg_attrs *const p = x;
397
398 inchash::hash h;
399 h.add_ptr (p->decl);
400 h.add_poly_hwi (p->offset);
401 return h.end ();
402 }
403
404 /* Returns nonzero if the value represented by X is the same as that given by
405 Y. */
406
407 bool
408 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
409 {
410 const reg_attrs *const p = x;
411 const reg_attrs *const q = y;
412
413 return (p->decl == q->decl && known_eq (p->offset, q->offset));
414 }
415 /* Allocate a new reg_attrs structure and insert it into the hash table if
416 one identical to it is not already in the table. We are doing this for
417 MEM of mode MODE. */
418
419 static reg_attrs *
420 get_reg_attrs (tree decl, poly_int64 offset)
421 {
422 reg_attrs attrs;
423
424 /* If everything is the default, we can just return zero. */
425 if (decl == 0 && known_eq (offset, 0))
426 return 0;
427
428 attrs.decl = decl;
429 attrs.offset = offset;
430
431 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
432 if (*slot == 0)
433 {
434 *slot = ggc_alloc<reg_attrs> ();
435 memcpy (*slot, &attrs, sizeof (reg_attrs));
436 }
437
438 return *slot;
439 }
440
441
442 #if !HAVE_blockage
443 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
444 and to block register equivalences to be seen across this insn. */
445
446 rtx
447 gen_blockage (void)
448 {
449 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
450 MEM_VOLATILE_P (x) = true;
451 return x;
452 }
453 #endif
454
455
456 /* Set the mode and register number of X to MODE and REGNO. */
457
458 void
459 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
460 {
461 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
462 ? hard_regno_nregs (regno, mode)
463 : 1);
464 PUT_MODE_RAW (x, mode);
465 set_regno_raw (x, regno, nregs);
466 }
467
468 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
469
470 rtx
471 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
472 {
473 set_mode_and_regno (x, mode, regno);
474 REG_ATTRS (x) = NULL;
475 ORIGINAL_REGNO (x) = regno;
476 return x;
477 }
478
479 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
480 don't attempt to share with the various global pieces of rtl (such as
481 frame_pointer_rtx). */
482
483 rtx
484 gen_raw_REG (machine_mode mode, unsigned int regno)
485 {
486 rtx x = rtx_alloc (REG MEM_STAT_INFO);
487 init_raw_REG (x, mode, regno);
488 return x;
489 }
490
491 /* There are some RTL codes that require special attention; the generation
492 functions do the raw handling. If you add to this list, modify
493 special_rtx in gengenrtl.c as well. */
494
495 rtx_expr_list *
496 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
497 {
498 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
499 expr_list));
500 }
501
502 rtx_insn_list *
503 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
504 {
505 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
506 insn_list));
507 }
508
509 rtx_insn *
510 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
511 basic_block bb, rtx pattern, int location, int code,
512 rtx reg_notes)
513 {
514 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
515 prev_insn, next_insn,
516 bb, pattern, location, code,
517 reg_notes));
518 }
519
520 rtx
521 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
522 {
523 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
524 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
525
526 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
527 if (const_true_rtx && arg == STORE_FLAG_VALUE)
528 return const_true_rtx;
529 #endif
530
531 /* Look up the CONST_INT in the hash table. */
532 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
533 INSERT);
534 if (*slot == 0)
535 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
536
537 return *slot;
538 }
539
540 rtx
541 gen_int_mode (poly_int64 c, machine_mode mode)
542 {
543 c = trunc_int_for_mode (c, mode);
544 if (c.is_constant ())
545 return GEN_INT (c.coeffs[0]);
546 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
547 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
548 }
549
550 /* CONST_DOUBLEs might be created from pairs of integers, or from
551 REAL_VALUE_TYPEs. Also, their length is known only at run time,
552 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
553
554 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
555 hash table. If so, return its counterpart; otherwise add it
556 to the hash table and return it. */
557 static rtx
558 lookup_const_double (rtx real)
559 {
560 rtx *slot = const_double_htab->find_slot (real, INSERT);
561 if (*slot == 0)
562 *slot = real;
563
564 return *slot;
565 }
566
567 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
568 VALUE in mode MODE. */
569 rtx
570 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
571 {
572 rtx real = rtx_alloc (CONST_DOUBLE);
573 PUT_MODE (real, mode);
574
575 real->u.rv = value;
576
577 return lookup_const_double (real);
578 }
579
580 /* Determine whether FIXED, a CONST_FIXED, already exists in the
581 hash table. If so, return its counterpart; otherwise add it
582 to the hash table and return it. */
583
584 static rtx
585 lookup_const_fixed (rtx fixed)
586 {
587 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
588 if (*slot == 0)
589 *slot = fixed;
590
591 return *slot;
592 }
593
594 /* Return a CONST_FIXED rtx for a fixed-point value specified by
595 VALUE in mode MODE. */
596
597 rtx
598 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
599 {
600 rtx fixed = rtx_alloc (CONST_FIXED);
601 PUT_MODE (fixed, mode);
602
603 fixed->u.fv = value;
604
605 return lookup_const_fixed (fixed);
606 }
607
608 #if TARGET_SUPPORTS_WIDE_INT == 0
609 /* Constructs double_int from rtx CST. */
610
611 double_int
612 rtx_to_double_int (const_rtx cst)
613 {
614 double_int r;
615
616 if (CONST_INT_P (cst))
617 r = double_int::from_shwi (INTVAL (cst));
618 else if (CONST_DOUBLE_AS_INT_P (cst))
619 {
620 r.low = CONST_DOUBLE_LOW (cst);
621 r.high = CONST_DOUBLE_HIGH (cst);
622 }
623 else
624 gcc_unreachable ();
625
626 return r;
627 }
628 #endif
629
630 #if TARGET_SUPPORTS_WIDE_INT
631 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
632 If so, return its counterpart; otherwise add it to the hash table and
633 return it. */
634
635 static rtx
636 lookup_const_wide_int (rtx wint)
637 {
638 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
639 if (*slot == 0)
640 *slot = wint;
641
642 return *slot;
643 }
644 #endif
645
646 /* Return an rtx constant for V, given that the constant has mode MODE.
647 The returned rtx will be a CONST_INT if V fits, otherwise it will be
648 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
649 (if TARGET_SUPPORTS_WIDE_INT). */
650
651 static rtx
652 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
653 {
654 unsigned int len = v.get_len ();
655 /* Not scalar_int_mode because we also allow pointer bound modes. */
656 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
657
658 /* Allow truncation but not extension since we do not know if the
659 number is signed or unsigned. */
660 gcc_assert (prec <= v.get_precision ());
661
662 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
663 return gen_int_mode (v.elt (0), mode);
664
665 #if TARGET_SUPPORTS_WIDE_INT
666 {
667 unsigned int i;
668 rtx value;
669 unsigned int blocks_needed
670 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
671
672 if (len > blocks_needed)
673 len = blocks_needed;
674
675 value = const_wide_int_alloc (len);
676
677 /* It is so tempting to just put the mode in here. Must control
678 myself ... */
679 PUT_MODE (value, VOIDmode);
680 CWI_PUT_NUM_ELEM (value, len);
681
682 for (i = 0; i < len; i++)
683 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
684
685 return lookup_const_wide_int (value);
686 }
687 #else
688 return immed_double_const (v.elt (0), v.elt (1), mode);
689 #endif
690 }
691
692 #if TARGET_SUPPORTS_WIDE_INT == 0
693 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
694 of ints: I0 is the low-order word and I1 is the high-order word.
695 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
696 implied upper bits are copies of the high bit of i1. The value
697 itself is neither signed nor unsigned. Do not use this routine for
698 non-integer modes; convert to REAL_VALUE_TYPE and use
699 const_double_from_real_value. */
700
701 rtx
702 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
703 {
704 rtx value;
705 unsigned int i;
706
707 /* There are the following cases (note that there are no modes with
708 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
709
710 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
711 gen_int_mode.
712 2) If the value of the integer fits into HOST_WIDE_INT anyway
713 (i.e., i1 consists only from copies of the sign bit, and sign
714 of i0 and i1 are the same), then we return a CONST_INT for i0.
715 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
716 scalar_mode smode;
717 if (is_a <scalar_mode> (mode, &smode)
718 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
719 return gen_int_mode (i0, mode);
720
721 /* If this integer fits in one word, return a CONST_INT. */
722 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
723 return GEN_INT (i0);
724
725 /* We use VOIDmode for integers. */
726 value = rtx_alloc (CONST_DOUBLE);
727 PUT_MODE (value, VOIDmode);
728
729 CONST_DOUBLE_LOW (value) = i0;
730 CONST_DOUBLE_HIGH (value) = i1;
731
732 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
733 XWINT (value, i) = 0;
734
735 return lookup_const_double (value);
736 }
737 #endif
738
739 /* Return an rtx representation of C in mode MODE. */
740
741 rtx
742 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
743 {
744 if (c.is_constant ())
745 return immed_wide_int_const_1 (c.coeffs[0], mode);
746
747 /* Not scalar_int_mode because we also allow pointer bound modes. */
748 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
749
750 /* Allow truncation but not extension since we do not know if the
751 number is signed or unsigned. */
752 gcc_assert (prec <= c.coeffs[0].get_precision ());
753 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
754
755 /* See whether we already have an rtx for this constant. */
756 inchash::hash h;
757 h.add_int (mode);
758 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
759 h.add_wide_int (newc.coeffs[i]);
760 const_poly_int_hasher::compare_type typed_value (mode, newc);
761 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
762 h.end (), INSERT);
763 rtx x = *slot;
764 if (x)
765 return x;
766
767 /* Create a new rtx. There's a choice to be made here between installing
768 the actual mode of the rtx or leaving it as VOIDmode (for consistency
769 with CONST_INT). In practice the handling of the codes is different
770 enough that we get no benefit from using VOIDmode, and various places
771 assume that VOIDmode implies CONST_INT. Using the real mode seems like
772 the right long-term direction anyway. */
773 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
774 size_t extra_size = twi::extra_size (prec);
775 x = rtx_alloc_v (CONST_POLY_INT,
776 sizeof (struct const_poly_int_def) + extra_size);
777 PUT_MODE (x, mode);
778 CONST_POLY_INT_COEFFS (x).set_precision (prec);
779 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
780 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
781
782 *slot = x;
783 return x;
784 }
785
786 rtx
787 gen_rtx_REG (machine_mode mode, unsigned int regno)
788 {
789 /* In case the MD file explicitly references the frame pointer, have
790 all such references point to the same frame pointer. This is
791 used during frame pointer elimination to distinguish the explicit
792 references to these registers from pseudos that happened to be
793 assigned to them.
794
795 If we have eliminated the frame pointer or arg pointer, we will
796 be using it as a normal register, for example as a spill
797 register. In such cases, we might be accessing it in a mode that
798 is not Pmode and therefore cannot use the pre-allocated rtx.
799
800 Also don't do this when we are making new REGs in reload, since
801 we don't want to get confused with the real pointers. */
802
803 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
804 {
805 if (regno == FRAME_POINTER_REGNUM
806 && (!reload_completed || frame_pointer_needed))
807 return frame_pointer_rtx;
808
809 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
810 && regno == HARD_FRAME_POINTER_REGNUM
811 && (!reload_completed || frame_pointer_needed))
812 return hard_frame_pointer_rtx;
813 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
814 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
815 && regno == ARG_POINTER_REGNUM)
816 return arg_pointer_rtx;
817 #endif
818 #ifdef RETURN_ADDRESS_POINTER_REGNUM
819 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
820 return return_address_pointer_rtx;
821 #endif
822 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
823 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
824 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
825 return pic_offset_table_rtx;
826 if (regno == STACK_POINTER_REGNUM)
827 return stack_pointer_rtx;
828 }
829
830 #if 0
831 /* If the per-function register table has been set up, try to re-use
832 an existing entry in that table to avoid useless generation of RTL.
833
834 This code is disabled for now until we can fix the various backends
835 which depend on having non-shared hard registers in some cases. Long
836 term we want to re-enable this code as it can significantly cut down
837 on the amount of useless RTL that gets generated.
838
839 We'll also need to fix some code that runs after reload that wants to
840 set ORIGINAL_REGNO. */
841
842 if (cfun
843 && cfun->emit
844 && regno_reg_rtx
845 && regno < FIRST_PSEUDO_REGISTER
846 && reg_raw_mode[regno] == mode)
847 return regno_reg_rtx[regno];
848 #endif
849
850 return gen_raw_REG (mode, regno);
851 }
852
853 rtx
854 gen_rtx_MEM (machine_mode mode, rtx addr)
855 {
856 rtx rt = gen_rtx_raw_MEM (mode, addr);
857
858 /* This field is not cleared by the mere allocation of the rtx, so
859 we clear it here. */
860 MEM_ATTRS (rt) = 0;
861
862 return rt;
863 }
864
865 /* Generate a memory referring to non-trapping constant memory. */
866
867 rtx
868 gen_const_mem (machine_mode mode, rtx addr)
869 {
870 rtx mem = gen_rtx_MEM (mode, addr);
871 MEM_READONLY_P (mem) = 1;
872 MEM_NOTRAP_P (mem) = 1;
873 return mem;
874 }
875
876 /* Generate a MEM referring to fixed portions of the frame, e.g., register
877 save areas. */
878
879 rtx
880 gen_frame_mem (machine_mode mode, rtx addr)
881 {
882 rtx mem = gen_rtx_MEM (mode, addr);
883 MEM_NOTRAP_P (mem) = 1;
884 set_mem_alias_set (mem, get_frame_alias_set ());
885 return mem;
886 }
887
888 /* Generate a MEM referring to a temporary use of the stack, not part
889 of the fixed stack frame. For example, something which is pushed
890 by a target splitter. */
891 rtx
892 gen_tmp_stack_mem (machine_mode mode, rtx addr)
893 {
894 rtx mem = gen_rtx_MEM (mode, addr);
895 MEM_NOTRAP_P (mem) = 1;
896 if (!cfun->calls_alloca)
897 set_mem_alias_set (mem, get_frame_alias_set ());
898 return mem;
899 }
900
901 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
902 this construct would be valid, and false otherwise. */
903
904 bool
905 validate_subreg (machine_mode omode, machine_mode imode,
906 const_rtx reg, poly_uint64 offset)
907 {
908 poly_uint64 isize = GET_MODE_SIZE (imode);
909 poly_uint64 osize = GET_MODE_SIZE (omode);
910
911 /* The sizes must be ordered, so that we know whether the subreg
912 is partial, paradoxical or complete. */
913 if (!ordered_p (isize, osize))
914 return false;
915
916 /* All subregs must be aligned. */
917 if (!multiple_p (offset, osize))
918 return false;
919
920 /* The subreg offset cannot be outside the inner object. */
921 if (maybe_ge (offset, isize))
922 return false;
923
924 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
925
926 /* ??? This should not be here. Temporarily continue to allow word_mode
927 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
928 Generally, backends are doing something sketchy but it'll take time to
929 fix them all. */
930 if (omode == word_mode)
931 ;
932 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
933 is the culprit here, and not the backends. */
934 else if (known_ge (osize, regsize) && known_ge (isize, osize))
935 ;
936 /* Allow component subregs of complex and vector. Though given the below
937 extraction rules, it's not always clear what that means. */
938 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
939 && GET_MODE_INNER (imode) == omode)
940 ;
941 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
942 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
943 represent this. It's questionable if this ought to be represented at
944 all -- why can't this all be hidden in post-reload splitters that make
945 arbitrarily mode changes to the registers themselves. */
946 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
947 ;
948 /* Subregs involving floating point modes are not allowed to
949 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
950 (subreg:SI (reg:DF) 0) isn't. */
951 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
952 {
953 if (! (known_eq (isize, osize)
954 /* LRA can use subreg to store a floating point value in
955 an integer mode. Although the floating point and the
956 integer modes need the same number of hard registers,
957 the size of floating point mode can be less than the
958 integer mode. LRA also uses subregs for a register
959 should be used in different mode in on insn. */
960 || lra_in_progress))
961 return false;
962 }
963
964 /* Paradoxical subregs must have offset zero. */
965 if (maybe_gt (osize, isize))
966 return known_eq (offset, 0U);
967
968 /* This is a normal subreg. Verify that the offset is representable. */
969
970 /* For hard registers, we already have most of these rules collected in
971 subreg_offset_representable_p. */
972 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
973 {
974 unsigned int regno = REGNO (reg);
975
976 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
977 && GET_MODE_INNER (imode) == omode)
978 ;
979 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
980 return false;
981
982 return subreg_offset_representable_p (regno, imode, offset, omode);
983 }
984
985 /* The outer size must be ordered wrt the register size, otherwise
986 we wouldn't know at compile time how many registers the outer
987 mode occupies. */
988 if (!ordered_p (osize, regsize))
989 return false;
990
991 /* For pseudo registers, we want most of the same checks. Namely:
992
993 Assume that the pseudo register will be allocated to hard registers
994 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
995 the remainder must correspond to the lowpart of the containing hard
996 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
997 otherwise it is at the lowest offset.
998
999 Given that we've already checked the mode and offset alignment,
1000 we only have to check subblock subregs here. */
1001 if (maybe_lt (osize, regsize)
1002 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1003 {
1004 /* It is invalid for the target to pick a register size for a mode
1005 that isn't ordered wrt to the size of that mode. */
1006 poly_uint64 block_size = ordered_min (isize, regsize);
1007 unsigned int start_reg;
1008 poly_uint64 offset_within_reg;
1009 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1010 || (BYTES_BIG_ENDIAN
1011 ? maybe_ne (offset_within_reg, block_size - osize)
1012 : maybe_ne (offset_within_reg, 0U)))
1013 return false;
1014 }
1015 return true;
1016 }
1017
1018 rtx
1019 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1020 {
1021 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1022 return gen_rtx_raw_SUBREG (mode, reg, offset);
1023 }
1024
1025 /* Generate a SUBREG representing the least-significant part of REG if MODE
1026 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1027
1028 rtx
1029 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1030 {
1031 machine_mode inmode;
1032
1033 inmode = GET_MODE (reg);
1034 if (inmode == VOIDmode)
1035 inmode = mode;
1036 return gen_rtx_SUBREG (mode, reg,
1037 subreg_lowpart_offset (mode, inmode));
1038 }
1039
1040 rtx
1041 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1042 enum var_init_status status)
1043 {
1044 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1045 PAT_VAR_LOCATION_STATUS (x) = status;
1046 return x;
1047 }
1048 \f
1049
1050 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1051
1052 rtvec
1053 gen_rtvec (int n, ...)
1054 {
1055 int i;
1056 rtvec rt_val;
1057 va_list p;
1058
1059 va_start (p, n);
1060
1061 /* Don't allocate an empty rtvec... */
1062 if (n == 0)
1063 {
1064 va_end (p);
1065 return NULL_RTVEC;
1066 }
1067
1068 rt_val = rtvec_alloc (n);
1069
1070 for (i = 0; i < n; i++)
1071 rt_val->elem[i] = va_arg (p, rtx);
1072
1073 va_end (p);
1074 return rt_val;
1075 }
1076
1077 rtvec
1078 gen_rtvec_v (int n, rtx *argp)
1079 {
1080 int i;
1081 rtvec rt_val;
1082
1083 /* Don't allocate an empty rtvec... */
1084 if (n == 0)
1085 return NULL_RTVEC;
1086
1087 rt_val = rtvec_alloc (n);
1088
1089 for (i = 0; i < n; i++)
1090 rt_val->elem[i] = *argp++;
1091
1092 return rt_val;
1093 }
1094
1095 rtvec
1096 gen_rtvec_v (int n, rtx_insn **argp)
1097 {
1098 int i;
1099 rtvec rt_val;
1100
1101 /* Don't allocate an empty rtvec... */
1102 if (n == 0)
1103 return NULL_RTVEC;
1104
1105 rt_val = rtvec_alloc (n);
1106
1107 for (i = 0; i < n; i++)
1108 rt_val->elem[i] = *argp++;
1109
1110 return rt_val;
1111 }
1112
1113 \f
1114 /* Return the number of bytes between the start of an OUTER_MODE
1115 in-memory value and the start of an INNER_MODE in-memory value,
1116 given that the former is a lowpart of the latter. It may be a
1117 paradoxical lowpart, in which case the offset will be negative
1118 on big-endian targets. */
1119
1120 poly_int64
1121 byte_lowpart_offset (machine_mode outer_mode,
1122 machine_mode inner_mode)
1123 {
1124 if (paradoxical_subreg_p (outer_mode, inner_mode))
1125 return -subreg_lowpart_offset (inner_mode, outer_mode);
1126 else
1127 return subreg_lowpart_offset (outer_mode, inner_mode);
1128 }
1129
1130 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1131 from address X. For paradoxical big-endian subregs this is a
1132 negative value, otherwise it's the same as OFFSET. */
1133
1134 poly_int64
1135 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1136 poly_uint64 offset)
1137 {
1138 if (paradoxical_subreg_p (outer_mode, inner_mode))
1139 {
1140 gcc_assert (known_eq (offset, 0U));
1141 return -subreg_lowpart_offset (inner_mode, outer_mode);
1142 }
1143 return offset;
1144 }
1145
1146 /* As above, but return the offset that existing subreg X would have
1147 if SUBREG_REG (X) were stored in memory. The only significant thing
1148 about the current SUBREG_REG is its mode. */
1149
1150 poly_int64
1151 subreg_memory_offset (const_rtx x)
1152 {
1153 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1154 SUBREG_BYTE (x));
1155 }
1156 \f
1157 /* Generate a REG rtx for a new pseudo register of mode MODE.
1158 This pseudo is assigned the next sequential register number. */
1159
1160 rtx
1161 gen_reg_rtx (machine_mode mode)
1162 {
1163 rtx val;
1164 unsigned int align = GET_MODE_ALIGNMENT (mode);
1165
1166 gcc_assert (can_create_pseudo_p ());
1167
1168 /* If a virtual register with bigger mode alignment is generated,
1169 increase stack alignment estimation because it might be spilled
1170 to stack later. */
1171 if (SUPPORTS_STACK_ALIGNMENT
1172 && crtl->stack_alignment_estimated < align
1173 && !crtl->stack_realign_processed)
1174 {
1175 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1176 if (crtl->stack_alignment_estimated < min_align)
1177 crtl->stack_alignment_estimated = min_align;
1178 }
1179
1180 if (generating_concat_p
1181 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1182 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1183 {
1184 /* For complex modes, don't make a single pseudo.
1185 Instead, make a CONCAT of two pseudos.
1186 This allows noncontiguous allocation of the real and imaginary parts,
1187 which makes much better code. Besides, allocating DCmode
1188 pseudos overstrains reload on some machines like the 386. */
1189 rtx realpart, imagpart;
1190 machine_mode partmode = GET_MODE_INNER (mode);
1191
1192 realpart = gen_reg_rtx (partmode);
1193 imagpart = gen_reg_rtx (partmode);
1194 return gen_rtx_CONCAT (mode, realpart, imagpart);
1195 }
1196
1197 /* Do not call gen_reg_rtx with uninitialized crtl. */
1198 gcc_assert (crtl->emit.regno_pointer_align_length);
1199
1200 crtl->emit.ensure_regno_capacity ();
1201 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1202
1203 val = gen_raw_REG (mode, reg_rtx_no);
1204 regno_reg_rtx[reg_rtx_no++] = val;
1205 return val;
1206 }
1207
1208 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1209 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1210
1211 void
1212 emit_status::ensure_regno_capacity ()
1213 {
1214 int old_size = regno_pointer_align_length;
1215
1216 if (reg_rtx_no < old_size)
1217 return;
1218
1219 int new_size = old_size * 2;
1220 while (reg_rtx_no >= new_size)
1221 new_size *= 2;
1222
1223 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1224 memset (tmp + old_size, 0, new_size - old_size);
1225 regno_pointer_align = (unsigned char *) tmp;
1226
1227 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1228 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1229 regno_reg_rtx = new1;
1230
1231 crtl->emit.regno_pointer_align_length = new_size;
1232 }
1233
1234 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1235
1236 bool
1237 reg_is_parm_p (rtx reg)
1238 {
1239 tree decl;
1240
1241 gcc_assert (REG_P (reg));
1242 decl = REG_EXPR (reg);
1243 return (decl && TREE_CODE (decl) == PARM_DECL);
1244 }
1245
1246 /* Update NEW with the same attributes as REG, but with OFFSET added
1247 to the REG_OFFSET. */
1248
1249 static void
1250 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1251 {
1252 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1253 REG_OFFSET (reg) + offset);
1254 }
1255
1256 /* Generate a register with same attributes as REG, but with OFFSET
1257 added to the REG_OFFSET. */
1258
1259 rtx
1260 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1261 poly_int64 offset)
1262 {
1263 rtx new_rtx = gen_rtx_REG (mode, regno);
1264
1265 update_reg_offset (new_rtx, reg, offset);
1266 return new_rtx;
1267 }
1268
1269 /* Generate a new pseudo-register with the same attributes as REG, but
1270 with OFFSET added to the REG_OFFSET. */
1271
1272 rtx
1273 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1274 {
1275 rtx new_rtx = gen_reg_rtx (mode);
1276
1277 update_reg_offset (new_rtx, reg, offset);
1278 return new_rtx;
1279 }
1280
1281 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1282 new register is a (possibly paradoxical) lowpart of the old one. */
1283
1284 void
1285 adjust_reg_mode (rtx reg, machine_mode mode)
1286 {
1287 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1288 PUT_MODE (reg, mode);
1289 }
1290
1291 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1292 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1293
1294 void
1295 set_reg_attrs_from_value (rtx reg, rtx x)
1296 {
1297 poly_int64 offset;
1298 bool can_be_reg_pointer = true;
1299
1300 /* Don't call mark_reg_pointer for incompatible pointer sign
1301 extension. */
1302 while (GET_CODE (x) == SIGN_EXTEND
1303 || GET_CODE (x) == ZERO_EXTEND
1304 || GET_CODE (x) == TRUNCATE
1305 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1306 {
1307 #if defined(POINTERS_EXTEND_UNSIGNED)
1308 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1309 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1310 || (paradoxical_subreg_p (x)
1311 && ! (SUBREG_PROMOTED_VAR_P (x)
1312 && SUBREG_CHECK_PROMOTED_SIGN (x,
1313 POINTERS_EXTEND_UNSIGNED))))
1314 && !targetm.have_ptr_extend ())
1315 can_be_reg_pointer = false;
1316 #endif
1317 x = XEXP (x, 0);
1318 }
1319
1320 /* Hard registers can be reused for multiple purposes within the same
1321 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1322 on them is wrong. */
1323 if (HARD_REGISTER_P (reg))
1324 return;
1325
1326 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1327 if (MEM_P (x))
1328 {
1329 if (MEM_OFFSET_KNOWN_P (x))
1330 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1331 MEM_OFFSET (x) + offset);
1332 if (can_be_reg_pointer && MEM_POINTER (x))
1333 mark_reg_pointer (reg, 0);
1334 }
1335 else if (REG_P (x))
1336 {
1337 if (REG_ATTRS (x))
1338 update_reg_offset (reg, x, offset);
1339 if (can_be_reg_pointer && REG_POINTER (x))
1340 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1341 }
1342 }
1343
1344 /* Generate a REG rtx for a new pseudo register, copying the mode
1345 and attributes from X. */
1346
1347 rtx
1348 gen_reg_rtx_and_attrs (rtx x)
1349 {
1350 rtx reg = gen_reg_rtx (GET_MODE (x));
1351 set_reg_attrs_from_value (reg, x);
1352 return reg;
1353 }
1354
1355 /* Set the register attributes for registers contained in PARM_RTX.
1356 Use needed values from memory attributes of MEM. */
1357
1358 void
1359 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1360 {
1361 if (REG_P (parm_rtx))
1362 set_reg_attrs_from_value (parm_rtx, mem);
1363 else if (GET_CODE (parm_rtx) == PARALLEL)
1364 {
1365 /* Check for a NULL entry in the first slot, used to indicate that the
1366 parameter goes both on the stack and in registers. */
1367 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1368 for (; i < XVECLEN (parm_rtx, 0); i++)
1369 {
1370 rtx x = XVECEXP (parm_rtx, 0, i);
1371 if (REG_P (XEXP (x, 0)))
1372 REG_ATTRS (XEXP (x, 0))
1373 = get_reg_attrs (MEM_EXPR (mem),
1374 INTVAL (XEXP (x, 1)));
1375 }
1376 }
1377 }
1378
1379 /* Set the REG_ATTRS for registers in value X, given that X represents
1380 decl T. */
1381
1382 void
1383 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1384 {
1385 if (!t)
1386 return;
1387 tree tdecl = t;
1388 if (GET_CODE (x) == SUBREG)
1389 {
1390 gcc_assert (subreg_lowpart_p (x));
1391 x = SUBREG_REG (x);
1392 }
1393 if (REG_P (x))
1394 REG_ATTRS (x)
1395 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1396 DECL_P (tdecl)
1397 ? DECL_MODE (tdecl)
1398 : TYPE_MODE (TREE_TYPE (tdecl))));
1399 if (GET_CODE (x) == CONCAT)
1400 {
1401 if (REG_P (XEXP (x, 0)))
1402 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1403 if (REG_P (XEXP (x, 1)))
1404 REG_ATTRS (XEXP (x, 1))
1405 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1406 }
1407 if (GET_CODE (x) == PARALLEL)
1408 {
1409 int i, start;
1410
1411 /* Check for a NULL entry, used to indicate that the parameter goes
1412 both on the stack and in registers. */
1413 if (XEXP (XVECEXP (x, 0, 0), 0))
1414 start = 0;
1415 else
1416 start = 1;
1417
1418 for (i = start; i < XVECLEN (x, 0); i++)
1419 {
1420 rtx y = XVECEXP (x, 0, i);
1421 if (REG_P (XEXP (y, 0)))
1422 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1423 }
1424 }
1425 }
1426
1427 /* Assign the RTX X to declaration T. */
1428
1429 void
1430 set_decl_rtl (tree t, rtx x)
1431 {
1432 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1433 if (x)
1434 set_reg_attrs_for_decl_rtl (t, x);
1435 }
1436
1437 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1438 if the ABI requires the parameter to be passed by reference. */
1439
1440 void
1441 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1442 {
1443 DECL_INCOMING_RTL (t) = x;
1444 if (x && !by_reference_p)
1445 set_reg_attrs_for_decl_rtl (t, x);
1446 }
1447
1448 /* Identify REG (which may be a CONCAT) as a user register. */
1449
1450 void
1451 mark_user_reg (rtx reg)
1452 {
1453 if (GET_CODE (reg) == CONCAT)
1454 {
1455 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1456 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1457 }
1458 else
1459 {
1460 gcc_assert (REG_P (reg));
1461 REG_USERVAR_P (reg) = 1;
1462 }
1463 }
1464
1465 /* Identify REG as a probable pointer register and show its alignment
1466 as ALIGN, if nonzero. */
1467
1468 void
1469 mark_reg_pointer (rtx reg, int align)
1470 {
1471 if (! REG_POINTER (reg))
1472 {
1473 REG_POINTER (reg) = 1;
1474
1475 if (align)
1476 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1477 }
1478 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1479 /* We can no-longer be sure just how aligned this pointer is. */
1480 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1481 }
1482
1483 /* Return 1 plus largest pseudo reg number used in the current function. */
1484
1485 int
1486 max_reg_num (void)
1487 {
1488 return reg_rtx_no;
1489 }
1490
1491 /* Return 1 + the largest label number used so far in the current function. */
1492
1493 int
1494 max_label_num (void)
1495 {
1496 return label_num;
1497 }
1498
1499 /* Return first label number used in this function (if any were used). */
1500
1501 int
1502 get_first_label_num (void)
1503 {
1504 return first_label_num;
1505 }
1506
1507 /* If the rtx for label was created during the expansion of a nested
1508 function, then first_label_num won't include this label number.
1509 Fix this now so that array indices work later. */
1510
1511 void
1512 maybe_set_first_label_num (rtx_code_label *x)
1513 {
1514 if (CODE_LABEL_NUMBER (x) < first_label_num)
1515 first_label_num = CODE_LABEL_NUMBER (x);
1516 }
1517
1518 /* For use by the RTL function loader, when mingling with normal
1519 functions.
1520 Ensure that label_num is greater than the label num of X, to avoid
1521 duplicate labels in the generated assembler. */
1522
1523 void
1524 maybe_set_max_label_num (rtx_code_label *x)
1525 {
1526 if (CODE_LABEL_NUMBER (x) >= label_num)
1527 label_num = CODE_LABEL_NUMBER (x) + 1;
1528 }
1529
1530 \f
1531 /* Return a value representing some low-order bits of X, where the number
1532 of low-order bits is given by MODE. Note that no conversion is done
1533 between floating-point and fixed-point values, rather, the bit
1534 representation is returned.
1535
1536 This function handles the cases in common between gen_lowpart, below,
1537 and two variants in cse.c and combine.c. These are the cases that can
1538 be safely handled at all points in the compilation.
1539
1540 If this is not a case we can handle, return 0. */
1541
1542 rtx
1543 gen_lowpart_common (machine_mode mode, rtx x)
1544 {
1545 poly_uint64 msize = GET_MODE_SIZE (mode);
1546 machine_mode innermode;
1547
1548 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1549 so we have to make one up. Yuk. */
1550 innermode = GET_MODE (x);
1551 if (CONST_INT_P (x)
1552 && known_le (msize * BITS_PER_UNIT,
1553 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1554 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1555 else if (innermode == VOIDmode)
1556 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1557
1558 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1559
1560 if (innermode == mode)
1561 return x;
1562
1563 /* The size of the outer and inner modes must be ordered. */
1564 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1565 if (!ordered_p (msize, xsize))
1566 return 0;
1567
1568 if (SCALAR_FLOAT_MODE_P (mode))
1569 {
1570 /* Don't allow paradoxical FLOAT_MODE subregs. */
1571 if (maybe_gt (msize, xsize))
1572 return 0;
1573 }
1574 else
1575 {
1576 /* MODE must occupy no more of the underlying registers than X. */
1577 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1578 unsigned int mregs, xregs;
1579 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1580 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1581 || mregs > xregs)
1582 return 0;
1583 }
1584
1585 scalar_int_mode int_mode, int_innermode, from_mode;
1586 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1587 && is_a <scalar_int_mode> (mode, &int_mode)
1588 && is_a <scalar_int_mode> (innermode, &int_innermode)
1589 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1590 {
1591 /* If we are getting the low-order part of something that has been
1592 sign- or zero-extended, we can either just use the object being
1593 extended or make a narrower extension. If we want an even smaller
1594 piece than the size of the object being extended, call ourselves
1595 recursively.
1596
1597 This case is used mostly by combine and cse. */
1598
1599 if (from_mode == int_mode)
1600 return XEXP (x, 0);
1601 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1602 return gen_lowpart_common (int_mode, XEXP (x, 0));
1603 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1604 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1605 }
1606 else if (GET_CODE (x) == SUBREG || REG_P (x)
1607 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1608 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1609 || CONST_POLY_INT_P (x))
1610 return lowpart_subreg (mode, x, innermode);
1611
1612 /* Otherwise, we can't do this. */
1613 return 0;
1614 }
1615 \f
1616 rtx
1617 gen_highpart (machine_mode mode, rtx x)
1618 {
1619 poly_uint64 msize = GET_MODE_SIZE (mode);
1620 rtx result;
1621
1622 /* This case loses if X is a subreg. To catch bugs early,
1623 complain if an invalid MODE is used even in other cases. */
1624 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1625 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1626
1627 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1628 subreg_highpart_offset (mode, GET_MODE (x)));
1629 gcc_assert (result);
1630
1631 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1632 the target if we have a MEM. gen_highpart must return a valid operand,
1633 emitting code if necessary to do so. */
1634 if (MEM_P (result))
1635 {
1636 result = validize_mem (result);
1637 gcc_assert (result);
1638 }
1639
1640 return result;
1641 }
1642
1643 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1644 be VOIDmode constant. */
1645 rtx
1646 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1647 {
1648 if (GET_MODE (exp) != VOIDmode)
1649 {
1650 gcc_assert (GET_MODE (exp) == innermode);
1651 return gen_highpart (outermode, exp);
1652 }
1653 return simplify_gen_subreg (outermode, exp, innermode,
1654 subreg_highpart_offset (outermode, innermode));
1655 }
1656
1657 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1658 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1659
1660 poly_uint64
1661 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1662 {
1663 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1664 if (maybe_gt (outer_bytes, inner_bytes))
1665 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1666 return 0;
1667
1668 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1669 return inner_bytes - outer_bytes;
1670 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1671 return 0;
1672 else
1673 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1674 }
1675
1676 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1677 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1678
1679 poly_uint64
1680 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1681 {
1682 gcc_assert (known_ge (inner_bytes, outer_bytes));
1683
1684 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1685 return 0;
1686 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1687 return inner_bytes - outer_bytes;
1688 else
1689 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1690 (inner_bytes - outer_bytes)
1691 * BITS_PER_UNIT);
1692 }
1693
1694 /* Return 1 iff X, assumed to be a SUBREG,
1695 refers to the least significant part of its containing reg.
1696 If X is not a SUBREG, always return 1 (it is its own low part!). */
1697
1698 int
1699 subreg_lowpart_p (const_rtx x)
1700 {
1701 if (GET_CODE (x) != SUBREG)
1702 return 1;
1703 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1704 return 0;
1705
1706 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1707 GET_MODE (SUBREG_REG (x))),
1708 SUBREG_BYTE (x));
1709 }
1710 \f
1711 /* Return subword OFFSET of operand OP.
1712 The word number, OFFSET, is interpreted as the word number starting
1713 at the low-order address. OFFSET 0 is the low-order word if not
1714 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1715
1716 If we cannot extract the required word, we return zero. Otherwise,
1717 an rtx corresponding to the requested word will be returned.
1718
1719 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1720 reload has completed, a valid address will always be returned. After
1721 reload, if a valid address cannot be returned, we return zero.
1722
1723 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1724 it is the responsibility of the caller.
1725
1726 MODE is the mode of OP in case it is a CONST_INT.
1727
1728 ??? This is still rather broken for some cases. The problem for the
1729 moment is that all callers of this thing provide no 'goal mode' to
1730 tell us to work with. This exists because all callers were written
1731 in a word based SUBREG world.
1732 Now use of this function can be deprecated by simplify_subreg in most
1733 cases.
1734 */
1735
1736 rtx
1737 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1738 machine_mode mode)
1739 {
1740 if (mode == VOIDmode)
1741 mode = GET_MODE (op);
1742
1743 gcc_assert (mode != VOIDmode);
1744
1745 /* If OP is narrower than a word, fail. */
1746 if (mode != BLKmode
1747 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1748 return 0;
1749
1750 /* If we want a word outside OP, return zero. */
1751 if (mode != BLKmode
1752 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1753 return const0_rtx;
1754
1755 /* Form a new MEM at the requested address. */
1756 if (MEM_P (op))
1757 {
1758 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1759
1760 if (! validate_address)
1761 return new_rtx;
1762
1763 else if (reload_completed)
1764 {
1765 if (! strict_memory_address_addr_space_p (word_mode,
1766 XEXP (new_rtx, 0),
1767 MEM_ADDR_SPACE (op)))
1768 return 0;
1769 }
1770 else
1771 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1772 }
1773
1774 /* Rest can be handled by simplify_subreg. */
1775 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1776 }
1777
1778 /* Similar to `operand_subword', but never return 0. If we can't
1779 extract the required subword, put OP into a register and try again.
1780 The second attempt must succeed. We always validate the address in
1781 this case.
1782
1783 MODE is the mode of OP, in case it is CONST_INT. */
1784
1785 rtx
1786 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1787 {
1788 rtx result = operand_subword (op, offset, 1, mode);
1789
1790 if (result)
1791 return result;
1792
1793 if (mode != BLKmode && mode != VOIDmode)
1794 {
1795 /* If this is a register which cannot be accessed by words, copy it
1796 to a pseudo register. */
1797 if (REG_P (op))
1798 op = copy_to_reg (op);
1799 else
1800 op = force_reg (mode, op);
1801 }
1802
1803 result = operand_subword (op, offset, 1, mode);
1804 gcc_assert (result);
1805
1806 return result;
1807 }
1808 \f
1809 mem_attrs::mem_attrs ()
1810 : expr (NULL_TREE),
1811 offset (0),
1812 size (0),
1813 alias (0),
1814 align (0),
1815 addrspace (ADDR_SPACE_GENERIC),
1816 offset_known_p (false),
1817 size_known_p (false)
1818 {}
1819
1820 /* Returns 1 if both MEM_EXPR can be considered equal
1821 and 0 otherwise. */
1822
1823 int
1824 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1825 {
1826 if (expr1 == expr2)
1827 return 1;
1828
1829 if (! expr1 || ! expr2)
1830 return 0;
1831
1832 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1833 return 0;
1834
1835 return operand_equal_p (expr1, expr2, 0);
1836 }
1837
1838 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1839 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1840 -1 if not known. */
1841
1842 int
1843 get_mem_align_offset (rtx mem, unsigned int align)
1844 {
1845 tree expr;
1846 poly_uint64 offset;
1847
1848 /* This function can't use
1849 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1850 || (MAX (MEM_ALIGN (mem),
1851 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1852 < align))
1853 return -1;
1854 else
1855 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1856 for two reasons:
1857 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1858 for <variable>. get_inner_reference doesn't handle it and
1859 even if it did, the alignment in that case needs to be determined
1860 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1861 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1862 isn't sufficiently aligned, the object it is in might be. */
1863 gcc_assert (MEM_P (mem));
1864 expr = MEM_EXPR (mem);
1865 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1866 return -1;
1867
1868 offset = MEM_OFFSET (mem);
1869 if (DECL_P (expr))
1870 {
1871 if (DECL_ALIGN (expr) < align)
1872 return -1;
1873 }
1874 else if (INDIRECT_REF_P (expr))
1875 {
1876 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1877 return -1;
1878 }
1879 else if (TREE_CODE (expr) == COMPONENT_REF)
1880 {
1881 while (1)
1882 {
1883 tree inner = TREE_OPERAND (expr, 0);
1884 tree field = TREE_OPERAND (expr, 1);
1885 tree byte_offset = component_ref_field_offset (expr);
1886 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1887
1888 poly_uint64 suboffset;
1889 if (!byte_offset
1890 || !poly_int_tree_p (byte_offset, &suboffset)
1891 || !tree_fits_uhwi_p (bit_offset))
1892 return -1;
1893
1894 offset += suboffset;
1895 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1896
1897 if (inner == NULL_TREE)
1898 {
1899 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1900 < (unsigned int) align)
1901 return -1;
1902 break;
1903 }
1904 else if (DECL_P (inner))
1905 {
1906 if (DECL_ALIGN (inner) < align)
1907 return -1;
1908 break;
1909 }
1910 else if (TREE_CODE (inner) != COMPONENT_REF)
1911 return -1;
1912 expr = inner;
1913 }
1914 }
1915 else
1916 return -1;
1917
1918 HOST_WIDE_INT misalign;
1919 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1920 return -1;
1921 return misalign;
1922 }
1923
1924 /* Given REF (a MEM) and T, either the type of X or the expression
1925 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1926 if we are making a new object of this type. BITPOS is nonzero if
1927 there is an offset outstanding on T that will be applied later. */
1928
1929 void
1930 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1931 poly_int64 bitpos)
1932 {
1933 poly_int64 apply_bitpos = 0;
1934 tree type;
1935 class mem_attrs attrs, *defattrs, *refattrs;
1936 addr_space_t as;
1937
1938 /* It can happen that type_for_mode was given a mode for which there
1939 is no language-level type. In which case it returns NULL, which
1940 we can see here. */
1941 if (t == NULL_TREE)
1942 return;
1943
1944 type = TYPE_P (t) ? t : TREE_TYPE (t);
1945 if (type == error_mark_node)
1946 return;
1947
1948 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1949 wrong answer, as it assumes that DECL_RTL already has the right alias
1950 info. Callers should not set DECL_RTL until after the call to
1951 set_mem_attributes. */
1952 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1953
1954 /* Get the alias set from the expression or type (perhaps using a
1955 front-end routine) and use it. */
1956 attrs.alias = get_alias_set (t);
1957
1958 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1959 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1960
1961 /* Default values from pre-existing memory attributes if present. */
1962 refattrs = MEM_ATTRS (ref);
1963 if (refattrs)
1964 {
1965 /* ??? Can this ever happen? Calling this routine on a MEM that
1966 already carries memory attributes should probably be invalid. */
1967 attrs.expr = refattrs->expr;
1968 attrs.offset_known_p = refattrs->offset_known_p;
1969 attrs.offset = refattrs->offset;
1970 attrs.size_known_p = refattrs->size_known_p;
1971 attrs.size = refattrs->size;
1972 attrs.align = refattrs->align;
1973 }
1974
1975 /* Otherwise, default values from the mode of the MEM reference. */
1976 else
1977 {
1978 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1979 gcc_assert (!defattrs->expr);
1980 gcc_assert (!defattrs->offset_known_p);
1981
1982 /* Respect mode size. */
1983 attrs.size_known_p = defattrs->size_known_p;
1984 attrs.size = defattrs->size;
1985 /* ??? Is this really necessary? We probably should always get
1986 the size from the type below. */
1987
1988 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1989 if T is an object, always compute the object alignment below. */
1990 if (TYPE_P (t))
1991 attrs.align = defattrs->align;
1992 else
1993 attrs.align = BITS_PER_UNIT;
1994 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1995 e.g. if the type carries an alignment attribute. Should we be
1996 able to simply always use TYPE_ALIGN? */
1997 }
1998
1999 /* We can set the alignment from the type if we are making an object or if
2000 this is an INDIRECT_REF. */
2001 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2002 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2003
2004 /* If the size is known, we can set that. */
2005 tree new_size = TYPE_SIZE_UNIT (type);
2006
2007 /* The address-space is that of the type. */
2008 as = TYPE_ADDR_SPACE (type);
2009
2010 /* If T is not a type, we may be able to deduce some more information about
2011 the expression. */
2012 if (! TYPE_P (t))
2013 {
2014 tree base;
2015
2016 if (TREE_THIS_VOLATILE (t))
2017 MEM_VOLATILE_P (ref) = 1;
2018
2019 /* Now remove any conversions: they don't change what the underlying
2020 object is. Likewise for SAVE_EXPR. */
2021 while (CONVERT_EXPR_P (t)
2022 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2023 || TREE_CODE (t) == SAVE_EXPR)
2024 t = TREE_OPERAND (t, 0);
2025
2026 /* Note whether this expression can trap. */
2027 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2028
2029 base = get_base_address (t);
2030 if (base)
2031 {
2032 if (DECL_P (base)
2033 && TREE_READONLY (base)
2034 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2035 && !TREE_THIS_VOLATILE (base))
2036 MEM_READONLY_P (ref) = 1;
2037
2038 /* Mark static const strings readonly as well. */
2039 if (TREE_CODE (base) == STRING_CST
2040 && TREE_READONLY (base)
2041 && TREE_STATIC (base))
2042 MEM_READONLY_P (ref) = 1;
2043
2044 /* Address-space information is on the base object. */
2045 if (TREE_CODE (base) == MEM_REF
2046 || TREE_CODE (base) == TARGET_MEM_REF)
2047 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2048 0))));
2049 else
2050 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2051 }
2052
2053 /* If this expression uses it's parent's alias set, mark it such
2054 that we won't change it. */
2055 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2056 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2057
2058 /* If this is a decl, set the attributes of the MEM from it. */
2059 if (DECL_P (t))
2060 {
2061 attrs.expr = t;
2062 attrs.offset_known_p = true;
2063 attrs.offset = 0;
2064 apply_bitpos = bitpos;
2065 new_size = DECL_SIZE_UNIT (t);
2066 }
2067
2068 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2069 else if (CONSTANT_CLASS_P (t))
2070 ;
2071
2072 /* If this is a field reference, record it. */
2073 else if (TREE_CODE (t) == COMPONENT_REF)
2074 {
2075 attrs.expr = t;
2076 attrs.offset_known_p = true;
2077 attrs.offset = 0;
2078 apply_bitpos = bitpos;
2079 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2080 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2081 }
2082
2083 /* If this is an array reference, look for an outer field reference. */
2084 else if (TREE_CODE (t) == ARRAY_REF)
2085 {
2086 tree off_tree = size_zero_node;
2087 /* We can't modify t, because we use it at the end of the
2088 function. */
2089 tree t2 = t;
2090
2091 do
2092 {
2093 tree index = TREE_OPERAND (t2, 1);
2094 tree low_bound = array_ref_low_bound (t2);
2095 tree unit_size = array_ref_element_size (t2);
2096
2097 /* We assume all arrays have sizes that are a multiple of a byte.
2098 First subtract the lower bound, if any, in the type of the
2099 index, then convert to sizetype and multiply by the size of
2100 the array element. */
2101 if (! integer_zerop (low_bound))
2102 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2103 index, low_bound);
2104
2105 off_tree = size_binop (PLUS_EXPR,
2106 size_binop (MULT_EXPR,
2107 fold_convert (sizetype,
2108 index),
2109 unit_size),
2110 off_tree);
2111 t2 = TREE_OPERAND (t2, 0);
2112 }
2113 while (TREE_CODE (t2) == ARRAY_REF);
2114
2115 if (DECL_P (t2)
2116 || (TREE_CODE (t2) == COMPONENT_REF
2117 /* For trailing arrays t2 doesn't have a size that
2118 covers all valid accesses. */
2119 && ! array_at_struct_end_p (t)))
2120 {
2121 attrs.expr = t2;
2122 attrs.offset_known_p = false;
2123 if (poly_int_tree_p (off_tree, &attrs.offset))
2124 {
2125 attrs.offset_known_p = true;
2126 apply_bitpos = bitpos;
2127 }
2128 }
2129 /* Else do not record a MEM_EXPR. */
2130 }
2131
2132 /* If this is an indirect reference, record it. */
2133 else if (TREE_CODE (t) == MEM_REF
2134 || TREE_CODE (t) == TARGET_MEM_REF)
2135 {
2136 attrs.expr = t;
2137 attrs.offset_known_p = true;
2138 attrs.offset = 0;
2139 apply_bitpos = bitpos;
2140 }
2141
2142 /* If this is a reference based on a partitioned decl replace the
2143 base with a MEM_REF of the pointer representative we created
2144 during stack slot partitioning. */
2145 if (attrs.expr
2146 && VAR_P (base)
2147 && ! is_global_var (base)
2148 && cfun->gimple_df->decls_to_pointers != NULL)
2149 {
2150 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2151 if (namep)
2152 {
2153 attrs.expr = unshare_expr (attrs.expr);
2154 tree *orig_base = &attrs.expr;
2155 while (handled_component_p (*orig_base))
2156 orig_base = &TREE_OPERAND (*orig_base, 0);
2157 tree aptrt = reference_alias_ptr_type (*orig_base);
2158 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2159 build_int_cst (aptrt, 0));
2160 }
2161 }
2162
2163 /* Compute the alignment. */
2164 unsigned int obj_align;
2165 unsigned HOST_WIDE_INT obj_bitpos;
2166 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2167 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2168 if (diff_align != 0)
2169 obj_align = MIN (obj_align, diff_align);
2170 attrs.align = MAX (attrs.align, obj_align);
2171 }
2172
2173 poly_uint64 const_size;
2174 if (poly_int_tree_p (new_size, &const_size))
2175 {
2176 attrs.size_known_p = true;
2177 attrs.size = const_size;
2178 }
2179
2180 /* If we modified OFFSET based on T, then subtract the outstanding
2181 bit position offset. Similarly, increase the size of the accessed
2182 object to contain the negative offset. */
2183 if (maybe_ne (apply_bitpos, 0))
2184 {
2185 gcc_assert (attrs.offset_known_p);
2186 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2187 attrs.offset -= bytepos;
2188 if (attrs.size_known_p)
2189 attrs.size += bytepos;
2190 }
2191
2192 /* Now set the attributes we computed above. */
2193 attrs.addrspace = as;
2194 set_mem_attrs (ref, &attrs);
2195 }
2196
2197 void
2198 set_mem_attributes (rtx ref, tree t, int objectp)
2199 {
2200 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2201 }
2202
2203 /* Set the alias set of MEM to SET. */
2204
2205 void
2206 set_mem_alias_set (rtx mem, alias_set_type set)
2207 {
2208 /* If the new and old alias sets don't conflict, something is wrong. */
2209 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2210 mem_attrs attrs (*get_mem_attrs (mem));
2211 attrs.alias = set;
2212 set_mem_attrs (mem, &attrs);
2213 }
2214
2215 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2216
2217 void
2218 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2219 {
2220 mem_attrs attrs (*get_mem_attrs (mem));
2221 attrs.addrspace = addrspace;
2222 set_mem_attrs (mem, &attrs);
2223 }
2224
2225 /* Set the alignment of MEM to ALIGN bits. */
2226
2227 void
2228 set_mem_align (rtx mem, unsigned int align)
2229 {
2230 mem_attrs attrs (*get_mem_attrs (mem));
2231 attrs.align = align;
2232 set_mem_attrs (mem, &attrs);
2233 }
2234
2235 /* Set the expr for MEM to EXPR. */
2236
2237 void
2238 set_mem_expr (rtx mem, tree expr)
2239 {
2240 mem_attrs attrs (*get_mem_attrs (mem));
2241 attrs.expr = expr;
2242 set_mem_attrs (mem, &attrs);
2243 }
2244
2245 /* Set the offset of MEM to OFFSET. */
2246
2247 void
2248 set_mem_offset (rtx mem, poly_int64 offset)
2249 {
2250 mem_attrs attrs (*get_mem_attrs (mem));
2251 attrs.offset_known_p = true;
2252 attrs.offset = offset;
2253 set_mem_attrs (mem, &attrs);
2254 }
2255
2256 /* Clear the offset of MEM. */
2257
2258 void
2259 clear_mem_offset (rtx mem)
2260 {
2261 mem_attrs attrs (*get_mem_attrs (mem));
2262 attrs.offset_known_p = false;
2263 set_mem_attrs (mem, &attrs);
2264 }
2265
2266 /* Set the size of MEM to SIZE. */
2267
2268 void
2269 set_mem_size (rtx mem, poly_int64 size)
2270 {
2271 mem_attrs attrs (*get_mem_attrs (mem));
2272 attrs.size_known_p = true;
2273 attrs.size = size;
2274 set_mem_attrs (mem, &attrs);
2275 }
2276
2277 /* Clear the size of MEM. */
2278
2279 void
2280 clear_mem_size (rtx mem)
2281 {
2282 mem_attrs attrs (*get_mem_attrs (mem));
2283 attrs.size_known_p = false;
2284 set_mem_attrs (mem, &attrs);
2285 }
2286 \f
2287 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2288 and its address changed to ADDR. (VOIDmode means don't change the mode.
2289 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2290 returned memory location is required to be valid. INPLACE is true if any
2291 changes can be made directly to MEMREF or false if MEMREF must be treated
2292 as immutable.
2293
2294 The memory attributes are not changed. */
2295
2296 static rtx
2297 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2298 bool inplace)
2299 {
2300 addr_space_t as;
2301 rtx new_rtx;
2302
2303 gcc_assert (MEM_P (memref));
2304 as = MEM_ADDR_SPACE (memref);
2305 if (mode == VOIDmode)
2306 mode = GET_MODE (memref);
2307 if (addr == 0)
2308 addr = XEXP (memref, 0);
2309 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2310 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2311 return memref;
2312
2313 /* Don't validate address for LRA. LRA can make the address valid
2314 by itself in most efficient way. */
2315 if (validate && !lra_in_progress)
2316 {
2317 if (reload_in_progress || reload_completed)
2318 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2319 else
2320 addr = memory_address_addr_space (mode, addr, as);
2321 }
2322
2323 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2324 return memref;
2325
2326 if (inplace)
2327 {
2328 XEXP (memref, 0) = addr;
2329 return memref;
2330 }
2331
2332 new_rtx = gen_rtx_MEM (mode, addr);
2333 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2334 return new_rtx;
2335 }
2336
2337 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2338 way we are changing MEMREF, so we only preserve the alias set. */
2339
2340 rtx
2341 change_address (rtx memref, machine_mode mode, rtx addr)
2342 {
2343 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2344 machine_mode mmode = GET_MODE (new_rtx);
2345 class mem_attrs *defattrs;
2346
2347 mem_attrs attrs (*get_mem_attrs (memref));
2348 defattrs = mode_mem_attrs[(int) mmode];
2349 attrs.expr = NULL_TREE;
2350 attrs.offset_known_p = false;
2351 attrs.size_known_p = defattrs->size_known_p;
2352 attrs.size = defattrs->size;
2353 attrs.align = defattrs->align;
2354
2355 /* If there are no changes, just return the original memory reference. */
2356 if (new_rtx == memref)
2357 {
2358 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2359 return new_rtx;
2360
2361 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2362 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2363 }
2364
2365 set_mem_attrs (new_rtx, &attrs);
2366 return new_rtx;
2367 }
2368
2369 /* Return a memory reference like MEMREF, but with its mode changed
2370 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2371 nonzero, the memory address is forced to be valid.
2372 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2373 and the caller is responsible for adjusting MEMREF base register.
2374 If ADJUST_OBJECT is zero, the underlying object associated with the
2375 memory reference is left unchanged and the caller is responsible for
2376 dealing with it. Otherwise, if the new memory reference is outside
2377 the underlying object, even partially, then the object is dropped.
2378 SIZE, if nonzero, is the size of an access in cases where MODE
2379 has no inherent size. */
2380
2381 rtx
2382 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2383 int validate, int adjust_address, int adjust_object,
2384 poly_int64 size)
2385 {
2386 rtx addr = XEXP (memref, 0);
2387 rtx new_rtx;
2388 scalar_int_mode address_mode;
2389 class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2390 unsigned HOST_WIDE_INT max_align;
2391 #ifdef POINTERS_EXTEND_UNSIGNED
2392 scalar_int_mode pointer_mode
2393 = targetm.addr_space.pointer_mode (attrs.addrspace);
2394 #endif
2395
2396 /* VOIDmode means no mode change for change_address_1. */
2397 if (mode == VOIDmode)
2398 mode = GET_MODE (memref);
2399
2400 /* Take the size of non-BLKmode accesses from the mode. */
2401 defattrs = mode_mem_attrs[(int) mode];
2402 if (defattrs->size_known_p)
2403 size = defattrs->size;
2404
2405 /* If there are no changes, just return the original memory reference. */
2406 if (mode == GET_MODE (memref)
2407 && known_eq (offset, 0)
2408 && (known_eq (size, 0)
2409 || (attrs.size_known_p && known_eq (attrs.size, size)))
2410 && (!validate || memory_address_addr_space_p (mode, addr,
2411 attrs.addrspace)))
2412 return memref;
2413
2414 /* ??? Prefer to create garbage instead of creating shared rtl.
2415 This may happen even if offset is nonzero -- consider
2416 (plus (plus reg reg) const_int) -- so do this always. */
2417 addr = copy_rtx (addr);
2418
2419 /* Convert a possibly large offset to a signed value within the
2420 range of the target address space. */
2421 address_mode = get_address_mode (memref);
2422 offset = trunc_int_for_mode (offset, address_mode);
2423
2424 if (adjust_address)
2425 {
2426 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2427 object, we can merge it into the LO_SUM. */
2428 if (GET_MODE (memref) != BLKmode
2429 && GET_CODE (addr) == LO_SUM
2430 && known_in_range_p (offset,
2431 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2432 / BITS_PER_UNIT)))
2433 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2434 plus_constant (address_mode,
2435 XEXP (addr, 1), offset));
2436 #ifdef POINTERS_EXTEND_UNSIGNED
2437 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2438 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2439 the fact that pointers are not allowed to overflow. */
2440 else if (POINTERS_EXTEND_UNSIGNED > 0
2441 && GET_CODE (addr) == ZERO_EXTEND
2442 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2443 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2444 addr = gen_rtx_ZERO_EXTEND (address_mode,
2445 plus_constant (pointer_mode,
2446 XEXP (addr, 0), offset));
2447 #endif
2448 else
2449 addr = plus_constant (address_mode, addr, offset);
2450 }
2451
2452 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2453
2454 /* If the address is a REG, change_address_1 rightfully returns memref,
2455 but this would destroy memref's MEM_ATTRS. */
2456 if (new_rtx == memref && maybe_ne (offset, 0))
2457 new_rtx = copy_rtx (new_rtx);
2458
2459 /* Conservatively drop the object if we don't know where we start from. */
2460 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2461 {
2462 attrs.expr = NULL_TREE;
2463 attrs.alias = 0;
2464 }
2465
2466 /* Compute the new values of the memory attributes due to this adjustment.
2467 We add the offsets and update the alignment. */
2468 if (attrs.offset_known_p)
2469 {
2470 attrs.offset += offset;
2471
2472 /* Drop the object if the new left end is not within its bounds. */
2473 if (adjust_object && maybe_lt (attrs.offset, 0))
2474 {
2475 attrs.expr = NULL_TREE;
2476 attrs.alias = 0;
2477 }
2478 }
2479
2480 /* Compute the new alignment by taking the MIN of the alignment and the
2481 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2482 if zero. */
2483 if (maybe_ne (offset, 0))
2484 {
2485 max_align = known_alignment (offset) * BITS_PER_UNIT;
2486 attrs.align = MIN (attrs.align, max_align);
2487 }
2488
2489 if (maybe_ne (size, 0))
2490 {
2491 /* Drop the object if the new right end is not within its bounds. */
2492 if (adjust_object && maybe_gt (offset + size, attrs.size))
2493 {
2494 attrs.expr = NULL_TREE;
2495 attrs.alias = 0;
2496 }
2497 attrs.size_known_p = true;
2498 attrs.size = size;
2499 }
2500 else if (attrs.size_known_p)
2501 {
2502 gcc_assert (!adjust_object);
2503 attrs.size -= offset;
2504 /* ??? The store_by_pieces machinery generates negative sizes,
2505 so don't assert for that here. */
2506 }
2507
2508 set_mem_attrs (new_rtx, &attrs);
2509
2510 return new_rtx;
2511 }
2512
2513 /* Return a memory reference like MEMREF, but with its mode changed
2514 to MODE and its address changed to ADDR, which is assumed to be
2515 MEMREF offset by OFFSET bytes. If VALIDATE is
2516 nonzero, the memory address is forced to be valid. */
2517
2518 rtx
2519 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2520 poly_int64 offset, int validate)
2521 {
2522 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2523 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2524 }
2525
2526 /* Return a memory reference like MEMREF, but whose address is changed by
2527 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2528 known to be in OFFSET (possibly 1). */
2529
2530 rtx
2531 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2532 {
2533 rtx new_rtx, addr = XEXP (memref, 0);
2534 machine_mode address_mode;
2535 class mem_attrs *defattrs;
2536
2537 mem_attrs attrs (*get_mem_attrs (memref));
2538 address_mode = get_address_mode (memref);
2539 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2540
2541 /* At this point we don't know _why_ the address is invalid. It
2542 could have secondary memory references, multiplies or anything.
2543
2544 However, if we did go and rearrange things, we can wind up not
2545 being able to recognize the magic around pic_offset_table_rtx.
2546 This stuff is fragile, and is yet another example of why it is
2547 bad to expose PIC machinery too early. */
2548 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2549 attrs.addrspace)
2550 && GET_CODE (addr) == PLUS
2551 && XEXP (addr, 0) == pic_offset_table_rtx)
2552 {
2553 addr = force_reg (GET_MODE (addr), addr);
2554 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2555 }
2556
2557 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2558 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2559
2560 /* If there are no changes, just return the original memory reference. */
2561 if (new_rtx == memref)
2562 return new_rtx;
2563
2564 /* Update the alignment to reflect the offset. Reset the offset, which
2565 we don't know. */
2566 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2567 attrs.offset_known_p = false;
2568 attrs.size_known_p = defattrs->size_known_p;
2569 attrs.size = defattrs->size;
2570 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2571 set_mem_attrs (new_rtx, &attrs);
2572 return new_rtx;
2573 }
2574
2575 /* Return a memory reference like MEMREF, but with its address changed to
2576 ADDR. The caller is asserting that the actual piece of memory pointed
2577 to is the same, just the form of the address is being changed, such as
2578 by putting something into a register. INPLACE is true if any changes
2579 can be made directly to MEMREF or false if MEMREF must be treated as
2580 immutable. */
2581
2582 rtx
2583 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2584 {
2585 /* change_address_1 copies the memory attribute structure without change
2586 and that's exactly what we want here. */
2587 update_temp_slot_address (XEXP (memref, 0), addr);
2588 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2589 }
2590
2591 /* Likewise, but the reference is not required to be valid. */
2592
2593 rtx
2594 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2595 {
2596 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2597 }
2598
2599 /* Return a memory reference like MEMREF, but with its mode widened to
2600 MODE and offset by OFFSET. This would be used by targets that e.g.
2601 cannot issue QImode memory operations and have to use SImode memory
2602 operations plus masking logic. */
2603
2604 rtx
2605 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2606 {
2607 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2608 poly_uint64 size = GET_MODE_SIZE (mode);
2609
2610 /* If there are no changes, just return the original memory reference. */
2611 if (new_rtx == memref)
2612 return new_rtx;
2613
2614 mem_attrs attrs (*get_mem_attrs (new_rtx));
2615
2616 /* If we don't know what offset we were at within the expression, then
2617 we can't know if we've overstepped the bounds. */
2618 if (! attrs.offset_known_p)
2619 attrs.expr = NULL_TREE;
2620
2621 while (attrs.expr)
2622 {
2623 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2624 {
2625 tree field = TREE_OPERAND (attrs.expr, 1);
2626 tree offset = component_ref_field_offset (attrs.expr);
2627
2628 if (! DECL_SIZE_UNIT (field))
2629 {
2630 attrs.expr = NULL_TREE;
2631 break;
2632 }
2633
2634 /* Is the field at least as large as the access? If so, ok,
2635 otherwise strip back to the containing structure. */
2636 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2637 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2638 && known_ge (attrs.offset, 0))
2639 break;
2640
2641 poly_uint64 suboffset;
2642 if (!poly_int_tree_p (offset, &suboffset))
2643 {
2644 attrs.expr = NULL_TREE;
2645 break;
2646 }
2647
2648 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2649 attrs.offset += suboffset;
2650 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2651 / BITS_PER_UNIT);
2652 }
2653 /* Similarly for the decl. */
2654 else if (DECL_P (attrs.expr)
2655 && DECL_SIZE_UNIT (attrs.expr)
2656 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2657 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2658 size)
2659 && known_ge (attrs.offset, 0))
2660 break;
2661 else
2662 {
2663 /* The widened memory access overflows the expression, which means
2664 that it could alias another expression. Zap it. */
2665 attrs.expr = NULL_TREE;
2666 break;
2667 }
2668 }
2669
2670 if (! attrs.expr)
2671 attrs.offset_known_p = false;
2672
2673 /* The widened memory may alias other stuff, so zap the alias set. */
2674 /* ??? Maybe use get_alias_set on any remaining expression. */
2675 attrs.alias = 0;
2676 attrs.size_known_p = true;
2677 attrs.size = size;
2678 set_mem_attrs (new_rtx, &attrs);
2679 return new_rtx;
2680 }
2681 \f
2682 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2683 static GTY(()) tree spill_slot_decl;
2684
2685 tree
2686 get_spill_slot_decl (bool force_build_p)
2687 {
2688 tree d = spill_slot_decl;
2689 rtx rd;
2690
2691 if (d || !force_build_p)
2692 return d;
2693
2694 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2695 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2696 DECL_ARTIFICIAL (d) = 1;
2697 DECL_IGNORED_P (d) = 1;
2698 TREE_USED (d) = 1;
2699 spill_slot_decl = d;
2700
2701 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2702 MEM_NOTRAP_P (rd) = 1;
2703 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2704 attrs.alias = new_alias_set ();
2705 attrs.expr = d;
2706 set_mem_attrs (rd, &attrs);
2707 SET_DECL_RTL (d, rd);
2708
2709 return d;
2710 }
2711
2712 /* Given MEM, a result from assign_stack_local, fill in the memory
2713 attributes as appropriate for a register allocator spill slot.
2714 These slots are not aliasable by other memory. We arrange for
2715 them all to use a single MEM_EXPR, so that the aliasing code can
2716 work properly in the case of shared spill slots. */
2717
2718 void
2719 set_mem_attrs_for_spill (rtx mem)
2720 {
2721 rtx addr;
2722
2723 mem_attrs attrs (*get_mem_attrs (mem));
2724 attrs.expr = get_spill_slot_decl (true);
2725 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2726 attrs.addrspace = ADDR_SPACE_GENERIC;
2727
2728 /* We expect the incoming memory to be of the form:
2729 (mem:MODE (plus (reg sfp) (const_int offset)))
2730 with perhaps the plus missing for offset = 0. */
2731 addr = XEXP (mem, 0);
2732 attrs.offset_known_p = true;
2733 strip_offset (addr, &attrs.offset);
2734
2735 set_mem_attrs (mem, &attrs);
2736 MEM_NOTRAP_P (mem) = 1;
2737 }
2738 \f
2739 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2740
2741 rtx_code_label *
2742 gen_label_rtx (void)
2743 {
2744 return as_a <rtx_code_label *> (
2745 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2746 NULL, label_num++, NULL));
2747 }
2748 \f
2749 /* For procedure integration. */
2750
2751 /* Install new pointers to the first and last insns in the chain.
2752 Also, set cur_insn_uid to one higher than the last in use.
2753 Used for an inline-procedure after copying the insn chain. */
2754
2755 void
2756 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2757 {
2758 rtx_insn *insn;
2759
2760 set_first_insn (first);
2761 set_last_insn (last);
2762 cur_insn_uid = 0;
2763
2764 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2765 {
2766 int debug_count = 0;
2767
2768 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2769 cur_debug_insn_uid = 0;
2770
2771 for (insn = first; insn; insn = NEXT_INSN (insn))
2772 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2773 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2774 else
2775 {
2776 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2777 if (DEBUG_INSN_P (insn))
2778 debug_count++;
2779 }
2780
2781 if (debug_count)
2782 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2783 else
2784 cur_debug_insn_uid++;
2785 }
2786 else
2787 for (insn = first; insn; insn = NEXT_INSN (insn))
2788 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2789
2790 cur_insn_uid++;
2791 }
2792 \f
2793 /* Go through all the RTL insn bodies and copy any invalid shared
2794 structure. This routine should only be called once. */
2795
2796 static void
2797 unshare_all_rtl_1 (rtx_insn *insn)
2798 {
2799 /* Unshare just about everything else. */
2800 unshare_all_rtl_in_chain (insn);
2801
2802 /* Make sure the addresses of stack slots found outside the insn chain
2803 (such as, in DECL_RTL of a variable) are not shared
2804 with the insn chain.
2805
2806 This special care is necessary when the stack slot MEM does not
2807 actually appear in the insn chain. If it does appear, its address
2808 is unshared from all else at that point. */
2809 unsigned int i;
2810 rtx temp;
2811 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2812 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2813 }
2814
2815 /* Go through all the RTL insn bodies and copy any invalid shared
2816 structure, again. This is a fairly expensive thing to do so it
2817 should be done sparingly. */
2818
2819 void
2820 unshare_all_rtl_again (rtx_insn *insn)
2821 {
2822 rtx_insn *p;
2823 tree decl;
2824
2825 for (p = insn; p; p = NEXT_INSN (p))
2826 if (INSN_P (p))
2827 {
2828 reset_used_flags (PATTERN (p));
2829 reset_used_flags (REG_NOTES (p));
2830 if (CALL_P (p))
2831 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2832 }
2833
2834 /* Make sure that virtual stack slots are not shared. */
2835 set_used_decls (DECL_INITIAL (cfun->decl));
2836
2837 /* Make sure that virtual parameters are not shared. */
2838 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2839 set_used_flags (DECL_RTL (decl));
2840
2841 rtx temp;
2842 unsigned int i;
2843 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2844 reset_used_flags (temp);
2845
2846 unshare_all_rtl_1 (insn);
2847 }
2848
2849 unsigned int
2850 unshare_all_rtl (void)
2851 {
2852 unshare_all_rtl_1 (get_insns ());
2853
2854 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2855 {
2856 if (DECL_RTL_SET_P (decl))
2857 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2858 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2859 }
2860
2861 return 0;
2862 }
2863
2864
2865 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2866 Recursively does the same for subexpressions. */
2867
2868 static void
2869 verify_rtx_sharing (rtx orig, rtx insn)
2870 {
2871 rtx x = orig;
2872 int i;
2873 enum rtx_code code;
2874 const char *format_ptr;
2875
2876 if (x == 0)
2877 return;
2878
2879 code = GET_CODE (x);
2880
2881 /* These types may be freely shared. */
2882
2883 switch (code)
2884 {
2885 case REG:
2886 case DEBUG_EXPR:
2887 case VALUE:
2888 CASE_CONST_ANY:
2889 case SYMBOL_REF:
2890 case LABEL_REF:
2891 case CODE_LABEL:
2892 case PC:
2893 case CC0:
2894 case RETURN:
2895 case SIMPLE_RETURN:
2896 case SCRATCH:
2897 /* SCRATCH must be shared because they represent distinct values. */
2898 return;
2899 case CLOBBER:
2900 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2901 clobbers or clobbers of hard registers that originated as pseudos.
2902 This is needed to allow safe register renaming. */
2903 if (REG_P (XEXP (x, 0))
2904 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2905 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2906 return;
2907 break;
2908
2909 case CONST:
2910 if (shared_const_p (orig))
2911 return;
2912 break;
2913
2914 case MEM:
2915 /* A MEM is allowed to be shared if its address is constant. */
2916 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2917 || reload_completed || reload_in_progress)
2918 return;
2919
2920 break;
2921
2922 default:
2923 break;
2924 }
2925
2926 /* This rtx may not be shared. If it has already been seen,
2927 replace it with a copy of itself. */
2928 if (flag_checking && RTX_FLAG (x, used))
2929 {
2930 error ("invalid rtl sharing found in the insn");
2931 debug_rtx (insn);
2932 error ("shared rtx");
2933 debug_rtx (x);
2934 internal_error ("internal consistency failure");
2935 }
2936 gcc_assert (!RTX_FLAG (x, used));
2937
2938 RTX_FLAG (x, used) = 1;
2939
2940 /* Now scan the subexpressions recursively. */
2941
2942 format_ptr = GET_RTX_FORMAT (code);
2943
2944 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2945 {
2946 switch (*format_ptr++)
2947 {
2948 case 'e':
2949 verify_rtx_sharing (XEXP (x, i), insn);
2950 break;
2951
2952 case 'E':
2953 if (XVEC (x, i) != NULL)
2954 {
2955 int j;
2956 int len = XVECLEN (x, i);
2957
2958 for (j = 0; j < len; j++)
2959 {
2960 /* We allow sharing of ASM_OPERANDS inside single
2961 instruction. */
2962 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2963 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2964 == ASM_OPERANDS))
2965 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2966 else
2967 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2968 }
2969 }
2970 break;
2971 }
2972 }
2973 return;
2974 }
2975
2976 /* Reset used-flags for INSN. */
2977
2978 static void
2979 reset_insn_used_flags (rtx insn)
2980 {
2981 gcc_assert (INSN_P (insn));
2982 reset_used_flags (PATTERN (insn));
2983 reset_used_flags (REG_NOTES (insn));
2984 if (CALL_P (insn))
2985 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2986 }
2987
2988 /* Go through all the RTL insn bodies and clear all the USED bits. */
2989
2990 static void
2991 reset_all_used_flags (void)
2992 {
2993 rtx_insn *p;
2994
2995 for (p = get_insns (); p; p = NEXT_INSN (p))
2996 if (INSN_P (p))
2997 {
2998 rtx pat = PATTERN (p);
2999 if (GET_CODE (pat) != SEQUENCE)
3000 reset_insn_used_flags (p);
3001 else
3002 {
3003 gcc_assert (REG_NOTES (p) == NULL);
3004 for (int i = 0; i < XVECLEN (pat, 0); i++)
3005 {
3006 rtx insn = XVECEXP (pat, 0, i);
3007 if (INSN_P (insn))
3008 reset_insn_used_flags (insn);
3009 }
3010 }
3011 }
3012 }
3013
3014 /* Verify sharing in INSN. */
3015
3016 static void
3017 verify_insn_sharing (rtx insn)
3018 {
3019 gcc_assert (INSN_P (insn));
3020 verify_rtx_sharing (PATTERN (insn), insn);
3021 verify_rtx_sharing (REG_NOTES (insn), insn);
3022 if (CALL_P (insn))
3023 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
3024 }
3025
3026 /* Go through all the RTL insn bodies and check that there is no unexpected
3027 sharing in between the subexpressions. */
3028
3029 DEBUG_FUNCTION void
3030 verify_rtl_sharing (void)
3031 {
3032 rtx_insn *p;
3033
3034 timevar_push (TV_VERIFY_RTL_SHARING);
3035
3036 reset_all_used_flags ();
3037
3038 for (p = get_insns (); p; p = NEXT_INSN (p))
3039 if (INSN_P (p))
3040 {
3041 rtx pat = PATTERN (p);
3042 if (GET_CODE (pat) != SEQUENCE)
3043 verify_insn_sharing (p);
3044 else
3045 for (int i = 0; i < XVECLEN (pat, 0); i++)
3046 {
3047 rtx insn = XVECEXP (pat, 0, i);
3048 if (INSN_P (insn))
3049 verify_insn_sharing (insn);
3050 }
3051 }
3052
3053 reset_all_used_flags ();
3054
3055 timevar_pop (TV_VERIFY_RTL_SHARING);
3056 }
3057
3058 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3059 Assumes the mark bits are cleared at entry. */
3060
3061 void
3062 unshare_all_rtl_in_chain (rtx_insn *insn)
3063 {
3064 for (; insn; insn = NEXT_INSN (insn))
3065 if (INSN_P (insn))
3066 {
3067 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3068 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3069 if (CALL_P (insn))
3070 CALL_INSN_FUNCTION_USAGE (insn)
3071 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3072 }
3073 }
3074
3075 /* Go through all virtual stack slots of a function and mark them as
3076 shared. We never replace the DECL_RTLs themselves with a copy,
3077 but expressions mentioned into a DECL_RTL cannot be shared with
3078 expressions in the instruction stream.
3079
3080 Note that reload may convert pseudo registers into memories in-place.
3081 Pseudo registers are always shared, but MEMs never are. Thus if we
3082 reset the used flags on MEMs in the instruction stream, we must set
3083 them again on MEMs that appear in DECL_RTLs. */
3084
3085 static void
3086 set_used_decls (tree blk)
3087 {
3088 tree t;
3089
3090 /* Mark decls. */
3091 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3092 if (DECL_RTL_SET_P (t))
3093 set_used_flags (DECL_RTL (t));
3094
3095 /* Now process sub-blocks. */
3096 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3097 set_used_decls (t);
3098 }
3099
3100 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3101 Recursively does the same for subexpressions. Uses
3102 copy_rtx_if_shared_1 to reduce stack space. */
3103
3104 rtx
3105 copy_rtx_if_shared (rtx orig)
3106 {
3107 copy_rtx_if_shared_1 (&orig);
3108 return orig;
3109 }
3110
3111 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3112 use. Recursively does the same for subexpressions. */
3113
3114 static void
3115 copy_rtx_if_shared_1 (rtx *orig1)
3116 {
3117 rtx x;
3118 int i;
3119 enum rtx_code code;
3120 rtx *last_ptr;
3121 const char *format_ptr;
3122 int copied = 0;
3123 int length;
3124
3125 /* Repeat is used to turn tail-recursion into iteration. */
3126 repeat:
3127 x = *orig1;
3128
3129 if (x == 0)
3130 return;
3131
3132 code = GET_CODE (x);
3133
3134 /* These types may be freely shared. */
3135
3136 switch (code)
3137 {
3138 case REG:
3139 case DEBUG_EXPR:
3140 case VALUE:
3141 CASE_CONST_ANY:
3142 case SYMBOL_REF:
3143 case LABEL_REF:
3144 case CODE_LABEL:
3145 case PC:
3146 case CC0:
3147 case RETURN:
3148 case SIMPLE_RETURN:
3149 case SCRATCH:
3150 /* SCRATCH must be shared because they represent distinct values. */
3151 return;
3152 case CLOBBER:
3153 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3154 clobbers or clobbers of hard registers that originated as pseudos.
3155 This is needed to allow safe register renaming. */
3156 if (REG_P (XEXP (x, 0))
3157 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3158 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3159 return;
3160 break;
3161
3162 case CONST:
3163 if (shared_const_p (x))
3164 return;
3165 break;
3166
3167 case DEBUG_INSN:
3168 case INSN:
3169 case JUMP_INSN:
3170 case CALL_INSN:
3171 case NOTE:
3172 case BARRIER:
3173 /* The chain of insns is not being copied. */
3174 return;
3175
3176 default:
3177 break;
3178 }
3179
3180 /* This rtx may not be shared. If it has already been seen,
3181 replace it with a copy of itself. */
3182
3183 if (RTX_FLAG (x, used))
3184 {
3185 x = shallow_copy_rtx (x);
3186 copied = 1;
3187 }
3188 RTX_FLAG (x, used) = 1;
3189
3190 /* Now scan the subexpressions recursively.
3191 We can store any replaced subexpressions directly into X
3192 since we know X is not shared! Any vectors in X
3193 must be copied if X was copied. */
3194
3195 format_ptr = GET_RTX_FORMAT (code);
3196 length = GET_RTX_LENGTH (code);
3197 last_ptr = NULL;
3198
3199 for (i = 0; i < length; i++)
3200 {
3201 switch (*format_ptr++)
3202 {
3203 case 'e':
3204 if (last_ptr)
3205 copy_rtx_if_shared_1 (last_ptr);
3206 last_ptr = &XEXP (x, i);
3207 break;
3208
3209 case 'E':
3210 if (XVEC (x, i) != NULL)
3211 {
3212 int j;
3213 int len = XVECLEN (x, i);
3214
3215 /* Copy the vector iff I copied the rtx and the length
3216 is nonzero. */
3217 if (copied && len > 0)
3218 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3219
3220 /* Call recursively on all inside the vector. */
3221 for (j = 0; j < len; j++)
3222 {
3223 if (last_ptr)
3224 copy_rtx_if_shared_1 (last_ptr);
3225 last_ptr = &XVECEXP (x, i, j);
3226 }
3227 }
3228 break;
3229 }
3230 }
3231 *orig1 = x;
3232 if (last_ptr)
3233 {
3234 orig1 = last_ptr;
3235 goto repeat;
3236 }
3237 return;
3238 }
3239
3240 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3241
3242 static void
3243 mark_used_flags (rtx x, int flag)
3244 {
3245 int i, j;
3246 enum rtx_code code;
3247 const char *format_ptr;
3248 int length;
3249
3250 /* Repeat is used to turn tail-recursion into iteration. */
3251 repeat:
3252 if (x == 0)
3253 return;
3254
3255 code = GET_CODE (x);
3256
3257 /* These types may be freely shared so we needn't do any resetting
3258 for them. */
3259
3260 switch (code)
3261 {
3262 case REG:
3263 case DEBUG_EXPR:
3264 case VALUE:
3265 CASE_CONST_ANY:
3266 case SYMBOL_REF:
3267 case CODE_LABEL:
3268 case PC:
3269 case CC0:
3270 case RETURN:
3271 case SIMPLE_RETURN:
3272 return;
3273
3274 case DEBUG_INSN:
3275 case INSN:
3276 case JUMP_INSN:
3277 case CALL_INSN:
3278 case NOTE:
3279 case LABEL_REF:
3280 case BARRIER:
3281 /* The chain of insns is not being copied. */
3282 return;
3283
3284 default:
3285 break;
3286 }
3287
3288 RTX_FLAG (x, used) = flag;
3289
3290 format_ptr = GET_RTX_FORMAT (code);
3291 length = GET_RTX_LENGTH (code);
3292
3293 for (i = 0; i < length; i++)
3294 {
3295 switch (*format_ptr++)
3296 {
3297 case 'e':
3298 if (i == length-1)
3299 {
3300 x = XEXP (x, i);
3301 goto repeat;
3302 }
3303 mark_used_flags (XEXP (x, i), flag);
3304 break;
3305
3306 case 'E':
3307 for (j = 0; j < XVECLEN (x, i); j++)
3308 mark_used_flags (XVECEXP (x, i, j), flag);
3309 break;
3310 }
3311 }
3312 }
3313
3314 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3315 to look for shared sub-parts. */
3316
3317 void
3318 reset_used_flags (rtx x)
3319 {
3320 mark_used_flags (x, 0);
3321 }
3322
3323 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3324 to look for shared sub-parts. */
3325
3326 void
3327 set_used_flags (rtx x)
3328 {
3329 mark_used_flags (x, 1);
3330 }
3331 \f
3332 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3333 Return X or the rtx for the pseudo reg the value of X was copied into.
3334 OTHER must be valid as a SET_DEST. */
3335
3336 rtx
3337 make_safe_from (rtx x, rtx other)
3338 {
3339 while (1)
3340 switch (GET_CODE (other))
3341 {
3342 case SUBREG:
3343 other = SUBREG_REG (other);
3344 break;
3345 case STRICT_LOW_PART:
3346 case SIGN_EXTEND:
3347 case ZERO_EXTEND:
3348 other = XEXP (other, 0);
3349 break;
3350 default:
3351 goto done;
3352 }
3353 done:
3354 if ((MEM_P (other)
3355 && ! CONSTANT_P (x)
3356 && !REG_P (x)
3357 && GET_CODE (x) != SUBREG)
3358 || (REG_P (other)
3359 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3360 || reg_mentioned_p (other, x))))
3361 {
3362 rtx temp = gen_reg_rtx (GET_MODE (x));
3363 emit_move_insn (temp, x);
3364 return temp;
3365 }
3366 return x;
3367 }
3368 \f
3369 /* Emission of insns (adding them to the doubly-linked list). */
3370
3371 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3372
3373 rtx_insn *
3374 get_last_insn_anywhere (void)
3375 {
3376 struct sequence_stack *seq;
3377 for (seq = get_current_sequence (); seq; seq = seq->next)
3378 if (seq->last != 0)
3379 return seq->last;
3380 return 0;
3381 }
3382
3383 /* Return the first nonnote insn emitted in current sequence or current
3384 function. This routine looks inside SEQUENCEs. */
3385
3386 rtx_insn *
3387 get_first_nonnote_insn (void)
3388 {
3389 rtx_insn *insn = get_insns ();
3390
3391 if (insn)
3392 {
3393 if (NOTE_P (insn))
3394 for (insn = next_insn (insn);
3395 insn && NOTE_P (insn);
3396 insn = next_insn (insn))
3397 continue;
3398 else
3399 {
3400 if (NONJUMP_INSN_P (insn)
3401 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3402 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3403 }
3404 }
3405
3406 return insn;
3407 }
3408
3409 /* Return the last nonnote insn emitted in current sequence or current
3410 function. This routine looks inside SEQUENCEs. */
3411
3412 rtx_insn *
3413 get_last_nonnote_insn (void)
3414 {
3415 rtx_insn *insn = get_last_insn ();
3416
3417 if (insn)
3418 {
3419 if (NOTE_P (insn))
3420 for (insn = previous_insn (insn);
3421 insn && NOTE_P (insn);
3422 insn = previous_insn (insn))
3423 continue;
3424 else
3425 {
3426 if (NONJUMP_INSN_P (insn))
3427 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3428 insn = seq->insn (seq->len () - 1);
3429 }
3430 }
3431
3432 return insn;
3433 }
3434
3435 /* Return the number of actual (non-debug) insns emitted in this
3436 function. */
3437
3438 int
3439 get_max_insn_count (void)
3440 {
3441 int n = cur_insn_uid;
3442
3443 /* The table size must be stable across -g, to avoid codegen
3444 differences due to debug insns, and not be affected by
3445 -fmin-insn-uid, to avoid excessive table size and to simplify
3446 debugging of -fcompare-debug failures. */
3447 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3448 n -= cur_debug_insn_uid;
3449 else
3450 n -= param_min_nondebug_insn_uid;
3451
3452 return n;
3453 }
3454
3455 \f
3456 /* Return the next insn. If it is a SEQUENCE, return the first insn
3457 of the sequence. */
3458
3459 rtx_insn *
3460 next_insn (rtx_insn *insn)
3461 {
3462 if (insn)
3463 {
3464 insn = NEXT_INSN (insn);
3465 if (insn && NONJUMP_INSN_P (insn)
3466 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3467 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3468 }
3469
3470 return insn;
3471 }
3472
3473 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3474 of the sequence. */
3475
3476 rtx_insn *
3477 previous_insn (rtx_insn *insn)
3478 {
3479 if (insn)
3480 {
3481 insn = PREV_INSN (insn);
3482 if (insn && NONJUMP_INSN_P (insn))
3483 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3484 insn = seq->insn (seq->len () - 1);
3485 }
3486
3487 return insn;
3488 }
3489
3490 /* Return the next insn after INSN that is not a NOTE. This routine does not
3491 look inside SEQUENCEs. */
3492
3493 rtx_insn *
3494 next_nonnote_insn (rtx_insn *insn)
3495 {
3496 while (insn)
3497 {
3498 insn = NEXT_INSN (insn);
3499 if (insn == 0 || !NOTE_P (insn))
3500 break;
3501 }
3502
3503 return insn;
3504 }
3505
3506 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3507 routine does not look inside SEQUENCEs. */
3508
3509 rtx_insn *
3510 next_nondebug_insn (rtx_insn *insn)
3511 {
3512 while (insn)
3513 {
3514 insn = NEXT_INSN (insn);
3515 if (insn == 0 || !DEBUG_INSN_P (insn))
3516 break;
3517 }
3518
3519 return insn;
3520 }
3521
3522 /* Return the previous insn before INSN that is not a NOTE. This routine does
3523 not look inside SEQUENCEs. */
3524
3525 rtx_insn *
3526 prev_nonnote_insn (rtx_insn *insn)
3527 {
3528 while (insn)
3529 {
3530 insn = PREV_INSN (insn);
3531 if (insn == 0 || !NOTE_P (insn))
3532 break;
3533 }
3534
3535 return insn;
3536 }
3537
3538 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3539 This routine does not look inside SEQUENCEs. */
3540
3541 rtx_insn *
3542 prev_nondebug_insn (rtx_insn *insn)
3543 {
3544 while (insn)
3545 {
3546 insn = PREV_INSN (insn);
3547 if (insn == 0 || !DEBUG_INSN_P (insn))
3548 break;
3549 }
3550
3551 return insn;
3552 }
3553
3554 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3555 This routine does not look inside SEQUENCEs. */
3556
3557 rtx_insn *
3558 next_nonnote_nondebug_insn (rtx_insn *insn)
3559 {
3560 while (insn)
3561 {
3562 insn = NEXT_INSN (insn);
3563 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3564 break;
3565 }
3566
3567 return insn;
3568 }
3569
3570 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3571 but stop the search before we enter another basic block. This
3572 routine does not look inside SEQUENCEs. */
3573
3574 rtx_insn *
3575 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3576 {
3577 while (insn)
3578 {
3579 insn = NEXT_INSN (insn);
3580 if (insn == 0)
3581 break;
3582 if (DEBUG_INSN_P (insn))
3583 continue;
3584 if (!NOTE_P (insn))
3585 break;
3586 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3587 return NULL;
3588 }
3589
3590 return insn;
3591 }
3592
3593 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3594 This routine does not look inside SEQUENCEs. */
3595
3596 rtx_insn *
3597 prev_nonnote_nondebug_insn (rtx_insn *insn)
3598 {
3599 while (insn)
3600 {
3601 insn = PREV_INSN (insn);
3602 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3603 break;
3604 }
3605
3606 return insn;
3607 }
3608
3609 /* Return the previous insn before INSN that is not a NOTE nor
3610 DEBUG_INSN, but stop the search before we enter another basic
3611 block. This routine does not look inside SEQUENCEs. */
3612
3613 rtx_insn *
3614 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3615 {
3616 while (insn)
3617 {
3618 insn = PREV_INSN (insn);
3619 if (insn == 0)
3620 break;
3621 if (DEBUG_INSN_P (insn))
3622 continue;
3623 if (!NOTE_P (insn))
3624 break;
3625 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3626 return NULL;
3627 }
3628
3629 return insn;
3630 }
3631
3632 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3633 or 0, if there is none. This routine does not look inside
3634 SEQUENCEs. */
3635
3636 rtx_insn *
3637 next_real_insn (rtx_insn *insn)
3638 {
3639 while (insn)
3640 {
3641 insn = NEXT_INSN (insn);
3642 if (insn == 0 || INSN_P (insn))
3643 break;
3644 }
3645
3646 return insn;
3647 }
3648
3649 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3650 or 0, if there is none. This routine does not look inside
3651 SEQUENCEs. */
3652
3653 rtx_insn *
3654 prev_real_insn (rtx_insn *insn)
3655 {
3656 while (insn)
3657 {
3658 insn = PREV_INSN (insn);
3659 if (insn == 0 || INSN_P (insn))
3660 break;
3661 }
3662
3663 return insn;
3664 }
3665
3666 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3667 or 0, if there is none. This routine does not look inside
3668 SEQUENCEs. */
3669
3670 rtx_insn *
3671 next_real_nondebug_insn (rtx uncast_insn)
3672 {
3673 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3674
3675 while (insn)
3676 {
3677 insn = NEXT_INSN (insn);
3678 if (insn == 0 || NONDEBUG_INSN_P (insn))
3679 break;
3680 }
3681
3682 return insn;
3683 }
3684
3685 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3686 or 0, if there is none. This routine does not look inside
3687 SEQUENCEs. */
3688
3689 rtx_insn *
3690 prev_real_nondebug_insn (rtx_insn *insn)
3691 {
3692 while (insn)
3693 {
3694 insn = PREV_INSN (insn);
3695 if (insn == 0 || NONDEBUG_INSN_P (insn))
3696 break;
3697 }
3698
3699 return insn;
3700 }
3701
3702 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3703 This routine does not look inside SEQUENCEs. */
3704
3705 rtx_call_insn *
3706 last_call_insn (void)
3707 {
3708 rtx_insn *insn;
3709
3710 for (insn = get_last_insn ();
3711 insn && !CALL_P (insn);
3712 insn = PREV_INSN (insn))
3713 ;
3714
3715 return safe_as_a <rtx_call_insn *> (insn);
3716 }
3717
3718 /* Find the next insn after INSN that really does something. This routine
3719 does not look inside SEQUENCEs. After reload this also skips over
3720 standalone USE and CLOBBER insn. */
3721
3722 int
3723 active_insn_p (const rtx_insn *insn)
3724 {
3725 return (CALL_P (insn) || JUMP_P (insn)
3726 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3727 || (NONJUMP_INSN_P (insn)
3728 && (! reload_completed
3729 || (GET_CODE (PATTERN (insn)) != USE
3730 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3731 }
3732
3733 rtx_insn *
3734 next_active_insn (rtx_insn *insn)
3735 {
3736 while (insn)
3737 {
3738 insn = NEXT_INSN (insn);
3739 if (insn == 0 || active_insn_p (insn))
3740 break;
3741 }
3742
3743 return insn;
3744 }
3745
3746 /* Find the last insn before INSN that really does something. This routine
3747 does not look inside SEQUENCEs. After reload this also skips over
3748 standalone USE and CLOBBER insn. */
3749
3750 rtx_insn *
3751 prev_active_insn (rtx_insn *insn)
3752 {
3753 while (insn)
3754 {
3755 insn = PREV_INSN (insn);
3756 if (insn == 0 || active_insn_p (insn))
3757 break;
3758 }
3759
3760 return insn;
3761 }
3762 \f
3763 /* Return the next insn that uses CC0 after INSN, which is assumed to
3764 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3765 applied to the result of this function should yield INSN).
3766
3767 Normally, this is simply the next insn. However, if a REG_CC_USER note
3768 is present, it contains the insn that uses CC0.
3769
3770 Return 0 if we can't find the insn. */
3771
3772 rtx_insn *
3773 next_cc0_user (rtx_insn *insn)
3774 {
3775 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3776
3777 if (note)
3778 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3779
3780 insn = next_nonnote_insn (insn);
3781 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3782 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3783
3784 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3785 return insn;
3786
3787 return 0;
3788 }
3789
3790 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3791 note, it is the previous insn. */
3792
3793 rtx_insn *
3794 prev_cc0_setter (rtx_insn *insn)
3795 {
3796 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3797
3798 if (note)
3799 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3800
3801 insn = prev_nonnote_insn (insn);
3802 gcc_assert (sets_cc0_p (PATTERN (insn)));
3803
3804 return insn;
3805 }
3806
3807 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3808
3809 static int
3810 find_auto_inc (const_rtx x, const_rtx reg)
3811 {
3812 subrtx_iterator::array_type array;
3813 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3814 {
3815 const_rtx x = *iter;
3816 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3817 && rtx_equal_p (reg, XEXP (x, 0)))
3818 return true;
3819 }
3820 return false;
3821 }
3822
3823 /* Increment the label uses for all labels present in rtx. */
3824
3825 static void
3826 mark_label_nuses (rtx x)
3827 {
3828 enum rtx_code code;
3829 int i, j;
3830 const char *fmt;
3831
3832 code = GET_CODE (x);
3833 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3834 LABEL_NUSES (label_ref_label (x))++;
3835
3836 fmt = GET_RTX_FORMAT (code);
3837 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3838 {
3839 if (fmt[i] == 'e')
3840 mark_label_nuses (XEXP (x, i));
3841 else if (fmt[i] == 'E')
3842 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3843 mark_label_nuses (XVECEXP (x, i, j));
3844 }
3845 }
3846
3847 \f
3848 /* Try splitting insns that can be split for better scheduling.
3849 PAT is the pattern which might split.
3850 TRIAL is the insn providing PAT.
3851 LAST is nonzero if we should return the last insn of the sequence produced.
3852
3853 If this routine succeeds in splitting, it returns the first or last
3854 replacement insn depending on the value of LAST. Otherwise, it
3855 returns TRIAL. If the insn to be returned can be split, it will be. */
3856
3857 rtx_insn *
3858 try_split (rtx pat, rtx_insn *trial, int last)
3859 {
3860 rtx_insn *before, *after;
3861 rtx note;
3862 rtx_insn *seq, *tem;
3863 profile_probability probability;
3864 rtx_insn *insn_last, *insn;
3865 int njumps = 0;
3866 rtx_insn *call_insn = NULL;
3867
3868 /* We're not good at redistributing frame information. */
3869 if (RTX_FRAME_RELATED_P (trial))
3870 return trial;
3871
3872 if (any_condjump_p (trial)
3873 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3874 split_branch_probability
3875 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3876 else
3877 split_branch_probability = profile_probability::uninitialized ();
3878
3879 probability = split_branch_probability;
3880
3881 seq = split_insns (pat, trial);
3882
3883 split_branch_probability = profile_probability::uninitialized ();
3884
3885 if (!seq)
3886 return trial;
3887
3888 /* Avoid infinite loop if any insn of the result matches
3889 the original pattern. */
3890 insn_last = seq;
3891 while (1)
3892 {
3893 if (INSN_P (insn_last)
3894 && rtx_equal_p (PATTERN (insn_last), pat))
3895 return trial;
3896 if (!NEXT_INSN (insn_last))
3897 break;
3898 insn_last = NEXT_INSN (insn_last);
3899 }
3900
3901 /* We will be adding the new sequence to the function. The splitters
3902 may have introduced invalid RTL sharing, so unshare the sequence now. */
3903 unshare_all_rtl_in_chain (seq);
3904
3905 /* Mark labels and copy flags. */
3906 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3907 {
3908 if (JUMP_P (insn))
3909 {
3910 if (JUMP_P (trial))
3911 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3912 mark_jump_label (PATTERN (insn), insn, 0);
3913 njumps++;
3914 if (probability.initialized_p ()
3915 && any_condjump_p (insn)
3916 && !find_reg_note (insn, REG_BR_PROB, 0))
3917 {
3918 /* We can preserve the REG_BR_PROB notes only if exactly
3919 one jump is created, otherwise the machine description
3920 is responsible for this step using
3921 split_branch_probability variable. */
3922 gcc_assert (njumps == 1);
3923 add_reg_br_prob_note (insn, probability);
3924 }
3925 }
3926 }
3927
3928 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3929 in SEQ and copy any additional information across. */
3930 if (CALL_P (trial))
3931 {
3932 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3933 if (CALL_P (insn))
3934 {
3935 gcc_assert (call_insn == NULL_RTX);
3936 call_insn = insn;
3937
3938 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3939 target may have explicitly specified. */
3940 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3941 while (*p)
3942 p = &XEXP (*p, 1);
3943 *p = CALL_INSN_FUNCTION_USAGE (trial);
3944
3945 /* If the old call was a sibling call, the new one must
3946 be too. */
3947 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3948 }
3949 }
3950
3951 /* Copy notes, particularly those related to the CFG. */
3952 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3953 {
3954 switch (REG_NOTE_KIND (note))
3955 {
3956 case REG_EH_REGION:
3957 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3958 break;
3959
3960 case REG_NORETURN:
3961 case REG_SETJMP:
3962 case REG_TM:
3963 case REG_CALL_NOCF_CHECK:
3964 case REG_CALL_ARG_LOCATION:
3965 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3966 {
3967 if (CALL_P (insn))
3968 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3969 }
3970 break;
3971
3972 case REG_NON_LOCAL_GOTO:
3973 case REG_LABEL_TARGET:
3974 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3975 {
3976 if (JUMP_P (insn))
3977 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3978 }
3979 break;
3980
3981 case REG_INC:
3982 if (!AUTO_INC_DEC)
3983 break;
3984
3985 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3986 {
3987 rtx reg = XEXP (note, 0);
3988 if (!FIND_REG_INC_NOTE (insn, reg)
3989 && find_auto_inc (PATTERN (insn), reg))
3990 add_reg_note (insn, REG_INC, reg);
3991 }
3992 break;
3993
3994 case REG_ARGS_SIZE:
3995 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3996 break;
3997
3998 case REG_CALL_DECL:
3999 gcc_assert (call_insn != NULL_RTX);
4000 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
4001 break;
4002
4003 default:
4004 break;
4005 }
4006 }
4007
4008 /* If there are LABELS inside the split insns increment the
4009 usage count so we don't delete the label. */
4010 if (INSN_P (trial))
4011 {
4012 insn = insn_last;
4013 while (insn != NULL_RTX)
4014 {
4015 /* JUMP_P insns have already been "marked" above. */
4016 if (NONJUMP_INSN_P (insn))
4017 mark_label_nuses (PATTERN (insn));
4018
4019 insn = PREV_INSN (insn);
4020 }
4021 }
4022
4023 before = PREV_INSN (trial);
4024 after = NEXT_INSN (trial);
4025
4026 emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
4027
4028 delete_insn (trial);
4029
4030 /* Recursively call try_split for each new insn created; by the
4031 time control returns here that insn will be fully split, so
4032 set LAST and continue from the insn after the one returned.
4033 We can't use next_active_insn here since AFTER may be a note.
4034 Ignore deleted insns, which can be occur if not optimizing. */
4035 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4036 if (! tem->deleted () && INSN_P (tem))
4037 tem = try_split (PATTERN (tem), tem, 1);
4038
4039 /* Return either the first or the last insn, depending on which was
4040 requested. */
4041 return last
4042 ? (after ? PREV_INSN (after) : get_last_insn ())
4043 : NEXT_INSN (before);
4044 }
4045 \f
4046 /* Make and return an INSN rtx, initializing all its slots.
4047 Store PATTERN in the pattern slots. */
4048
4049 rtx_insn *
4050 make_insn_raw (rtx pattern)
4051 {
4052 rtx_insn *insn;
4053
4054 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4055
4056 INSN_UID (insn) = cur_insn_uid++;
4057 PATTERN (insn) = pattern;
4058 INSN_CODE (insn) = -1;
4059 REG_NOTES (insn) = NULL;
4060 INSN_LOCATION (insn) = curr_insn_location ();
4061 BLOCK_FOR_INSN (insn) = NULL;
4062
4063 #ifdef ENABLE_RTL_CHECKING
4064 if (insn
4065 && INSN_P (insn)
4066 && (returnjump_p (insn)
4067 || (GET_CODE (insn) == SET
4068 && SET_DEST (insn) == pc_rtx)))
4069 {
4070 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4071 debug_rtx (insn);
4072 }
4073 #endif
4074
4075 return insn;
4076 }
4077
4078 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4079
4080 static rtx_insn *
4081 make_debug_insn_raw (rtx pattern)
4082 {
4083 rtx_debug_insn *insn;
4084
4085 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4086 INSN_UID (insn) = cur_debug_insn_uid++;
4087 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4088 INSN_UID (insn) = cur_insn_uid++;
4089
4090 PATTERN (insn) = pattern;
4091 INSN_CODE (insn) = -1;
4092 REG_NOTES (insn) = NULL;
4093 INSN_LOCATION (insn) = curr_insn_location ();
4094 BLOCK_FOR_INSN (insn) = NULL;
4095
4096 return insn;
4097 }
4098
4099 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4100
4101 static rtx_insn *
4102 make_jump_insn_raw (rtx pattern)
4103 {
4104 rtx_jump_insn *insn;
4105
4106 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4107 INSN_UID (insn) = cur_insn_uid++;
4108
4109 PATTERN (insn) = pattern;
4110 INSN_CODE (insn) = -1;
4111 REG_NOTES (insn) = NULL;
4112 JUMP_LABEL (insn) = NULL;
4113 INSN_LOCATION (insn) = curr_insn_location ();
4114 BLOCK_FOR_INSN (insn) = NULL;
4115
4116 return insn;
4117 }
4118
4119 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4120
4121 static rtx_insn *
4122 make_call_insn_raw (rtx pattern)
4123 {
4124 rtx_call_insn *insn;
4125
4126 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4127 INSN_UID (insn) = cur_insn_uid++;
4128
4129 PATTERN (insn) = pattern;
4130 INSN_CODE (insn) = -1;
4131 REG_NOTES (insn) = NULL;
4132 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4133 INSN_LOCATION (insn) = curr_insn_location ();
4134 BLOCK_FOR_INSN (insn) = NULL;
4135
4136 return insn;
4137 }
4138
4139 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4140
4141 static rtx_note *
4142 make_note_raw (enum insn_note subtype)
4143 {
4144 /* Some notes are never created this way at all. These notes are
4145 only created by patching out insns. */
4146 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4147 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4148
4149 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4150 INSN_UID (note) = cur_insn_uid++;
4151 NOTE_KIND (note) = subtype;
4152 BLOCK_FOR_INSN (note) = NULL;
4153 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4154 return note;
4155 }
4156 \f
4157 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4158 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4159 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4160
4161 static inline void
4162 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4163 {
4164 SET_PREV_INSN (insn) = prev;
4165 SET_NEXT_INSN (insn) = next;
4166 if (prev != NULL)
4167 {
4168 SET_NEXT_INSN (prev) = insn;
4169 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4170 {
4171 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4172 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4173 }
4174 }
4175 if (next != NULL)
4176 {
4177 SET_PREV_INSN (next) = insn;
4178 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4179 {
4180 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4181 SET_PREV_INSN (sequence->insn (0)) = insn;
4182 }
4183 }
4184
4185 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4186 {
4187 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4188 SET_PREV_INSN (sequence->insn (0)) = prev;
4189 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4190 }
4191 }
4192
4193 /* Add INSN to the end of the doubly-linked list.
4194 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4195
4196 void
4197 add_insn (rtx_insn *insn)
4198 {
4199 rtx_insn *prev = get_last_insn ();
4200 link_insn_into_chain (insn, prev, NULL);
4201 if (get_insns () == NULL)
4202 set_first_insn (insn);
4203 set_last_insn (insn);
4204 }
4205
4206 /* Add INSN into the doubly-linked list after insn AFTER. */
4207
4208 static void
4209 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4210 {
4211 rtx_insn *next = NEXT_INSN (after);
4212
4213 gcc_assert (!optimize || !after->deleted ());
4214
4215 link_insn_into_chain (insn, after, next);
4216
4217 if (next == NULL)
4218 {
4219 struct sequence_stack *seq;
4220
4221 for (seq = get_current_sequence (); seq; seq = seq->next)
4222 if (after == seq->last)
4223 {
4224 seq->last = insn;
4225 break;
4226 }
4227 }
4228 }
4229
4230 /* Add INSN into the doubly-linked list before insn BEFORE. */
4231
4232 static void
4233 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4234 {
4235 rtx_insn *prev = PREV_INSN (before);
4236
4237 gcc_assert (!optimize || !before->deleted ());
4238
4239 link_insn_into_chain (insn, prev, before);
4240
4241 if (prev == NULL)
4242 {
4243 struct sequence_stack *seq;
4244
4245 for (seq = get_current_sequence (); seq; seq = seq->next)
4246 if (before == seq->first)
4247 {
4248 seq->first = insn;
4249 break;
4250 }
4251
4252 gcc_assert (seq);
4253 }
4254 }
4255
4256 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4257 If BB is NULL, an attempt is made to infer the bb from before.
4258
4259 This and the next function should be the only functions called
4260 to insert an insn once delay slots have been filled since only
4261 they know how to update a SEQUENCE. */
4262
4263 void
4264 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4265 {
4266 add_insn_after_nobb (insn, after);
4267 if (!BARRIER_P (after)
4268 && !BARRIER_P (insn)
4269 && (bb = BLOCK_FOR_INSN (after)))
4270 {
4271 set_block_for_insn (insn, bb);
4272 if (INSN_P (insn))
4273 df_insn_rescan (insn);
4274 /* Should not happen as first in the BB is always
4275 either NOTE or LABEL. */
4276 if (BB_END (bb) == after
4277 /* Avoid clobbering of structure when creating new BB. */
4278 && !BARRIER_P (insn)
4279 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4280 BB_END (bb) = insn;
4281 }
4282 }
4283
4284 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4285 If BB is NULL, an attempt is made to infer the bb from before.
4286
4287 This and the previous function should be the only functions called
4288 to insert an insn once delay slots have been filled since only
4289 they know how to update a SEQUENCE. */
4290
4291 void
4292 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4293 {
4294 add_insn_before_nobb (insn, before);
4295
4296 if (!bb
4297 && !BARRIER_P (before)
4298 && !BARRIER_P (insn))
4299 bb = BLOCK_FOR_INSN (before);
4300
4301 if (bb)
4302 {
4303 set_block_for_insn (insn, bb);
4304 if (INSN_P (insn))
4305 df_insn_rescan (insn);
4306 /* Should not happen as first in the BB is always either NOTE or
4307 LABEL. */
4308 gcc_assert (BB_HEAD (bb) != insn
4309 /* Avoid clobbering of structure when creating new BB. */
4310 || BARRIER_P (insn)
4311 || NOTE_INSN_BASIC_BLOCK_P (insn));
4312 }
4313 }
4314
4315 /* Replace insn with an deleted instruction note. */
4316
4317 void
4318 set_insn_deleted (rtx_insn *insn)
4319 {
4320 if (INSN_P (insn))
4321 df_insn_delete (insn);
4322 PUT_CODE (insn, NOTE);
4323 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4324 }
4325
4326
4327 /* Unlink INSN from the insn chain.
4328
4329 This function knows how to handle sequences.
4330
4331 This function does not invalidate data flow information associated with
4332 INSN (i.e. does not call df_insn_delete). That makes this function
4333 usable for only disconnecting an insn from the chain, and re-emit it
4334 elsewhere later.
4335
4336 To later insert INSN elsewhere in the insn chain via add_insn and
4337 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4338 the caller. Nullifying them here breaks many insn chain walks.
4339
4340 To really delete an insn and related DF information, use delete_insn. */
4341
4342 void
4343 remove_insn (rtx_insn *insn)
4344 {
4345 rtx_insn *next = NEXT_INSN (insn);
4346 rtx_insn *prev = PREV_INSN (insn);
4347 basic_block bb;
4348
4349 if (prev)
4350 {
4351 SET_NEXT_INSN (prev) = next;
4352 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4353 {
4354 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4355 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4356 }
4357 }
4358 else
4359 {
4360 struct sequence_stack *seq;
4361
4362 for (seq = get_current_sequence (); seq; seq = seq->next)
4363 if (insn == seq->first)
4364 {
4365 seq->first = next;
4366 break;
4367 }
4368
4369 gcc_assert (seq);
4370 }
4371
4372 if (next)
4373 {
4374 SET_PREV_INSN (next) = prev;
4375 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4376 {
4377 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4378 SET_PREV_INSN (sequence->insn (0)) = prev;
4379 }
4380 }
4381 else
4382 {
4383 struct sequence_stack *seq;
4384
4385 for (seq = get_current_sequence (); seq; seq = seq->next)
4386 if (insn == seq->last)
4387 {
4388 seq->last = prev;
4389 break;
4390 }
4391
4392 gcc_assert (seq);
4393 }
4394
4395 /* Fix up basic block boundaries, if necessary. */
4396 if (!BARRIER_P (insn)
4397 && (bb = BLOCK_FOR_INSN (insn)))
4398 {
4399 if (BB_HEAD (bb) == insn)
4400 {
4401 /* Never ever delete the basic block note without deleting whole
4402 basic block. */
4403 gcc_assert (!NOTE_P (insn));
4404 BB_HEAD (bb) = next;
4405 }
4406 if (BB_END (bb) == insn)
4407 BB_END (bb) = prev;
4408 }
4409 }
4410
4411 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4412
4413 void
4414 add_function_usage_to (rtx call_insn, rtx call_fusage)
4415 {
4416 gcc_assert (call_insn && CALL_P (call_insn));
4417
4418 /* Put the register usage information on the CALL. If there is already
4419 some usage information, put ours at the end. */
4420 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4421 {
4422 rtx link;
4423
4424 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4425 link = XEXP (link, 1))
4426 ;
4427
4428 XEXP (link, 1) = call_fusage;
4429 }
4430 else
4431 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4432 }
4433
4434 /* Delete all insns made since FROM.
4435 FROM becomes the new last instruction. */
4436
4437 void
4438 delete_insns_since (rtx_insn *from)
4439 {
4440 if (from == 0)
4441 set_first_insn (0);
4442 else
4443 SET_NEXT_INSN (from) = 0;
4444 set_last_insn (from);
4445 }
4446
4447 /* This function is deprecated, please use sequences instead.
4448
4449 Move a consecutive bunch of insns to a different place in the chain.
4450 The insns to be moved are those between FROM and TO.
4451 They are moved to a new position after the insn AFTER.
4452 AFTER must not be FROM or TO or any insn in between.
4453
4454 This function does not know about SEQUENCEs and hence should not be
4455 called after delay-slot filling has been done. */
4456
4457 void
4458 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4459 {
4460 if (flag_checking)
4461 {
4462 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4463 gcc_assert (after != x);
4464 gcc_assert (after != to);
4465 }
4466
4467 /* Splice this bunch out of where it is now. */
4468 if (PREV_INSN (from))
4469 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4470 if (NEXT_INSN (to))
4471 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4472 if (get_last_insn () == to)
4473 set_last_insn (PREV_INSN (from));
4474 if (get_insns () == from)
4475 set_first_insn (NEXT_INSN (to));
4476
4477 /* Make the new neighbors point to it and it to them. */
4478 if (NEXT_INSN (after))
4479 SET_PREV_INSN (NEXT_INSN (after)) = to;
4480
4481 SET_NEXT_INSN (to) = NEXT_INSN (after);
4482 SET_PREV_INSN (from) = after;
4483 SET_NEXT_INSN (after) = from;
4484 if (after == get_last_insn ())
4485 set_last_insn (to);
4486 }
4487
4488 /* Same as function above, but take care to update BB boundaries. */
4489 void
4490 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4491 {
4492 rtx_insn *prev = PREV_INSN (from);
4493 basic_block bb, bb2;
4494
4495 reorder_insns_nobb (from, to, after);
4496
4497 if (!BARRIER_P (after)
4498 && (bb = BLOCK_FOR_INSN (after)))
4499 {
4500 rtx_insn *x;
4501 df_set_bb_dirty (bb);
4502
4503 if (!BARRIER_P (from)
4504 && (bb2 = BLOCK_FOR_INSN (from)))
4505 {
4506 if (BB_END (bb2) == to)
4507 BB_END (bb2) = prev;
4508 df_set_bb_dirty (bb2);
4509 }
4510
4511 if (BB_END (bb) == after)
4512 BB_END (bb) = to;
4513
4514 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4515 if (!BARRIER_P (x))
4516 df_insn_change_bb (x, bb);
4517 }
4518 }
4519
4520 \f
4521 /* Emit insn(s) of given code and pattern
4522 at a specified place within the doubly-linked list.
4523
4524 All of the emit_foo global entry points accept an object
4525 X which is either an insn list or a PATTERN of a single
4526 instruction.
4527
4528 There are thus a few canonical ways to generate code and
4529 emit it at a specific place in the instruction stream. For
4530 example, consider the instruction named SPOT and the fact that
4531 we would like to emit some instructions before SPOT. We might
4532 do it like this:
4533
4534 start_sequence ();
4535 ... emit the new instructions ...
4536 insns_head = get_insns ();
4537 end_sequence ();
4538
4539 emit_insn_before (insns_head, SPOT);
4540
4541 It used to be common to generate SEQUENCE rtl instead, but that
4542 is a relic of the past which no longer occurs. The reason is that
4543 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4544 generated would almost certainly die right after it was created. */
4545
4546 static rtx_insn *
4547 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4548 basic_block bb,
4549 rtx_insn *(*make_raw) (rtx))
4550 {
4551 rtx_insn *insn;
4552
4553 gcc_assert (before);
4554
4555 if (x == NULL_RTX)
4556 return last;
4557
4558 switch (GET_CODE (x))
4559 {
4560 case DEBUG_INSN:
4561 case INSN:
4562 case JUMP_INSN:
4563 case CALL_INSN:
4564 case CODE_LABEL:
4565 case BARRIER:
4566 case NOTE:
4567 insn = as_a <rtx_insn *> (x);
4568 while (insn)
4569 {
4570 rtx_insn *next = NEXT_INSN (insn);
4571 add_insn_before (insn, before, bb);
4572 last = insn;
4573 insn = next;
4574 }
4575 break;
4576
4577 #ifdef ENABLE_RTL_CHECKING
4578 case SEQUENCE:
4579 gcc_unreachable ();
4580 break;
4581 #endif
4582
4583 default:
4584 last = (*make_raw) (x);
4585 add_insn_before (last, before, bb);
4586 break;
4587 }
4588
4589 return last;
4590 }
4591
4592 /* Make X be output before the instruction BEFORE. */
4593
4594 rtx_insn *
4595 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4596 {
4597 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4598 }
4599
4600 /* Make an instruction with body X and code JUMP_INSN
4601 and output it before the instruction BEFORE. */
4602
4603 rtx_jump_insn *
4604 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4605 {
4606 return as_a <rtx_jump_insn *> (
4607 emit_pattern_before_noloc (x, before, NULL, NULL,
4608 make_jump_insn_raw));
4609 }
4610
4611 /* Make an instruction with body X and code CALL_INSN
4612 and output it before the instruction BEFORE. */
4613
4614 rtx_insn *
4615 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4616 {
4617 return emit_pattern_before_noloc (x, before, NULL, NULL,
4618 make_call_insn_raw);
4619 }
4620
4621 /* Make an instruction with body X and code DEBUG_INSN
4622 and output it before the instruction BEFORE. */
4623
4624 rtx_insn *
4625 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4626 {
4627 return emit_pattern_before_noloc (x, before, NULL, NULL,
4628 make_debug_insn_raw);
4629 }
4630
4631 /* Make an insn of code BARRIER
4632 and output it before the insn BEFORE. */
4633
4634 rtx_barrier *
4635 emit_barrier_before (rtx_insn *before)
4636 {
4637 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4638
4639 INSN_UID (insn) = cur_insn_uid++;
4640
4641 add_insn_before (insn, before, NULL);
4642 return insn;
4643 }
4644
4645 /* Emit the label LABEL before the insn BEFORE. */
4646
4647 rtx_code_label *
4648 emit_label_before (rtx_code_label *label, rtx_insn *before)
4649 {
4650 gcc_checking_assert (INSN_UID (label) == 0);
4651 INSN_UID (label) = cur_insn_uid++;
4652 add_insn_before (label, before, NULL);
4653 return label;
4654 }
4655 \f
4656 /* Helper for emit_insn_after, handles lists of instructions
4657 efficiently. */
4658
4659 static rtx_insn *
4660 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4661 {
4662 rtx_insn *last;
4663 rtx_insn *after_after;
4664 if (!bb && !BARRIER_P (after))
4665 bb = BLOCK_FOR_INSN (after);
4666
4667 if (bb)
4668 {
4669 df_set_bb_dirty (bb);
4670 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4671 if (!BARRIER_P (last))
4672 {
4673 set_block_for_insn (last, bb);
4674 df_insn_rescan (last);
4675 }
4676 if (!BARRIER_P (last))
4677 {
4678 set_block_for_insn (last, bb);
4679 df_insn_rescan (last);
4680 }
4681 if (BB_END (bb) == after)
4682 BB_END (bb) = last;
4683 }
4684 else
4685 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4686 continue;
4687
4688 after_after = NEXT_INSN (after);
4689
4690 SET_NEXT_INSN (after) = first;
4691 SET_PREV_INSN (first) = after;
4692 SET_NEXT_INSN (last) = after_after;
4693 if (after_after)
4694 SET_PREV_INSN (after_after) = last;
4695
4696 if (after == get_last_insn ())
4697 set_last_insn (last);
4698
4699 return last;
4700 }
4701
4702 static rtx_insn *
4703 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4704 rtx_insn *(*make_raw)(rtx))
4705 {
4706 rtx_insn *last = after;
4707
4708 gcc_assert (after);
4709
4710 if (x == NULL_RTX)
4711 return last;
4712
4713 switch (GET_CODE (x))
4714 {
4715 case DEBUG_INSN:
4716 case INSN:
4717 case JUMP_INSN:
4718 case CALL_INSN:
4719 case CODE_LABEL:
4720 case BARRIER:
4721 case NOTE:
4722 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4723 break;
4724
4725 #ifdef ENABLE_RTL_CHECKING
4726 case SEQUENCE:
4727 gcc_unreachable ();
4728 break;
4729 #endif
4730
4731 default:
4732 last = (*make_raw) (x);
4733 add_insn_after (last, after, bb);
4734 break;
4735 }
4736
4737 return last;
4738 }
4739
4740 /* Make X be output after the insn AFTER and set the BB of insn. If
4741 BB is NULL, an attempt is made to infer the BB from AFTER. */
4742
4743 rtx_insn *
4744 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4745 {
4746 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4747 }
4748
4749
4750 /* Make an insn of code JUMP_INSN with body X
4751 and output it after the insn AFTER. */
4752
4753 rtx_jump_insn *
4754 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4755 {
4756 return as_a <rtx_jump_insn *> (
4757 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4758 }
4759
4760 /* Make an instruction with body X and code CALL_INSN
4761 and output it after the instruction AFTER. */
4762
4763 rtx_insn *
4764 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4765 {
4766 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4767 }
4768
4769 /* Make an instruction with body X and code CALL_INSN
4770 and output it after the instruction AFTER. */
4771
4772 rtx_insn *
4773 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4774 {
4775 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4776 }
4777
4778 /* Make an insn of code BARRIER
4779 and output it after the insn AFTER. */
4780
4781 rtx_barrier *
4782 emit_barrier_after (rtx_insn *after)
4783 {
4784 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4785
4786 INSN_UID (insn) = cur_insn_uid++;
4787
4788 add_insn_after (insn, after, NULL);
4789 return insn;
4790 }
4791
4792 /* Emit the label LABEL after the insn AFTER. */
4793
4794 rtx_insn *
4795 emit_label_after (rtx_insn *label, rtx_insn *after)
4796 {
4797 gcc_checking_assert (INSN_UID (label) == 0);
4798 INSN_UID (label) = cur_insn_uid++;
4799 add_insn_after (label, after, NULL);
4800 return label;
4801 }
4802 \f
4803 /* Notes require a bit of special handling: Some notes need to have their
4804 BLOCK_FOR_INSN set, others should never have it set, and some should
4805 have it set or clear depending on the context. */
4806
4807 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4808 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4809 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4810
4811 static bool
4812 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4813 {
4814 switch (subtype)
4815 {
4816 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4817 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4818 return true;
4819
4820 /* Notes for var tracking and EH region markers can appear between or
4821 inside basic blocks. If the caller is emitting on the basic block
4822 boundary, do not set BLOCK_FOR_INSN on the new note. */
4823 case NOTE_INSN_VAR_LOCATION:
4824 case NOTE_INSN_EH_REGION_BEG:
4825 case NOTE_INSN_EH_REGION_END:
4826 return on_bb_boundary_p;
4827
4828 /* Otherwise, BLOCK_FOR_INSN must be set. */
4829 default:
4830 return false;
4831 }
4832 }
4833
4834 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4835
4836 rtx_note *
4837 emit_note_after (enum insn_note subtype, rtx_insn *after)
4838 {
4839 rtx_note *note = make_note_raw (subtype);
4840 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4841 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4842
4843 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4844 add_insn_after_nobb (note, after);
4845 else
4846 add_insn_after (note, after, bb);
4847 return note;
4848 }
4849
4850 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4851
4852 rtx_note *
4853 emit_note_before (enum insn_note subtype, rtx_insn *before)
4854 {
4855 rtx_note *note = make_note_raw (subtype);
4856 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4857 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4858
4859 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4860 add_insn_before_nobb (note, before);
4861 else
4862 add_insn_before (note, before, bb);
4863 return note;
4864 }
4865 \f
4866 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4867 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4868
4869 static rtx_insn *
4870 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4871 rtx_insn *(*make_raw) (rtx))
4872 {
4873 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4874
4875 if (pattern == NULL_RTX || !loc)
4876 return last;
4877
4878 after = NEXT_INSN (after);
4879 while (1)
4880 {
4881 if (active_insn_p (after)
4882 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4883 && !INSN_LOCATION (after))
4884 INSN_LOCATION (after) = loc;
4885 if (after == last)
4886 break;
4887 after = NEXT_INSN (after);
4888 }
4889 return last;
4890 }
4891
4892 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4893 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4894 any DEBUG_INSNs. */
4895
4896 static rtx_insn *
4897 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4898 rtx_insn *(*make_raw) (rtx))
4899 {
4900 rtx_insn *prev = after;
4901
4902 if (skip_debug_insns)
4903 while (DEBUG_INSN_P (prev))
4904 prev = PREV_INSN (prev);
4905
4906 if (INSN_P (prev))
4907 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4908 make_raw);
4909 else
4910 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4911 }
4912
4913 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4914 rtx_insn *
4915 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4916 {
4917 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4918 }
4919
4920 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4921 rtx_insn *
4922 emit_insn_after (rtx pattern, rtx_insn *after)
4923 {
4924 return emit_pattern_after (pattern, after, true, make_insn_raw);
4925 }
4926
4927 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4928 rtx_jump_insn *
4929 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4930 {
4931 return as_a <rtx_jump_insn *> (
4932 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4933 }
4934
4935 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4936 rtx_jump_insn *
4937 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4938 {
4939 return as_a <rtx_jump_insn *> (
4940 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4941 }
4942
4943 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4944 rtx_insn *
4945 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4946 {
4947 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4948 }
4949
4950 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4951 rtx_insn *
4952 emit_call_insn_after (rtx pattern, rtx_insn *after)
4953 {
4954 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4955 }
4956
4957 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4958 rtx_insn *
4959 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4960 {
4961 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4962 }
4963
4964 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4965 rtx_insn *
4966 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4967 {
4968 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4969 }
4970
4971 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4972 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4973 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4974 CALL_INSN, etc. */
4975
4976 static rtx_insn *
4977 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4978 bool insnp, rtx_insn *(*make_raw) (rtx))
4979 {
4980 rtx_insn *first = PREV_INSN (before);
4981 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4982 insnp ? before : NULL,
4983 NULL, make_raw);
4984
4985 if (pattern == NULL_RTX || !loc)
4986 return last;
4987
4988 if (!first)
4989 first = get_insns ();
4990 else
4991 first = NEXT_INSN (first);
4992 while (1)
4993 {
4994 if (active_insn_p (first)
4995 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4996 && !INSN_LOCATION (first))
4997 INSN_LOCATION (first) = loc;
4998 if (first == last)
4999 break;
5000 first = NEXT_INSN (first);
5001 }
5002 return last;
5003 }
5004
5005 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
5006 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
5007 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
5008 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
5009
5010 static rtx_insn *
5011 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
5012 bool insnp, rtx_insn *(*make_raw) (rtx))
5013 {
5014 rtx_insn *next = before;
5015
5016 if (skip_debug_insns)
5017 while (DEBUG_INSN_P (next))
5018 next = PREV_INSN (next);
5019
5020 if (INSN_P (next))
5021 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
5022 insnp, make_raw);
5023 else
5024 return emit_pattern_before_noloc (pattern, before,
5025 insnp ? before : NULL,
5026 NULL, make_raw);
5027 }
5028
5029 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5030 rtx_insn *
5031 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5032 {
5033 return emit_pattern_before_setloc (pattern, before, loc, true,
5034 make_insn_raw);
5035 }
5036
5037 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5038 rtx_insn *
5039 emit_insn_before (rtx pattern, rtx_insn *before)
5040 {
5041 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5042 }
5043
5044 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5045 rtx_jump_insn *
5046 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5047 {
5048 return as_a <rtx_jump_insn *> (
5049 emit_pattern_before_setloc (pattern, before, loc, false,
5050 make_jump_insn_raw));
5051 }
5052
5053 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5054 rtx_jump_insn *
5055 emit_jump_insn_before (rtx pattern, rtx_insn *before)
5056 {
5057 return as_a <rtx_jump_insn *> (
5058 emit_pattern_before (pattern, before, true, false,
5059 make_jump_insn_raw));
5060 }
5061
5062 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5063 rtx_insn *
5064 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5065 {
5066 return emit_pattern_before_setloc (pattern, before, loc, false,
5067 make_call_insn_raw);
5068 }
5069
5070 /* Like emit_call_insn_before_noloc,
5071 but set insn_location according to BEFORE. */
5072 rtx_insn *
5073 emit_call_insn_before (rtx pattern, rtx_insn *before)
5074 {
5075 return emit_pattern_before (pattern, before, true, false,
5076 make_call_insn_raw);
5077 }
5078
5079 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5080 rtx_insn *
5081 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5082 {
5083 return emit_pattern_before_setloc (pattern, before, loc, false,
5084 make_debug_insn_raw);
5085 }
5086
5087 /* Like emit_debug_insn_before_noloc,
5088 but set insn_location according to BEFORE. */
5089 rtx_insn *
5090 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5091 {
5092 return emit_pattern_before (pattern, before, false, false,
5093 make_debug_insn_raw);
5094 }
5095 \f
5096 /* Take X and emit it at the end of the doubly-linked
5097 INSN list.
5098
5099 Returns the last insn emitted. */
5100
5101 rtx_insn *
5102 emit_insn (rtx x)
5103 {
5104 rtx_insn *last = get_last_insn ();
5105 rtx_insn *insn;
5106
5107 if (x == NULL_RTX)
5108 return last;
5109
5110 switch (GET_CODE (x))
5111 {
5112 case DEBUG_INSN:
5113 case INSN:
5114 case JUMP_INSN:
5115 case CALL_INSN:
5116 case CODE_LABEL:
5117 case BARRIER:
5118 case NOTE:
5119 insn = as_a <rtx_insn *> (x);
5120 while (insn)
5121 {
5122 rtx_insn *next = NEXT_INSN (insn);
5123 add_insn (insn);
5124 last = insn;
5125 insn = next;
5126 }
5127 break;
5128
5129 #ifdef ENABLE_RTL_CHECKING
5130 case JUMP_TABLE_DATA:
5131 case SEQUENCE:
5132 gcc_unreachable ();
5133 break;
5134 #endif
5135
5136 default:
5137 last = make_insn_raw (x);
5138 add_insn (last);
5139 break;
5140 }
5141
5142 return last;
5143 }
5144
5145 /* Make an insn of code DEBUG_INSN with pattern X
5146 and add it to the end of the doubly-linked list. */
5147
5148 rtx_insn *
5149 emit_debug_insn (rtx x)
5150 {
5151 rtx_insn *last = get_last_insn ();
5152 rtx_insn *insn;
5153
5154 if (x == NULL_RTX)
5155 return last;
5156
5157 switch (GET_CODE (x))
5158 {
5159 case DEBUG_INSN:
5160 case INSN:
5161 case JUMP_INSN:
5162 case CALL_INSN:
5163 case CODE_LABEL:
5164 case BARRIER:
5165 case NOTE:
5166 insn = as_a <rtx_insn *> (x);
5167 while (insn)
5168 {
5169 rtx_insn *next = NEXT_INSN (insn);
5170 add_insn (insn);
5171 last = insn;
5172 insn = next;
5173 }
5174 break;
5175
5176 #ifdef ENABLE_RTL_CHECKING
5177 case JUMP_TABLE_DATA:
5178 case SEQUENCE:
5179 gcc_unreachable ();
5180 break;
5181 #endif
5182
5183 default:
5184 last = make_debug_insn_raw (x);
5185 add_insn (last);
5186 break;
5187 }
5188
5189 return last;
5190 }
5191
5192 /* Make an insn of code JUMP_INSN with pattern X
5193 and add it to the end of the doubly-linked list. */
5194
5195 rtx_insn *
5196 emit_jump_insn (rtx x)
5197 {
5198 rtx_insn *last = NULL;
5199 rtx_insn *insn;
5200
5201 switch (GET_CODE (x))
5202 {
5203 case DEBUG_INSN:
5204 case INSN:
5205 case JUMP_INSN:
5206 case CALL_INSN:
5207 case CODE_LABEL:
5208 case BARRIER:
5209 case NOTE:
5210 insn = as_a <rtx_insn *> (x);
5211 while (insn)
5212 {
5213 rtx_insn *next = NEXT_INSN (insn);
5214 add_insn (insn);
5215 last = insn;
5216 insn = next;
5217 }
5218 break;
5219
5220 #ifdef ENABLE_RTL_CHECKING
5221 case JUMP_TABLE_DATA:
5222 case SEQUENCE:
5223 gcc_unreachable ();
5224 break;
5225 #endif
5226
5227 default:
5228 last = make_jump_insn_raw (x);
5229 add_insn (last);
5230 break;
5231 }
5232
5233 return last;
5234 }
5235
5236 /* Make an insn of code CALL_INSN with pattern X
5237 and add it to the end of the doubly-linked list. */
5238
5239 rtx_insn *
5240 emit_call_insn (rtx x)
5241 {
5242 rtx_insn *insn;
5243
5244 switch (GET_CODE (x))
5245 {
5246 case DEBUG_INSN:
5247 case INSN:
5248 case JUMP_INSN:
5249 case CALL_INSN:
5250 case CODE_LABEL:
5251 case BARRIER:
5252 case NOTE:
5253 insn = emit_insn (x);
5254 break;
5255
5256 #ifdef ENABLE_RTL_CHECKING
5257 case SEQUENCE:
5258 case JUMP_TABLE_DATA:
5259 gcc_unreachable ();
5260 break;
5261 #endif
5262
5263 default:
5264 insn = make_call_insn_raw (x);
5265 add_insn (insn);
5266 break;
5267 }
5268
5269 return insn;
5270 }
5271
5272 /* Add the label LABEL to the end of the doubly-linked list. */
5273
5274 rtx_code_label *
5275 emit_label (rtx uncast_label)
5276 {
5277 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5278
5279 gcc_checking_assert (INSN_UID (label) == 0);
5280 INSN_UID (label) = cur_insn_uid++;
5281 add_insn (label);
5282 return label;
5283 }
5284
5285 /* Make an insn of code JUMP_TABLE_DATA
5286 and add it to the end of the doubly-linked list. */
5287
5288 rtx_jump_table_data *
5289 emit_jump_table_data (rtx table)
5290 {
5291 rtx_jump_table_data *jump_table_data =
5292 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5293 INSN_UID (jump_table_data) = cur_insn_uid++;
5294 PATTERN (jump_table_data) = table;
5295 BLOCK_FOR_INSN (jump_table_data) = NULL;
5296 add_insn (jump_table_data);
5297 return jump_table_data;
5298 }
5299
5300 /* Make an insn of code BARRIER
5301 and add it to the end of the doubly-linked list. */
5302
5303 rtx_barrier *
5304 emit_barrier (void)
5305 {
5306 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5307 INSN_UID (barrier) = cur_insn_uid++;
5308 add_insn (barrier);
5309 return barrier;
5310 }
5311
5312 /* Emit a copy of note ORIG. */
5313
5314 rtx_note *
5315 emit_note_copy (rtx_note *orig)
5316 {
5317 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5318 rtx_note *note = make_note_raw (kind);
5319 NOTE_DATA (note) = NOTE_DATA (orig);
5320 add_insn (note);
5321 return note;
5322 }
5323
5324 /* Make an insn of code NOTE or type NOTE_NO
5325 and add it to the end of the doubly-linked list. */
5326
5327 rtx_note *
5328 emit_note (enum insn_note kind)
5329 {
5330 rtx_note *note = make_note_raw (kind);
5331 add_insn (note);
5332 return note;
5333 }
5334
5335 /* Emit a clobber of lvalue X. */
5336
5337 rtx_insn *
5338 emit_clobber (rtx x)
5339 {
5340 /* CONCATs should not appear in the insn stream. */
5341 if (GET_CODE (x) == CONCAT)
5342 {
5343 emit_clobber (XEXP (x, 0));
5344 return emit_clobber (XEXP (x, 1));
5345 }
5346 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5347 }
5348
5349 /* Return a sequence of insns to clobber lvalue X. */
5350
5351 rtx_insn *
5352 gen_clobber (rtx x)
5353 {
5354 rtx_insn *seq;
5355
5356 start_sequence ();
5357 emit_clobber (x);
5358 seq = get_insns ();
5359 end_sequence ();
5360 return seq;
5361 }
5362
5363 /* Emit a use of rvalue X. */
5364
5365 rtx_insn *
5366 emit_use (rtx x)
5367 {
5368 /* CONCATs should not appear in the insn stream. */
5369 if (GET_CODE (x) == CONCAT)
5370 {
5371 emit_use (XEXP (x, 0));
5372 return emit_use (XEXP (x, 1));
5373 }
5374 return emit_insn (gen_rtx_USE (VOIDmode, x));
5375 }
5376
5377 /* Return a sequence of insns to use rvalue X. */
5378
5379 rtx_insn *
5380 gen_use (rtx x)
5381 {
5382 rtx_insn *seq;
5383
5384 start_sequence ();
5385 emit_use (x);
5386 seq = get_insns ();
5387 end_sequence ();
5388 return seq;
5389 }
5390
5391 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5392 Return the set in INSN that such notes describe, or NULL if the notes
5393 have no meaning for INSN. */
5394
5395 rtx
5396 set_for_reg_notes (rtx insn)
5397 {
5398 rtx pat, reg;
5399
5400 if (!INSN_P (insn))
5401 return NULL_RTX;
5402
5403 pat = PATTERN (insn);
5404 if (GET_CODE (pat) == PARALLEL)
5405 {
5406 /* We do not use single_set because that ignores SETs of unused
5407 registers. REG_EQUAL and REG_EQUIV notes really do require the
5408 PARALLEL to have a single SET. */
5409 if (multiple_sets (insn))
5410 return NULL_RTX;
5411 pat = XVECEXP (pat, 0, 0);
5412 }
5413
5414 if (GET_CODE (pat) != SET)
5415 return NULL_RTX;
5416
5417 reg = SET_DEST (pat);
5418
5419 /* Notes apply to the contents of a STRICT_LOW_PART. */
5420 if (GET_CODE (reg) == STRICT_LOW_PART
5421 || GET_CODE (reg) == ZERO_EXTRACT)
5422 reg = XEXP (reg, 0);
5423
5424 /* Check that we have a register. */
5425 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5426 return NULL_RTX;
5427
5428 return pat;
5429 }
5430
5431 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5432 note of this type already exists, remove it first. */
5433
5434 rtx
5435 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5436 {
5437 rtx note = find_reg_note (insn, kind, NULL_RTX);
5438
5439 switch (kind)
5440 {
5441 case REG_EQUAL:
5442 case REG_EQUIV:
5443 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5444 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5445 return NULL_RTX;
5446
5447 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5448 It serves no useful purpose and breaks eliminate_regs. */
5449 if (GET_CODE (datum) == ASM_OPERANDS)
5450 return NULL_RTX;
5451
5452 /* Notes with side effects are dangerous. Even if the side-effect
5453 initially mirrors one in PATTERN (INSN), later optimizations
5454 might alter the way that the final register value is calculated
5455 and so move or alter the side-effect in some way. The note would
5456 then no longer be a valid substitution for SET_SRC. */
5457 if (side_effects_p (datum))
5458 return NULL_RTX;
5459 break;
5460
5461 default:
5462 break;
5463 }
5464
5465 if (note)
5466 XEXP (note, 0) = datum;
5467 else
5468 {
5469 add_reg_note (insn, kind, datum);
5470 note = REG_NOTES (insn);
5471 }
5472
5473 switch (kind)
5474 {
5475 case REG_EQUAL:
5476 case REG_EQUIV:
5477 df_notes_rescan (as_a <rtx_insn *> (insn));
5478 break;
5479 default:
5480 break;
5481 }
5482
5483 return note;
5484 }
5485
5486 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5487 rtx
5488 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5489 {
5490 rtx set = set_for_reg_notes (insn);
5491
5492 if (set && SET_DEST (set) == dst)
5493 return set_unique_reg_note (insn, kind, datum);
5494 return NULL_RTX;
5495 }
5496 \f
5497 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5498 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5499 is true.
5500
5501 If X is a label, it is simply added into the insn chain. */
5502
5503 rtx_insn *
5504 emit (rtx x, bool allow_barrier_p)
5505 {
5506 enum rtx_code code = classify_insn (x);
5507
5508 switch (code)
5509 {
5510 case CODE_LABEL:
5511 return emit_label (x);
5512 case INSN:
5513 return emit_insn (x);
5514 case JUMP_INSN:
5515 {
5516 rtx_insn *insn = emit_jump_insn (x);
5517 if (allow_barrier_p
5518 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5519 return emit_barrier ();
5520 return insn;
5521 }
5522 case CALL_INSN:
5523 return emit_call_insn (x);
5524 case DEBUG_INSN:
5525 return emit_debug_insn (x);
5526 default:
5527 gcc_unreachable ();
5528 }
5529 }
5530 \f
5531 /* Space for free sequence stack entries. */
5532 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5533
5534 /* Begin emitting insns to a sequence. If this sequence will contain
5535 something that might cause the compiler to pop arguments to function
5536 calls (because those pops have previously been deferred; see
5537 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5538 before calling this function. That will ensure that the deferred
5539 pops are not accidentally emitted in the middle of this sequence. */
5540
5541 void
5542 start_sequence (void)
5543 {
5544 struct sequence_stack *tem;
5545
5546 if (free_sequence_stack != NULL)
5547 {
5548 tem = free_sequence_stack;
5549 free_sequence_stack = tem->next;
5550 }
5551 else
5552 tem = ggc_alloc<sequence_stack> ();
5553
5554 tem->next = get_current_sequence ()->next;
5555 tem->first = get_insns ();
5556 tem->last = get_last_insn ();
5557 get_current_sequence ()->next = tem;
5558
5559 set_first_insn (0);
5560 set_last_insn (0);
5561 }
5562
5563 /* Set up the insn chain starting with FIRST as the current sequence,
5564 saving the previously current one. See the documentation for
5565 start_sequence for more information about how to use this function. */
5566
5567 void
5568 push_to_sequence (rtx_insn *first)
5569 {
5570 rtx_insn *last;
5571
5572 start_sequence ();
5573
5574 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5575 ;
5576
5577 set_first_insn (first);
5578 set_last_insn (last);
5579 }
5580
5581 /* Like push_to_sequence, but take the last insn as an argument to avoid
5582 looping through the list. */
5583
5584 void
5585 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5586 {
5587 start_sequence ();
5588
5589 set_first_insn (first);
5590 set_last_insn (last);
5591 }
5592
5593 /* Set up the outer-level insn chain
5594 as the current sequence, saving the previously current one. */
5595
5596 void
5597 push_topmost_sequence (void)
5598 {
5599 struct sequence_stack *top;
5600
5601 start_sequence ();
5602
5603 top = get_topmost_sequence ();
5604 set_first_insn (top->first);
5605 set_last_insn (top->last);
5606 }
5607
5608 /* After emitting to the outer-level insn chain, update the outer-level
5609 insn chain, and restore the previous saved state. */
5610
5611 void
5612 pop_topmost_sequence (void)
5613 {
5614 struct sequence_stack *top;
5615
5616 top = get_topmost_sequence ();
5617 top->first = get_insns ();
5618 top->last = get_last_insn ();
5619
5620 end_sequence ();
5621 }
5622
5623 /* After emitting to a sequence, restore previous saved state.
5624
5625 To get the contents of the sequence just made, you must call
5626 `get_insns' *before* calling here.
5627
5628 If the compiler might have deferred popping arguments while
5629 generating this sequence, and this sequence will not be immediately
5630 inserted into the instruction stream, use do_pending_stack_adjust
5631 before calling get_insns. That will ensure that the deferred
5632 pops are inserted into this sequence, and not into some random
5633 location in the instruction stream. See INHIBIT_DEFER_POP for more
5634 information about deferred popping of arguments. */
5635
5636 void
5637 end_sequence (void)
5638 {
5639 struct sequence_stack *tem = get_current_sequence ()->next;
5640
5641 set_first_insn (tem->first);
5642 set_last_insn (tem->last);
5643 get_current_sequence ()->next = tem->next;
5644
5645 memset (tem, 0, sizeof (*tem));
5646 tem->next = free_sequence_stack;
5647 free_sequence_stack = tem;
5648 }
5649
5650 /* Return 1 if currently emitting into a sequence. */
5651
5652 int
5653 in_sequence_p (void)
5654 {
5655 return get_current_sequence ()->next != 0;
5656 }
5657 \f
5658 /* Put the various virtual registers into REGNO_REG_RTX. */
5659
5660 static void
5661 init_virtual_regs (void)
5662 {
5663 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5664 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5665 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5666 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5667 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5668 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5669 = virtual_preferred_stack_boundary_rtx;
5670 }
5671
5672 \f
5673 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5674 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5675 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5676 static int copy_insn_n_scratches;
5677
5678 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5679 copied an ASM_OPERANDS.
5680 In that case, it is the original input-operand vector. */
5681 static rtvec orig_asm_operands_vector;
5682
5683 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5684 copied an ASM_OPERANDS.
5685 In that case, it is the copied input-operand vector. */
5686 static rtvec copy_asm_operands_vector;
5687
5688 /* Likewise for the constraints vector. */
5689 static rtvec orig_asm_constraints_vector;
5690 static rtvec copy_asm_constraints_vector;
5691
5692 /* Recursively create a new copy of an rtx for copy_insn.
5693 This function differs from copy_rtx in that it handles SCRATCHes and
5694 ASM_OPERANDs properly.
5695 Normally, this function is not used directly; use copy_insn as front end.
5696 However, you could first copy an insn pattern with copy_insn and then use
5697 this function afterwards to properly copy any REG_NOTEs containing
5698 SCRATCHes. */
5699
5700 rtx
5701 copy_insn_1 (rtx orig)
5702 {
5703 rtx copy;
5704 int i, j;
5705 RTX_CODE code;
5706 const char *format_ptr;
5707
5708 if (orig == NULL)
5709 return NULL;
5710
5711 code = GET_CODE (orig);
5712
5713 switch (code)
5714 {
5715 case REG:
5716 case DEBUG_EXPR:
5717 CASE_CONST_ANY:
5718 case SYMBOL_REF:
5719 case CODE_LABEL:
5720 case PC:
5721 case CC0:
5722 case RETURN:
5723 case SIMPLE_RETURN:
5724 return orig;
5725 case CLOBBER:
5726 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5727 clobbers or clobbers of hard registers that originated as pseudos.
5728 This is needed to allow safe register renaming. */
5729 if (REG_P (XEXP (orig, 0))
5730 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5731 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5732 return orig;
5733 break;
5734
5735 case SCRATCH:
5736 for (i = 0; i < copy_insn_n_scratches; i++)
5737 if (copy_insn_scratch_in[i] == orig)
5738 return copy_insn_scratch_out[i];
5739 break;
5740
5741 case CONST:
5742 if (shared_const_p (orig))
5743 return orig;
5744 break;
5745
5746 /* A MEM with a constant address is not sharable. The problem is that
5747 the constant address may need to be reloaded. If the mem is shared,
5748 then reloading one copy of this mem will cause all copies to appear
5749 to have been reloaded. */
5750
5751 default:
5752 break;
5753 }
5754
5755 /* Copy the various flags, fields, and other information. We assume
5756 that all fields need copying, and then clear the fields that should
5757 not be copied. That is the sensible default behavior, and forces
5758 us to explicitly document why we are *not* copying a flag. */
5759 copy = shallow_copy_rtx (orig);
5760
5761 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5762 if (INSN_P (orig))
5763 {
5764 RTX_FLAG (copy, jump) = 0;
5765 RTX_FLAG (copy, call) = 0;
5766 RTX_FLAG (copy, frame_related) = 0;
5767 }
5768
5769 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5770
5771 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5772 switch (*format_ptr++)
5773 {
5774 case 'e':
5775 if (XEXP (orig, i) != NULL)
5776 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5777 break;
5778
5779 case 'E':
5780 case 'V':
5781 if (XVEC (orig, i) == orig_asm_constraints_vector)
5782 XVEC (copy, i) = copy_asm_constraints_vector;
5783 else if (XVEC (orig, i) == orig_asm_operands_vector)
5784 XVEC (copy, i) = copy_asm_operands_vector;
5785 else if (XVEC (orig, i) != NULL)
5786 {
5787 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5788 for (j = 0; j < XVECLEN (copy, i); j++)
5789 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5790 }
5791 break;
5792
5793 case 't':
5794 case 'w':
5795 case 'i':
5796 case 'p':
5797 case 's':
5798 case 'S':
5799 case 'u':
5800 case '0':
5801 /* These are left unchanged. */
5802 break;
5803
5804 default:
5805 gcc_unreachable ();
5806 }
5807
5808 if (code == SCRATCH)
5809 {
5810 i = copy_insn_n_scratches++;
5811 gcc_assert (i < MAX_RECOG_OPERANDS);
5812 copy_insn_scratch_in[i] = orig;
5813 copy_insn_scratch_out[i] = copy;
5814 }
5815 else if (code == ASM_OPERANDS)
5816 {
5817 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5818 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5819 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5820 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5821 }
5822
5823 return copy;
5824 }
5825
5826 /* Create a new copy of an rtx.
5827 This function differs from copy_rtx in that it handles SCRATCHes and
5828 ASM_OPERANDs properly.
5829 INSN doesn't really have to be a full INSN; it could be just the
5830 pattern. */
5831 rtx
5832 copy_insn (rtx insn)
5833 {
5834 copy_insn_n_scratches = 0;
5835 orig_asm_operands_vector = 0;
5836 orig_asm_constraints_vector = 0;
5837 copy_asm_operands_vector = 0;
5838 copy_asm_constraints_vector = 0;
5839 return copy_insn_1 (insn);
5840 }
5841
5842 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5843 on that assumption that INSN itself remains in its original place. */
5844
5845 rtx_insn *
5846 copy_delay_slot_insn (rtx_insn *insn)
5847 {
5848 /* Copy INSN with its rtx_code, all its notes, location etc. */
5849 insn = as_a <rtx_insn *> (copy_rtx (insn));
5850 INSN_UID (insn) = cur_insn_uid++;
5851 return insn;
5852 }
5853
5854 /* Initialize data structures and variables in this file
5855 before generating rtl for each function. */
5856
5857 void
5858 init_emit (void)
5859 {
5860 set_first_insn (NULL);
5861 set_last_insn (NULL);
5862 if (param_min_nondebug_insn_uid)
5863 cur_insn_uid = param_min_nondebug_insn_uid;
5864 else
5865 cur_insn_uid = 1;
5866 cur_debug_insn_uid = 1;
5867 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5868 first_label_num = label_num;
5869 get_current_sequence ()->next = NULL;
5870
5871 /* Init the tables that describe all the pseudo regs. */
5872
5873 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5874
5875 crtl->emit.regno_pointer_align
5876 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5877
5878 regno_reg_rtx
5879 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5880
5881 /* Put copies of all the hard registers into regno_reg_rtx. */
5882 memcpy (regno_reg_rtx,
5883 initial_regno_reg_rtx,
5884 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5885
5886 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5887 init_virtual_regs ();
5888
5889 /* Indicate that the virtual registers and stack locations are
5890 all pointers. */
5891 REG_POINTER (stack_pointer_rtx) = 1;
5892 REG_POINTER (frame_pointer_rtx) = 1;
5893 REG_POINTER (hard_frame_pointer_rtx) = 1;
5894 REG_POINTER (arg_pointer_rtx) = 1;
5895
5896 REG_POINTER (virtual_incoming_args_rtx) = 1;
5897 REG_POINTER (virtual_stack_vars_rtx) = 1;
5898 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5899 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5900 REG_POINTER (virtual_cfa_rtx) = 1;
5901
5902 #ifdef STACK_BOUNDARY
5903 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5904 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5905 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5906 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5907
5908 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5909 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5910 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5911 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5912
5913 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5914 #endif
5915
5916 #ifdef INIT_EXPANDERS
5917 INIT_EXPANDERS;
5918 #endif
5919 }
5920
5921 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5922
5923 wide_int
5924 const_vector_int_elt (const_rtx x, unsigned int i)
5925 {
5926 /* First handle elements that are directly encoded. */
5927 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5928 if (i < (unsigned int) XVECLEN (x, 0))
5929 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5930
5931 /* Identify the pattern that contains element I and work out the index of
5932 the last encoded element for that pattern. */
5933 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5934 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5935 unsigned int count = i / npatterns;
5936 unsigned int pattern = i % npatterns;
5937 unsigned int final_i = encoded_nelts - npatterns + pattern;
5938
5939 /* If there are no steps, the final encoded value is the right one. */
5940 if (!CONST_VECTOR_STEPPED_P (x))
5941 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5942
5943 /* Otherwise work out the value from the last two encoded elements. */
5944 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5945 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5946 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5947 rtx_mode_t (v1, elt_mode));
5948 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5949 }
5950
5951 /* Return the value of element I of CONST_VECTOR X. */
5952
5953 rtx
5954 const_vector_elt (const_rtx x, unsigned int i)
5955 {
5956 /* First handle elements that are directly encoded. */
5957 if (i < (unsigned int) XVECLEN (x, 0))
5958 return CONST_VECTOR_ENCODED_ELT (x, i);
5959
5960 /* If there are no steps, the final encoded value is the right one. */
5961 if (!CONST_VECTOR_STEPPED_P (x))
5962 {
5963 /* Identify the pattern that contains element I and work out the index of
5964 the last encoded element for that pattern. */
5965 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5966 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5967 unsigned int pattern = i % npatterns;
5968 unsigned int final_i = encoded_nelts - npatterns + pattern;
5969 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5970 }
5971
5972 /* Otherwise work out the value from the last two encoded elements. */
5973 return immed_wide_int_const (const_vector_int_elt (x, i),
5974 GET_MODE_INNER (GET_MODE (x)));
5975 }
5976
5977 /* Return true if X is a valid element for a CONST_VECTOR of the given
5978 mode. */
5979
5980 bool
5981 valid_for_const_vector_p (machine_mode, rtx x)
5982 {
5983 return (CONST_SCALAR_INT_P (x)
5984 || CONST_DOUBLE_AS_FLOAT_P (x)
5985 || CONST_FIXED_P (x));
5986 }
5987
5988 /* Generate a vector constant of mode MODE in which every element has
5989 value ELT. */
5990
5991 rtx
5992 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5993 {
5994 rtx_vector_builder builder (mode, 1, 1);
5995 builder.quick_push (elt);
5996 return builder.build ();
5997 }
5998
5999 /* Return a vector rtx of mode MODE in which every element has value X.
6000 The result will be a constant if X is constant. */
6001
6002 rtx
6003 gen_vec_duplicate (machine_mode mode, rtx x)
6004 {
6005 if (valid_for_const_vector_p (mode, x))
6006 return gen_const_vec_duplicate (mode, x);
6007 return gen_rtx_VEC_DUPLICATE (mode, x);
6008 }
6009
6010 /* A subroutine of const_vec_series_p that handles the case in which:
6011
6012 (GET_CODE (X) == CONST_VECTOR
6013 && CONST_VECTOR_NPATTERNS (X) == 1
6014 && !CONST_VECTOR_DUPLICATE_P (X))
6015
6016 is known to hold. */
6017
6018 bool
6019 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
6020 {
6021 /* Stepped sequences are only defined for integers, to avoid specifying
6022 rounding behavior. */
6023 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6024 return false;
6025
6026 /* A non-duplicated vector with two elements can always be seen as a
6027 series with a nonzero step. Longer vectors must have a stepped
6028 encoding. */
6029 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6030 && !CONST_VECTOR_STEPPED_P (x))
6031 return false;
6032
6033 /* Calculate the step between the first and second elements. */
6034 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6035 rtx base = CONST_VECTOR_ELT (x, 0);
6036 rtx step = simplify_binary_operation (MINUS, inner,
6037 CONST_VECTOR_ENCODED_ELT (x, 1), base);
6038 if (rtx_equal_p (step, CONST0_RTX (inner)))
6039 return false;
6040
6041 /* If we have a stepped encoding, check that the step between the
6042 second and third elements is the same as STEP. */
6043 if (CONST_VECTOR_STEPPED_P (x))
6044 {
6045 rtx diff = simplify_binary_operation (MINUS, inner,
6046 CONST_VECTOR_ENCODED_ELT (x, 2),
6047 CONST_VECTOR_ENCODED_ELT (x, 1));
6048 if (!rtx_equal_p (step, diff))
6049 return false;
6050 }
6051
6052 *base_out = base;
6053 *step_out = step;
6054 return true;
6055 }
6056
6057 /* Generate a vector constant of mode MODE in which element I has
6058 the value BASE + I * STEP. */
6059
6060 rtx
6061 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6062 {
6063 gcc_assert (valid_for_const_vector_p (mode, base)
6064 && valid_for_const_vector_p (mode, step));
6065
6066 rtx_vector_builder builder (mode, 1, 3);
6067 builder.quick_push (base);
6068 for (int i = 1; i < 3; ++i)
6069 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6070 builder[i - 1], step));
6071 return builder.build ();
6072 }
6073
6074 /* Generate a vector of mode MODE in which element I has the value
6075 BASE + I * STEP. The result will be a constant if BASE and STEP
6076 are both constants. */
6077
6078 rtx
6079 gen_vec_series (machine_mode mode, rtx base, rtx step)
6080 {
6081 if (step == const0_rtx)
6082 return gen_vec_duplicate (mode, base);
6083 if (valid_for_const_vector_p (mode, base)
6084 && valid_for_const_vector_p (mode, step))
6085 return gen_const_vec_series (mode, base, step);
6086 return gen_rtx_VEC_SERIES (mode, base, step);
6087 }
6088
6089 /* Generate a new vector constant for mode MODE and constant value
6090 CONSTANT. */
6091
6092 static rtx
6093 gen_const_vector (machine_mode mode, int constant)
6094 {
6095 machine_mode inner = GET_MODE_INNER (mode);
6096
6097 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6098
6099 rtx el = const_tiny_rtx[constant][(int) inner];
6100 gcc_assert (el);
6101
6102 return gen_const_vec_duplicate (mode, el);
6103 }
6104
6105 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6106 all elements are zero, and the one vector when all elements are one. */
6107 rtx
6108 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6109 {
6110 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6111
6112 /* If the values are all the same, check to see if we can use one of the
6113 standard constant vectors. */
6114 if (rtvec_all_equal_p (v))
6115 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6116
6117 unsigned int nunits = GET_NUM_ELEM (v);
6118 rtx_vector_builder builder (mode, nunits, 1);
6119 for (unsigned int i = 0; i < nunits; ++i)
6120 builder.quick_push (RTVEC_ELT (v, i));
6121 return builder.build (v);
6122 }
6123
6124 /* Initialise global register information required by all functions. */
6125
6126 void
6127 init_emit_regs (void)
6128 {
6129 int i;
6130 machine_mode mode;
6131 mem_attrs *attrs;
6132
6133 /* Reset register attributes */
6134 reg_attrs_htab->empty ();
6135
6136 /* We need reg_raw_mode, so initialize the modes now. */
6137 init_reg_modes_target ();
6138
6139 /* Assign register numbers to the globally defined register rtx. */
6140 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6141 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6142 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6143 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6144 virtual_incoming_args_rtx =
6145 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6146 virtual_stack_vars_rtx =
6147 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6148 virtual_stack_dynamic_rtx =
6149 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6150 virtual_outgoing_args_rtx =
6151 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6152 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6153 virtual_preferred_stack_boundary_rtx =
6154 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6155
6156 /* Initialize RTL for commonly used hard registers. These are
6157 copied into regno_reg_rtx as we begin to compile each function. */
6158 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6159 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6160
6161 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6162 return_address_pointer_rtx
6163 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6164 #endif
6165
6166 pic_offset_table_rtx = NULL_RTX;
6167 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6168 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6169
6170 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6171 {
6172 mode = (machine_mode) i;
6173 attrs = ggc_cleared_alloc<mem_attrs> ();
6174 attrs->align = BITS_PER_UNIT;
6175 attrs->addrspace = ADDR_SPACE_GENERIC;
6176 if (mode != BLKmode && mode != VOIDmode)
6177 {
6178 attrs->size_known_p = true;
6179 attrs->size = GET_MODE_SIZE (mode);
6180 if (STRICT_ALIGNMENT)
6181 attrs->align = GET_MODE_ALIGNMENT (mode);
6182 }
6183 mode_mem_attrs[i] = attrs;
6184 }
6185
6186 split_branch_probability = profile_probability::uninitialized ();
6187 }
6188
6189 /* Initialize global machine_mode variables. */
6190
6191 void
6192 init_derived_machine_modes (void)
6193 {
6194 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6195 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6196 {
6197 scalar_int_mode mode = mode_iter.require ();
6198
6199 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6200 && !opt_byte_mode.exists ())
6201 opt_byte_mode = mode;
6202
6203 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6204 && !opt_word_mode.exists ())
6205 opt_word_mode = mode;
6206 }
6207
6208 byte_mode = opt_byte_mode.require ();
6209 word_mode = opt_word_mode.require ();
6210 ptr_mode = as_a <scalar_int_mode>
6211 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6212 }
6213
6214 /* Create some permanent unique rtl objects shared between all functions. */
6215
6216 void
6217 init_emit_once (void)
6218 {
6219 int i;
6220 machine_mode mode;
6221 scalar_float_mode double_mode;
6222 opt_scalar_mode smode_iter;
6223
6224 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6225 CONST_FIXED, and memory attribute hash tables. */
6226 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6227
6228 #if TARGET_SUPPORTS_WIDE_INT
6229 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6230 #endif
6231 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6232
6233 if (NUM_POLY_INT_COEFFS > 1)
6234 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6235
6236 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6237
6238 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6239
6240 #ifdef INIT_EXPANDERS
6241 /* This is to initialize {init|mark|free}_machine_status before the first
6242 call to push_function_context_to. This is needed by the Chill front
6243 end which calls push_function_context_to before the first call to
6244 init_function_start. */
6245 INIT_EXPANDERS;
6246 #endif
6247
6248 /* Create the unique rtx's for certain rtx codes and operand values. */
6249
6250 /* Process stack-limiting command-line options. */
6251 if (opt_fstack_limit_symbol_arg != NULL)
6252 stack_limit_rtx
6253 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6254 if (opt_fstack_limit_register_no >= 0)
6255 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6256
6257 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6258 tries to use these variables. */
6259 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6260 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6261 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6262
6263 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6264 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6265 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6266 else
6267 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6268
6269 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6270
6271 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6272 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6273 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6274
6275 dconstm1 = dconst1;
6276 dconstm1.sign = 1;
6277
6278 dconsthalf = dconst1;
6279 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6280
6281 for (i = 0; i < 3; i++)
6282 {
6283 const REAL_VALUE_TYPE *const r =
6284 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6285
6286 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6287 const_tiny_rtx[i][(int) mode] =
6288 const_double_from_real_value (*r, mode);
6289
6290 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6291 const_tiny_rtx[i][(int) mode] =
6292 const_double_from_real_value (*r, mode);
6293
6294 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6295
6296 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6297 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6298
6299 for (mode = MIN_MODE_PARTIAL_INT;
6300 mode <= MAX_MODE_PARTIAL_INT;
6301 mode = (machine_mode)((int)(mode) + 1))
6302 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6303 }
6304
6305 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6306
6307 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6308 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6309
6310 /* For BImode, 1 and -1 are unsigned and signed interpretations
6311 of the same value. */
6312 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6313 const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6314 const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6315
6316 for (mode = MIN_MODE_PARTIAL_INT;
6317 mode <= MAX_MODE_PARTIAL_INT;
6318 mode = (machine_mode)((int)(mode) + 1))
6319 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6320
6321 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6322 {
6323 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6324 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6325 }
6326
6327 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6328 {
6329 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6330 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6331 }
6332
6333 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6334 interpretations of the same value. */
6335 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6336 {
6337 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6338 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6339 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6340 }
6341
6342 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6343 {
6344 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6345 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6346 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6347 }
6348
6349 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6350 {
6351 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6352 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6353 }
6354
6355 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6356 {
6357 scalar_mode smode = smode_iter.require ();
6358 FCONST0 (smode).data.high = 0;
6359 FCONST0 (smode).data.low = 0;
6360 FCONST0 (smode).mode = smode;
6361 const_tiny_rtx[0][(int) smode]
6362 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6363 }
6364
6365 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6366 {
6367 scalar_mode smode = smode_iter.require ();
6368 FCONST0 (smode).data.high = 0;
6369 FCONST0 (smode).data.low = 0;
6370 FCONST0 (smode).mode = smode;
6371 const_tiny_rtx[0][(int) smode]
6372 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6373 }
6374
6375 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6376 {
6377 scalar_mode smode = smode_iter.require ();
6378 FCONST0 (smode).data.high = 0;
6379 FCONST0 (smode).data.low = 0;
6380 FCONST0 (smode).mode = smode;
6381 const_tiny_rtx[0][(int) smode]
6382 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6383
6384 /* We store the value 1. */
6385 FCONST1 (smode).data.high = 0;
6386 FCONST1 (smode).data.low = 0;
6387 FCONST1 (smode).mode = smode;
6388 FCONST1 (smode).data
6389 = double_int_one.lshift (GET_MODE_FBIT (smode),
6390 HOST_BITS_PER_DOUBLE_INT,
6391 SIGNED_FIXED_POINT_MODE_P (smode));
6392 const_tiny_rtx[1][(int) smode]
6393 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6394 }
6395
6396 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6397 {
6398 scalar_mode smode = smode_iter.require ();
6399 FCONST0 (smode).data.high = 0;
6400 FCONST0 (smode).data.low = 0;
6401 FCONST0 (smode).mode = smode;
6402 const_tiny_rtx[0][(int) smode]
6403 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6404
6405 /* We store the value 1. */
6406 FCONST1 (smode).data.high = 0;
6407 FCONST1 (smode).data.low = 0;
6408 FCONST1 (smode).mode = smode;
6409 FCONST1 (smode).data
6410 = double_int_one.lshift (GET_MODE_FBIT (smode),
6411 HOST_BITS_PER_DOUBLE_INT,
6412 SIGNED_FIXED_POINT_MODE_P (smode));
6413 const_tiny_rtx[1][(int) smode]
6414 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6415 }
6416
6417 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6418 {
6419 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6420 }
6421
6422 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6423 {
6424 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6425 }
6426
6427 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6428 {
6429 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6430 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6431 }
6432
6433 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6434 {
6435 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6436 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6437 }
6438
6439 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6440 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6441 const_tiny_rtx[0][i] = const0_rtx;
6442
6443 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6444 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6445 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6446 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6447 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6448 /*prev_insn=*/NULL,
6449 /*next_insn=*/NULL,
6450 /*bb=*/NULL,
6451 /*pattern=*/NULL_RTX,
6452 /*location=*/-1,
6453 CODE_FOR_nothing,
6454 /*reg_notes=*/NULL_RTX);
6455 }
6456 \f
6457 /* Produce exact duplicate of insn INSN after AFTER.
6458 Care updating of libcall regions if present. */
6459
6460 rtx_insn *
6461 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6462 {
6463 rtx_insn *new_rtx;
6464 rtx link;
6465
6466 switch (GET_CODE (insn))
6467 {
6468 case INSN:
6469 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6470 break;
6471
6472 case JUMP_INSN:
6473 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6474 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6475 break;
6476
6477 case DEBUG_INSN:
6478 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6479 break;
6480
6481 case CALL_INSN:
6482 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6483 if (CALL_INSN_FUNCTION_USAGE (insn))
6484 CALL_INSN_FUNCTION_USAGE (new_rtx)
6485 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6486 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6487 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6488 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6489 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6490 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6491 break;
6492
6493 default:
6494 gcc_unreachable ();
6495 }
6496
6497 /* Update LABEL_NUSES. */
6498 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6499
6500 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6501
6502 /* If the old insn is frame related, then so is the new one. This is
6503 primarily needed for IA-64 unwind info which marks epilogue insns,
6504 which may be duplicated by the basic block reordering code. */
6505 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6506
6507 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6508 rtx *ptail = &REG_NOTES (new_rtx);
6509 while (*ptail != NULL_RTX)
6510 ptail = &XEXP (*ptail, 1);
6511
6512 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6513 will make them. REG_LABEL_TARGETs are created there too, but are
6514 supposed to be sticky, so we copy them. */
6515 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6516 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6517 {
6518 *ptail = duplicate_reg_note (link);
6519 ptail = &XEXP (*ptail, 1);
6520 }
6521
6522 INSN_CODE (new_rtx) = INSN_CODE (insn);
6523 return new_rtx;
6524 }
6525
6526 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6527 rtx
6528 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6529 {
6530 if (hard_reg_clobbers[mode][regno])
6531 return hard_reg_clobbers[mode][regno];
6532 else
6533 return (hard_reg_clobbers[mode][regno] =
6534 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6535 }
6536
6537 location_t prologue_location;
6538 location_t epilogue_location;
6539
6540 /* Hold current location information and last location information, so the
6541 datastructures are built lazily only when some instructions in given
6542 place are needed. */
6543 static location_t curr_location;
6544
6545 /* Allocate insn location datastructure. */
6546 void
6547 insn_locations_init (void)
6548 {
6549 prologue_location = epilogue_location = 0;
6550 curr_location = UNKNOWN_LOCATION;
6551 }
6552
6553 /* At the end of emit stage, clear current location. */
6554 void
6555 insn_locations_finalize (void)
6556 {
6557 epilogue_location = curr_location;
6558 curr_location = UNKNOWN_LOCATION;
6559 }
6560
6561 /* Set current location. */
6562 void
6563 set_curr_insn_location (location_t location)
6564 {
6565 curr_location = location;
6566 }
6567
6568 /* Get current location. */
6569 location_t
6570 curr_insn_location (void)
6571 {
6572 return curr_location;
6573 }
6574
6575 /* Set the location of the insn chain starting at INSN to LOC. */
6576 void
6577 set_insn_locations (rtx_insn *insn, location_t loc)
6578 {
6579 while (insn)
6580 {
6581 if (INSN_P (insn))
6582 INSN_LOCATION (insn) = loc;
6583 insn = NEXT_INSN (insn);
6584 }
6585 }
6586
6587 /* Return lexical scope block insn belongs to. */
6588 tree
6589 insn_scope (const rtx_insn *insn)
6590 {
6591 return LOCATION_BLOCK (INSN_LOCATION (insn));
6592 }
6593
6594 /* Return line number of the statement that produced this insn. */
6595 int
6596 insn_line (const rtx_insn *insn)
6597 {
6598 return LOCATION_LINE (INSN_LOCATION (insn));
6599 }
6600
6601 /* Return source file of the statement that produced this insn. */
6602 const char *
6603 insn_file (const rtx_insn *insn)
6604 {
6605 return LOCATION_FILE (INSN_LOCATION (insn));
6606 }
6607
6608 /* Return expanded location of the statement that produced this insn. */
6609 expanded_location
6610 insn_location (const rtx_insn *insn)
6611 {
6612 return expand_location (INSN_LOCATION (insn));
6613 }
6614
6615 /* Return true if memory model MODEL requires a pre-operation (release-style)
6616 barrier or a post-operation (acquire-style) barrier. While not universal,
6617 this function matches behavior of several targets. */
6618
6619 bool
6620 need_atomic_barrier_p (enum memmodel model, bool pre)
6621 {
6622 switch (model & MEMMODEL_BASE_MASK)
6623 {
6624 case MEMMODEL_RELAXED:
6625 case MEMMODEL_CONSUME:
6626 return false;
6627 case MEMMODEL_RELEASE:
6628 return pre;
6629 case MEMMODEL_ACQUIRE:
6630 return !pre;
6631 case MEMMODEL_ACQ_REL:
6632 case MEMMODEL_SEQ_CST:
6633 return true;
6634 default:
6635 gcc_unreachable ();
6636 }
6637 }
6638
6639 /* Return a constant shift amount for shifting a value of mode MODE
6640 by VALUE bits. */
6641
6642 rtx
6643 gen_int_shift_amount (machine_mode, poly_int64 value)
6644 {
6645 /* Use a 64-bit mode, to avoid any truncation.
6646
6647 ??? Perhaps this should be automatically derived from the .md files
6648 instead, or perhaps have a target hook. */
6649 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6650 ? DImode
6651 : int_mode_for_size (64, 0).require ());
6652 return gen_int_mode (value, shift_mode);
6653 }
6654
6655 /* Initialize fields of rtl_data related to stack alignment. */
6656
6657 void
6658 rtl_data::init_stack_alignment ()
6659 {
6660 stack_alignment_needed = STACK_BOUNDARY;
6661 max_used_stack_slot_alignment = STACK_BOUNDARY;
6662 stack_alignment_estimated = 0;
6663 preferred_stack_boundary = STACK_BOUNDARY;
6664 }
6665
6666 \f
6667 #include "gt-emit-rtl.h"