]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
Make CONST_VECTOR_ELT handle implicitly-encoded elements
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63 #include "rtx-vector-builder.h"
64
65 struct target_rtl default_target_rtl;
66 #if SWITCHABLE_TARGET
67 struct target_rtl *this_target_rtl = &default_target_rtl;
68 #endif
69
70 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71
72 /* Commonly used modes. */
73
74 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
75 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
76 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77
78 /* Datastructures maintained for currently processed function in RTL form. */
79
80 struct rtl_data x_rtl;
81
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87 rtx * regno_reg_rtx;
88
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
92 static GTY(()) int label_num = 1;
93
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119
120 /* Standard pieces of rtx, to be substituted directly into things. */
121 rtx pc_rtx;
122 rtx ret_rtx;
123 rtx simple_return_rtx;
124 rtx cc0_rtx;
125
126 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
127 this pointer should normally never be dereferenced), but is required to be
128 distinct from NULL_RTX. Currently used by peephole2 pass. */
129 rtx_insn *invalid_insn_rtx;
130
131 /* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
133
134 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
135 {
136 typedef HOST_WIDE_INT compare_type;
137
138 static hashval_t hash (rtx i);
139 static bool equal (rtx i, HOST_WIDE_INT h);
140 };
141
142 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
143
144 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
145 {
146 static hashval_t hash (rtx x);
147 static bool equal (rtx x, rtx y);
148 };
149
150 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
151
152 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
153 {
154 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
155
156 static hashval_t hash (rtx x);
157 static bool equal (rtx x, const compare_type &y);
158 };
159
160 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
161
162 /* A hash table storing register attribute structures. */
163 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
164 {
165 static hashval_t hash (reg_attrs *x);
166 static bool equal (reg_attrs *a, reg_attrs *b);
167 };
168
169 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
170
171 /* A hash table storing all CONST_DOUBLEs. */
172 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
173 {
174 static hashval_t hash (rtx x);
175 static bool equal (rtx x, rtx y);
176 };
177
178 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
179
180 /* A hash table storing all CONST_FIXEDs. */
181 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
182 {
183 static hashval_t hash (rtx x);
184 static bool equal (rtx x, rtx y);
185 };
186
187 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
188
189 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191 #define first_label_num (crtl->emit.x_first_label_num)
192
193 static void set_used_decls (tree);
194 static void mark_label_nuses (rtx);
195 #if TARGET_SUPPORTS_WIDE_INT
196 static rtx lookup_const_wide_int (rtx);
197 #endif
198 static rtx lookup_const_double (rtx);
199 static rtx lookup_const_fixed (rtx);
200 static rtx gen_const_vector (machine_mode, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202
203 /* Probability of the conditional branch currently proceeded by try_split. */
204 profile_probability split_branch_probability;
205 \f
206 /* Returns a hash code for X (which is a really a CONST_INT). */
207
208 hashval_t
209 const_int_hasher::hash (rtx x)
210 {
211 return (hashval_t) INTVAL (x);
212 }
213
214 /* Returns nonzero if the value represented by X (which is really a
215 CONST_INT) is the same as that given by Y (which is really a
216 HOST_WIDE_INT *). */
217
218 bool
219 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
220 {
221 return (INTVAL (x) == y);
222 }
223
224 #if TARGET_SUPPORTS_WIDE_INT
225 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
226
227 hashval_t
228 const_wide_int_hasher::hash (rtx x)
229 {
230 int i;
231 unsigned HOST_WIDE_INT hash = 0;
232 const_rtx xr = x;
233
234 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
235 hash += CONST_WIDE_INT_ELT (xr, i);
236
237 return (hashval_t) hash;
238 }
239
240 /* Returns nonzero if the value represented by X (which is really a
241 CONST_WIDE_INT) is the same as that given by Y (which is really a
242 CONST_WIDE_INT). */
243
244 bool
245 const_wide_int_hasher::equal (rtx x, rtx y)
246 {
247 int i;
248 const_rtx xr = x;
249 const_rtx yr = y;
250 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
251 return false;
252
253 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
254 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
255 return false;
256
257 return true;
258 }
259 #endif
260
261 /* Returns a hash code for CONST_POLY_INT X. */
262
263 hashval_t
264 const_poly_int_hasher::hash (rtx x)
265 {
266 inchash::hash h;
267 h.add_int (GET_MODE (x));
268 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
269 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
270 return h.end ();
271 }
272
273 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
274
275 bool
276 const_poly_int_hasher::equal (rtx x, const compare_type &y)
277 {
278 if (GET_MODE (x) != y.first)
279 return false;
280 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
281 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
282 return false;
283 return true;
284 }
285
286 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
287 hashval_t
288 const_double_hasher::hash (rtx x)
289 {
290 const_rtx const value = x;
291 hashval_t h;
292
293 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
294 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
295 else
296 {
297 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
298 /* MODE is used in the comparison, so it should be in the hash. */
299 h ^= GET_MODE (value);
300 }
301 return h;
302 }
303
304 /* Returns nonzero if the value represented by X (really a ...)
305 is the same as that represented by Y (really a ...) */
306 bool
307 const_double_hasher::equal (rtx x, rtx y)
308 {
309 const_rtx const a = x, b = y;
310
311 if (GET_MODE (a) != GET_MODE (b))
312 return 0;
313 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
314 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
315 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
316 else
317 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
318 CONST_DOUBLE_REAL_VALUE (b));
319 }
320
321 /* Returns a hash code for X (which is really a CONST_FIXED). */
322
323 hashval_t
324 const_fixed_hasher::hash (rtx x)
325 {
326 const_rtx const value = x;
327 hashval_t h;
328
329 h = fixed_hash (CONST_FIXED_VALUE (value));
330 /* MODE is used in the comparison, so it should be in the hash. */
331 h ^= GET_MODE (value);
332 return h;
333 }
334
335 /* Returns nonzero if the value represented by X is the same as that
336 represented by Y. */
337
338 bool
339 const_fixed_hasher::equal (rtx x, rtx y)
340 {
341 const_rtx const a = x, b = y;
342
343 if (GET_MODE (a) != GET_MODE (b))
344 return 0;
345 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
346 }
347
348 /* Return true if the given memory attributes are equal. */
349
350 bool
351 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
352 {
353 if (p == q)
354 return true;
355 if (!p || !q)
356 return false;
357 return (p->alias == q->alias
358 && p->offset_known_p == q->offset_known_p
359 && (!p->offset_known_p || known_eq (p->offset, q->offset))
360 && p->size_known_p == q->size_known_p
361 && (!p->size_known_p || known_eq (p->size, q->size))
362 && p->align == q->align
363 && p->addrspace == q->addrspace
364 && (p->expr == q->expr
365 || (p->expr != NULL_TREE && q->expr != NULL_TREE
366 && operand_equal_p (p->expr, q->expr, 0))));
367 }
368
369 /* Set MEM's memory attributes so that they are the same as ATTRS. */
370
371 static void
372 set_mem_attrs (rtx mem, mem_attrs *attrs)
373 {
374 /* If everything is the default, we can just clear the attributes. */
375 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
376 {
377 MEM_ATTRS (mem) = 0;
378 return;
379 }
380
381 if (!MEM_ATTRS (mem)
382 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
383 {
384 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
385 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
386 }
387 }
388
389 /* Returns a hash code for X (which is a really a reg_attrs *). */
390
391 hashval_t
392 reg_attr_hasher::hash (reg_attrs *x)
393 {
394 const reg_attrs *const p = x;
395
396 inchash::hash h;
397 h.add_ptr (p->decl);
398 h.add_poly_hwi (p->offset);
399 return h.end ();
400 }
401
402 /* Returns nonzero if the value represented by X is the same as that given by
403 Y. */
404
405 bool
406 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
407 {
408 const reg_attrs *const p = x;
409 const reg_attrs *const q = y;
410
411 return (p->decl == q->decl && known_eq (p->offset, q->offset));
412 }
413 /* Allocate a new reg_attrs structure and insert it into the hash table if
414 one identical to it is not already in the table. We are doing this for
415 MEM of mode MODE. */
416
417 static reg_attrs *
418 get_reg_attrs (tree decl, poly_int64 offset)
419 {
420 reg_attrs attrs;
421
422 /* If everything is the default, we can just return zero. */
423 if (decl == 0 && known_eq (offset, 0))
424 return 0;
425
426 attrs.decl = decl;
427 attrs.offset = offset;
428
429 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
430 if (*slot == 0)
431 {
432 *slot = ggc_alloc<reg_attrs> ();
433 memcpy (*slot, &attrs, sizeof (reg_attrs));
434 }
435
436 return *slot;
437 }
438
439
440 #if !HAVE_blockage
441 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
442 and to block register equivalences to be seen across this insn. */
443
444 rtx
445 gen_blockage (void)
446 {
447 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
448 MEM_VOLATILE_P (x) = true;
449 return x;
450 }
451 #endif
452
453
454 /* Set the mode and register number of X to MODE and REGNO. */
455
456 void
457 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
458 {
459 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
460 ? hard_regno_nregs (regno, mode)
461 : 1);
462 PUT_MODE_RAW (x, mode);
463 set_regno_raw (x, regno, nregs);
464 }
465
466 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
467 don't attempt to share with the various global pieces of rtl (such as
468 frame_pointer_rtx). */
469
470 rtx
471 gen_raw_REG (machine_mode mode, unsigned int regno)
472 {
473 rtx x = rtx_alloc (REG MEM_STAT_INFO);
474 set_mode_and_regno (x, mode, regno);
475 REG_ATTRS (x) = NULL;
476 ORIGINAL_REGNO (x) = regno;
477 return x;
478 }
479
480 /* There are some RTL codes that require special attention; the generation
481 functions do the raw handling. If you add to this list, modify
482 special_rtx in gengenrtl.c as well. */
483
484 rtx_expr_list *
485 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
486 {
487 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
488 expr_list));
489 }
490
491 rtx_insn_list *
492 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
493 {
494 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
495 insn_list));
496 }
497
498 rtx_insn *
499 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
500 basic_block bb, rtx pattern, int location, int code,
501 rtx reg_notes)
502 {
503 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
504 prev_insn, next_insn,
505 bb, pattern, location, code,
506 reg_notes));
507 }
508
509 rtx
510 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
511 {
512 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
513 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
514
515 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
516 if (const_true_rtx && arg == STORE_FLAG_VALUE)
517 return const_true_rtx;
518 #endif
519
520 /* Look up the CONST_INT in the hash table. */
521 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
522 INSERT);
523 if (*slot == 0)
524 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
525
526 return *slot;
527 }
528
529 rtx
530 gen_int_mode (poly_int64 c, machine_mode mode)
531 {
532 c = trunc_int_for_mode (c, mode);
533 if (c.is_constant ())
534 return GEN_INT (c.coeffs[0]);
535 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
536 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
537 }
538
539 /* CONST_DOUBLEs might be created from pairs of integers, or from
540 REAL_VALUE_TYPEs. Also, their length is known only at run time,
541 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
542
543 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
544 hash table. If so, return its counterpart; otherwise add it
545 to the hash table and return it. */
546 static rtx
547 lookup_const_double (rtx real)
548 {
549 rtx *slot = const_double_htab->find_slot (real, INSERT);
550 if (*slot == 0)
551 *slot = real;
552
553 return *slot;
554 }
555
556 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
557 VALUE in mode MODE. */
558 rtx
559 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
560 {
561 rtx real = rtx_alloc (CONST_DOUBLE);
562 PUT_MODE (real, mode);
563
564 real->u.rv = value;
565
566 return lookup_const_double (real);
567 }
568
569 /* Determine whether FIXED, a CONST_FIXED, already exists in the
570 hash table. If so, return its counterpart; otherwise add it
571 to the hash table and return it. */
572
573 static rtx
574 lookup_const_fixed (rtx fixed)
575 {
576 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
577 if (*slot == 0)
578 *slot = fixed;
579
580 return *slot;
581 }
582
583 /* Return a CONST_FIXED rtx for a fixed-point value specified by
584 VALUE in mode MODE. */
585
586 rtx
587 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
588 {
589 rtx fixed = rtx_alloc (CONST_FIXED);
590 PUT_MODE (fixed, mode);
591
592 fixed->u.fv = value;
593
594 return lookup_const_fixed (fixed);
595 }
596
597 #if TARGET_SUPPORTS_WIDE_INT == 0
598 /* Constructs double_int from rtx CST. */
599
600 double_int
601 rtx_to_double_int (const_rtx cst)
602 {
603 double_int r;
604
605 if (CONST_INT_P (cst))
606 r = double_int::from_shwi (INTVAL (cst));
607 else if (CONST_DOUBLE_AS_INT_P (cst))
608 {
609 r.low = CONST_DOUBLE_LOW (cst);
610 r.high = CONST_DOUBLE_HIGH (cst);
611 }
612 else
613 gcc_unreachable ();
614
615 return r;
616 }
617 #endif
618
619 #if TARGET_SUPPORTS_WIDE_INT
620 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
621 If so, return its counterpart; otherwise add it to the hash table and
622 return it. */
623
624 static rtx
625 lookup_const_wide_int (rtx wint)
626 {
627 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
628 if (*slot == 0)
629 *slot = wint;
630
631 return *slot;
632 }
633 #endif
634
635 /* Return an rtx constant for V, given that the constant has mode MODE.
636 The returned rtx will be a CONST_INT if V fits, otherwise it will be
637 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
638 (if TARGET_SUPPORTS_WIDE_INT). */
639
640 static rtx
641 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
642 {
643 unsigned int len = v.get_len ();
644 /* Not scalar_int_mode because we also allow pointer bound modes. */
645 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
646
647 /* Allow truncation but not extension since we do not know if the
648 number is signed or unsigned. */
649 gcc_assert (prec <= v.get_precision ());
650
651 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
652 return gen_int_mode (v.elt (0), mode);
653
654 #if TARGET_SUPPORTS_WIDE_INT
655 {
656 unsigned int i;
657 rtx value;
658 unsigned int blocks_needed
659 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
660
661 if (len > blocks_needed)
662 len = blocks_needed;
663
664 value = const_wide_int_alloc (len);
665
666 /* It is so tempting to just put the mode in here. Must control
667 myself ... */
668 PUT_MODE (value, VOIDmode);
669 CWI_PUT_NUM_ELEM (value, len);
670
671 for (i = 0; i < len; i++)
672 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
673
674 return lookup_const_wide_int (value);
675 }
676 #else
677 return immed_double_const (v.elt (0), v.elt (1), mode);
678 #endif
679 }
680
681 #if TARGET_SUPPORTS_WIDE_INT == 0
682 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
683 of ints: I0 is the low-order word and I1 is the high-order word.
684 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
685 implied upper bits are copies of the high bit of i1. The value
686 itself is neither signed nor unsigned. Do not use this routine for
687 non-integer modes; convert to REAL_VALUE_TYPE and use
688 const_double_from_real_value. */
689
690 rtx
691 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
692 {
693 rtx value;
694 unsigned int i;
695
696 /* There are the following cases (note that there are no modes with
697 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
698
699 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700 gen_int_mode.
701 2) If the value of the integer fits into HOST_WIDE_INT anyway
702 (i.e., i1 consists only from copies of the sign bit, and sign
703 of i0 and i1 are the same), then we return a CONST_INT for i0.
704 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
705 scalar_mode smode;
706 if (is_a <scalar_mode> (mode, &smode)
707 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
708 return gen_int_mode (i0, mode);
709
710 /* If this integer fits in one word, return a CONST_INT. */
711 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
712 return GEN_INT (i0);
713
714 /* We use VOIDmode for integers. */
715 value = rtx_alloc (CONST_DOUBLE);
716 PUT_MODE (value, VOIDmode);
717
718 CONST_DOUBLE_LOW (value) = i0;
719 CONST_DOUBLE_HIGH (value) = i1;
720
721 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
722 XWINT (value, i) = 0;
723
724 return lookup_const_double (value);
725 }
726 #endif
727
728 /* Return an rtx representation of C in mode MODE. */
729
730 rtx
731 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
732 {
733 if (c.is_constant ())
734 return immed_wide_int_const_1 (c.coeffs[0], mode);
735
736 /* Not scalar_int_mode because we also allow pointer bound modes. */
737 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
738
739 /* Allow truncation but not extension since we do not know if the
740 number is signed or unsigned. */
741 gcc_assert (prec <= c.coeffs[0].get_precision ());
742 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
743
744 /* See whether we already have an rtx for this constant. */
745 inchash::hash h;
746 h.add_int (mode);
747 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
748 h.add_wide_int (newc.coeffs[i]);
749 const_poly_int_hasher::compare_type typed_value (mode, newc);
750 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
751 h.end (), INSERT);
752 rtx x = *slot;
753 if (x)
754 return x;
755
756 /* Create a new rtx. There's a choice to be made here between installing
757 the actual mode of the rtx or leaving it as VOIDmode (for consistency
758 with CONST_INT). In practice the handling of the codes is different
759 enough that we get no benefit from using VOIDmode, and various places
760 assume that VOIDmode implies CONST_INT. Using the real mode seems like
761 the right long-term direction anyway. */
762 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
763 size_t extra_size = twi::extra_size (prec);
764 x = rtx_alloc_v (CONST_POLY_INT,
765 sizeof (struct const_poly_int_def) + extra_size);
766 PUT_MODE (x, mode);
767 CONST_POLY_INT_COEFFS (x).set_precision (prec);
768 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
769 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
770
771 *slot = x;
772 return x;
773 }
774
775 rtx
776 gen_rtx_REG (machine_mode mode, unsigned int regno)
777 {
778 /* In case the MD file explicitly references the frame pointer, have
779 all such references point to the same frame pointer. This is
780 used during frame pointer elimination to distinguish the explicit
781 references to these registers from pseudos that happened to be
782 assigned to them.
783
784 If we have eliminated the frame pointer or arg pointer, we will
785 be using it as a normal register, for example as a spill
786 register. In such cases, we might be accessing it in a mode that
787 is not Pmode and therefore cannot use the pre-allocated rtx.
788
789 Also don't do this when we are making new REGs in reload, since
790 we don't want to get confused with the real pointers. */
791
792 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
793 {
794 if (regno == FRAME_POINTER_REGNUM
795 && (!reload_completed || frame_pointer_needed))
796 return frame_pointer_rtx;
797
798 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
799 && regno == HARD_FRAME_POINTER_REGNUM
800 && (!reload_completed || frame_pointer_needed))
801 return hard_frame_pointer_rtx;
802 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
803 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
804 && regno == ARG_POINTER_REGNUM)
805 return arg_pointer_rtx;
806 #endif
807 #ifdef RETURN_ADDRESS_POINTER_REGNUM
808 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
809 return return_address_pointer_rtx;
810 #endif
811 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
812 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
813 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
814 return pic_offset_table_rtx;
815 if (regno == STACK_POINTER_REGNUM)
816 return stack_pointer_rtx;
817 }
818
819 #if 0
820 /* If the per-function register table has been set up, try to re-use
821 an existing entry in that table to avoid useless generation of RTL.
822
823 This code is disabled for now until we can fix the various backends
824 which depend on having non-shared hard registers in some cases. Long
825 term we want to re-enable this code as it can significantly cut down
826 on the amount of useless RTL that gets generated.
827
828 We'll also need to fix some code that runs after reload that wants to
829 set ORIGINAL_REGNO. */
830
831 if (cfun
832 && cfun->emit
833 && regno_reg_rtx
834 && regno < FIRST_PSEUDO_REGISTER
835 && reg_raw_mode[regno] == mode)
836 return regno_reg_rtx[regno];
837 #endif
838
839 return gen_raw_REG (mode, regno);
840 }
841
842 rtx
843 gen_rtx_MEM (machine_mode mode, rtx addr)
844 {
845 rtx rt = gen_rtx_raw_MEM (mode, addr);
846
847 /* This field is not cleared by the mere allocation of the rtx, so
848 we clear it here. */
849 MEM_ATTRS (rt) = 0;
850
851 return rt;
852 }
853
854 /* Generate a memory referring to non-trapping constant memory. */
855
856 rtx
857 gen_const_mem (machine_mode mode, rtx addr)
858 {
859 rtx mem = gen_rtx_MEM (mode, addr);
860 MEM_READONLY_P (mem) = 1;
861 MEM_NOTRAP_P (mem) = 1;
862 return mem;
863 }
864
865 /* Generate a MEM referring to fixed portions of the frame, e.g., register
866 save areas. */
867
868 rtx
869 gen_frame_mem (machine_mode mode, rtx addr)
870 {
871 rtx mem = gen_rtx_MEM (mode, addr);
872 MEM_NOTRAP_P (mem) = 1;
873 set_mem_alias_set (mem, get_frame_alias_set ());
874 return mem;
875 }
876
877 /* Generate a MEM referring to a temporary use of the stack, not part
878 of the fixed stack frame. For example, something which is pushed
879 by a target splitter. */
880 rtx
881 gen_tmp_stack_mem (machine_mode mode, rtx addr)
882 {
883 rtx mem = gen_rtx_MEM (mode, addr);
884 MEM_NOTRAP_P (mem) = 1;
885 if (!cfun->calls_alloca)
886 set_mem_alias_set (mem, get_frame_alias_set ());
887 return mem;
888 }
889
890 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
891 this construct would be valid, and false otherwise. */
892
893 bool
894 validate_subreg (machine_mode omode, machine_mode imode,
895 const_rtx reg, poly_uint64 offset)
896 {
897 unsigned int isize = GET_MODE_SIZE (imode);
898 unsigned int osize = GET_MODE_SIZE (omode);
899
900 /* All subregs must be aligned. */
901 if (!multiple_p (offset, osize))
902 return false;
903
904 /* The subreg offset cannot be outside the inner object. */
905 if (maybe_ge (offset, isize))
906 return false;
907
908 unsigned int regsize = REGMODE_NATURAL_SIZE (imode);
909
910 /* ??? This should not be here. Temporarily continue to allow word_mode
911 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
912 Generally, backends are doing something sketchy but it'll take time to
913 fix them all. */
914 if (omode == word_mode)
915 ;
916 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
917 is the culprit here, and not the backends. */
918 else if (osize >= regsize && isize >= osize)
919 ;
920 /* Allow component subregs of complex and vector. Though given the below
921 extraction rules, it's not always clear what that means. */
922 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
923 && GET_MODE_INNER (imode) == omode)
924 ;
925 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
926 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
927 represent this. It's questionable if this ought to be represented at
928 all -- why can't this all be hidden in post-reload splitters that make
929 arbitrarily mode changes to the registers themselves. */
930 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
931 ;
932 /* Subregs involving floating point modes are not allowed to
933 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
934 (subreg:SI (reg:DF) 0) isn't. */
935 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
936 {
937 if (! (isize == osize
938 /* LRA can use subreg to store a floating point value in
939 an integer mode. Although the floating point and the
940 integer modes need the same number of hard registers,
941 the size of floating point mode can be less than the
942 integer mode. LRA also uses subregs for a register
943 should be used in different mode in on insn. */
944 || lra_in_progress))
945 return false;
946 }
947
948 /* Paradoxical subregs must have offset zero. */
949 if (osize > isize)
950 return known_eq (offset, 0U);
951
952 /* This is a normal subreg. Verify that the offset is representable. */
953
954 /* For hard registers, we already have most of these rules collected in
955 subreg_offset_representable_p. */
956 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
957 {
958 unsigned int regno = REGNO (reg);
959
960 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
961 && GET_MODE_INNER (imode) == omode)
962 ;
963 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
964 return false;
965
966 return subreg_offset_representable_p (regno, imode, offset, omode);
967 }
968
969 /* For pseudo registers, we want most of the same checks. Namely:
970
971 Assume that the pseudo register will be allocated to hard registers
972 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
973 the remainder must correspond to the lowpart of the containing hard
974 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
975 otherwise it is at the lowest offset.
976
977 Given that we've already checked the mode and offset alignment,
978 we only have to check subblock subregs here. */
979 if (osize < regsize
980 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
981 {
982 poly_uint64 block_size = MIN (isize, regsize);
983 unsigned int start_reg;
984 poly_uint64 offset_within_reg;
985 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
986 || (BYTES_BIG_ENDIAN
987 ? maybe_ne (offset_within_reg, block_size - osize)
988 : maybe_ne (offset_within_reg, 0U)))
989 return false;
990 }
991 return true;
992 }
993
994 rtx
995 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
996 {
997 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
998 return gen_rtx_raw_SUBREG (mode, reg, offset);
999 }
1000
1001 /* Generate a SUBREG representing the least-significant part of REG if MODE
1002 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1003
1004 rtx
1005 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1006 {
1007 machine_mode inmode;
1008
1009 inmode = GET_MODE (reg);
1010 if (inmode == VOIDmode)
1011 inmode = mode;
1012 return gen_rtx_SUBREG (mode, reg,
1013 subreg_lowpart_offset (mode, inmode));
1014 }
1015
1016 rtx
1017 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1018 enum var_init_status status)
1019 {
1020 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1021 PAT_VAR_LOCATION_STATUS (x) = status;
1022 return x;
1023 }
1024 \f
1025
1026 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1027
1028 rtvec
1029 gen_rtvec (int n, ...)
1030 {
1031 int i;
1032 rtvec rt_val;
1033 va_list p;
1034
1035 va_start (p, n);
1036
1037 /* Don't allocate an empty rtvec... */
1038 if (n == 0)
1039 {
1040 va_end (p);
1041 return NULL_RTVEC;
1042 }
1043
1044 rt_val = rtvec_alloc (n);
1045
1046 for (i = 0; i < n; i++)
1047 rt_val->elem[i] = va_arg (p, rtx);
1048
1049 va_end (p);
1050 return rt_val;
1051 }
1052
1053 rtvec
1054 gen_rtvec_v (int n, rtx *argp)
1055 {
1056 int i;
1057 rtvec rt_val;
1058
1059 /* Don't allocate an empty rtvec... */
1060 if (n == 0)
1061 return NULL_RTVEC;
1062
1063 rt_val = rtvec_alloc (n);
1064
1065 for (i = 0; i < n; i++)
1066 rt_val->elem[i] = *argp++;
1067
1068 return rt_val;
1069 }
1070
1071 rtvec
1072 gen_rtvec_v (int n, rtx_insn **argp)
1073 {
1074 int i;
1075 rtvec rt_val;
1076
1077 /* Don't allocate an empty rtvec... */
1078 if (n == 0)
1079 return NULL_RTVEC;
1080
1081 rt_val = rtvec_alloc (n);
1082
1083 for (i = 0; i < n; i++)
1084 rt_val->elem[i] = *argp++;
1085
1086 return rt_val;
1087 }
1088
1089 \f
1090 /* Return the number of bytes between the start of an OUTER_MODE
1091 in-memory value and the start of an INNER_MODE in-memory value,
1092 given that the former is a lowpart of the latter. It may be a
1093 paradoxical lowpart, in which case the offset will be negative
1094 on big-endian targets. */
1095
1096 poly_int64
1097 byte_lowpart_offset (machine_mode outer_mode,
1098 machine_mode inner_mode)
1099 {
1100 if (paradoxical_subreg_p (outer_mode, inner_mode))
1101 return -subreg_lowpart_offset (inner_mode, outer_mode);
1102 else
1103 return subreg_lowpart_offset (outer_mode, inner_mode);
1104 }
1105
1106 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1107 from address X. For paradoxical big-endian subregs this is a
1108 negative value, otherwise it's the same as OFFSET. */
1109
1110 poly_int64
1111 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1112 poly_uint64 offset)
1113 {
1114 if (paradoxical_subreg_p (outer_mode, inner_mode))
1115 {
1116 gcc_assert (known_eq (offset, 0U));
1117 return -subreg_lowpart_offset (inner_mode, outer_mode);
1118 }
1119 return offset;
1120 }
1121
1122 /* As above, but return the offset that existing subreg X would have
1123 if SUBREG_REG (X) were stored in memory. The only significant thing
1124 about the current SUBREG_REG is its mode. */
1125
1126 poly_int64
1127 subreg_memory_offset (const_rtx x)
1128 {
1129 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1130 SUBREG_BYTE (x));
1131 }
1132 \f
1133 /* Generate a REG rtx for a new pseudo register of mode MODE.
1134 This pseudo is assigned the next sequential register number. */
1135
1136 rtx
1137 gen_reg_rtx (machine_mode mode)
1138 {
1139 rtx val;
1140 unsigned int align = GET_MODE_ALIGNMENT (mode);
1141
1142 gcc_assert (can_create_pseudo_p ());
1143
1144 /* If a virtual register with bigger mode alignment is generated,
1145 increase stack alignment estimation because it might be spilled
1146 to stack later. */
1147 if (SUPPORTS_STACK_ALIGNMENT
1148 && crtl->stack_alignment_estimated < align
1149 && !crtl->stack_realign_processed)
1150 {
1151 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1152 if (crtl->stack_alignment_estimated < min_align)
1153 crtl->stack_alignment_estimated = min_align;
1154 }
1155
1156 if (generating_concat_p
1157 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1158 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1159 {
1160 /* For complex modes, don't make a single pseudo.
1161 Instead, make a CONCAT of two pseudos.
1162 This allows noncontiguous allocation of the real and imaginary parts,
1163 which makes much better code. Besides, allocating DCmode
1164 pseudos overstrains reload on some machines like the 386. */
1165 rtx realpart, imagpart;
1166 machine_mode partmode = GET_MODE_INNER (mode);
1167
1168 realpart = gen_reg_rtx (partmode);
1169 imagpart = gen_reg_rtx (partmode);
1170 return gen_rtx_CONCAT (mode, realpart, imagpart);
1171 }
1172
1173 /* Do not call gen_reg_rtx with uninitialized crtl. */
1174 gcc_assert (crtl->emit.regno_pointer_align_length);
1175
1176 crtl->emit.ensure_regno_capacity ();
1177 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1178
1179 val = gen_raw_REG (mode, reg_rtx_no);
1180 regno_reg_rtx[reg_rtx_no++] = val;
1181 return val;
1182 }
1183
1184 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1185 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1186
1187 void
1188 emit_status::ensure_regno_capacity ()
1189 {
1190 int old_size = regno_pointer_align_length;
1191
1192 if (reg_rtx_no < old_size)
1193 return;
1194
1195 int new_size = old_size * 2;
1196 while (reg_rtx_no >= new_size)
1197 new_size *= 2;
1198
1199 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1200 memset (tmp + old_size, 0, new_size - old_size);
1201 regno_pointer_align = (unsigned char *) tmp;
1202
1203 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1204 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1205 regno_reg_rtx = new1;
1206
1207 crtl->emit.regno_pointer_align_length = new_size;
1208 }
1209
1210 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1211
1212 bool
1213 reg_is_parm_p (rtx reg)
1214 {
1215 tree decl;
1216
1217 gcc_assert (REG_P (reg));
1218 decl = REG_EXPR (reg);
1219 return (decl && TREE_CODE (decl) == PARM_DECL);
1220 }
1221
1222 /* Update NEW with the same attributes as REG, but with OFFSET added
1223 to the REG_OFFSET. */
1224
1225 static void
1226 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1227 {
1228 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1229 REG_OFFSET (reg) + offset);
1230 }
1231
1232 /* Generate a register with same attributes as REG, but with OFFSET
1233 added to the REG_OFFSET. */
1234
1235 rtx
1236 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1237 poly_int64 offset)
1238 {
1239 rtx new_rtx = gen_rtx_REG (mode, regno);
1240
1241 update_reg_offset (new_rtx, reg, offset);
1242 return new_rtx;
1243 }
1244
1245 /* Generate a new pseudo-register with the same attributes as REG, but
1246 with OFFSET added to the REG_OFFSET. */
1247
1248 rtx
1249 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1250 {
1251 rtx new_rtx = gen_reg_rtx (mode);
1252
1253 update_reg_offset (new_rtx, reg, offset);
1254 return new_rtx;
1255 }
1256
1257 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1258 new register is a (possibly paradoxical) lowpart of the old one. */
1259
1260 void
1261 adjust_reg_mode (rtx reg, machine_mode mode)
1262 {
1263 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1264 PUT_MODE (reg, mode);
1265 }
1266
1267 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1268 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1269
1270 void
1271 set_reg_attrs_from_value (rtx reg, rtx x)
1272 {
1273 poly_int64 offset;
1274 bool can_be_reg_pointer = true;
1275
1276 /* Don't call mark_reg_pointer for incompatible pointer sign
1277 extension. */
1278 while (GET_CODE (x) == SIGN_EXTEND
1279 || GET_CODE (x) == ZERO_EXTEND
1280 || GET_CODE (x) == TRUNCATE
1281 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1282 {
1283 #if defined(POINTERS_EXTEND_UNSIGNED)
1284 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1285 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1286 || (paradoxical_subreg_p (x)
1287 && ! (SUBREG_PROMOTED_VAR_P (x)
1288 && SUBREG_CHECK_PROMOTED_SIGN (x,
1289 POINTERS_EXTEND_UNSIGNED))))
1290 && !targetm.have_ptr_extend ())
1291 can_be_reg_pointer = false;
1292 #endif
1293 x = XEXP (x, 0);
1294 }
1295
1296 /* Hard registers can be reused for multiple purposes within the same
1297 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1298 on them is wrong. */
1299 if (HARD_REGISTER_P (reg))
1300 return;
1301
1302 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1303 if (MEM_P (x))
1304 {
1305 if (MEM_OFFSET_KNOWN_P (x))
1306 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1307 MEM_OFFSET (x) + offset);
1308 if (can_be_reg_pointer && MEM_POINTER (x))
1309 mark_reg_pointer (reg, 0);
1310 }
1311 else if (REG_P (x))
1312 {
1313 if (REG_ATTRS (x))
1314 update_reg_offset (reg, x, offset);
1315 if (can_be_reg_pointer && REG_POINTER (x))
1316 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1317 }
1318 }
1319
1320 /* Generate a REG rtx for a new pseudo register, copying the mode
1321 and attributes from X. */
1322
1323 rtx
1324 gen_reg_rtx_and_attrs (rtx x)
1325 {
1326 rtx reg = gen_reg_rtx (GET_MODE (x));
1327 set_reg_attrs_from_value (reg, x);
1328 return reg;
1329 }
1330
1331 /* Set the register attributes for registers contained in PARM_RTX.
1332 Use needed values from memory attributes of MEM. */
1333
1334 void
1335 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1336 {
1337 if (REG_P (parm_rtx))
1338 set_reg_attrs_from_value (parm_rtx, mem);
1339 else if (GET_CODE (parm_rtx) == PARALLEL)
1340 {
1341 /* Check for a NULL entry in the first slot, used to indicate that the
1342 parameter goes both on the stack and in registers. */
1343 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1344 for (; i < XVECLEN (parm_rtx, 0); i++)
1345 {
1346 rtx x = XVECEXP (parm_rtx, 0, i);
1347 if (REG_P (XEXP (x, 0)))
1348 REG_ATTRS (XEXP (x, 0))
1349 = get_reg_attrs (MEM_EXPR (mem),
1350 INTVAL (XEXP (x, 1)));
1351 }
1352 }
1353 }
1354
1355 /* Set the REG_ATTRS for registers in value X, given that X represents
1356 decl T. */
1357
1358 void
1359 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1360 {
1361 if (!t)
1362 return;
1363 tree tdecl = t;
1364 if (GET_CODE (x) == SUBREG)
1365 {
1366 gcc_assert (subreg_lowpart_p (x));
1367 x = SUBREG_REG (x);
1368 }
1369 if (REG_P (x))
1370 REG_ATTRS (x)
1371 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1372 DECL_P (tdecl)
1373 ? DECL_MODE (tdecl)
1374 : TYPE_MODE (TREE_TYPE (tdecl))));
1375 if (GET_CODE (x) == CONCAT)
1376 {
1377 if (REG_P (XEXP (x, 0)))
1378 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1379 if (REG_P (XEXP (x, 1)))
1380 REG_ATTRS (XEXP (x, 1))
1381 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1382 }
1383 if (GET_CODE (x) == PARALLEL)
1384 {
1385 int i, start;
1386
1387 /* Check for a NULL entry, used to indicate that the parameter goes
1388 both on the stack and in registers. */
1389 if (XEXP (XVECEXP (x, 0, 0), 0))
1390 start = 0;
1391 else
1392 start = 1;
1393
1394 for (i = start; i < XVECLEN (x, 0); i++)
1395 {
1396 rtx y = XVECEXP (x, 0, i);
1397 if (REG_P (XEXP (y, 0)))
1398 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1399 }
1400 }
1401 }
1402
1403 /* Assign the RTX X to declaration T. */
1404
1405 void
1406 set_decl_rtl (tree t, rtx x)
1407 {
1408 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1409 if (x)
1410 set_reg_attrs_for_decl_rtl (t, x);
1411 }
1412
1413 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1414 if the ABI requires the parameter to be passed by reference. */
1415
1416 void
1417 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1418 {
1419 DECL_INCOMING_RTL (t) = x;
1420 if (x && !by_reference_p)
1421 set_reg_attrs_for_decl_rtl (t, x);
1422 }
1423
1424 /* Identify REG (which may be a CONCAT) as a user register. */
1425
1426 void
1427 mark_user_reg (rtx reg)
1428 {
1429 if (GET_CODE (reg) == CONCAT)
1430 {
1431 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1432 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1433 }
1434 else
1435 {
1436 gcc_assert (REG_P (reg));
1437 REG_USERVAR_P (reg) = 1;
1438 }
1439 }
1440
1441 /* Identify REG as a probable pointer register and show its alignment
1442 as ALIGN, if nonzero. */
1443
1444 void
1445 mark_reg_pointer (rtx reg, int align)
1446 {
1447 if (! REG_POINTER (reg))
1448 {
1449 REG_POINTER (reg) = 1;
1450
1451 if (align)
1452 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1453 }
1454 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1455 /* We can no-longer be sure just how aligned this pointer is. */
1456 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1457 }
1458
1459 /* Return 1 plus largest pseudo reg number used in the current function. */
1460
1461 int
1462 max_reg_num (void)
1463 {
1464 return reg_rtx_no;
1465 }
1466
1467 /* Return 1 + the largest label number used so far in the current function. */
1468
1469 int
1470 max_label_num (void)
1471 {
1472 return label_num;
1473 }
1474
1475 /* Return first label number used in this function (if any were used). */
1476
1477 int
1478 get_first_label_num (void)
1479 {
1480 return first_label_num;
1481 }
1482
1483 /* If the rtx for label was created during the expansion of a nested
1484 function, then first_label_num won't include this label number.
1485 Fix this now so that array indices work later. */
1486
1487 void
1488 maybe_set_first_label_num (rtx_code_label *x)
1489 {
1490 if (CODE_LABEL_NUMBER (x) < first_label_num)
1491 first_label_num = CODE_LABEL_NUMBER (x);
1492 }
1493
1494 /* For use by the RTL function loader, when mingling with normal
1495 functions.
1496 Ensure that label_num is greater than the label num of X, to avoid
1497 duplicate labels in the generated assembler. */
1498
1499 void
1500 maybe_set_max_label_num (rtx_code_label *x)
1501 {
1502 if (CODE_LABEL_NUMBER (x) >= label_num)
1503 label_num = CODE_LABEL_NUMBER (x) + 1;
1504 }
1505
1506 \f
1507 /* Return a value representing some low-order bits of X, where the number
1508 of low-order bits is given by MODE. Note that no conversion is done
1509 between floating-point and fixed-point values, rather, the bit
1510 representation is returned.
1511
1512 This function handles the cases in common between gen_lowpart, below,
1513 and two variants in cse.c and combine.c. These are the cases that can
1514 be safely handled at all points in the compilation.
1515
1516 If this is not a case we can handle, return 0. */
1517
1518 rtx
1519 gen_lowpart_common (machine_mode mode, rtx x)
1520 {
1521 int msize = GET_MODE_SIZE (mode);
1522 int xsize;
1523 machine_mode innermode;
1524
1525 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1526 so we have to make one up. Yuk. */
1527 innermode = GET_MODE (x);
1528 if (CONST_INT_P (x)
1529 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1530 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1531 else if (innermode == VOIDmode)
1532 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1533
1534 xsize = GET_MODE_SIZE (innermode);
1535
1536 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1537
1538 if (innermode == mode)
1539 return x;
1540
1541 if (SCALAR_FLOAT_MODE_P (mode))
1542 {
1543 /* Don't allow paradoxical FLOAT_MODE subregs. */
1544 if (msize > xsize)
1545 return 0;
1546 }
1547 else
1548 {
1549 /* MODE must occupy no more of the underlying registers than X. */
1550 unsigned int regsize = REGMODE_NATURAL_SIZE (innermode);
1551 unsigned int mregs = CEIL (msize, regsize);
1552 unsigned int xregs = CEIL (xsize, regsize);
1553 if (mregs > xregs)
1554 return 0;
1555 }
1556
1557 scalar_int_mode int_mode, int_innermode, from_mode;
1558 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1559 && is_a <scalar_int_mode> (mode, &int_mode)
1560 && is_a <scalar_int_mode> (innermode, &int_innermode)
1561 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1562 {
1563 /* If we are getting the low-order part of something that has been
1564 sign- or zero-extended, we can either just use the object being
1565 extended or make a narrower extension. If we want an even smaller
1566 piece than the size of the object being extended, call ourselves
1567 recursively.
1568
1569 This case is used mostly by combine and cse. */
1570
1571 if (from_mode == int_mode)
1572 return XEXP (x, 0);
1573 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1574 return gen_lowpart_common (int_mode, XEXP (x, 0));
1575 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1576 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1577 }
1578 else if (GET_CODE (x) == SUBREG || REG_P (x)
1579 || GET_CODE (x) == CONCAT || const_vec_p (x)
1580 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1581 || CONST_POLY_INT_P (x))
1582 return lowpart_subreg (mode, x, innermode);
1583
1584 /* Otherwise, we can't do this. */
1585 return 0;
1586 }
1587 \f
1588 rtx
1589 gen_highpart (machine_mode mode, rtx x)
1590 {
1591 unsigned int msize = GET_MODE_SIZE (mode);
1592 rtx result;
1593
1594 /* This case loses if X is a subreg. To catch bugs early,
1595 complain if an invalid MODE is used even in other cases. */
1596 gcc_assert (msize <= UNITS_PER_WORD
1597 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1598
1599 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1600 subreg_highpart_offset (mode, GET_MODE (x)));
1601 gcc_assert (result);
1602
1603 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1604 the target if we have a MEM. gen_highpart must return a valid operand,
1605 emitting code if necessary to do so. */
1606 if (MEM_P (result))
1607 {
1608 result = validize_mem (result);
1609 gcc_assert (result);
1610 }
1611
1612 return result;
1613 }
1614
1615 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1616 be VOIDmode constant. */
1617 rtx
1618 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1619 {
1620 if (GET_MODE (exp) != VOIDmode)
1621 {
1622 gcc_assert (GET_MODE (exp) == innermode);
1623 return gen_highpart (outermode, exp);
1624 }
1625 return simplify_gen_subreg (outermode, exp, innermode,
1626 subreg_highpart_offset (outermode, innermode));
1627 }
1628
1629 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1630 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1631
1632 poly_uint64
1633 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1634 {
1635 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1636 if (maybe_gt (outer_bytes, inner_bytes))
1637 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1638 return 0;
1639
1640 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1641 return inner_bytes - outer_bytes;
1642 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1643 return 0;
1644 else
1645 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1646 }
1647
1648 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1649 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1650
1651 poly_uint64
1652 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1653 {
1654 gcc_assert (known_ge (inner_bytes, outer_bytes));
1655
1656 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1657 return 0;
1658 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1659 return inner_bytes - outer_bytes;
1660 else
1661 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1662 (inner_bytes - outer_bytes)
1663 * BITS_PER_UNIT);
1664 }
1665
1666 /* Return 1 iff X, assumed to be a SUBREG,
1667 refers to the least significant part of its containing reg.
1668 If X is not a SUBREG, always return 1 (it is its own low part!). */
1669
1670 int
1671 subreg_lowpart_p (const_rtx x)
1672 {
1673 if (GET_CODE (x) != SUBREG)
1674 return 1;
1675 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1676 return 0;
1677
1678 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1679 GET_MODE (SUBREG_REG (x))),
1680 SUBREG_BYTE (x));
1681 }
1682 \f
1683 /* Return subword OFFSET of operand OP.
1684 The word number, OFFSET, is interpreted as the word number starting
1685 at the low-order address. OFFSET 0 is the low-order word if not
1686 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1687
1688 If we cannot extract the required word, we return zero. Otherwise,
1689 an rtx corresponding to the requested word will be returned.
1690
1691 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1692 reload has completed, a valid address will always be returned. After
1693 reload, if a valid address cannot be returned, we return zero.
1694
1695 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1696 it is the responsibility of the caller.
1697
1698 MODE is the mode of OP in case it is a CONST_INT.
1699
1700 ??? This is still rather broken for some cases. The problem for the
1701 moment is that all callers of this thing provide no 'goal mode' to
1702 tell us to work with. This exists because all callers were written
1703 in a word based SUBREG world.
1704 Now use of this function can be deprecated by simplify_subreg in most
1705 cases.
1706 */
1707
1708 rtx
1709 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1710 machine_mode mode)
1711 {
1712 if (mode == VOIDmode)
1713 mode = GET_MODE (op);
1714
1715 gcc_assert (mode != VOIDmode);
1716
1717 /* If OP is narrower than a word, fail. */
1718 if (mode != BLKmode
1719 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1720 return 0;
1721
1722 /* If we want a word outside OP, return zero. */
1723 if (mode != BLKmode
1724 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1725 return const0_rtx;
1726
1727 /* Form a new MEM at the requested address. */
1728 if (MEM_P (op))
1729 {
1730 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1731
1732 if (! validate_address)
1733 return new_rtx;
1734
1735 else if (reload_completed)
1736 {
1737 if (! strict_memory_address_addr_space_p (word_mode,
1738 XEXP (new_rtx, 0),
1739 MEM_ADDR_SPACE (op)))
1740 return 0;
1741 }
1742 else
1743 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1744 }
1745
1746 /* Rest can be handled by simplify_subreg. */
1747 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1748 }
1749
1750 /* Similar to `operand_subword', but never return 0. If we can't
1751 extract the required subword, put OP into a register and try again.
1752 The second attempt must succeed. We always validate the address in
1753 this case.
1754
1755 MODE is the mode of OP, in case it is CONST_INT. */
1756
1757 rtx
1758 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1759 {
1760 rtx result = operand_subword (op, offset, 1, mode);
1761
1762 if (result)
1763 return result;
1764
1765 if (mode != BLKmode && mode != VOIDmode)
1766 {
1767 /* If this is a register which can not be accessed by words, copy it
1768 to a pseudo register. */
1769 if (REG_P (op))
1770 op = copy_to_reg (op);
1771 else
1772 op = force_reg (mode, op);
1773 }
1774
1775 result = operand_subword (op, offset, 1, mode);
1776 gcc_assert (result);
1777
1778 return result;
1779 }
1780 \f
1781 mem_attrs::mem_attrs ()
1782 : expr (NULL_TREE),
1783 offset (0),
1784 size (0),
1785 alias (0),
1786 align (0),
1787 addrspace (ADDR_SPACE_GENERIC),
1788 offset_known_p (false),
1789 size_known_p (false)
1790 {}
1791
1792 /* Returns 1 if both MEM_EXPR can be considered equal
1793 and 0 otherwise. */
1794
1795 int
1796 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1797 {
1798 if (expr1 == expr2)
1799 return 1;
1800
1801 if (! expr1 || ! expr2)
1802 return 0;
1803
1804 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1805 return 0;
1806
1807 return operand_equal_p (expr1, expr2, 0);
1808 }
1809
1810 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1811 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1812 -1 if not known. */
1813
1814 int
1815 get_mem_align_offset (rtx mem, unsigned int align)
1816 {
1817 tree expr;
1818 poly_uint64 offset;
1819
1820 /* This function can't use
1821 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1822 || (MAX (MEM_ALIGN (mem),
1823 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1824 < align))
1825 return -1;
1826 else
1827 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1828 for two reasons:
1829 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1830 for <variable>. get_inner_reference doesn't handle it and
1831 even if it did, the alignment in that case needs to be determined
1832 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1833 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1834 isn't sufficiently aligned, the object it is in might be. */
1835 gcc_assert (MEM_P (mem));
1836 expr = MEM_EXPR (mem);
1837 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1838 return -1;
1839
1840 offset = MEM_OFFSET (mem);
1841 if (DECL_P (expr))
1842 {
1843 if (DECL_ALIGN (expr) < align)
1844 return -1;
1845 }
1846 else if (INDIRECT_REF_P (expr))
1847 {
1848 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1849 return -1;
1850 }
1851 else if (TREE_CODE (expr) == COMPONENT_REF)
1852 {
1853 while (1)
1854 {
1855 tree inner = TREE_OPERAND (expr, 0);
1856 tree field = TREE_OPERAND (expr, 1);
1857 tree byte_offset = component_ref_field_offset (expr);
1858 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1859
1860 poly_uint64 suboffset;
1861 if (!byte_offset
1862 || !poly_int_tree_p (byte_offset, &suboffset)
1863 || !tree_fits_uhwi_p (bit_offset))
1864 return -1;
1865
1866 offset += suboffset;
1867 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1868
1869 if (inner == NULL_TREE)
1870 {
1871 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1872 < (unsigned int) align)
1873 return -1;
1874 break;
1875 }
1876 else if (DECL_P (inner))
1877 {
1878 if (DECL_ALIGN (inner) < align)
1879 return -1;
1880 break;
1881 }
1882 else if (TREE_CODE (inner) != COMPONENT_REF)
1883 return -1;
1884 expr = inner;
1885 }
1886 }
1887 else
1888 return -1;
1889
1890 HOST_WIDE_INT misalign;
1891 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1892 return -1;
1893 return misalign;
1894 }
1895
1896 /* Given REF (a MEM) and T, either the type of X or the expression
1897 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1898 if we are making a new object of this type. BITPOS is nonzero if
1899 there is an offset outstanding on T that will be applied later. */
1900
1901 void
1902 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1903 poly_int64 bitpos)
1904 {
1905 poly_int64 apply_bitpos = 0;
1906 tree type;
1907 struct mem_attrs attrs, *defattrs, *refattrs;
1908 addr_space_t as;
1909
1910 /* It can happen that type_for_mode was given a mode for which there
1911 is no language-level type. In which case it returns NULL, which
1912 we can see here. */
1913 if (t == NULL_TREE)
1914 return;
1915
1916 type = TYPE_P (t) ? t : TREE_TYPE (t);
1917 if (type == error_mark_node)
1918 return;
1919
1920 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1921 wrong answer, as it assumes that DECL_RTL already has the right alias
1922 info. Callers should not set DECL_RTL until after the call to
1923 set_mem_attributes. */
1924 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1925
1926 /* Get the alias set from the expression or type (perhaps using a
1927 front-end routine) and use it. */
1928 attrs.alias = get_alias_set (t);
1929
1930 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1931 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1932
1933 /* Default values from pre-existing memory attributes if present. */
1934 refattrs = MEM_ATTRS (ref);
1935 if (refattrs)
1936 {
1937 /* ??? Can this ever happen? Calling this routine on a MEM that
1938 already carries memory attributes should probably be invalid. */
1939 attrs.expr = refattrs->expr;
1940 attrs.offset_known_p = refattrs->offset_known_p;
1941 attrs.offset = refattrs->offset;
1942 attrs.size_known_p = refattrs->size_known_p;
1943 attrs.size = refattrs->size;
1944 attrs.align = refattrs->align;
1945 }
1946
1947 /* Otherwise, default values from the mode of the MEM reference. */
1948 else
1949 {
1950 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1951 gcc_assert (!defattrs->expr);
1952 gcc_assert (!defattrs->offset_known_p);
1953
1954 /* Respect mode size. */
1955 attrs.size_known_p = defattrs->size_known_p;
1956 attrs.size = defattrs->size;
1957 /* ??? Is this really necessary? We probably should always get
1958 the size from the type below. */
1959
1960 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1961 if T is an object, always compute the object alignment below. */
1962 if (TYPE_P (t))
1963 attrs.align = defattrs->align;
1964 else
1965 attrs.align = BITS_PER_UNIT;
1966 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1967 e.g. if the type carries an alignment attribute. Should we be
1968 able to simply always use TYPE_ALIGN? */
1969 }
1970
1971 /* We can set the alignment from the type if we are making an object or if
1972 this is an INDIRECT_REF. */
1973 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1974 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1975
1976 /* If the size is known, we can set that. */
1977 tree new_size = TYPE_SIZE_UNIT (type);
1978
1979 /* The address-space is that of the type. */
1980 as = TYPE_ADDR_SPACE (type);
1981
1982 /* If T is not a type, we may be able to deduce some more information about
1983 the expression. */
1984 if (! TYPE_P (t))
1985 {
1986 tree base;
1987
1988 if (TREE_THIS_VOLATILE (t))
1989 MEM_VOLATILE_P (ref) = 1;
1990
1991 /* Now remove any conversions: they don't change what the underlying
1992 object is. Likewise for SAVE_EXPR. */
1993 while (CONVERT_EXPR_P (t)
1994 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1995 || TREE_CODE (t) == SAVE_EXPR)
1996 t = TREE_OPERAND (t, 0);
1997
1998 /* Note whether this expression can trap. */
1999 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2000
2001 base = get_base_address (t);
2002 if (base)
2003 {
2004 if (DECL_P (base)
2005 && TREE_READONLY (base)
2006 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2007 && !TREE_THIS_VOLATILE (base))
2008 MEM_READONLY_P (ref) = 1;
2009
2010 /* Mark static const strings readonly as well. */
2011 if (TREE_CODE (base) == STRING_CST
2012 && TREE_READONLY (base)
2013 && TREE_STATIC (base))
2014 MEM_READONLY_P (ref) = 1;
2015
2016 /* Address-space information is on the base object. */
2017 if (TREE_CODE (base) == MEM_REF
2018 || TREE_CODE (base) == TARGET_MEM_REF)
2019 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2020 0))));
2021 else
2022 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2023 }
2024
2025 /* If this expression uses it's parent's alias set, mark it such
2026 that we won't change it. */
2027 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2028 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2029
2030 /* If this is a decl, set the attributes of the MEM from it. */
2031 if (DECL_P (t))
2032 {
2033 attrs.expr = t;
2034 attrs.offset_known_p = true;
2035 attrs.offset = 0;
2036 apply_bitpos = bitpos;
2037 new_size = DECL_SIZE_UNIT (t);
2038 }
2039
2040 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2041 else if (CONSTANT_CLASS_P (t))
2042 ;
2043
2044 /* If this is a field reference, record it. */
2045 else if (TREE_CODE (t) == COMPONENT_REF)
2046 {
2047 attrs.expr = t;
2048 attrs.offset_known_p = true;
2049 attrs.offset = 0;
2050 apply_bitpos = bitpos;
2051 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2052 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2053 }
2054
2055 /* If this is an array reference, look for an outer field reference. */
2056 else if (TREE_CODE (t) == ARRAY_REF)
2057 {
2058 tree off_tree = size_zero_node;
2059 /* We can't modify t, because we use it at the end of the
2060 function. */
2061 tree t2 = t;
2062
2063 do
2064 {
2065 tree index = TREE_OPERAND (t2, 1);
2066 tree low_bound = array_ref_low_bound (t2);
2067 tree unit_size = array_ref_element_size (t2);
2068
2069 /* We assume all arrays have sizes that are a multiple of a byte.
2070 First subtract the lower bound, if any, in the type of the
2071 index, then convert to sizetype and multiply by the size of
2072 the array element. */
2073 if (! integer_zerop (low_bound))
2074 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2075 index, low_bound);
2076
2077 off_tree = size_binop (PLUS_EXPR,
2078 size_binop (MULT_EXPR,
2079 fold_convert (sizetype,
2080 index),
2081 unit_size),
2082 off_tree);
2083 t2 = TREE_OPERAND (t2, 0);
2084 }
2085 while (TREE_CODE (t2) == ARRAY_REF);
2086
2087 if (DECL_P (t2)
2088 || (TREE_CODE (t2) == COMPONENT_REF
2089 /* For trailing arrays t2 doesn't have a size that
2090 covers all valid accesses. */
2091 && ! array_at_struct_end_p (t)))
2092 {
2093 attrs.expr = t2;
2094 attrs.offset_known_p = false;
2095 if (poly_int_tree_p (off_tree, &attrs.offset))
2096 {
2097 attrs.offset_known_p = true;
2098 apply_bitpos = bitpos;
2099 }
2100 }
2101 /* Else do not record a MEM_EXPR. */
2102 }
2103
2104 /* If this is an indirect reference, record it. */
2105 else if (TREE_CODE (t) == MEM_REF
2106 || TREE_CODE (t) == TARGET_MEM_REF)
2107 {
2108 attrs.expr = t;
2109 attrs.offset_known_p = true;
2110 attrs.offset = 0;
2111 apply_bitpos = bitpos;
2112 }
2113
2114 /* Compute the alignment. */
2115 unsigned int obj_align;
2116 unsigned HOST_WIDE_INT obj_bitpos;
2117 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2118 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2119 if (diff_align != 0)
2120 obj_align = MIN (obj_align, diff_align);
2121 attrs.align = MAX (attrs.align, obj_align);
2122 }
2123
2124 poly_uint64 const_size;
2125 if (poly_int_tree_p (new_size, &const_size))
2126 {
2127 attrs.size_known_p = true;
2128 attrs.size = const_size;
2129 }
2130
2131 /* If we modified OFFSET based on T, then subtract the outstanding
2132 bit position offset. Similarly, increase the size of the accessed
2133 object to contain the negative offset. */
2134 if (maybe_ne (apply_bitpos, 0))
2135 {
2136 gcc_assert (attrs.offset_known_p);
2137 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2138 attrs.offset -= bytepos;
2139 if (attrs.size_known_p)
2140 attrs.size += bytepos;
2141 }
2142
2143 /* Now set the attributes we computed above. */
2144 attrs.addrspace = as;
2145 set_mem_attrs (ref, &attrs);
2146 }
2147
2148 void
2149 set_mem_attributes (rtx ref, tree t, int objectp)
2150 {
2151 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2152 }
2153
2154 /* Set the alias set of MEM to SET. */
2155
2156 void
2157 set_mem_alias_set (rtx mem, alias_set_type set)
2158 {
2159 /* If the new and old alias sets don't conflict, something is wrong. */
2160 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2161 mem_attrs attrs (*get_mem_attrs (mem));
2162 attrs.alias = set;
2163 set_mem_attrs (mem, &attrs);
2164 }
2165
2166 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2167
2168 void
2169 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2170 {
2171 mem_attrs attrs (*get_mem_attrs (mem));
2172 attrs.addrspace = addrspace;
2173 set_mem_attrs (mem, &attrs);
2174 }
2175
2176 /* Set the alignment of MEM to ALIGN bits. */
2177
2178 void
2179 set_mem_align (rtx mem, unsigned int align)
2180 {
2181 mem_attrs attrs (*get_mem_attrs (mem));
2182 attrs.align = align;
2183 set_mem_attrs (mem, &attrs);
2184 }
2185
2186 /* Set the expr for MEM to EXPR. */
2187
2188 void
2189 set_mem_expr (rtx mem, tree expr)
2190 {
2191 mem_attrs attrs (*get_mem_attrs (mem));
2192 attrs.expr = expr;
2193 set_mem_attrs (mem, &attrs);
2194 }
2195
2196 /* Set the offset of MEM to OFFSET. */
2197
2198 void
2199 set_mem_offset (rtx mem, poly_int64 offset)
2200 {
2201 mem_attrs attrs (*get_mem_attrs (mem));
2202 attrs.offset_known_p = true;
2203 attrs.offset = offset;
2204 set_mem_attrs (mem, &attrs);
2205 }
2206
2207 /* Clear the offset of MEM. */
2208
2209 void
2210 clear_mem_offset (rtx mem)
2211 {
2212 mem_attrs attrs (*get_mem_attrs (mem));
2213 attrs.offset_known_p = false;
2214 set_mem_attrs (mem, &attrs);
2215 }
2216
2217 /* Set the size of MEM to SIZE. */
2218
2219 void
2220 set_mem_size (rtx mem, poly_int64 size)
2221 {
2222 mem_attrs attrs (*get_mem_attrs (mem));
2223 attrs.size_known_p = true;
2224 attrs.size = size;
2225 set_mem_attrs (mem, &attrs);
2226 }
2227
2228 /* Clear the size of MEM. */
2229
2230 void
2231 clear_mem_size (rtx mem)
2232 {
2233 mem_attrs attrs (*get_mem_attrs (mem));
2234 attrs.size_known_p = false;
2235 set_mem_attrs (mem, &attrs);
2236 }
2237 \f
2238 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2239 and its address changed to ADDR. (VOIDmode means don't change the mode.
2240 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2241 returned memory location is required to be valid. INPLACE is true if any
2242 changes can be made directly to MEMREF or false if MEMREF must be treated
2243 as immutable.
2244
2245 The memory attributes are not changed. */
2246
2247 static rtx
2248 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2249 bool inplace)
2250 {
2251 addr_space_t as;
2252 rtx new_rtx;
2253
2254 gcc_assert (MEM_P (memref));
2255 as = MEM_ADDR_SPACE (memref);
2256 if (mode == VOIDmode)
2257 mode = GET_MODE (memref);
2258 if (addr == 0)
2259 addr = XEXP (memref, 0);
2260 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2261 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2262 return memref;
2263
2264 /* Don't validate address for LRA. LRA can make the address valid
2265 by itself in most efficient way. */
2266 if (validate && !lra_in_progress)
2267 {
2268 if (reload_in_progress || reload_completed)
2269 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2270 else
2271 addr = memory_address_addr_space (mode, addr, as);
2272 }
2273
2274 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2275 return memref;
2276
2277 if (inplace)
2278 {
2279 XEXP (memref, 0) = addr;
2280 return memref;
2281 }
2282
2283 new_rtx = gen_rtx_MEM (mode, addr);
2284 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2285 return new_rtx;
2286 }
2287
2288 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2289 way we are changing MEMREF, so we only preserve the alias set. */
2290
2291 rtx
2292 change_address (rtx memref, machine_mode mode, rtx addr)
2293 {
2294 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2295 machine_mode mmode = GET_MODE (new_rtx);
2296 struct mem_attrs *defattrs;
2297
2298 mem_attrs attrs (*get_mem_attrs (memref));
2299 defattrs = mode_mem_attrs[(int) mmode];
2300 attrs.expr = NULL_TREE;
2301 attrs.offset_known_p = false;
2302 attrs.size_known_p = defattrs->size_known_p;
2303 attrs.size = defattrs->size;
2304 attrs.align = defattrs->align;
2305
2306 /* If there are no changes, just return the original memory reference. */
2307 if (new_rtx == memref)
2308 {
2309 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2310 return new_rtx;
2311
2312 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2313 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2314 }
2315
2316 set_mem_attrs (new_rtx, &attrs);
2317 return new_rtx;
2318 }
2319
2320 /* Return a memory reference like MEMREF, but with its mode changed
2321 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2322 nonzero, the memory address is forced to be valid.
2323 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2324 and the caller is responsible for adjusting MEMREF base register.
2325 If ADJUST_OBJECT is zero, the underlying object associated with the
2326 memory reference is left unchanged and the caller is responsible for
2327 dealing with it. Otherwise, if the new memory reference is outside
2328 the underlying object, even partially, then the object is dropped.
2329 SIZE, if nonzero, is the size of an access in cases where MODE
2330 has no inherent size. */
2331
2332 rtx
2333 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2334 int validate, int adjust_address, int adjust_object,
2335 poly_int64 size)
2336 {
2337 rtx addr = XEXP (memref, 0);
2338 rtx new_rtx;
2339 scalar_int_mode address_mode;
2340 struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2341 unsigned HOST_WIDE_INT max_align;
2342 #ifdef POINTERS_EXTEND_UNSIGNED
2343 scalar_int_mode pointer_mode
2344 = targetm.addr_space.pointer_mode (attrs.addrspace);
2345 #endif
2346
2347 /* VOIDmode means no mode change for change_address_1. */
2348 if (mode == VOIDmode)
2349 mode = GET_MODE (memref);
2350
2351 /* Take the size of non-BLKmode accesses from the mode. */
2352 defattrs = mode_mem_attrs[(int) mode];
2353 if (defattrs->size_known_p)
2354 size = defattrs->size;
2355
2356 /* If there are no changes, just return the original memory reference. */
2357 if (mode == GET_MODE (memref)
2358 && known_eq (offset, 0)
2359 && (known_eq (size, 0)
2360 || (attrs.size_known_p && known_eq (attrs.size, size)))
2361 && (!validate || memory_address_addr_space_p (mode, addr,
2362 attrs.addrspace)))
2363 return memref;
2364
2365 /* ??? Prefer to create garbage instead of creating shared rtl.
2366 This may happen even if offset is nonzero -- consider
2367 (plus (plus reg reg) const_int) -- so do this always. */
2368 addr = copy_rtx (addr);
2369
2370 /* Convert a possibly large offset to a signed value within the
2371 range of the target address space. */
2372 address_mode = get_address_mode (memref);
2373 offset = trunc_int_for_mode (offset, address_mode);
2374
2375 if (adjust_address)
2376 {
2377 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2378 object, we can merge it into the LO_SUM. */
2379 if (GET_MODE (memref) != BLKmode
2380 && GET_CODE (addr) == LO_SUM
2381 && known_in_range_p (offset,
2382 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2383 / BITS_PER_UNIT)))
2384 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2385 plus_constant (address_mode,
2386 XEXP (addr, 1), offset));
2387 #ifdef POINTERS_EXTEND_UNSIGNED
2388 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2389 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2390 the fact that pointers are not allowed to overflow. */
2391 else if (POINTERS_EXTEND_UNSIGNED > 0
2392 && GET_CODE (addr) == ZERO_EXTEND
2393 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2394 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2395 addr = gen_rtx_ZERO_EXTEND (address_mode,
2396 plus_constant (pointer_mode,
2397 XEXP (addr, 0), offset));
2398 #endif
2399 else
2400 addr = plus_constant (address_mode, addr, offset);
2401 }
2402
2403 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2404
2405 /* If the address is a REG, change_address_1 rightfully returns memref,
2406 but this would destroy memref's MEM_ATTRS. */
2407 if (new_rtx == memref && maybe_ne (offset, 0))
2408 new_rtx = copy_rtx (new_rtx);
2409
2410 /* Conservatively drop the object if we don't know where we start from. */
2411 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2412 {
2413 attrs.expr = NULL_TREE;
2414 attrs.alias = 0;
2415 }
2416
2417 /* Compute the new values of the memory attributes due to this adjustment.
2418 We add the offsets and update the alignment. */
2419 if (attrs.offset_known_p)
2420 {
2421 attrs.offset += offset;
2422
2423 /* Drop the object if the new left end is not within its bounds. */
2424 if (adjust_object && maybe_lt (attrs.offset, 0))
2425 {
2426 attrs.expr = NULL_TREE;
2427 attrs.alias = 0;
2428 }
2429 }
2430
2431 /* Compute the new alignment by taking the MIN of the alignment and the
2432 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2433 if zero. */
2434 if (maybe_ne (offset, 0))
2435 {
2436 max_align = known_alignment (offset) * BITS_PER_UNIT;
2437 attrs.align = MIN (attrs.align, max_align);
2438 }
2439
2440 if (maybe_ne (size, 0))
2441 {
2442 /* Drop the object if the new right end is not within its bounds. */
2443 if (adjust_object && maybe_gt (offset + size, attrs.size))
2444 {
2445 attrs.expr = NULL_TREE;
2446 attrs.alias = 0;
2447 }
2448 attrs.size_known_p = true;
2449 attrs.size = size;
2450 }
2451 else if (attrs.size_known_p)
2452 {
2453 gcc_assert (!adjust_object);
2454 attrs.size -= offset;
2455 /* ??? The store_by_pieces machinery generates negative sizes,
2456 so don't assert for that here. */
2457 }
2458
2459 set_mem_attrs (new_rtx, &attrs);
2460
2461 return new_rtx;
2462 }
2463
2464 /* Return a memory reference like MEMREF, but with its mode changed
2465 to MODE and its address changed to ADDR, which is assumed to be
2466 MEMREF offset by OFFSET bytes. If VALIDATE is
2467 nonzero, the memory address is forced to be valid. */
2468
2469 rtx
2470 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2471 poly_int64 offset, int validate)
2472 {
2473 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2474 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2475 }
2476
2477 /* Return a memory reference like MEMREF, but whose address is changed by
2478 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2479 known to be in OFFSET (possibly 1). */
2480
2481 rtx
2482 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2483 {
2484 rtx new_rtx, addr = XEXP (memref, 0);
2485 machine_mode address_mode;
2486 struct mem_attrs *defattrs;
2487
2488 mem_attrs attrs (*get_mem_attrs (memref));
2489 address_mode = get_address_mode (memref);
2490 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2491
2492 /* At this point we don't know _why_ the address is invalid. It
2493 could have secondary memory references, multiplies or anything.
2494
2495 However, if we did go and rearrange things, we can wind up not
2496 being able to recognize the magic around pic_offset_table_rtx.
2497 This stuff is fragile, and is yet another example of why it is
2498 bad to expose PIC machinery too early. */
2499 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2500 attrs.addrspace)
2501 && GET_CODE (addr) == PLUS
2502 && XEXP (addr, 0) == pic_offset_table_rtx)
2503 {
2504 addr = force_reg (GET_MODE (addr), addr);
2505 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2506 }
2507
2508 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2509 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2510
2511 /* If there are no changes, just return the original memory reference. */
2512 if (new_rtx == memref)
2513 return new_rtx;
2514
2515 /* Update the alignment to reflect the offset. Reset the offset, which
2516 we don't know. */
2517 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2518 attrs.offset_known_p = false;
2519 attrs.size_known_p = defattrs->size_known_p;
2520 attrs.size = defattrs->size;
2521 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2522 set_mem_attrs (new_rtx, &attrs);
2523 return new_rtx;
2524 }
2525
2526 /* Return a memory reference like MEMREF, but with its address changed to
2527 ADDR. The caller is asserting that the actual piece of memory pointed
2528 to is the same, just the form of the address is being changed, such as
2529 by putting something into a register. INPLACE is true if any changes
2530 can be made directly to MEMREF or false if MEMREF must be treated as
2531 immutable. */
2532
2533 rtx
2534 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2535 {
2536 /* change_address_1 copies the memory attribute structure without change
2537 and that's exactly what we want here. */
2538 update_temp_slot_address (XEXP (memref, 0), addr);
2539 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2540 }
2541
2542 /* Likewise, but the reference is not required to be valid. */
2543
2544 rtx
2545 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2546 {
2547 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2548 }
2549
2550 /* Return a memory reference like MEMREF, but with its mode widened to
2551 MODE and offset by OFFSET. This would be used by targets that e.g.
2552 cannot issue QImode memory operations and have to use SImode memory
2553 operations plus masking logic. */
2554
2555 rtx
2556 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2557 {
2558 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2559 unsigned int size = GET_MODE_SIZE (mode);
2560
2561 /* If there are no changes, just return the original memory reference. */
2562 if (new_rtx == memref)
2563 return new_rtx;
2564
2565 mem_attrs attrs (*get_mem_attrs (new_rtx));
2566
2567 /* If we don't know what offset we were at within the expression, then
2568 we can't know if we've overstepped the bounds. */
2569 if (! attrs.offset_known_p)
2570 attrs.expr = NULL_TREE;
2571
2572 while (attrs.expr)
2573 {
2574 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2575 {
2576 tree field = TREE_OPERAND (attrs.expr, 1);
2577 tree offset = component_ref_field_offset (attrs.expr);
2578
2579 if (! DECL_SIZE_UNIT (field))
2580 {
2581 attrs.expr = NULL_TREE;
2582 break;
2583 }
2584
2585 /* Is the field at least as large as the access? If so, ok,
2586 otherwise strip back to the containing structure. */
2587 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2588 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2589 && known_ge (attrs.offset, 0))
2590 break;
2591
2592 poly_uint64 suboffset;
2593 if (!poly_int_tree_p (offset, &suboffset))
2594 {
2595 attrs.expr = NULL_TREE;
2596 break;
2597 }
2598
2599 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2600 attrs.offset += suboffset;
2601 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2602 / BITS_PER_UNIT);
2603 }
2604 /* Similarly for the decl. */
2605 else if (DECL_P (attrs.expr)
2606 && DECL_SIZE_UNIT (attrs.expr)
2607 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2608 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2609 size)
2610 && known_ge (attrs.offset, 0))
2611 break;
2612 else
2613 {
2614 /* The widened memory access overflows the expression, which means
2615 that it could alias another expression. Zap it. */
2616 attrs.expr = NULL_TREE;
2617 break;
2618 }
2619 }
2620
2621 if (! attrs.expr)
2622 attrs.offset_known_p = false;
2623
2624 /* The widened memory may alias other stuff, so zap the alias set. */
2625 /* ??? Maybe use get_alias_set on any remaining expression. */
2626 attrs.alias = 0;
2627 attrs.size_known_p = true;
2628 attrs.size = size;
2629 set_mem_attrs (new_rtx, &attrs);
2630 return new_rtx;
2631 }
2632 \f
2633 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2634 static GTY(()) tree spill_slot_decl;
2635
2636 tree
2637 get_spill_slot_decl (bool force_build_p)
2638 {
2639 tree d = spill_slot_decl;
2640 rtx rd;
2641
2642 if (d || !force_build_p)
2643 return d;
2644
2645 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2646 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2647 DECL_ARTIFICIAL (d) = 1;
2648 DECL_IGNORED_P (d) = 1;
2649 TREE_USED (d) = 1;
2650 spill_slot_decl = d;
2651
2652 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2653 MEM_NOTRAP_P (rd) = 1;
2654 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2655 attrs.alias = new_alias_set ();
2656 attrs.expr = d;
2657 set_mem_attrs (rd, &attrs);
2658 SET_DECL_RTL (d, rd);
2659
2660 return d;
2661 }
2662
2663 /* Given MEM, a result from assign_stack_local, fill in the memory
2664 attributes as appropriate for a register allocator spill slot.
2665 These slots are not aliasable by other memory. We arrange for
2666 them all to use a single MEM_EXPR, so that the aliasing code can
2667 work properly in the case of shared spill slots. */
2668
2669 void
2670 set_mem_attrs_for_spill (rtx mem)
2671 {
2672 rtx addr;
2673
2674 mem_attrs attrs (*get_mem_attrs (mem));
2675 attrs.expr = get_spill_slot_decl (true);
2676 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2677 attrs.addrspace = ADDR_SPACE_GENERIC;
2678
2679 /* We expect the incoming memory to be of the form:
2680 (mem:MODE (plus (reg sfp) (const_int offset)))
2681 with perhaps the plus missing for offset = 0. */
2682 addr = XEXP (mem, 0);
2683 attrs.offset_known_p = true;
2684 strip_offset (addr, &attrs.offset);
2685
2686 set_mem_attrs (mem, &attrs);
2687 MEM_NOTRAP_P (mem) = 1;
2688 }
2689 \f
2690 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2691
2692 rtx_code_label *
2693 gen_label_rtx (void)
2694 {
2695 return as_a <rtx_code_label *> (
2696 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2697 NULL, label_num++, NULL));
2698 }
2699 \f
2700 /* For procedure integration. */
2701
2702 /* Install new pointers to the first and last insns in the chain.
2703 Also, set cur_insn_uid to one higher than the last in use.
2704 Used for an inline-procedure after copying the insn chain. */
2705
2706 void
2707 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2708 {
2709 rtx_insn *insn;
2710
2711 set_first_insn (first);
2712 set_last_insn (last);
2713 cur_insn_uid = 0;
2714
2715 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2716 {
2717 int debug_count = 0;
2718
2719 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2720 cur_debug_insn_uid = 0;
2721
2722 for (insn = first; insn; insn = NEXT_INSN (insn))
2723 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2724 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2725 else
2726 {
2727 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2728 if (DEBUG_INSN_P (insn))
2729 debug_count++;
2730 }
2731
2732 if (debug_count)
2733 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2734 else
2735 cur_debug_insn_uid++;
2736 }
2737 else
2738 for (insn = first; insn; insn = NEXT_INSN (insn))
2739 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2740
2741 cur_insn_uid++;
2742 }
2743 \f
2744 /* Go through all the RTL insn bodies and copy any invalid shared
2745 structure. This routine should only be called once. */
2746
2747 static void
2748 unshare_all_rtl_1 (rtx_insn *insn)
2749 {
2750 /* Unshare just about everything else. */
2751 unshare_all_rtl_in_chain (insn);
2752
2753 /* Make sure the addresses of stack slots found outside the insn chain
2754 (such as, in DECL_RTL of a variable) are not shared
2755 with the insn chain.
2756
2757 This special care is necessary when the stack slot MEM does not
2758 actually appear in the insn chain. If it does appear, its address
2759 is unshared from all else at that point. */
2760 unsigned int i;
2761 rtx temp;
2762 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2763 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2764 }
2765
2766 /* Go through all the RTL insn bodies and copy any invalid shared
2767 structure, again. This is a fairly expensive thing to do so it
2768 should be done sparingly. */
2769
2770 void
2771 unshare_all_rtl_again (rtx_insn *insn)
2772 {
2773 rtx_insn *p;
2774 tree decl;
2775
2776 for (p = insn; p; p = NEXT_INSN (p))
2777 if (INSN_P (p))
2778 {
2779 reset_used_flags (PATTERN (p));
2780 reset_used_flags (REG_NOTES (p));
2781 if (CALL_P (p))
2782 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2783 }
2784
2785 /* Make sure that virtual stack slots are not shared. */
2786 set_used_decls (DECL_INITIAL (cfun->decl));
2787
2788 /* Make sure that virtual parameters are not shared. */
2789 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2790 set_used_flags (DECL_RTL (decl));
2791
2792 rtx temp;
2793 unsigned int i;
2794 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2795 reset_used_flags (temp);
2796
2797 unshare_all_rtl_1 (insn);
2798 }
2799
2800 unsigned int
2801 unshare_all_rtl (void)
2802 {
2803 unshare_all_rtl_1 (get_insns ());
2804
2805 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2806 {
2807 if (DECL_RTL_SET_P (decl))
2808 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2809 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2810 }
2811
2812 return 0;
2813 }
2814
2815
2816 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2817 Recursively does the same for subexpressions. */
2818
2819 static void
2820 verify_rtx_sharing (rtx orig, rtx insn)
2821 {
2822 rtx x = orig;
2823 int i;
2824 enum rtx_code code;
2825 const char *format_ptr;
2826
2827 if (x == 0)
2828 return;
2829
2830 code = GET_CODE (x);
2831
2832 /* These types may be freely shared. */
2833
2834 switch (code)
2835 {
2836 case REG:
2837 case DEBUG_EXPR:
2838 case VALUE:
2839 CASE_CONST_ANY:
2840 case SYMBOL_REF:
2841 case LABEL_REF:
2842 case CODE_LABEL:
2843 case PC:
2844 case CC0:
2845 case RETURN:
2846 case SIMPLE_RETURN:
2847 case SCRATCH:
2848 /* SCRATCH must be shared because they represent distinct values. */
2849 return;
2850 case CLOBBER:
2851 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2852 clobbers or clobbers of hard registers that originated as pseudos.
2853 This is needed to allow safe register renaming. */
2854 if (REG_P (XEXP (x, 0))
2855 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2856 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2857 return;
2858 break;
2859
2860 case CONST:
2861 if (shared_const_p (orig))
2862 return;
2863 break;
2864
2865 case MEM:
2866 /* A MEM is allowed to be shared if its address is constant. */
2867 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2868 || reload_completed || reload_in_progress)
2869 return;
2870
2871 break;
2872
2873 default:
2874 break;
2875 }
2876
2877 /* This rtx may not be shared. If it has already been seen,
2878 replace it with a copy of itself. */
2879 if (flag_checking && RTX_FLAG (x, used))
2880 {
2881 error ("invalid rtl sharing found in the insn");
2882 debug_rtx (insn);
2883 error ("shared rtx");
2884 debug_rtx (x);
2885 internal_error ("internal consistency failure");
2886 }
2887 gcc_assert (!RTX_FLAG (x, used));
2888
2889 RTX_FLAG (x, used) = 1;
2890
2891 /* Now scan the subexpressions recursively. */
2892
2893 format_ptr = GET_RTX_FORMAT (code);
2894
2895 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2896 {
2897 switch (*format_ptr++)
2898 {
2899 case 'e':
2900 verify_rtx_sharing (XEXP (x, i), insn);
2901 break;
2902
2903 case 'E':
2904 if (XVEC (x, i) != NULL)
2905 {
2906 int j;
2907 int len = XVECLEN (x, i);
2908
2909 for (j = 0; j < len; j++)
2910 {
2911 /* We allow sharing of ASM_OPERANDS inside single
2912 instruction. */
2913 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2914 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2915 == ASM_OPERANDS))
2916 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2917 else
2918 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2919 }
2920 }
2921 break;
2922 }
2923 }
2924 return;
2925 }
2926
2927 /* Reset used-flags for INSN. */
2928
2929 static void
2930 reset_insn_used_flags (rtx insn)
2931 {
2932 gcc_assert (INSN_P (insn));
2933 reset_used_flags (PATTERN (insn));
2934 reset_used_flags (REG_NOTES (insn));
2935 if (CALL_P (insn))
2936 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2937 }
2938
2939 /* Go through all the RTL insn bodies and clear all the USED bits. */
2940
2941 static void
2942 reset_all_used_flags (void)
2943 {
2944 rtx_insn *p;
2945
2946 for (p = get_insns (); p; p = NEXT_INSN (p))
2947 if (INSN_P (p))
2948 {
2949 rtx pat = PATTERN (p);
2950 if (GET_CODE (pat) != SEQUENCE)
2951 reset_insn_used_flags (p);
2952 else
2953 {
2954 gcc_assert (REG_NOTES (p) == NULL);
2955 for (int i = 0; i < XVECLEN (pat, 0); i++)
2956 {
2957 rtx insn = XVECEXP (pat, 0, i);
2958 if (INSN_P (insn))
2959 reset_insn_used_flags (insn);
2960 }
2961 }
2962 }
2963 }
2964
2965 /* Verify sharing in INSN. */
2966
2967 static void
2968 verify_insn_sharing (rtx insn)
2969 {
2970 gcc_assert (INSN_P (insn));
2971 verify_rtx_sharing (PATTERN (insn), insn);
2972 verify_rtx_sharing (REG_NOTES (insn), insn);
2973 if (CALL_P (insn))
2974 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2975 }
2976
2977 /* Go through all the RTL insn bodies and check that there is no unexpected
2978 sharing in between the subexpressions. */
2979
2980 DEBUG_FUNCTION void
2981 verify_rtl_sharing (void)
2982 {
2983 rtx_insn *p;
2984
2985 timevar_push (TV_VERIFY_RTL_SHARING);
2986
2987 reset_all_used_flags ();
2988
2989 for (p = get_insns (); p; p = NEXT_INSN (p))
2990 if (INSN_P (p))
2991 {
2992 rtx pat = PATTERN (p);
2993 if (GET_CODE (pat) != SEQUENCE)
2994 verify_insn_sharing (p);
2995 else
2996 for (int i = 0; i < XVECLEN (pat, 0); i++)
2997 {
2998 rtx insn = XVECEXP (pat, 0, i);
2999 if (INSN_P (insn))
3000 verify_insn_sharing (insn);
3001 }
3002 }
3003
3004 reset_all_used_flags ();
3005
3006 timevar_pop (TV_VERIFY_RTL_SHARING);
3007 }
3008
3009 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3010 Assumes the mark bits are cleared at entry. */
3011
3012 void
3013 unshare_all_rtl_in_chain (rtx_insn *insn)
3014 {
3015 for (; insn; insn = NEXT_INSN (insn))
3016 if (INSN_P (insn))
3017 {
3018 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3019 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3020 if (CALL_P (insn))
3021 CALL_INSN_FUNCTION_USAGE (insn)
3022 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3023 }
3024 }
3025
3026 /* Go through all virtual stack slots of a function and mark them as
3027 shared. We never replace the DECL_RTLs themselves with a copy,
3028 but expressions mentioned into a DECL_RTL cannot be shared with
3029 expressions in the instruction stream.
3030
3031 Note that reload may convert pseudo registers into memories in-place.
3032 Pseudo registers are always shared, but MEMs never are. Thus if we
3033 reset the used flags on MEMs in the instruction stream, we must set
3034 them again on MEMs that appear in DECL_RTLs. */
3035
3036 static void
3037 set_used_decls (tree blk)
3038 {
3039 tree t;
3040
3041 /* Mark decls. */
3042 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3043 if (DECL_RTL_SET_P (t))
3044 set_used_flags (DECL_RTL (t));
3045
3046 /* Now process sub-blocks. */
3047 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3048 set_used_decls (t);
3049 }
3050
3051 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3052 Recursively does the same for subexpressions. Uses
3053 copy_rtx_if_shared_1 to reduce stack space. */
3054
3055 rtx
3056 copy_rtx_if_shared (rtx orig)
3057 {
3058 copy_rtx_if_shared_1 (&orig);
3059 return orig;
3060 }
3061
3062 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3063 use. Recursively does the same for subexpressions. */
3064
3065 static void
3066 copy_rtx_if_shared_1 (rtx *orig1)
3067 {
3068 rtx x;
3069 int i;
3070 enum rtx_code code;
3071 rtx *last_ptr;
3072 const char *format_ptr;
3073 int copied = 0;
3074 int length;
3075
3076 /* Repeat is used to turn tail-recursion into iteration. */
3077 repeat:
3078 x = *orig1;
3079
3080 if (x == 0)
3081 return;
3082
3083 code = GET_CODE (x);
3084
3085 /* These types may be freely shared. */
3086
3087 switch (code)
3088 {
3089 case REG:
3090 case DEBUG_EXPR:
3091 case VALUE:
3092 CASE_CONST_ANY:
3093 case SYMBOL_REF:
3094 case LABEL_REF:
3095 case CODE_LABEL:
3096 case PC:
3097 case CC0:
3098 case RETURN:
3099 case SIMPLE_RETURN:
3100 case SCRATCH:
3101 /* SCRATCH must be shared because they represent distinct values. */
3102 return;
3103 case CLOBBER:
3104 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3105 clobbers or clobbers of hard registers that originated as pseudos.
3106 This is needed to allow safe register renaming. */
3107 if (REG_P (XEXP (x, 0))
3108 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3109 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3110 return;
3111 break;
3112
3113 case CONST:
3114 if (shared_const_p (x))
3115 return;
3116 break;
3117
3118 case DEBUG_INSN:
3119 case INSN:
3120 case JUMP_INSN:
3121 case CALL_INSN:
3122 case NOTE:
3123 case BARRIER:
3124 /* The chain of insns is not being copied. */
3125 return;
3126
3127 default:
3128 break;
3129 }
3130
3131 /* This rtx may not be shared. If it has already been seen,
3132 replace it with a copy of itself. */
3133
3134 if (RTX_FLAG (x, used))
3135 {
3136 x = shallow_copy_rtx (x);
3137 copied = 1;
3138 }
3139 RTX_FLAG (x, used) = 1;
3140
3141 /* Now scan the subexpressions recursively.
3142 We can store any replaced subexpressions directly into X
3143 since we know X is not shared! Any vectors in X
3144 must be copied if X was copied. */
3145
3146 format_ptr = GET_RTX_FORMAT (code);
3147 length = GET_RTX_LENGTH (code);
3148 last_ptr = NULL;
3149
3150 for (i = 0; i < length; i++)
3151 {
3152 switch (*format_ptr++)
3153 {
3154 case 'e':
3155 if (last_ptr)
3156 copy_rtx_if_shared_1 (last_ptr);
3157 last_ptr = &XEXP (x, i);
3158 break;
3159
3160 case 'E':
3161 if (XVEC (x, i) != NULL)
3162 {
3163 int j;
3164 int len = XVECLEN (x, i);
3165
3166 /* Copy the vector iff I copied the rtx and the length
3167 is nonzero. */
3168 if (copied && len > 0)
3169 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3170
3171 /* Call recursively on all inside the vector. */
3172 for (j = 0; j < len; j++)
3173 {
3174 if (last_ptr)
3175 copy_rtx_if_shared_1 (last_ptr);
3176 last_ptr = &XVECEXP (x, i, j);
3177 }
3178 }
3179 break;
3180 }
3181 }
3182 *orig1 = x;
3183 if (last_ptr)
3184 {
3185 orig1 = last_ptr;
3186 goto repeat;
3187 }
3188 return;
3189 }
3190
3191 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3192
3193 static void
3194 mark_used_flags (rtx x, int flag)
3195 {
3196 int i, j;
3197 enum rtx_code code;
3198 const char *format_ptr;
3199 int length;
3200
3201 /* Repeat is used to turn tail-recursion into iteration. */
3202 repeat:
3203 if (x == 0)
3204 return;
3205
3206 code = GET_CODE (x);
3207
3208 /* These types may be freely shared so we needn't do any resetting
3209 for them. */
3210
3211 switch (code)
3212 {
3213 case REG:
3214 case DEBUG_EXPR:
3215 case VALUE:
3216 CASE_CONST_ANY:
3217 case SYMBOL_REF:
3218 case CODE_LABEL:
3219 case PC:
3220 case CC0:
3221 case RETURN:
3222 case SIMPLE_RETURN:
3223 return;
3224
3225 case DEBUG_INSN:
3226 case INSN:
3227 case JUMP_INSN:
3228 case CALL_INSN:
3229 case NOTE:
3230 case LABEL_REF:
3231 case BARRIER:
3232 /* The chain of insns is not being copied. */
3233 return;
3234
3235 default:
3236 break;
3237 }
3238
3239 RTX_FLAG (x, used) = flag;
3240
3241 format_ptr = GET_RTX_FORMAT (code);
3242 length = GET_RTX_LENGTH (code);
3243
3244 for (i = 0; i < length; i++)
3245 {
3246 switch (*format_ptr++)
3247 {
3248 case 'e':
3249 if (i == length-1)
3250 {
3251 x = XEXP (x, i);
3252 goto repeat;
3253 }
3254 mark_used_flags (XEXP (x, i), flag);
3255 break;
3256
3257 case 'E':
3258 for (j = 0; j < XVECLEN (x, i); j++)
3259 mark_used_flags (XVECEXP (x, i, j), flag);
3260 break;
3261 }
3262 }
3263 }
3264
3265 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3266 to look for shared sub-parts. */
3267
3268 void
3269 reset_used_flags (rtx x)
3270 {
3271 mark_used_flags (x, 0);
3272 }
3273
3274 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3275 to look for shared sub-parts. */
3276
3277 void
3278 set_used_flags (rtx x)
3279 {
3280 mark_used_flags (x, 1);
3281 }
3282 \f
3283 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3284 Return X or the rtx for the pseudo reg the value of X was copied into.
3285 OTHER must be valid as a SET_DEST. */
3286
3287 rtx
3288 make_safe_from (rtx x, rtx other)
3289 {
3290 while (1)
3291 switch (GET_CODE (other))
3292 {
3293 case SUBREG:
3294 other = SUBREG_REG (other);
3295 break;
3296 case STRICT_LOW_PART:
3297 case SIGN_EXTEND:
3298 case ZERO_EXTEND:
3299 other = XEXP (other, 0);
3300 break;
3301 default:
3302 goto done;
3303 }
3304 done:
3305 if ((MEM_P (other)
3306 && ! CONSTANT_P (x)
3307 && !REG_P (x)
3308 && GET_CODE (x) != SUBREG)
3309 || (REG_P (other)
3310 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3311 || reg_mentioned_p (other, x))))
3312 {
3313 rtx temp = gen_reg_rtx (GET_MODE (x));
3314 emit_move_insn (temp, x);
3315 return temp;
3316 }
3317 return x;
3318 }
3319 \f
3320 /* Emission of insns (adding them to the doubly-linked list). */
3321
3322 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3323
3324 rtx_insn *
3325 get_last_insn_anywhere (void)
3326 {
3327 struct sequence_stack *seq;
3328 for (seq = get_current_sequence (); seq; seq = seq->next)
3329 if (seq->last != 0)
3330 return seq->last;
3331 return 0;
3332 }
3333
3334 /* Return the first nonnote insn emitted in current sequence or current
3335 function. This routine looks inside SEQUENCEs. */
3336
3337 rtx_insn *
3338 get_first_nonnote_insn (void)
3339 {
3340 rtx_insn *insn = get_insns ();
3341
3342 if (insn)
3343 {
3344 if (NOTE_P (insn))
3345 for (insn = next_insn (insn);
3346 insn && NOTE_P (insn);
3347 insn = next_insn (insn))
3348 continue;
3349 else
3350 {
3351 if (NONJUMP_INSN_P (insn)
3352 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3353 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3354 }
3355 }
3356
3357 return insn;
3358 }
3359
3360 /* Return the last nonnote insn emitted in current sequence or current
3361 function. This routine looks inside SEQUENCEs. */
3362
3363 rtx_insn *
3364 get_last_nonnote_insn (void)
3365 {
3366 rtx_insn *insn = get_last_insn ();
3367
3368 if (insn)
3369 {
3370 if (NOTE_P (insn))
3371 for (insn = previous_insn (insn);
3372 insn && NOTE_P (insn);
3373 insn = previous_insn (insn))
3374 continue;
3375 else
3376 {
3377 if (NONJUMP_INSN_P (insn))
3378 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3379 insn = seq->insn (seq->len () - 1);
3380 }
3381 }
3382
3383 return insn;
3384 }
3385
3386 /* Return the number of actual (non-debug) insns emitted in this
3387 function. */
3388
3389 int
3390 get_max_insn_count (void)
3391 {
3392 int n = cur_insn_uid;
3393
3394 /* The table size must be stable across -g, to avoid codegen
3395 differences due to debug insns, and not be affected by
3396 -fmin-insn-uid, to avoid excessive table size and to simplify
3397 debugging of -fcompare-debug failures. */
3398 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3399 n -= cur_debug_insn_uid;
3400 else
3401 n -= MIN_NONDEBUG_INSN_UID;
3402
3403 return n;
3404 }
3405
3406 \f
3407 /* Return the next insn. If it is a SEQUENCE, return the first insn
3408 of the sequence. */
3409
3410 rtx_insn *
3411 next_insn (rtx_insn *insn)
3412 {
3413 if (insn)
3414 {
3415 insn = NEXT_INSN (insn);
3416 if (insn && NONJUMP_INSN_P (insn)
3417 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3418 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3419 }
3420
3421 return insn;
3422 }
3423
3424 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3425 of the sequence. */
3426
3427 rtx_insn *
3428 previous_insn (rtx_insn *insn)
3429 {
3430 if (insn)
3431 {
3432 insn = PREV_INSN (insn);
3433 if (insn && NONJUMP_INSN_P (insn))
3434 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3435 insn = seq->insn (seq->len () - 1);
3436 }
3437
3438 return insn;
3439 }
3440
3441 /* Return the next insn after INSN that is not a NOTE. This routine does not
3442 look inside SEQUENCEs. */
3443
3444 rtx_insn *
3445 next_nonnote_insn (rtx_insn *insn)
3446 {
3447 while (insn)
3448 {
3449 insn = NEXT_INSN (insn);
3450 if (insn == 0 || !NOTE_P (insn))
3451 break;
3452 }
3453
3454 return insn;
3455 }
3456
3457 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3458 routine does not look inside SEQUENCEs. */
3459
3460 rtx_insn *
3461 next_nondebug_insn (rtx_insn *insn)
3462 {
3463 while (insn)
3464 {
3465 insn = NEXT_INSN (insn);
3466 if (insn == 0 || !DEBUG_INSN_P (insn))
3467 break;
3468 }
3469
3470 return insn;
3471 }
3472
3473 /* Return the previous insn before INSN that is not a NOTE. This routine does
3474 not look inside SEQUENCEs. */
3475
3476 rtx_insn *
3477 prev_nonnote_insn (rtx_insn *insn)
3478 {
3479 while (insn)
3480 {
3481 insn = PREV_INSN (insn);
3482 if (insn == 0 || !NOTE_P (insn))
3483 break;
3484 }
3485
3486 return insn;
3487 }
3488
3489 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3490 This routine does not look inside SEQUENCEs. */
3491
3492 rtx_insn *
3493 prev_nondebug_insn (rtx_insn *insn)
3494 {
3495 while (insn)
3496 {
3497 insn = PREV_INSN (insn);
3498 if (insn == 0 || !DEBUG_INSN_P (insn))
3499 break;
3500 }
3501
3502 return insn;
3503 }
3504
3505 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3506 This routine does not look inside SEQUENCEs. */
3507
3508 rtx_insn *
3509 next_nonnote_nondebug_insn (rtx_insn *insn)
3510 {
3511 while (insn)
3512 {
3513 insn = NEXT_INSN (insn);
3514 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3515 break;
3516 }
3517
3518 return insn;
3519 }
3520
3521 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3522 but stop the search before we enter another basic block. This
3523 routine does not look inside SEQUENCEs. */
3524
3525 rtx_insn *
3526 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3527 {
3528 while (insn)
3529 {
3530 insn = NEXT_INSN (insn);
3531 if (insn == 0)
3532 break;
3533 if (DEBUG_INSN_P (insn))
3534 continue;
3535 if (!NOTE_P (insn))
3536 break;
3537 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3538 return NULL;
3539 }
3540
3541 return insn;
3542 }
3543
3544 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3545 This routine does not look inside SEQUENCEs. */
3546
3547 rtx_insn *
3548 prev_nonnote_nondebug_insn (rtx_insn *insn)
3549 {
3550 while (insn)
3551 {
3552 insn = PREV_INSN (insn);
3553 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3554 break;
3555 }
3556
3557 return insn;
3558 }
3559
3560 /* Return the previous insn before INSN that is not a NOTE nor
3561 DEBUG_INSN, but stop the search before we enter another basic
3562 block. This routine does not look inside SEQUENCEs. */
3563
3564 rtx_insn *
3565 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3566 {
3567 while (insn)
3568 {
3569 insn = PREV_INSN (insn);
3570 if (insn == 0)
3571 break;
3572 if (DEBUG_INSN_P (insn))
3573 continue;
3574 if (!NOTE_P (insn))
3575 break;
3576 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3577 return NULL;
3578 }
3579
3580 return insn;
3581 }
3582
3583 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3584 or 0, if there is none. This routine does not look inside
3585 SEQUENCEs. */
3586
3587 rtx_insn *
3588 next_real_insn (rtx uncast_insn)
3589 {
3590 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3591
3592 while (insn)
3593 {
3594 insn = NEXT_INSN (insn);
3595 if (insn == 0 || INSN_P (insn))
3596 break;
3597 }
3598
3599 return insn;
3600 }
3601
3602 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3603 or 0, if there is none. This routine does not look inside
3604 SEQUENCEs. */
3605
3606 rtx_insn *
3607 prev_real_insn (rtx_insn *insn)
3608 {
3609 while (insn)
3610 {
3611 insn = PREV_INSN (insn);
3612 if (insn == 0 || INSN_P (insn))
3613 break;
3614 }
3615
3616 return insn;
3617 }
3618
3619 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3620 This routine does not look inside SEQUENCEs. */
3621
3622 rtx_call_insn *
3623 last_call_insn (void)
3624 {
3625 rtx_insn *insn;
3626
3627 for (insn = get_last_insn ();
3628 insn && !CALL_P (insn);
3629 insn = PREV_INSN (insn))
3630 ;
3631
3632 return safe_as_a <rtx_call_insn *> (insn);
3633 }
3634
3635 /* Find the next insn after INSN that really does something. This routine
3636 does not look inside SEQUENCEs. After reload this also skips over
3637 standalone USE and CLOBBER insn. */
3638
3639 int
3640 active_insn_p (const rtx_insn *insn)
3641 {
3642 return (CALL_P (insn) || JUMP_P (insn)
3643 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3644 || (NONJUMP_INSN_P (insn)
3645 && (! reload_completed
3646 || (GET_CODE (PATTERN (insn)) != USE
3647 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3648 }
3649
3650 rtx_insn *
3651 next_active_insn (rtx_insn *insn)
3652 {
3653 while (insn)
3654 {
3655 insn = NEXT_INSN (insn);
3656 if (insn == 0 || active_insn_p (insn))
3657 break;
3658 }
3659
3660 return insn;
3661 }
3662
3663 /* Find the last insn before INSN that really does something. This routine
3664 does not look inside SEQUENCEs. After reload this also skips over
3665 standalone USE and CLOBBER insn. */
3666
3667 rtx_insn *
3668 prev_active_insn (rtx_insn *insn)
3669 {
3670 while (insn)
3671 {
3672 insn = PREV_INSN (insn);
3673 if (insn == 0 || active_insn_p (insn))
3674 break;
3675 }
3676
3677 return insn;
3678 }
3679 \f
3680 /* Return the next insn that uses CC0 after INSN, which is assumed to
3681 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3682 applied to the result of this function should yield INSN).
3683
3684 Normally, this is simply the next insn. However, if a REG_CC_USER note
3685 is present, it contains the insn that uses CC0.
3686
3687 Return 0 if we can't find the insn. */
3688
3689 rtx_insn *
3690 next_cc0_user (rtx_insn *insn)
3691 {
3692 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3693
3694 if (note)
3695 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3696
3697 insn = next_nonnote_insn (insn);
3698 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3699 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3700
3701 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3702 return insn;
3703
3704 return 0;
3705 }
3706
3707 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3708 note, it is the previous insn. */
3709
3710 rtx_insn *
3711 prev_cc0_setter (rtx_insn *insn)
3712 {
3713 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3714
3715 if (note)
3716 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3717
3718 insn = prev_nonnote_insn (insn);
3719 gcc_assert (sets_cc0_p (PATTERN (insn)));
3720
3721 return insn;
3722 }
3723
3724 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3725
3726 static int
3727 find_auto_inc (const_rtx x, const_rtx reg)
3728 {
3729 subrtx_iterator::array_type array;
3730 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3731 {
3732 const_rtx x = *iter;
3733 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3734 && rtx_equal_p (reg, XEXP (x, 0)))
3735 return true;
3736 }
3737 return false;
3738 }
3739
3740 /* Increment the label uses for all labels present in rtx. */
3741
3742 static void
3743 mark_label_nuses (rtx x)
3744 {
3745 enum rtx_code code;
3746 int i, j;
3747 const char *fmt;
3748
3749 code = GET_CODE (x);
3750 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3751 LABEL_NUSES (label_ref_label (x))++;
3752
3753 fmt = GET_RTX_FORMAT (code);
3754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3755 {
3756 if (fmt[i] == 'e')
3757 mark_label_nuses (XEXP (x, i));
3758 else if (fmt[i] == 'E')
3759 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3760 mark_label_nuses (XVECEXP (x, i, j));
3761 }
3762 }
3763
3764 \f
3765 /* Try splitting insns that can be split for better scheduling.
3766 PAT is the pattern which might split.
3767 TRIAL is the insn providing PAT.
3768 LAST is nonzero if we should return the last insn of the sequence produced.
3769
3770 If this routine succeeds in splitting, it returns the first or last
3771 replacement insn depending on the value of LAST. Otherwise, it
3772 returns TRIAL. If the insn to be returned can be split, it will be. */
3773
3774 rtx_insn *
3775 try_split (rtx pat, rtx_insn *trial, int last)
3776 {
3777 rtx_insn *before, *after;
3778 rtx note;
3779 rtx_insn *seq, *tem;
3780 profile_probability probability;
3781 rtx_insn *insn_last, *insn;
3782 int njumps = 0;
3783 rtx_insn *call_insn = NULL;
3784
3785 /* We're not good at redistributing frame information. */
3786 if (RTX_FRAME_RELATED_P (trial))
3787 return trial;
3788
3789 if (any_condjump_p (trial)
3790 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3791 split_branch_probability
3792 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3793 else
3794 split_branch_probability = profile_probability::uninitialized ();
3795
3796 probability = split_branch_probability;
3797
3798 seq = split_insns (pat, trial);
3799
3800 split_branch_probability = profile_probability::uninitialized ();
3801
3802 if (!seq)
3803 return trial;
3804
3805 /* Avoid infinite loop if any insn of the result matches
3806 the original pattern. */
3807 insn_last = seq;
3808 while (1)
3809 {
3810 if (INSN_P (insn_last)
3811 && rtx_equal_p (PATTERN (insn_last), pat))
3812 return trial;
3813 if (!NEXT_INSN (insn_last))
3814 break;
3815 insn_last = NEXT_INSN (insn_last);
3816 }
3817
3818 /* We will be adding the new sequence to the function. The splitters
3819 may have introduced invalid RTL sharing, so unshare the sequence now. */
3820 unshare_all_rtl_in_chain (seq);
3821
3822 /* Mark labels and copy flags. */
3823 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3824 {
3825 if (JUMP_P (insn))
3826 {
3827 if (JUMP_P (trial))
3828 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3829 mark_jump_label (PATTERN (insn), insn, 0);
3830 njumps++;
3831 if (probability.initialized_p ()
3832 && any_condjump_p (insn)
3833 && !find_reg_note (insn, REG_BR_PROB, 0))
3834 {
3835 /* We can preserve the REG_BR_PROB notes only if exactly
3836 one jump is created, otherwise the machine description
3837 is responsible for this step using
3838 split_branch_probability variable. */
3839 gcc_assert (njumps == 1);
3840 add_reg_br_prob_note (insn, probability);
3841 }
3842 }
3843 }
3844
3845 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3846 in SEQ and copy any additional information across. */
3847 if (CALL_P (trial))
3848 {
3849 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3850 if (CALL_P (insn))
3851 {
3852 rtx_insn *next;
3853 rtx *p;
3854
3855 gcc_assert (call_insn == NULL_RTX);
3856 call_insn = insn;
3857
3858 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3859 target may have explicitly specified. */
3860 p = &CALL_INSN_FUNCTION_USAGE (insn);
3861 while (*p)
3862 p = &XEXP (*p, 1);
3863 *p = CALL_INSN_FUNCTION_USAGE (trial);
3864
3865 /* If the old call was a sibling call, the new one must
3866 be too. */
3867 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3868
3869 /* If the new call is the last instruction in the sequence,
3870 it will effectively replace the old call in-situ. Otherwise
3871 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3872 so that it comes immediately after the new call. */
3873 if (NEXT_INSN (insn))
3874 for (next = NEXT_INSN (trial);
3875 next && NOTE_P (next);
3876 next = NEXT_INSN (next))
3877 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3878 {
3879 remove_insn (next);
3880 add_insn_after (next, insn, NULL);
3881 break;
3882 }
3883 }
3884 }
3885
3886 /* Copy notes, particularly those related to the CFG. */
3887 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3888 {
3889 switch (REG_NOTE_KIND (note))
3890 {
3891 case REG_EH_REGION:
3892 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3893 break;
3894
3895 case REG_NORETURN:
3896 case REG_SETJMP:
3897 case REG_TM:
3898 case REG_CALL_NOCF_CHECK:
3899 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3900 {
3901 if (CALL_P (insn))
3902 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3903 }
3904 break;
3905
3906 case REG_NON_LOCAL_GOTO:
3907 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3908 {
3909 if (JUMP_P (insn))
3910 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3911 }
3912 break;
3913
3914 case REG_INC:
3915 if (!AUTO_INC_DEC)
3916 break;
3917
3918 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3919 {
3920 rtx reg = XEXP (note, 0);
3921 if (!FIND_REG_INC_NOTE (insn, reg)
3922 && find_auto_inc (PATTERN (insn), reg))
3923 add_reg_note (insn, REG_INC, reg);
3924 }
3925 break;
3926
3927 case REG_ARGS_SIZE:
3928 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3929 break;
3930
3931 case REG_CALL_DECL:
3932 gcc_assert (call_insn != NULL_RTX);
3933 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3934 break;
3935
3936 default:
3937 break;
3938 }
3939 }
3940
3941 /* If there are LABELS inside the split insns increment the
3942 usage count so we don't delete the label. */
3943 if (INSN_P (trial))
3944 {
3945 insn = insn_last;
3946 while (insn != NULL_RTX)
3947 {
3948 /* JUMP_P insns have already been "marked" above. */
3949 if (NONJUMP_INSN_P (insn))
3950 mark_label_nuses (PATTERN (insn));
3951
3952 insn = PREV_INSN (insn);
3953 }
3954 }
3955
3956 before = PREV_INSN (trial);
3957 after = NEXT_INSN (trial);
3958
3959 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3960
3961 delete_insn (trial);
3962
3963 /* Recursively call try_split for each new insn created; by the
3964 time control returns here that insn will be fully split, so
3965 set LAST and continue from the insn after the one returned.
3966 We can't use next_active_insn here since AFTER may be a note.
3967 Ignore deleted insns, which can be occur if not optimizing. */
3968 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3969 if (! tem->deleted () && INSN_P (tem))
3970 tem = try_split (PATTERN (tem), tem, 1);
3971
3972 /* Return either the first or the last insn, depending on which was
3973 requested. */
3974 return last
3975 ? (after ? PREV_INSN (after) : get_last_insn ())
3976 : NEXT_INSN (before);
3977 }
3978 \f
3979 /* Make and return an INSN rtx, initializing all its slots.
3980 Store PATTERN in the pattern slots. */
3981
3982 rtx_insn *
3983 make_insn_raw (rtx pattern)
3984 {
3985 rtx_insn *insn;
3986
3987 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3988
3989 INSN_UID (insn) = cur_insn_uid++;
3990 PATTERN (insn) = pattern;
3991 INSN_CODE (insn) = -1;
3992 REG_NOTES (insn) = NULL;
3993 INSN_LOCATION (insn) = curr_insn_location ();
3994 BLOCK_FOR_INSN (insn) = NULL;
3995
3996 #ifdef ENABLE_RTL_CHECKING
3997 if (insn
3998 && INSN_P (insn)
3999 && (returnjump_p (insn)
4000 || (GET_CODE (insn) == SET
4001 && SET_DEST (insn) == pc_rtx)))
4002 {
4003 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4004 debug_rtx (insn);
4005 }
4006 #endif
4007
4008 return insn;
4009 }
4010
4011 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4012
4013 static rtx_insn *
4014 make_debug_insn_raw (rtx pattern)
4015 {
4016 rtx_debug_insn *insn;
4017
4018 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4019 INSN_UID (insn) = cur_debug_insn_uid++;
4020 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4021 INSN_UID (insn) = cur_insn_uid++;
4022
4023 PATTERN (insn) = pattern;
4024 INSN_CODE (insn) = -1;
4025 REG_NOTES (insn) = NULL;
4026 INSN_LOCATION (insn) = curr_insn_location ();
4027 BLOCK_FOR_INSN (insn) = NULL;
4028
4029 return insn;
4030 }
4031
4032 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4033
4034 static rtx_insn *
4035 make_jump_insn_raw (rtx pattern)
4036 {
4037 rtx_jump_insn *insn;
4038
4039 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4040 INSN_UID (insn) = cur_insn_uid++;
4041
4042 PATTERN (insn) = pattern;
4043 INSN_CODE (insn) = -1;
4044 REG_NOTES (insn) = NULL;
4045 JUMP_LABEL (insn) = NULL;
4046 INSN_LOCATION (insn) = curr_insn_location ();
4047 BLOCK_FOR_INSN (insn) = NULL;
4048
4049 return insn;
4050 }
4051
4052 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4053
4054 static rtx_insn *
4055 make_call_insn_raw (rtx pattern)
4056 {
4057 rtx_call_insn *insn;
4058
4059 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4060 INSN_UID (insn) = cur_insn_uid++;
4061
4062 PATTERN (insn) = pattern;
4063 INSN_CODE (insn) = -1;
4064 REG_NOTES (insn) = NULL;
4065 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4066 INSN_LOCATION (insn) = curr_insn_location ();
4067 BLOCK_FOR_INSN (insn) = NULL;
4068
4069 return insn;
4070 }
4071
4072 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4073
4074 static rtx_note *
4075 make_note_raw (enum insn_note subtype)
4076 {
4077 /* Some notes are never created this way at all. These notes are
4078 only created by patching out insns. */
4079 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4080 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4081
4082 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4083 INSN_UID (note) = cur_insn_uid++;
4084 NOTE_KIND (note) = subtype;
4085 BLOCK_FOR_INSN (note) = NULL;
4086 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4087 return note;
4088 }
4089 \f
4090 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4091 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4092 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4093
4094 static inline void
4095 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4096 {
4097 SET_PREV_INSN (insn) = prev;
4098 SET_NEXT_INSN (insn) = next;
4099 if (prev != NULL)
4100 {
4101 SET_NEXT_INSN (prev) = insn;
4102 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4103 {
4104 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4105 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4106 }
4107 }
4108 if (next != NULL)
4109 {
4110 SET_PREV_INSN (next) = insn;
4111 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4112 {
4113 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4114 SET_PREV_INSN (sequence->insn (0)) = insn;
4115 }
4116 }
4117
4118 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4119 {
4120 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4121 SET_PREV_INSN (sequence->insn (0)) = prev;
4122 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4123 }
4124 }
4125
4126 /* Add INSN to the end of the doubly-linked list.
4127 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4128
4129 void
4130 add_insn (rtx_insn *insn)
4131 {
4132 rtx_insn *prev = get_last_insn ();
4133 link_insn_into_chain (insn, prev, NULL);
4134 if (get_insns () == NULL)
4135 set_first_insn (insn);
4136 set_last_insn (insn);
4137 }
4138
4139 /* Add INSN into the doubly-linked list after insn AFTER. */
4140
4141 static void
4142 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4143 {
4144 rtx_insn *next = NEXT_INSN (after);
4145
4146 gcc_assert (!optimize || !after->deleted ());
4147
4148 link_insn_into_chain (insn, after, next);
4149
4150 if (next == NULL)
4151 {
4152 struct sequence_stack *seq;
4153
4154 for (seq = get_current_sequence (); seq; seq = seq->next)
4155 if (after == seq->last)
4156 {
4157 seq->last = insn;
4158 break;
4159 }
4160 }
4161 }
4162
4163 /* Add INSN into the doubly-linked list before insn BEFORE. */
4164
4165 static void
4166 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4167 {
4168 rtx_insn *prev = PREV_INSN (before);
4169
4170 gcc_assert (!optimize || !before->deleted ());
4171
4172 link_insn_into_chain (insn, prev, before);
4173
4174 if (prev == NULL)
4175 {
4176 struct sequence_stack *seq;
4177
4178 for (seq = get_current_sequence (); seq; seq = seq->next)
4179 if (before == seq->first)
4180 {
4181 seq->first = insn;
4182 break;
4183 }
4184
4185 gcc_assert (seq);
4186 }
4187 }
4188
4189 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4190 If BB is NULL, an attempt is made to infer the bb from before.
4191
4192 This and the next function should be the only functions called
4193 to insert an insn once delay slots have been filled since only
4194 they know how to update a SEQUENCE. */
4195
4196 void
4197 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4198 {
4199 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4200 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4201 add_insn_after_nobb (insn, after);
4202 if (!BARRIER_P (after)
4203 && !BARRIER_P (insn)
4204 && (bb = BLOCK_FOR_INSN (after)))
4205 {
4206 set_block_for_insn (insn, bb);
4207 if (INSN_P (insn))
4208 df_insn_rescan (insn);
4209 /* Should not happen as first in the BB is always
4210 either NOTE or LABEL. */
4211 if (BB_END (bb) == after
4212 /* Avoid clobbering of structure when creating new BB. */
4213 && !BARRIER_P (insn)
4214 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4215 BB_END (bb) = insn;
4216 }
4217 }
4218
4219 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4220 If BB is NULL, an attempt is made to infer the bb from before.
4221
4222 This and the previous function should be the only functions called
4223 to insert an insn once delay slots have been filled since only
4224 they know how to update a SEQUENCE. */
4225
4226 void
4227 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4228 {
4229 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4230 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4231 add_insn_before_nobb (insn, before);
4232
4233 if (!bb
4234 && !BARRIER_P (before)
4235 && !BARRIER_P (insn))
4236 bb = BLOCK_FOR_INSN (before);
4237
4238 if (bb)
4239 {
4240 set_block_for_insn (insn, bb);
4241 if (INSN_P (insn))
4242 df_insn_rescan (insn);
4243 /* Should not happen as first in the BB is always either NOTE or
4244 LABEL. */
4245 gcc_assert (BB_HEAD (bb) != insn
4246 /* Avoid clobbering of structure when creating new BB. */
4247 || BARRIER_P (insn)
4248 || NOTE_INSN_BASIC_BLOCK_P (insn));
4249 }
4250 }
4251
4252 /* Replace insn with an deleted instruction note. */
4253
4254 void
4255 set_insn_deleted (rtx insn)
4256 {
4257 if (INSN_P (insn))
4258 df_insn_delete (as_a <rtx_insn *> (insn));
4259 PUT_CODE (insn, NOTE);
4260 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4261 }
4262
4263
4264 /* Unlink INSN from the insn chain.
4265
4266 This function knows how to handle sequences.
4267
4268 This function does not invalidate data flow information associated with
4269 INSN (i.e. does not call df_insn_delete). That makes this function
4270 usable for only disconnecting an insn from the chain, and re-emit it
4271 elsewhere later.
4272
4273 To later insert INSN elsewhere in the insn chain via add_insn and
4274 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4275 the caller. Nullifying them here breaks many insn chain walks.
4276
4277 To really delete an insn and related DF information, use delete_insn. */
4278
4279 void
4280 remove_insn (rtx uncast_insn)
4281 {
4282 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4283 rtx_insn *next = NEXT_INSN (insn);
4284 rtx_insn *prev = PREV_INSN (insn);
4285 basic_block bb;
4286
4287 if (prev)
4288 {
4289 SET_NEXT_INSN (prev) = next;
4290 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4291 {
4292 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4293 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4294 }
4295 }
4296 else
4297 {
4298 struct sequence_stack *seq;
4299
4300 for (seq = get_current_sequence (); seq; seq = seq->next)
4301 if (insn == seq->first)
4302 {
4303 seq->first = next;
4304 break;
4305 }
4306
4307 gcc_assert (seq);
4308 }
4309
4310 if (next)
4311 {
4312 SET_PREV_INSN (next) = prev;
4313 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4314 {
4315 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4316 SET_PREV_INSN (sequence->insn (0)) = prev;
4317 }
4318 }
4319 else
4320 {
4321 struct sequence_stack *seq;
4322
4323 for (seq = get_current_sequence (); seq; seq = seq->next)
4324 if (insn == seq->last)
4325 {
4326 seq->last = prev;
4327 break;
4328 }
4329
4330 gcc_assert (seq);
4331 }
4332
4333 /* Fix up basic block boundaries, if necessary. */
4334 if (!BARRIER_P (insn)
4335 && (bb = BLOCK_FOR_INSN (insn)))
4336 {
4337 if (BB_HEAD (bb) == insn)
4338 {
4339 /* Never ever delete the basic block note without deleting whole
4340 basic block. */
4341 gcc_assert (!NOTE_P (insn));
4342 BB_HEAD (bb) = next;
4343 }
4344 if (BB_END (bb) == insn)
4345 BB_END (bb) = prev;
4346 }
4347 }
4348
4349 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4350
4351 void
4352 add_function_usage_to (rtx call_insn, rtx call_fusage)
4353 {
4354 gcc_assert (call_insn && CALL_P (call_insn));
4355
4356 /* Put the register usage information on the CALL. If there is already
4357 some usage information, put ours at the end. */
4358 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4359 {
4360 rtx link;
4361
4362 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4363 link = XEXP (link, 1))
4364 ;
4365
4366 XEXP (link, 1) = call_fusage;
4367 }
4368 else
4369 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4370 }
4371
4372 /* Delete all insns made since FROM.
4373 FROM becomes the new last instruction. */
4374
4375 void
4376 delete_insns_since (rtx_insn *from)
4377 {
4378 if (from == 0)
4379 set_first_insn (0);
4380 else
4381 SET_NEXT_INSN (from) = 0;
4382 set_last_insn (from);
4383 }
4384
4385 /* This function is deprecated, please use sequences instead.
4386
4387 Move a consecutive bunch of insns to a different place in the chain.
4388 The insns to be moved are those between FROM and TO.
4389 They are moved to a new position after the insn AFTER.
4390 AFTER must not be FROM or TO or any insn in between.
4391
4392 This function does not know about SEQUENCEs and hence should not be
4393 called after delay-slot filling has been done. */
4394
4395 void
4396 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4397 {
4398 if (flag_checking)
4399 {
4400 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4401 gcc_assert (after != x);
4402 gcc_assert (after != to);
4403 }
4404
4405 /* Splice this bunch out of where it is now. */
4406 if (PREV_INSN (from))
4407 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4408 if (NEXT_INSN (to))
4409 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4410 if (get_last_insn () == to)
4411 set_last_insn (PREV_INSN (from));
4412 if (get_insns () == from)
4413 set_first_insn (NEXT_INSN (to));
4414
4415 /* Make the new neighbors point to it and it to them. */
4416 if (NEXT_INSN (after))
4417 SET_PREV_INSN (NEXT_INSN (after)) = to;
4418
4419 SET_NEXT_INSN (to) = NEXT_INSN (after);
4420 SET_PREV_INSN (from) = after;
4421 SET_NEXT_INSN (after) = from;
4422 if (after == get_last_insn ())
4423 set_last_insn (to);
4424 }
4425
4426 /* Same as function above, but take care to update BB boundaries. */
4427 void
4428 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4429 {
4430 rtx_insn *prev = PREV_INSN (from);
4431 basic_block bb, bb2;
4432
4433 reorder_insns_nobb (from, to, after);
4434
4435 if (!BARRIER_P (after)
4436 && (bb = BLOCK_FOR_INSN (after)))
4437 {
4438 rtx_insn *x;
4439 df_set_bb_dirty (bb);
4440
4441 if (!BARRIER_P (from)
4442 && (bb2 = BLOCK_FOR_INSN (from)))
4443 {
4444 if (BB_END (bb2) == to)
4445 BB_END (bb2) = prev;
4446 df_set_bb_dirty (bb2);
4447 }
4448
4449 if (BB_END (bb) == after)
4450 BB_END (bb) = to;
4451
4452 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4453 if (!BARRIER_P (x))
4454 df_insn_change_bb (x, bb);
4455 }
4456 }
4457
4458 \f
4459 /* Emit insn(s) of given code and pattern
4460 at a specified place within the doubly-linked list.
4461
4462 All of the emit_foo global entry points accept an object
4463 X which is either an insn list or a PATTERN of a single
4464 instruction.
4465
4466 There are thus a few canonical ways to generate code and
4467 emit it at a specific place in the instruction stream. For
4468 example, consider the instruction named SPOT and the fact that
4469 we would like to emit some instructions before SPOT. We might
4470 do it like this:
4471
4472 start_sequence ();
4473 ... emit the new instructions ...
4474 insns_head = get_insns ();
4475 end_sequence ();
4476
4477 emit_insn_before (insns_head, SPOT);
4478
4479 It used to be common to generate SEQUENCE rtl instead, but that
4480 is a relic of the past which no longer occurs. The reason is that
4481 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4482 generated would almost certainly die right after it was created. */
4483
4484 static rtx_insn *
4485 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4486 rtx_insn *(*make_raw) (rtx))
4487 {
4488 rtx_insn *insn;
4489
4490 gcc_assert (before);
4491
4492 if (x == NULL_RTX)
4493 return safe_as_a <rtx_insn *> (last);
4494
4495 switch (GET_CODE (x))
4496 {
4497 case DEBUG_INSN:
4498 case INSN:
4499 case JUMP_INSN:
4500 case CALL_INSN:
4501 case CODE_LABEL:
4502 case BARRIER:
4503 case NOTE:
4504 insn = as_a <rtx_insn *> (x);
4505 while (insn)
4506 {
4507 rtx_insn *next = NEXT_INSN (insn);
4508 add_insn_before (insn, before, bb);
4509 last = insn;
4510 insn = next;
4511 }
4512 break;
4513
4514 #ifdef ENABLE_RTL_CHECKING
4515 case SEQUENCE:
4516 gcc_unreachable ();
4517 break;
4518 #endif
4519
4520 default:
4521 last = (*make_raw) (x);
4522 add_insn_before (last, before, bb);
4523 break;
4524 }
4525
4526 return safe_as_a <rtx_insn *> (last);
4527 }
4528
4529 /* Make X be output before the instruction BEFORE. */
4530
4531 rtx_insn *
4532 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4533 {
4534 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4535 }
4536
4537 /* Make an instruction with body X and code JUMP_INSN
4538 and output it before the instruction BEFORE. */
4539
4540 rtx_jump_insn *
4541 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4542 {
4543 return as_a <rtx_jump_insn *> (
4544 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4545 make_jump_insn_raw));
4546 }
4547
4548 /* Make an instruction with body X and code CALL_INSN
4549 and output it before the instruction BEFORE. */
4550
4551 rtx_insn *
4552 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4553 {
4554 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4555 make_call_insn_raw);
4556 }
4557
4558 /* Make an instruction with body X and code DEBUG_INSN
4559 and output it before the instruction BEFORE. */
4560
4561 rtx_insn *
4562 emit_debug_insn_before_noloc (rtx x, rtx before)
4563 {
4564 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4565 make_debug_insn_raw);
4566 }
4567
4568 /* Make an insn of code BARRIER
4569 and output it before the insn BEFORE. */
4570
4571 rtx_barrier *
4572 emit_barrier_before (rtx before)
4573 {
4574 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4575
4576 INSN_UID (insn) = cur_insn_uid++;
4577
4578 add_insn_before (insn, before, NULL);
4579 return insn;
4580 }
4581
4582 /* Emit the label LABEL before the insn BEFORE. */
4583
4584 rtx_code_label *
4585 emit_label_before (rtx label, rtx_insn *before)
4586 {
4587 gcc_checking_assert (INSN_UID (label) == 0);
4588 INSN_UID (label) = cur_insn_uid++;
4589 add_insn_before (label, before, NULL);
4590 return as_a <rtx_code_label *> (label);
4591 }
4592 \f
4593 /* Helper for emit_insn_after, handles lists of instructions
4594 efficiently. */
4595
4596 static rtx_insn *
4597 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4598 {
4599 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4600 rtx_insn *last;
4601 rtx_insn *after_after;
4602 if (!bb && !BARRIER_P (after))
4603 bb = BLOCK_FOR_INSN (after);
4604
4605 if (bb)
4606 {
4607 df_set_bb_dirty (bb);
4608 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4609 if (!BARRIER_P (last))
4610 {
4611 set_block_for_insn (last, bb);
4612 df_insn_rescan (last);
4613 }
4614 if (!BARRIER_P (last))
4615 {
4616 set_block_for_insn (last, bb);
4617 df_insn_rescan (last);
4618 }
4619 if (BB_END (bb) == after)
4620 BB_END (bb) = last;
4621 }
4622 else
4623 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4624 continue;
4625
4626 after_after = NEXT_INSN (after);
4627
4628 SET_NEXT_INSN (after) = first;
4629 SET_PREV_INSN (first) = after;
4630 SET_NEXT_INSN (last) = after_after;
4631 if (after_after)
4632 SET_PREV_INSN (after_after) = last;
4633
4634 if (after == get_last_insn ())
4635 set_last_insn (last);
4636
4637 return last;
4638 }
4639
4640 static rtx_insn *
4641 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4642 rtx_insn *(*make_raw)(rtx))
4643 {
4644 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4645 rtx_insn *last = after;
4646
4647 gcc_assert (after);
4648
4649 if (x == NULL_RTX)
4650 return last;
4651
4652 switch (GET_CODE (x))
4653 {
4654 case DEBUG_INSN:
4655 case INSN:
4656 case JUMP_INSN:
4657 case CALL_INSN:
4658 case CODE_LABEL:
4659 case BARRIER:
4660 case NOTE:
4661 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4662 break;
4663
4664 #ifdef ENABLE_RTL_CHECKING
4665 case SEQUENCE:
4666 gcc_unreachable ();
4667 break;
4668 #endif
4669
4670 default:
4671 last = (*make_raw) (x);
4672 add_insn_after (last, after, bb);
4673 break;
4674 }
4675
4676 return last;
4677 }
4678
4679 /* Make X be output after the insn AFTER and set the BB of insn. If
4680 BB is NULL, an attempt is made to infer the BB from AFTER. */
4681
4682 rtx_insn *
4683 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4684 {
4685 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4686 }
4687
4688
4689 /* Make an insn of code JUMP_INSN with body X
4690 and output it after the insn AFTER. */
4691
4692 rtx_jump_insn *
4693 emit_jump_insn_after_noloc (rtx x, rtx after)
4694 {
4695 return as_a <rtx_jump_insn *> (
4696 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4697 }
4698
4699 /* Make an instruction with body X and code CALL_INSN
4700 and output it after the instruction AFTER. */
4701
4702 rtx_insn *
4703 emit_call_insn_after_noloc (rtx x, rtx after)
4704 {
4705 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4706 }
4707
4708 /* Make an instruction with body X and code CALL_INSN
4709 and output it after the instruction AFTER. */
4710
4711 rtx_insn *
4712 emit_debug_insn_after_noloc (rtx x, rtx after)
4713 {
4714 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4715 }
4716
4717 /* Make an insn of code BARRIER
4718 and output it after the insn AFTER. */
4719
4720 rtx_barrier *
4721 emit_barrier_after (rtx after)
4722 {
4723 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4724
4725 INSN_UID (insn) = cur_insn_uid++;
4726
4727 add_insn_after (insn, after, NULL);
4728 return insn;
4729 }
4730
4731 /* Emit the label LABEL after the insn AFTER. */
4732
4733 rtx_insn *
4734 emit_label_after (rtx label, rtx_insn *after)
4735 {
4736 gcc_checking_assert (INSN_UID (label) == 0);
4737 INSN_UID (label) = cur_insn_uid++;
4738 add_insn_after (label, after, NULL);
4739 return as_a <rtx_insn *> (label);
4740 }
4741 \f
4742 /* Notes require a bit of special handling: Some notes need to have their
4743 BLOCK_FOR_INSN set, others should never have it set, and some should
4744 have it set or clear depending on the context. */
4745
4746 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4747 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4748 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4749
4750 static bool
4751 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4752 {
4753 switch (subtype)
4754 {
4755 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4756 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4757 return true;
4758
4759 /* Notes for var tracking and EH region markers can appear between or
4760 inside basic blocks. If the caller is emitting on the basic block
4761 boundary, do not set BLOCK_FOR_INSN on the new note. */
4762 case NOTE_INSN_VAR_LOCATION:
4763 case NOTE_INSN_CALL_ARG_LOCATION:
4764 case NOTE_INSN_EH_REGION_BEG:
4765 case NOTE_INSN_EH_REGION_END:
4766 return on_bb_boundary_p;
4767
4768 /* Otherwise, BLOCK_FOR_INSN must be set. */
4769 default:
4770 return false;
4771 }
4772 }
4773
4774 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4775
4776 rtx_note *
4777 emit_note_after (enum insn_note subtype, rtx_insn *after)
4778 {
4779 rtx_note *note = make_note_raw (subtype);
4780 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4781 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4782
4783 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4784 add_insn_after_nobb (note, after);
4785 else
4786 add_insn_after (note, after, bb);
4787 return note;
4788 }
4789
4790 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4791
4792 rtx_note *
4793 emit_note_before (enum insn_note subtype, rtx_insn *before)
4794 {
4795 rtx_note *note = make_note_raw (subtype);
4796 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4797 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4798
4799 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4800 add_insn_before_nobb (note, before);
4801 else
4802 add_insn_before (note, before, bb);
4803 return note;
4804 }
4805 \f
4806 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4807 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4808
4809 static rtx_insn *
4810 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4811 rtx_insn *(*make_raw) (rtx))
4812 {
4813 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4814 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4815
4816 if (pattern == NULL_RTX || !loc)
4817 return last;
4818
4819 after = NEXT_INSN (after);
4820 while (1)
4821 {
4822 if (active_insn_p (after)
4823 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4824 && !INSN_LOCATION (after))
4825 INSN_LOCATION (after) = loc;
4826 if (after == last)
4827 break;
4828 after = NEXT_INSN (after);
4829 }
4830 return last;
4831 }
4832
4833 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4834 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4835 any DEBUG_INSNs. */
4836
4837 static rtx_insn *
4838 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4839 rtx_insn *(*make_raw) (rtx))
4840 {
4841 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4842 rtx_insn *prev = after;
4843
4844 if (skip_debug_insns)
4845 while (DEBUG_INSN_P (prev))
4846 prev = PREV_INSN (prev);
4847
4848 if (INSN_P (prev))
4849 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4850 make_raw);
4851 else
4852 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4853 }
4854
4855 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4856 rtx_insn *
4857 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4858 {
4859 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4860 }
4861
4862 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4863 rtx_insn *
4864 emit_insn_after (rtx pattern, rtx after)
4865 {
4866 return emit_pattern_after (pattern, after, true, make_insn_raw);
4867 }
4868
4869 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4870 rtx_jump_insn *
4871 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4872 {
4873 return as_a <rtx_jump_insn *> (
4874 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4875 }
4876
4877 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4878 rtx_jump_insn *
4879 emit_jump_insn_after (rtx pattern, rtx after)
4880 {
4881 return as_a <rtx_jump_insn *> (
4882 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4883 }
4884
4885 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4886 rtx_insn *
4887 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4888 {
4889 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4890 }
4891
4892 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4893 rtx_insn *
4894 emit_call_insn_after (rtx pattern, rtx after)
4895 {
4896 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4897 }
4898
4899 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4900 rtx_insn *
4901 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4902 {
4903 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4904 }
4905
4906 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4907 rtx_insn *
4908 emit_debug_insn_after (rtx pattern, rtx after)
4909 {
4910 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4911 }
4912
4913 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4914 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4915 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4916 CALL_INSN, etc. */
4917
4918 static rtx_insn *
4919 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4920 rtx_insn *(*make_raw) (rtx))
4921 {
4922 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4923 rtx_insn *first = PREV_INSN (before);
4924 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4925 insnp ? before : NULL_RTX,
4926 NULL, make_raw);
4927
4928 if (pattern == NULL_RTX || !loc)
4929 return last;
4930
4931 if (!first)
4932 first = get_insns ();
4933 else
4934 first = NEXT_INSN (first);
4935 while (1)
4936 {
4937 if (active_insn_p (first)
4938 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4939 && !INSN_LOCATION (first))
4940 INSN_LOCATION (first) = loc;
4941 if (first == last)
4942 break;
4943 first = NEXT_INSN (first);
4944 }
4945 return last;
4946 }
4947
4948 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4949 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4950 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4951 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4952
4953 static rtx_insn *
4954 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4955 bool insnp, rtx_insn *(*make_raw) (rtx))
4956 {
4957 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4958 rtx_insn *next = before;
4959
4960 if (skip_debug_insns)
4961 while (DEBUG_INSN_P (next))
4962 next = PREV_INSN (next);
4963
4964 if (INSN_P (next))
4965 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4966 insnp, make_raw);
4967 else
4968 return emit_pattern_before_noloc (pattern, before,
4969 insnp ? before : NULL_RTX,
4970 NULL, make_raw);
4971 }
4972
4973 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4974 rtx_insn *
4975 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4976 {
4977 return emit_pattern_before_setloc (pattern, before, loc, true,
4978 make_insn_raw);
4979 }
4980
4981 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4982 rtx_insn *
4983 emit_insn_before (rtx pattern, rtx before)
4984 {
4985 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4986 }
4987
4988 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4989 rtx_jump_insn *
4990 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4991 {
4992 return as_a <rtx_jump_insn *> (
4993 emit_pattern_before_setloc (pattern, before, loc, false,
4994 make_jump_insn_raw));
4995 }
4996
4997 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4998 rtx_jump_insn *
4999 emit_jump_insn_before (rtx pattern, rtx before)
5000 {
5001 return as_a <rtx_jump_insn *> (
5002 emit_pattern_before (pattern, before, true, false,
5003 make_jump_insn_raw));
5004 }
5005
5006 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5007 rtx_insn *
5008 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5009 {
5010 return emit_pattern_before_setloc (pattern, before, loc, false,
5011 make_call_insn_raw);
5012 }
5013
5014 /* Like emit_call_insn_before_noloc,
5015 but set insn_location according to BEFORE. */
5016 rtx_insn *
5017 emit_call_insn_before (rtx pattern, rtx_insn *before)
5018 {
5019 return emit_pattern_before (pattern, before, true, false,
5020 make_call_insn_raw);
5021 }
5022
5023 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5024 rtx_insn *
5025 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5026 {
5027 return emit_pattern_before_setloc (pattern, before, loc, false,
5028 make_debug_insn_raw);
5029 }
5030
5031 /* Like emit_debug_insn_before_noloc,
5032 but set insn_location according to BEFORE. */
5033 rtx_insn *
5034 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5035 {
5036 return emit_pattern_before (pattern, before, false, false,
5037 make_debug_insn_raw);
5038 }
5039 \f
5040 /* Take X and emit it at the end of the doubly-linked
5041 INSN list.
5042
5043 Returns the last insn emitted. */
5044
5045 rtx_insn *
5046 emit_insn (rtx x)
5047 {
5048 rtx_insn *last = get_last_insn ();
5049 rtx_insn *insn;
5050
5051 if (x == NULL_RTX)
5052 return last;
5053
5054 switch (GET_CODE (x))
5055 {
5056 case DEBUG_INSN:
5057 case INSN:
5058 case JUMP_INSN:
5059 case CALL_INSN:
5060 case CODE_LABEL:
5061 case BARRIER:
5062 case NOTE:
5063 insn = as_a <rtx_insn *> (x);
5064 while (insn)
5065 {
5066 rtx_insn *next = NEXT_INSN (insn);
5067 add_insn (insn);
5068 last = insn;
5069 insn = next;
5070 }
5071 break;
5072
5073 #ifdef ENABLE_RTL_CHECKING
5074 case JUMP_TABLE_DATA:
5075 case SEQUENCE:
5076 gcc_unreachable ();
5077 break;
5078 #endif
5079
5080 default:
5081 last = make_insn_raw (x);
5082 add_insn (last);
5083 break;
5084 }
5085
5086 return last;
5087 }
5088
5089 /* Make an insn of code DEBUG_INSN with pattern X
5090 and add it to the end of the doubly-linked list. */
5091
5092 rtx_insn *
5093 emit_debug_insn (rtx x)
5094 {
5095 rtx_insn *last = get_last_insn ();
5096 rtx_insn *insn;
5097
5098 if (x == NULL_RTX)
5099 return last;
5100
5101 switch (GET_CODE (x))
5102 {
5103 case DEBUG_INSN:
5104 case INSN:
5105 case JUMP_INSN:
5106 case CALL_INSN:
5107 case CODE_LABEL:
5108 case BARRIER:
5109 case NOTE:
5110 insn = as_a <rtx_insn *> (x);
5111 while (insn)
5112 {
5113 rtx_insn *next = NEXT_INSN (insn);
5114 add_insn (insn);
5115 last = insn;
5116 insn = next;
5117 }
5118 break;
5119
5120 #ifdef ENABLE_RTL_CHECKING
5121 case JUMP_TABLE_DATA:
5122 case SEQUENCE:
5123 gcc_unreachable ();
5124 break;
5125 #endif
5126
5127 default:
5128 last = make_debug_insn_raw (x);
5129 add_insn (last);
5130 break;
5131 }
5132
5133 return last;
5134 }
5135
5136 /* Make an insn of code JUMP_INSN with pattern X
5137 and add it to the end of the doubly-linked list. */
5138
5139 rtx_insn *
5140 emit_jump_insn (rtx x)
5141 {
5142 rtx_insn *last = NULL;
5143 rtx_insn *insn;
5144
5145 switch (GET_CODE (x))
5146 {
5147 case DEBUG_INSN:
5148 case INSN:
5149 case JUMP_INSN:
5150 case CALL_INSN:
5151 case CODE_LABEL:
5152 case BARRIER:
5153 case NOTE:
5154 insn = as_a <rtx_insn *> (x);
5155 while (insn)
5156 {
5157 rtx_insn *next = NEXT_INSN (insn);
5158 add_insn (insn);
5159 last = insn;
5160 insn = next;
5161 }
5162 break;
5163
5164 #ifdef ENABLE_RTL_CHECKING
5165 case JUMP_TABLE_DATA:
5166 case SEQUENCE:
5167 gcc_unreachable ();
5168 break;
5169 #endif
5170
5171 default:
5172 last = make_jump_insn_raw (x);
5173 add_insn (last);
5174 break;
5175 }
5176
5177 return last;
5178 }
5179
5180 /* Make an insn of code CALL_INSN with pattern X
5181 and add it to the end of the doubly-linked list. */
5182
5183 rtx_insn *
5184 emit_call_insn (rtx x)
5185 {
5186 rtx_insn *insn;
5187
5188 switch (GET_CODE (x))
5189 {
5190 case DEBUG_INSN:
5191 case INSN:
5192 case JUMP_INSN:
5193 case CALL_INSN:
5194 case CODE_LABEL:
5195 case BARRIER:
5196 case NOTE:
5197 insn = emit_insn (x);
5198 break;
5199
5200 #ifdef ENABLE_RTL_CHECKING
5201 case SEQUENCE:
5202 case JUMP_TABLE_DATA:
5203 gcc_unreachable ();
5204 break;
5205 #endif
5206
5207 default:
5208 insn = make_call_insn_raw (x);
5209 add_insn (insn);
5210 break;
5211 }
5212
5213 return insn;
5214 }
5215
5216 /* Add the label LABEL to the end of the doubly-linked list. */
5217
5218 rtx_code_label *
5219 emit_label (rtx uncast_label)
5220 {
5221 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5222
5223 gcc_checking_assert (INSN_UID (label) == 0);
5224 INSN_UID (label) = cur_insn_uid++;
5225 add_insn (label);
5226 return label;
5227 }
5228
5229 /* Make an insn of code JUMP_TABLE_DATA
5230 and add it to the end of the doubly-linked list. */
5231
5232 rtx_jump_table_data *
5233 emit_jump_table_data (rtx table)
5234 {
5235 rtx_jump_table_data *jump_table_data =
5236 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5237 INSN_UID (jump_table_data) = cur_insn_uid++;
5238 PATTERN (jump_table_data) = table;
5239 BLOCK_FOR_INSN (jump_table_data) = NULL;
5240 add_insn (jump_table_data);
5241 return jump_table_data;
5242 }
5243
5244 /* Make an insn of code BARRIER
5245 and add it to the end of the doubly-linked list. */
5246
5247 rtx_barrier *
5248 emit_barrier (void)
5249 {
5250 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5251 INSN_UID (barrier) = cur_insn_uid++;
5252 add_insn (barrier);
5253 return barrier;
5254 }
5255
5256 /* Emit a copy of note ORIG. */
5257
5258 rtx_note *
5259 emit_note_copy (rtx_note *orig)
5260 {
5261 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5262 rtx_note *note = make_note_raw (kind);
5263 NOTE_DATA (note) = NOTE_DATA (orig);
5264 add_insn (note);
5265 return note;
5266 }
5267
5268 /* Make an insn of code NOTE or type NOTE_NO
5269 and add it to the end of the doubly-linked list. */
5270
5271 rtx_note *
5272 emit_note (enum insn_note kind)
5273 {
5274 rtx_note *note = make_note_raw (kind);
5275 add_insn (note);
5276 return note;
5277 }
5278
5279 /* Emit a clobber of lvalue X. */
5280
5281 rtx_insn *
5282 emit_clobber (rtx x)
5283 {
5284 /* CONCATs should not appear in the insn stream. */
5285 if (GET_CODE (x) == CONCAT)
5286 {
5287 emit_clobber (XEXP (x, 0));
5288 return emit_clobber (XEXP (x, 1));
5289 }
5290 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5291 }
5292
5293 /* Return a sequence of insns to clobber lvalue X. */
5294
5295 rtx_insn *
5296 gen_clobber (rtx x)
5297 {
5298 rtx_insn *seq;
5299
5300 start_sequence ();
5301 emit_clobber (x);
5302 seq = get_insns ();
5303 end_sequence ();
5304 return seq;
5305 }
5306
5307 /* Emit a use of rvalue X. */
5308
5309 rtx_insn *
5310 emit_use (rtx x)
5311 {
5312 /* CONCATs should not appear in the insn stream. */
5313 if (GET_CODE (x) == CONCAT)
5314 {
5315 emit_use (XEXP (x, 0));
5316 return emit_use (XEXP (x, 1));
5317 }
5318 return emit_insn (gen_rtx_USE (VOIDmode, x));
5319 }
5320
5321 /* Return a sequence of insns to use rvalue X. */
5322
5323 rtx_insn *
5324 gen_use (rtx x)
5325 {
5326 rtx_insn *seq;
5327
5328 start_sequence ();
5329 emit_use (x);
5330 seq = get_insns ();
5331 end_sequence ();
5332 return seq;
5333 }
5334
5335 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5336 Return the set in INSN that such notes describe, or NULL if the notes
5337 have no meaning for INSN. */
5338
5339 rtx
5340 set_for_reg_notes (rtx insn)
5341 {
5342 rtx pat, reg;
5343
5344 if (!INSN_P (insn))
5345 return NULL_RTX;
5346
5347 pat = PATTERN (insn);
5348 if (GET_CODE (pat) == PARALLEL)
5349 {
5350 /* We do not use single_set because that ignores SETs of unused
5351 registers. REG_EQUAL and REG_EQUIV notes really do require the
5352 PARALLEL to have a single SET. */
5353 if (multiple_sets (insn))
5354 return NULL_RTX;
5355 pat = XVECEXP (pat, 0, 0);
5356 }
5357
5358 if (GET_CODE (pat) != SET)
5359 return NULL_RTX;
5360
5361 reg = SET_DEST (pat);
5362
5363 /* Notes apply to the contents of a STRICT_LOW_PART. */
5364 if (GET_CODE (reg) == STRICT_LOW_PART
5365 || GET_CODE (reg) == ZERO_EXTRACT)
5366 reg = XEXP (reg, 0);
5367
5368 /* Check that we have a register. */
5369 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5370 return NULL_RTX;
5371
5372 return pat;
5373 }
5374
5375 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5376 note of this type already exists, remove it first. */
5377
5378 rtx
5379 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5380 {
5381 rtx note = find_reg_note (insn, kind, NULL_RTX);
5382
5383 switch (kind)
5384 {
5385 case REG_EQUAL:
5386 case REG_EQUIV:
5387 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5388 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5389 return NULL_RTX;
5390
5391 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5392 It serves no useful purpose and breaks eliminate_regs. */
5393 if (GET_CODE (datum) == ASM_OPERANDS)
5394 return NULL_RTX;
5395
5396 /* Notes with side effects are dangerous. Even if the side-effect
5397 initially mirrors one in PATTERN (INSN), later optimizations
5398 might alter the way that the final register value is calculated
5399 and so move or alter the side-effect in some way. The note would
5400 then no longer be a valid substitution for SET_SRC. */
5401 if (side_effects_p (datum))
5402 return NULL_RTX;
5403 break;
5404
5405 default:
5406 break;
5407 }
5408
5409 if (note)
5410 XEXP (note, 0) = datum;
5411 else
5412 {
5413 add_reg_note (insn, kind, datum);
5414 note = REG_NOTES (insn);
5415 }
5416
5417 switch (kind)
5418 {
5419 case REG_EQUAL:
5420 case REG_EQUIV:
5421 df_notes_rescan (as_a <rtx_insn *> (insn));
5422 break;
5423 default:
5424 break;
5425 }
5426
5427 return note;
5428 }
5429
5430 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5431 rtx
5432 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5433 {
5434 rtx set = set_for_reg_notes (insn);
5435
5436 if (set && SET_DEST (set) == dst)
5437 return set_unique_reg_note (insn, kind, datum);
5438 return NULL_RTX;
5439 }
5440 \f
5441 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5442 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5443 is true.
5444
5445 If X is a label, it is simply added into the insn chain. */
5446
5447 rtx_insn *
5448 emit (rtx x, bool allow_barrier_p)
5449 {
5450 enum rtx_code code = classify_insn (x);
5451
5452 switch (code)
5453 {
5454 case CODE_LABEL:
5455 return emit_label (x);
5456 case INSN:
5457 return emit_insn (x);
5458 case JUMP_INSN:
5459 {
5460 rtx_insn *insn = emit_jump_insn (x);
5461 if (allow_barrier_p
5462 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5463 return emit_barrier ();
5464 return insn;
5465 }
5466 case CALL_INSN:
5467 return emit_call_insn (x);
5468 case DEBUG_INSN:
5469 return emit_debug_insn (x);
5470 default:
5471 gcc_unreachable ();
5472 }
5473 }
5474 \f
5475 /* Space for free sequence stack entries. */
5476 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5477
5478 /* Begin emitting insns to a sequence. If this sequence will contain
5479 something that might cause the compiler to pop arguments to function
5480 calls (because those pops have previously been deferred; see
5481 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5482 before calling this function. That will ensure that the deferred
5483 pops are not accidentally emitted in the middle of this sequence. */
5484
5485 void
5486 start_sequence (void)
5487 {
5488 struct sequence_stack *tem;
5489
5490 if (free_sequence_stack != NULL)
5491 {
5492 tem = free_sequence_stack;
5493 free_sequence_stack = tem->next;
5494 }
5495 else
5496 tem = ggc_alloc<sequence_stack> ();
5497
5498 tem->next = get_current_sequence ()->next;
5499 tem->first = get_insns ();
5500 tem->last = get_last_insn ();
5501 get_current_sequence ()->next = tem;
5502
5503 set_first_insn (0);
5504 set_last_insn (0);
5505 }
5506
5507 /* Set up the insn chain starting with FIRST as the current sequence,
5508 saving the previously current one. See the documentation for
5509 start_sequence for more information about how to use this function. */
5510
5511 void
5512 push_to_sequence (rtx_insn *first)
5513 {
5514 rtx_insn *last;
5515
5516 start_sequence ();
5517
5518 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5519 ;
5520
5521 set_first_insn (first);
5522 set_last_insn (last);
5523 }
5524
5525 /* Like push_to_sequence, but take the last insn as an argument to avoid
5526 looping through the list. */
5527
5528 void
5529 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5530 {
5531 start_sequence ();
5532
5533 set_first_insn (first);
5534 set_last_insn (last);
5535 }
5536
5537 /* Set up the outer-level insn chain
5538 as the current sequence, saving the previously current one. */
5539
5540 void
5541 push_topmost_sequence (void)
5542 {
5543 struct sequence_stack *top;
5544
5545 start_sequence ();
5546
5547 top = get_topmost_sequence ();
5548 set_first_insn (top->first);
5549 set_last_insn (top->last);
5550 }
5551
5552 /* After emitting to the outer-level insn chain, update the outer-level
5553 insn chain, and restore the previous saved state. */
5554
5555 void
5556 pop_topmost_sequence (void)
5557 {
5558 struct sequence_stack *top;
5559
5560 top = get_topmost_sequence ();
5561 top->first = get_insns ();
5562 top->last = get_last_insn ();
5563
5564 end_sequence ();
5565 }
5566
5567 /* After emitting to a sequence, restore previous saved state.
5568
5569 To get the contents of the sequence just made, you must call
5570 `get_insns' *before* calling here.
5571
5572 If the compiler might have deferred popping arguments while
5573 generating this sequence, and this sequence will not be immediately
5574 inserted into the instruction stream, use do_pending_stack_adjust
5575 before calling get_insns. That will ensure that the deferred
5576 pops are inserted into this sequence, and not into some random
5577 location in the instruction stream. See INHIBIT_DEFER_POP for more
5578 information about deferred popping of arguments. */
5579
5580 void
5581 end_sequence (void)
5582 {
5583 struct sequence_stack *tem = get_current_sequence ()->next;
5584
5585 set_first_insn (tem->first);
5586 set_last_insn (tem->last);
5587 get_current_sequence ()->next = tem->next;
5588
5589 memset (tem, 0, sizeof (*tem));
5590 tem->next = free_sequence_stack;
5591 free_sequence_stack = tem;
5592 }
5593
5594 /* Return 1 if currently emitting into a sequence. */
5595
5596 int
5597 in_sequence_p (void)
5598 {
5599 return get_current_sequence ()->next != 0;
5600 }
5601 \f
5602 /* Put the various virtual registers into REGNO_REG_RTX. */
5603
5604 static void
5605 init_virtual_regs (void)
5606 {
5607 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5608 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5609 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5610 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5611 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5612 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5613 = virtual_preferred_stack_boundary_rtx;
5614 }
5615
5616 \f
5617 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5618 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5619 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5620 static int copy_insn_n_scratches;
5621
5622 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5623 copied an ASM_OPERANDS.
5624 In that case, it is the original input-operand vector. */
5625 static rtvec orig_asm_operands_vector;
5626
5627 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5628 copied an ASM_OPERANDS.
5629 In that case, it is the copied input-operand vector. */
5630 static rtvec copy_asm_operands_vector;
5631
5632 /* Likewise for the constraints vector. */
5633 static rtvec orig_asm_constraints_vector;
5634 static rtvec copy_asm_constraints_vector;
5635
5636 /* Recursively create a new copy of an rtx for copy_insn.
5637 This function differs from copy_rtx in that it handles SCRATCHes and
5638 ASM_OPERANDs properly.
5639 Normally, this function is not used directly; use copy_insn as front end.
5640 However, you could first copy an insn pattern with copy_insn and then use
5641 this function afterwards to properly copy any REG_NOTEs containing
5642 SCRATCHes. */
5643
5644 rtx
5645 copy_insn_1 (rtx orig)
5646 {
5647 rtx copy;
5648 int i, j;
5649 RTX_CODE code;
5650 const char *format_ptr;
5651
5652 if (orig == NULL)
5653 return NULL;
5654
5655 code = GET_CODE (orig);
5656
5657 switch (code)
5658 {
5659 case REG:
5660 case DEBUG_EXPR:
5661 CASE_CONST_ANY:
5662 case SYMBOL_REF:
5663 case CODE_LABEL:
5664 case PC:
5665 case CC0:
5666 case RETURN:
5667 case SIMPLE_RETURN:
5668 return orig;
5669 case CLOBBER:
5670 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5671 clobbers or clobbers of hard registers that originated as pseudos.
5672 This is needed to allow safe register renaming. */
5673 if (REG_P (XEXP (orig, 0))
5674 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5675 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5676 return orig;
5677 break;
5678
5679 case SCRATCH:
5680 for (i = 0; i < copy_insn_n_scratches; i++)
5681 if (copy_insn_scratch_in[i] == orig)
5682 return copy_insn_scratch_out[i];
5683 break;
5684
5685 case CONST:
5686 if (shared_const_p (orig))
5687 return orig;
5688 break;
5689
5690 /* A MEM with a constant address is not sharable. The problem is that
5691 the constant address may need to be reloaded. If the mem is shared,
5692 then reloading one copy of this mem will cause all copies to appear
5693 to have been reloaded. */
5694
5695 default:
5696 break;
5697 }
5698
5699 /* Copy the various flags, fields, and other information. We assume
5700 that all fields need copying, and then clear the fields that should
5701 not be copied. That is the sensible default behavior, and forces
5702 us to explicitly document why we are *not* copying a flag. */
5703 copy = shallow_copy_rtx (orig);
5704
5705 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5706 if (INSN_P (orig))
5707 {
5708 RTX_FLAG (copy, jump) = 0;
5709 RTX_FLAG (copy, call) = 0;
5710 RTX_FLAG (copy, frame_related) = 0;
5711 }
5712
5713 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5714
5715 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5716 switch (*format_ptr++)
5717 {
5718 case 'e':
5719 if (XEXP (orig, i) != NULL)
5720 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5721 break;
5722
5723 case 'E':
5724 case 'V':
5725 if (XVEC (orig, i) == orig_asm_constraints_vector)
5726 XVEC (copy, i) = copy_asm_constraints_vector;
5727 else if (XVEC (orig, i) == orig_asm_operands_vector)
5728 XVEC (copy, i) = copy_asm_operands_vector;
5729 else if (XVEC (orig, i) != NULL)
5730 {
5731 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5732 for (j = 0; j < XVECLEN (copy, i); j++)
5733 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5734 }
5735 break;
5736
5737 case 't':
5738 case 'w':
5739 case 'i':
5740 case 'p':
5741 case 's':
5742 case 'S':
5743 case 'u':
5744 case '0':
5745 /* These are left unchanged. */
5746 break;
5747
5748 default:
5749 gcc_unreachable ();
5750 }
5751
5752 if (code == SCRATCH)
5753 {
5754 i = copy_insn_n_scratches++;
5755 gcc_assert (i < MAX_RECOG_OPERANDS);
5756 copy_insn_scratch_in[i] = orig;
5757 copy_insn_scratch_out[i] = copy;
5758 }
5759 else if (code == ASM_OPERANDS)
5760 {
5761 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5762 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5763 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5764 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5765 }
5766
5767 return copy;
5768 }
5769
5770 /* Create a new copy of an rtx.
5771 This function differs from copy_rtx in that it handles SCRATCHes and
5772 ASM_OPERANDs properly.
5773 INSN doesn't really have to be a full INSN; it could be just the
5774 pattern. */
5775 rtx
5776 copy_insn (rtx insn)
5777 {
5778 copy_insn_n_scratches = 0;
5779 orig_asm_operands_vector = 0;
5780 orig_asm_constraints_vector = 0;
5781 copy_asm_operands_vector = 0;
5782 copy_asm_constraints_vector = 0;
5783 return copy_insn_1 (insn);
5784 }
5785
5786 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5787 on that assumption that INSN itself remains in its original place. */
5788
5789 rtx_insn *
5790 copy_delay_slot_insn (rtx_insn *insn)
5791 {
5792 /* Copy INSN with its rtx_code, all its notes, location etc. */
5793 insn = as_a <rtx_insn *> (copy_rtx (insn));
5794 INSN_UID (insn) = cur_insn_uid++;
5795 return insn;
5796 }
5797
5798 /* Initialize data structures and variables in this file
5799 before generating rtl for each function. */
5800
5801 void
5802 init_emit (void)
5803 {
5804 set_first_insn (NULL);
5805 set_last_insn (NULL);
5806 if (MIN_NONDEBUG_INSN_UID)
5807 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5808 else
5809 cur_insn_uid = 1;
5810 cur_debug_insn_uid = 1;
5811 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5812 first_label_num = label_num;
5813 get_current_sequence ()->next = NULL;
5814
5815 /* Init the tables that describe all the pseudo regs. */
5816
5817 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5818
5819 crtl->emit.regno_pointer_align
5820 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5821
5822 regno_reg_rtx
5823 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5824
5825 /* Put copies of all the hard registers into regno_reg_rtx. */
5826 memcpy (regno_reg_rtx,
5827 initial_regno_reg_rtx,
5828 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5829
5830 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5831 init_virtual_regs ();
5832
5833 /* Indicate that the virtual registers and stack locations are
5834 all pointers. */
5835 REG_POINTER (stack_pointer_rtx) = 1;
5836 REG_POINTER (frame_pointer_rtx) = 1;
5837 REG_POINTER (hard_frame_pointer_rtx) = 1;
5838 REG_POINTER (arg_pointer_rtx) = 1;
5839
5840 REG_POINTER (virtual_incoming_args_rtx) = 1;
5841 REG_POINTER (virtual_stack_vars_rtx) = 1;
5842 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5843 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5844 REG_POINTER (virtual_cfa_rtx) = 1;
5845
5846 #ifdef STACK_BOUNDARY
5847 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5848 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5849 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5850 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5851
5852 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5853 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5854 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5855 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5856
5857 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5858 #endif
5859
5860 #ifdef INIT_EXPANDERS
5861 INIT_EXPANDERS;
5862 #endif
5863 }
5864
5865 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5866
5867 wide_int
5868 const_vector_int_elt (const_rtx x, unsigned int i)
5869 {
5870 /* First handle elements that are directly encoded. */
5871 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5872 if (i < (unsigned int) XVECLEN (x, 0))
5873 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5874
5875 /* Identify the pattern that contains element I and work out the index of
5876 the last encoded element for that pattern. */
5877 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5878 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5879 unsigned int count = i / npatterns;
5880 unsigned int pattern = i % npatterns;
5881 unsigned int final_i = encoded_nelts - npatterns + pattern;
5882
5883 /* If there are no steps, the final encoded value is the right one. */
5884 if (!CONST_VECTOR_STEPPED_P (x))
5885 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5886
5887 /* Otherwise work out the value from the last two encoded elements. */
5888 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5889 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5890 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5891 rtx_mode_t (v1, elt_mode));
5892 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5893 }
5894
5895 /* Return the value of element I of CONST_VECTOR X. */
5896
5897 rtx
5898 const_vector_elt (const_rtx x, unsigned int i)
5899 {
5900 /* First handle elements that are directly encoded. */
5901 if (i < (unsigned int) XVECLEN (x, 0))
5902 return CONST_VECTOR_ENCODED_ELT (x, i);
5903
5904 /* If there are no steps, the final encoded value is the right one. */
5905 if (!CONST_VECTOR_STEPPED_P (x))
5906 {
5907 /* Identify the pattern that contains element I and work out the index of
5908 the last encoded element for that pattern. */
5909 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5910 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5911 unsigned int pattern = i % npatterns;
5912 unsigned int final_i = encoded_nelts - npatterns + pattern;
5913 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5914 }
5915
5916 /* Otherwise work out the value from the last two encoded elements. */
5917 return immed_wide_int_const (const_vector_int_elt (x, i),
5918 GET_MODE_INNER (GET_MODE (x)));
5919 }
5920
5921 /* Return true if X is a valid element for a CONST_VECTOR of the given
5922 mode. */
5923
5924 bool
5925 valid_for_const_vector_p (machine_mode, rtx x)
5926 {
5927 return (CONST_SCALAR_INT_P (x)
5928 || CONST_DOUBLE_AS_FLOAT_P (x)
5929 || CONST_FIXED_P (x));
5930 }
5931
5932 /* Generate a vector constant of mode MODE in which every element has
5933 value ELT. */
5934
5935 rtx
5936 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5937 {
5938 rtx_vector_builder builder (mode, 1, 1);
5939 builder.quick_push (elt);
5940 return builder.build ();
5941 }
5942
5943 /* Return a vector rtx of mode MODE in which every element has value X.
5944 The result will be a constant if X is constant. */
5945
5946 rtx
5947 gen_vec_duplicate (machine_mode mode, rtx x)
5948 {
5949 if (valid_for_const_vector_p (mode, x))
5950 return gen_const_vec_duplicate (mode, x);
5951 return gen_rtx_VEC_DUPLICATE (mode, x);
5952 }
5953
5954 /* A subroutine of const_vec_series_p that handles the case in which:
5955
5956 (GET_CODE (X) == CONST_VECTOR
5957 && CONST_VECTOR_NPATTERNS (X) == 1
5958 && !CONST_VECTOR_DUPLICATE_P (X))
5959
5960 is known to hold. */
5961
5962 bool
5963 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5964 {
5965 /* Stepped sequences are only defined for integers, to avoid specifying
5966 rounding behavior. */
5967 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5968 return false;
5969
5970 /* A non-duplicated vector with two elements can always be seen as a
5971 series with a nonzero step. Longer vectors must have a stepped
5972 encoding. */
5973 if (CONST_VECTOR_NUNITS (x) != 2
5974 && !CONST_VECTOR_STEPPED_P (x))
5975 return false;
5976
5977 /* Calculate the step between the first and second elements. */
5978 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5979 rtx base = CONST_VECTOR_ELT (x, 0);
5980 rtx step = simplify_binary_operation (MINUS, inner,
5981 CONST_VECTOR_ENCODED_ELT (x, 1), base);
5982 if (rtx_equal_p (step, CONST0_RTX (inner)))
5983 return false;
5984
5985 /* If we have a stepped encoding, check that the step between the
5986 second and third elements is the same as STEP. */
5987 if (CONST_VECTOR_STEPPED_P (x))
5988 {
5989 rtx diff = simplify_binary_operation (MINUS, inner,
5990 CONST_VECTOR_ENCODED_ELT (x, 2),
5991 CONST_VECTOR_ENCODED_ELT (x, 1));
5992 if (!rtx_equal_p (step, diff))
5993 return false;
5994 }
5995
5996 *base_out = base;
5997 *step_out = step;
5998 return true;
5999 }
6000
6001 /* Generate a vector constant of mode MODE in which element I has
6002 the value BASE + I * STEP. */
6003
6004 rtx
6005 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6006 {
6007 gcc_assert (valid_for_const_vector_p (mode, base)
6008 && valid_for_const_vector_p (mode, step));
6009
6010 rtx_vector_builder builder (mode, 1, 3);
6011 builder.quick_push (base);
6012 for (int i = 1; i < 3; ++i)
6013 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6014 builder[i - 1], step));
6015 return builder.build ();
6016 }
6017
6018 /* Generate a vector of mode MODE in which element I has the value
6019 BASE + I * STEP. The result will be a constant if BASE and STEP
6020 are both constants. */
6021
6022 rtx
6023 gen_vec_series (machine_mode mode, rtx base, rtx step)
6024 {
6025 if (step == const0_rtx)
6026 return gen_vec_duplicate (mode, base);
6027 if (valid_for_const_vector_p (mode, base)
6028 && valid_for_const_vector_p (mode, step))
6029 return gen_const_vec_series (mode, base, step);
6030 return gen_rtx_VEC_SERIES (mode, base, step);
6031 }
6032
6033 /* Generate a new vector constant for mode MODE and constant value
6034 CONSTANT. */
6035
6036 static rtx
6037 gen_const_vector (machine_mode mode, int constant)
6038 {
6039 machine_mode inner = GET_MODE_INNER (mode);
6040
6041 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6042
6043 rtx el = const_tiny_rtx[constant][(int) inner];
6044 gcc_assert (el);
6045
6046 return gen_const_vec_duplicate (mode, el);
6047 }
6048
6049 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6050 all elements are zero, and the one vector when all elements are one. */
6051 rtx
6052 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6053 {
6054 gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v));
6055
6056 /* If the values are all the same, check to see if we can use one of the
6057 standard constant vectors. */
6058 if (rtvec_all_equal_p (v))
6059 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6060
6061 unsigned int nunits = GET_NUM_ELEM (v);
6062 rtx_vector_builder builder (mode, nunits, 1);
6063 for (unsigned int i = 0; i < nunits; ++i)
6064 builder.quick_push (RTVEC_ELT (v, i));
6065 return builder.build (v);
6066 }
6067
6068 /* Initialise global register information required by all functions. */
6069
6070 void
6071 init_emit_regs (void)
6072 {
6073 int i;
6074 machine_mode mode;
6075 mem_attrs *attrs;
6076
6077 /* Reset register attributes */
6078 reg_attrs_htab->empty ();
6079
6080 /* We need reg_raw_mode, so initialize the modes now. */
6081 init_reg_modes_target ();
6082
6083 /* Assign register numbers to the globally defined register rtx. */
6084 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6085 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6086 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6087 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6088 virtual_incoming_args_rtx =
6089 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6090 virtual_stack_vars_rtx =
6091 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6092 virtual_stack_dynamic_rtx =
6093 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6094 virtual_outgoing_args_rtx =
6095 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6096 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6097 virtual_preferred_stack_boundary_rtx =
6098 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6099
6100 /* Initialize RTL for commonly used hard registers. These are
6101 copied into regno_reg_rtx as we begin to compile each function. */
6102 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6103 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6104
6105 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6106 return_address_pointer_rtx
6107 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6108 #endif
6109
6110 pic_offset_table_rtx = NULL_RTX;
6111 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6112 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6113
6114 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6115 {
6116 mode = (machine_mode) i;
6117 attrs = ggc_cleared_alloc<mem_attrs> ();
6118 attrs->align = BITS_PER_UNIT;
6119 attrs->addrspace = ADDR_SPACE_GENERIC;
6120 if (mode != BLKmode)
6121 {
6122 attrs->size_known_p = true;
6123 attrs->size = GET_MODE_SIZE (mode);
6124 if (STRICT_ALIGNMENT)
6125 attrs->align = GET_MODE_ALIGNMENT (mode);
6126 }
6127 mode_mem_attrs[i] = attrs;
6128 }
6129
6130 split_branch_probability = profile_probability::uninitialized ();
6131 }
6132
6133 /* Initialize global machine_mode variables. */
6134
6135 void
6136 init_derived_machine_modes (void)
6137 {
6138 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6139 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6140 {
6141 scalar_int_mode mode = mode_iter.require ();
6142
6143 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6144 && !opt_byte_mode.exists ())
6145 opt_byte_mode = mode;
6146
6147 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6148 && !opt_word_mode.exists ())
6149 opt_word_mode = mode;
6150 }
6151
6152 byte_mode = opt_byte_mode.require ();
6153 word_mode = opt_word_mode.require ();
6154 ptr_mode = as_a <scalar_int_mode>
6155 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6156 }
6157
6158 /* Create some permanent unique rtl objects shared between all functions. */
6159
6160 void
6161 init_emit_once (void)
6162 {
6163 int i;
6164 machine_mode mode;
6165 scalar_float_mode double_mode;
6166 opt_scalar_mode smode_iter;
6167
6168 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6169 CONST_FIXED, and memory attribute hash tables. */
6170 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6171
6172 #if TARGET_SUPPORTS_WIDE_INT
6173 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6174 #endif
6175 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6176
6177 if (NUM_POLY_INT_COEFFS > 1)
6178 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6179
6180 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6181
6182 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6183
6184 #ifdef INIT_EXPANDERS
6185 /* This is to initialize {init|mark|free}_machine_status before the first
6186 call to push_function_context_to. This is needed by the Chill front
6187 end which calls push_function_context_to before the first call to
6188 init_function_start. */
6189 INIT_EXPANDERS;
6190 #endif
6191
6192 /* Create the unique rtx's for certain rtx codes and operand values. */
6193
6194 /* Process stack-limiting command-line options. */
6195 if (opt_fstack_limit_symbol_arg != NULL)
6196 stack_limit_rtx
6197 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6198 if (opt_fstack_limit_register_no >= 0)
6199 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6200
6201 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6202 tries to use these variables. */
6203 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6204 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6205 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6206
6207 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6208 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6209 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6210 else
6211 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6212
6213 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6214
6215 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6216 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6217 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6218
6219 dconstm1 = dconst1;
6220 dconstm1.sign = 1;
6221
6222 dconsthalf = dconst1;
6223 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6224
6225 for (i = 0; i < 3; i++)
6226 {
6227 const REAL_VALUE_TYPE *const r =
6228 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6229
6230 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6231 const_tiny_rtx[i][(int) mode] =
6232 const_double_from_real_value (*r, mode);
6233
6234 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6235 const_tiny_rtx[i][(int) mode] =
6236 const_double_from_real_value (*r, mode);
6237
6238 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6239
6240 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6241 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6242
6243 for (mode = MIN_MODE_PARTIAL_INT;
6244 mode <= MAX_MODE_PARTIAL_INT;
6245 mode = (machine_mode)((int)(mode) + 1))
6246 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6247 }
6248
6249 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6250
6251 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6252 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6253
6254 for (mode = MIN_MODE_PARTIAL_INT;
6255 mode <= MAX_MODE_PARTIAL_INT;
6256 mode = (machine_mode)((int)(mode) + 1))
6257 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6258
6259 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6260 {
6261 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6262 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6263 }
6264
6265 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6266 {
6267 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6268 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6269 }
6270
6271 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6272 {
6273 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6274 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6275 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6276 }
6277
6278 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6279 {
6280 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6281 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6282 }
6283
6284 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6285 {
6286 scalar_mode smode = smode_iter.require ();
6287 FCONST0 (smode).data.high = 0;
6288 FCONST0 (smode).data.low = 0;
6289 FCONST0 (smode).mode = smode;
6290 const_tiny_rtx[0][(int) smode]
6291 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6292 }
6293
6294 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6295 {
6296 scalar_mode smode = smode_iter.require ();
6297 FCONST0 (smode).data.high = 0;
6298 FCONST0 (smode).data.low = 0;
6299 FCONST0 (smode).mode = smode;
6300 const_tiny_rtx[0][(int) smode]
6301 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6302 }
6303
6304 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6305 {
6306 scalar_mode smode = smode_iter.require ();
6307 FCONST0 (smode).data.high = 0;
6308 FCONST0 (smode).data.low = 0;
6309 FCONST0 (smode).mode = smode;
6310 const_tiny_rtx[0][(int) smode]
6311 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6312
6313 /* We store the value 1. */
6314 FCONST1 (smode).data.high = 0;
6315 FCONST1 (smode).data.low = 0;
6316 FCONST1 (smode).mode = smode;
6317 FCONST1 (smode).data
6318 = double_int_one.lshift (GET_MODE_FBIT (smode),
6319 HOST_BITS_PER_DOUBLE_INT,
6320 SIGNED_FIXED_POINT_MODE_P (smode));
6321 const_tiny_rtx[1][(int) smode]
6322 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6323 }
6324
6325 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6326 {
6327 scalar_mode smode = smode_iter.require ();
6328 FCONST0 (smode).data.high = 0;
6329 FCONST0 (smode).data.low = 0;
6330 FCONST0 (smode).mode = smode;
6331 const_tiny_rtx[0][(int) smode]
6332 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6333
6334 /* We store the value 1. */
6335 FCONST1 (smode).data.high = 0;
6336 FCONST1 (smode).data.low = 0;
6337 FCONST1 (smode).mode = smode;
6338 FCONST1 (smode).data
6339 = double_int_one.lshift (GET_MODE_FBIT (smode),
6340 HOST_BITS_PER_DOUBLE_INT,
6341 SIGNED_FIXED_POINT_MODE_P (smode));
6342 const_tiny_rtx[1][(int) smode]
6343 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6344 }
6345
6346 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6347 {
6348 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6349 }
6350
6351 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6352 {
6353 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6354 }
6355
6356 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6357 {
6358 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6359 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6360 }
6361
6362 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6363 {
6364 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6365 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6366 }
6367
6368 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6369 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6370 const_tiny_rtx[0][i] = const0_rtx;
6371
6372 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6373 if (STORE_FLAG_VALUE == 1)
6374 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6375
6376 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6377 {
6378 scalar_mode smode = smode_iter.require ();
6379 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6380 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6381 }
6382
6383 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6384 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6385 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6386 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6387 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6388 /*prev_insn=*/NULL,
6389 /*next_insn=*/NULL,
6390 /*bb=*/NULL,
6391 /*pattern=*/NULL_RTX,
6392 /*location=*/-1,
6393 CODE_FOR_nothing,
6394 /*reg_notes=*/NULL_RTX);
6395 }
6396 \f
6397 /* Produce exact duplicate of insn INSN after AFTER.
6398 Care updating of libcall regions if present. */
6399
6400 rtx_insn *
6401 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6402 {
6403 rtx_insn *new_rtx;
6404 rtx link;
6405
6406 switch (GET_CODE (insn))
6407 {
6408 case INSN:
6409 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6410 break;
6411
6412 case JUMP_INSN:
6413 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6414 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6415 break;
6416
6417 case DEBUG_INSN:
6418 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6419 break;
6420
6421 case CALL_INSN:
6422 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6423 if (CALL_INSN_FUNCTION_USAGE (insn))
6424 CALL_INSN_FUNCTION_USAGE (new_rtx)
6425 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6426 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6427 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6428 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6429 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6430 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6431 break;
6432
6433 default:
6434 gcc_unreachable ();
6435 }
6436
6437 /* Update LABEL_NUSES. */
6438 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6439
6440 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6441
6442 /* If the old insn is frame related, then so is the new one. This is
6443 primarily needed for IA-64 unwind info which marks epilogue insns,
6444 which may be duplicated by the basic block reordering code. */
6445 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6446
6447 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6448 rtx *ptail = &REG_NOTES (new_rtx);
6449 while (*ptail != NULL_RTX)
6450 ptail = &XEXP (*ptail, 1);
6451
6452 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6453 will make them. REG_LABEL_TARGETs are created there too, but are
6454 supposed to be sticky, so we copy them. */
6455 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6456 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6457 {
6458 *ptail = duplicate_reg_note (link);
6459 ptail = &XEXP (*ptail, 1);
6460 }
6461
6462 INSN_CODE (new_rtx) = INSN_CODE (insn);
6463 return new_rtx;
6464 }
6465
6466 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6467 rtx
6468 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6469 {
6470 if (hard_reg_clobbers[mode][regno])
6471 return hard_reg_clobbers[mode][regno];
6472 else
6473 return (hard_reg_clobbers[mode][regno] =
6474 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6475 }
6476
6477 location_t prologue_location;
6478 location_t epilogue_location;
6479
6480 /* Hold current location information and last location information, so the
6481 datastructures are built lazily only when some instructions in given
6482 place are needed. */
6483 static location_t curr_location;
6484
6485 /* Allocate insn location datastructure. */
6486 void
6487 insn_locations_init (void)
6488 {
6489 prologue_location = epilogue_location = 0;
6490 curr_location = UNKNOWN_LOCATION;
6491 }
6492
6493 /* At the end of emit stage, clear current location. */
6494 void
6495 insn_locations_finalize (void)
6496 {
6497 epilogue_location = curr_location;
6498 curr_location = UNKNOWN_LOCATION;
6499 }
6500
6501 /* Set current location. */
6502 void
6503 set_curr_insn_location (location_t location)
6504 {
6505 curr_location = location;
6506 }
6507
6508 /* Get current location. */
6509 location_t
6510 curr_insn_location (void)
6511 {
6512 return curr_location;
6513 }
6514
6515 /* Return lexical scope block insn belongs to. */
6516 tree
6517 insn_scope (const rtx_insn *insn)
6518 {
6519 return LOCATION_BLOCK (INSN_LOCATION (insn));
6520 }
6521
6522 /* Return line number of the statement that produced this insn. */
6523 int
6524 insn_line (const rtx_insn *insn)
6525 {
6526 return LOCATION_LINE (INSN_LOCATION (insn));
6527 }
6528
6529 /* Return source file of the statement that produced this insn. */
6530 const char *
6531 insn_file (const rtx_insn *insn)
6532 {
6533 return LOCATION_FILE (INSN_LOCATION (insn));
6534 }
6535
6536 /* Return expanded location of the statement that produced this insn. */
6537 expanded_location
6538 insn_location (const rtx_insn *insn)
6539 {
6540 return expand_location (INSN_LOCATION (insn));
6541 }
6542
6543 /* Return true if memory model MODEL requires a pre-operation (release-style)
6544 barrier or a post-operation (acquire-style) barrier. While not universal,
6545 this function matches behavior of several targets. */
6546
6547 bool
6548 need_atomic_barrier_p (enum memmodel model, bool pre)
6549 {
6550 switch (model & MEMMODEL_BASE_MASK)
6551 {
6552 case MEMMODEL_RELAXED:
6553 case MEMMODEL_CONSUME:
6554 return false;
6555 case MEMMODEL_RELEASE:
6556 return pre;
6557 case MEMMODEL_ACQUIRE:
6558 return !pre;
6559 case MEMMODEL_ACQ_REL:
6560 case MEMMODEL_SEQ_CST:
6561 return true;
6562 default:
6563 gcc_unreachable ();
6564 }
6565 }
6566
6567 /* Return a constant shift amount for shifting a value of mode MODE
6568 by VALUE bits. */
6569
6570 rtx
6571 gen_int_shift_amount (machine_mode, poly_int64 value)
6572 {
6573 /* Use a 64-bit mode, to avoid any truncation.
6574
6575 ??? Perhaps this should be automatically derived from the .md files
6576 instead, or perhaps have a target hook. */
6577 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6578 ? DImode
6579 : int_mode_for_size (64, 0).require ());
6580 return gen_int_mode (value, shift_mode);
6581 }
6582
6583 /* Initialize fields of rtl_data related to stack alignment. */
6584
6585 void
6586 rtl_data::init_stack_alignment ()
6587 {
6588 stack_alignment_needed = STACK_BOUNDARY;
6589 max_used_stack_slot_alignment = STACK_BOUNDARY;
6590 stack_alignment_estimated = 0;
6591 preferred_stack_boundary = STACK_BOUNDARY;
6592 }
6593
6594 \f
6595 #include "gt-emit-rtl.h"