]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
poly_int: REG_OFFSET
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "rtl-iter.h"
60 #include "stor-layout.h"
61 #include "opts.h"
62 #include "predict.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
71 /* Commonly used modes. */
72
73 scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
76
77 /* Datastructures maintained for currently processed function in RTL form. */
78
79 struct rtl_data x_rtl;
80
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86 rtx * regno_reg_rtx;
87
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
91 static GTY(()) int label_num = 1;
92
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97
98 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
99
100 rtx const_true_rtx;
101
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
107
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118
119 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx pc_rtx;
121 rtx ret_rtx;
122 rtx simple_return_rtx;
123 rtx cc0_rtx;
124
125 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128 rtx_insn *invalid_insn_rtx;
129
130 /* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
133 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
134 {
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139 };
140
141 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
143 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
144 {
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147 };
148
149 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
150
151 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
152 {
153 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
154
155 static hashval_t hash (rtx x);
156 static bool equal (rtx x, const compare_type &y);
157 };
158
159 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
160
161 /* A hash table storing register attribute structures. */
162 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
163 {
164 static hashval_t hash (reg_attrs *x);
165 static bool equal (reg_attrs *a, reg_attrs *b);
166 };
167
168 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
169
170 /* A hash table storing all CONST_DOUBLEs. */
171 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
172 {
173 static hashval_t hash (rtx x);
174 static bool equal (rtx x, rtx y);
175 };
176
177 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
178
179 /* A hash table storing all CONST_FIXEDs. */
180 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
181 {
182 static hashval_t hash (rtx x);
183 static bool equal (rtx x, rtx y);
184 };
185
186 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
187
188 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
189 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
190 #define first_label_num (crtl->emit.x_first_label_num)
191
192 static void set_used_decls (tree);
193 static void mark_label_nuses (rtx);
194 #if TARGET_SUPPORTS_WIDE_INT
195 static rtx lookup_const_wide_int (rtx);
196 #endif
197 static rtx lookup_const_double (rtx);
198 static rtx lookup_const_fixed (rtx);
199 static rtx gen_const_vector (machine_mode, int);
200 static void copy_rtx_if_shared_1 (rtx *orig);
201
202 /* Probability of the conditional branch currently proceeded by try_split. */
203 profile_probability split_branch_probability;
204 \f
205 /* Returns a hash code for X (which is a really a CONST_INT). */
206
207 hashval_t
208 const_int_hasher::hash (rtx x)
209 {
210 return (hashval_t) INTVAL (x);
211 }
212
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
216
217 bool
218 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
219 {
220 return (INTVAL (x) == y);
221 }
222
223 #if TARGET_SUPPORTS_WIDE_INT
224 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
225
226 hashval_t
227 const_wide_int_hasher::hash (rtx x)
228 {
229 int i;
230 unsigned HOST_WIDE_INT hash = 0;
231 const_rtx xr = x;
232
233 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
234 hash += CONST_WIDE_INT_ELT (xr, i);
235
236 return (hashval_t) hash;
237 }
238
239 /* Returns nonzero if the value represented by X (which is really a
240 CONST_WIDE_INT) is the same as that given by Y (which is really a
241 CONST_WIDE_INT). */
242
243 bool
244 const_wide_int_hasher::equal (rtx x, rtx y)
245 {
246 int i;
247 const_rtx xr = x;
248 const_rtx yr = y;
249 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
250 return false;
251
252 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
253 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
254 return false;
255
256 return true;
257 }
258 #endif
259
260 /* Returns a hash code for CONST_POLY_INT X. */
261
262 hashval_t
263 const_poly_int_hasher::hash (rtx x)
264 {
265 inchash::hash h;
266 h.add_int (GET_MODE (x));
267 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
268 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
269 return h.end ();
270 }
271
272 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
273
274 bool
275 const_poly_int_hasher::equal (rtx x, const compare_type &y)
276 {
277 if (GET_MODE (x) != y.first)
278 return false;
279 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
280 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
281 return false;
282 return true;
283 }
284
285 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
286 hashval_t
287 const_double_hasher::hash (rtx x)
288 {
289 const_rtx const value = x;
290 hashval_t h;
291
292 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
293 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
294 else
295 {
296 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
297 /* MODE is used in the comparison, so it should be in the hash. */
298 h ^= GET_MODE (value);
299 }
300 return h;
301 }
302
303 /* Returns nonzero if the value represented by X (really a ...)
304 is the same as that represented by Y (really a ...) */
305 bool
306 const_double_hasher::equal (rtx x, rtx y)
307 {
308 const_rtx const a = x, b = y;
309
310 if (GET_MODE (a) != GET_MODE (b))
311 return 0;
312 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
313 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
314 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
315 else
316 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
317 CONST_DOUBLE_REAL_VALUE (b));
318 }
319
320 /* Returns a hash code for X (which is really a CONST_FIXED). */
321
322 hashval_t
323 const_fixed_hasher::hash (rtx x)
324 {
325 const_rtx const value = x;
326 hashval_t h;
327
328 h = fixed_hash (CONST_FIXED_VALUE (value));
329 /* MODE is used in the comparison, so it should be in the hash. */
330 h ^= GET_MODE (value);
331 return h;
332 }
333
334 /* Returns nonzero if the value represented by X is the same as that
335 represented by Y. */
336
337 bool
338 const_fixed_hasher::equal (rtx x, rtx y)
339 {
340 const_rtx const a = x, b = y;
341
342 if (GET_MODE (a) != GET_MODE (b))
343 return 0;
344 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
345 }
346
347 /* Return true if the given memory attributes are equal. */
348
349 bool
350 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
351 {
352 if (p == q)
353 return true;
354 if (!p || !q)
355 return false;
356 return (p->alias == q->alias
357 && p->offset_known_p == q->offset_known_p
358 && (!p->offset_known_p || p->offset == q->offset)
359 && p->size_known_p == q->size_known_p
360 && (!p->size_known_p || p->size == q->size)
361 && p->align == q->align
362 && p->addrspace == q->addrspace
363 && (p->expr == q->expr
364 || (p->expr != NULL_TREE && q->expr != NULL_TREE
365 && operand_equal_p (p->expr, q->expr, 0))));
366 }
367
368 /* Set MEM's memory attributes so that they are the same as ATTRS. */
369
370 static void
371 set_mem_attrs (rtx mem, mem_attrs *attrs)
372 {
373 /* If everything is the default, we can just clear the attributes. */
374 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
375 {
376 MEM_ATTRS (mem) = 0;
377 return;
378 }
379
380 if (!MEM_ATTRS (mem)
381 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
382 {
383 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
384 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
385 }
386 }
387
388 /* Returns a hash code for X (which is a really a reg_attrs *). */
389
390 hashval_t
391 reg_attr_hasher::hash (reg_attrs *x)
392 {
393 const reg_attrs *const p = x;
394
395 inchash::hash h;
396 h.add_ptr (p->decl);
397 h.add_poly_hwi (p->offset);
398 return h.end ();
399 }
400
401 /* Returns nonzero if the value represented by X is the same as that given by
402 Y. */
403
404 bool
405 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
406 {
407 const reg_attrs *const p = x;
408 const reg_attrs *const q = y;
409
410 return (p->decl == q->decl && known_eq (p->offset, q->offset));
411 }
412 /* Allocate a new reg_attrs structure and insert it into the hash table if
413 one identical to it is not already in the table. We are doing this for
414 MEM of mode MODE. */
415
416 static reg_attrs *
417 get_reg_attrs (tree decl, poly_int64 offset)
418 {
419 reg_attrs attrs;
420
421 /* If everything is the default, we can just return zero. */
422 if (decl == 0 && known_eq (offset, 0))
423 return 0;
424
425 attrs.decl = decl;
426 attrs.offset = offset;
427
428 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
429 if (*slot == 0)
430 {
431 *slot = ggc_alloc<reg_attrs> ();
432 memcpy (*slot, &attrs, sizeof (reg_attrs));
433 }
434
435 return *slot;
436 }
437
438
439 #if !HAVE_blockage
440 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
441 and to block register equivalences to be seen across this insn. */
442
443 rtx
444 gen_blockage (void)
445 {
446 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
447 MEM_VOLATILE_P (x) = true;
448 return x;
449 }
450 #endif
451
452
453 /* Set the mode and register number of X to MODE and REGNO. */
454
455 void
456 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
457 {
458 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
459 ? hard_regno_nregs (regno, mode)
460 : 1);
461 PUT_MODE_RAW (x, mode);
462 set_regno_raw (x, regno, nregs);
463 }
464
465 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
466 don't attempt to share with the various global pieces of rtl (such as
467 frame_pointer_rtx). */
468
469 rtx
470 gen_raw_REG (machine_mode mode, unsigned int regno)
471 {
472 rtx x = rtx_alloc (REG MEM_STAT_INFO);
473 set_mode_and_regno (x, mode, regno);
474 REG_ATTRS (x) = NULL;
475 ORIGINAL_REGNO (x) = regno;
476 return x;
477 }
478
479 /* There are some RTL codes that require special attention; the generation
480 functions do the raw handling. If you add to this list, modify
481 special_rtx in gengenrtl.c as well. */
482
483 rtx_expr_list *
484 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
485 {
486 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
487 expr_list));
488 }
489
490 rtx_insn_list *
491 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
492 {
493 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
494 insn_list));
495 }
496
497 rtx_insn *
498 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
499 basic_block bb, rtx pattern, int location, int code,
500 rtx reg_notes)
501 {
502 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
503 prev_insn, next_insn,
504 bb, pattern, location, code,
505 reg_notes));
506 }
507
508 rtx
509 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
510 {
511 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
512 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
513
514 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
515 if (const_true_rtx && arg == STORE_FLAG_VALUE)
516 return const_true_rtx;
517 #endif
518
519 /* Look up the CONST_INT in the hash table. */
520 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
521 INSERT);
522 if (*slot == 0)
523 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
524
525 return *slot;
526 }
527
528 rtx
529 gen_int_mode (poly_int64 c, machine_mode mode)
530 {
531 c = trunc_int_for_mode (c, mode);
532 if (c.is_constant ())
533 return GEN_INT (c.coeffs[0]);
534 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
535 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
536 }
537
538 /* CONST_DOUBLEs might be created from pairs of integers, or from
539 REAL_VALUE_TYPEs. Also, their length is known only at run time,
540 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
541
542 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
543 hash table. If so, return its counterpart; otherwise add it
544 to the hash table and return it. */
545 static rtx
546 lookup_const_double (rtx real)
547 {
548 rtx *slot = const_double_htab->find_slot (real, INSERT);
549 if (*slot == 0)
550 *slot = real;
551
552 return *slot;
553 }
554
555 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
556 VALUE in mode MODE. */
557 rtx
558 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
559 {
560 rtx real = rtx_alloc (CONST_DOUBLE);
561 PUT_MODE (real, mode);
562
563 real->u.rv = value;
564
565 return lookup_const_double (real);
566 }
567
568 /* Determine whether FIXED, a CONST_FIXED, already exists in the
569 hash table. If so, return its counterpart; otherwise add it
570 to the hash table and return it. */
571
572 static rtx
573 lookup_const_fixed (rtx fixed)
574 {
575 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
576 if (*slot == 0)
577 *slot = fixed;
578
579 return *slot;
580 }
581
582 /* Return a CONST_FIXED rtx for a fixed-point value specified by
583 VALUE in mode MODE. */
584
585 rtx
586 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
587 {
588 rtx fixed = rtx_alloc (CONST_FIXED);
589 PUT_MODE (fixed, mode);
590
591 fixed->u.fv = value;
592
593 return lookup_const_fixed (fixed);
594 }
595
596 #if TARGET_SUPPORTS_WIDE_INT == 0
597 /* Constructs double_int from rtx CST. */
598
599 double_int
600 rtx_to_double_int (const_rtx cst)
601 {
602 double_int r;
603
604 if (CONST_INT_P (cst))
605 r = double_int::from_shwi (INTVAL (cst));
606 else if (CONST_DOUBLE_AS_INT_P (cst))
607 {
608 r.low = CONST_DOUBLE_LOW (cst);
609 r.high = CONST_DOUBLE_HIGH (cst);
610 }
611 else
612 gcc_unreachable ();
613
614 return r;
615 }
616 #endif
617
618 #if TARGET_SUPPORTS_WIDE_INT
619 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
620 If so, return its counterpart; otherwise add it to the hash table and
621 return it. */
622
623 static rtx
624 lookup_const_wide_int (rtx wint)
625 {
626 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
627 if (*slot == 0)
628 *slot = wint;
629
630 return *slot;
631 }
632 #endif
633
634 /* Return an rtx constant for V, given that the constant has mode MODE.
635 The returned rtx will be a CONST_INT if V fits, otherwise it will be
636 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
637 (if TARGET_SUPPORTS_WIDE_INT). */
638
639 static rtx
640 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
641 {
642 unsigned int len = v.get_len ();
643 /* Not scalar_int_mode because we also allow pointer bound modes. */
644 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
645
646 /* Allow truncation but not extension since we do not know if the
647 number is signed or unsigned. */
648 gcc_assert (prec <= v.get_precision ());
649
650 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
651 return gen_int_mode (v.elt (0), mode);
652
653 #if TARGET_SUPPORTS_WIDE_INT
654 {
655 unsigned int i;
656 rtx value;
657 unsigned int blocks_needed
658 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
659
660 if (len > blocks_needed)
661 len = blocks_needed;
662
663 value = const_wide_int_alloc (len);
664
665 /* It is so tempting to just put the mode in here. Must control
666 myself ... */
667 PUT_MODE (value, VOIDmode);
668 CWI_PUT_NUM_ELEM (value, len);
669
670 for (i = 0; i < len; i++)
671 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
672
673 return lookup_const_wide_int (value);
674 }
675 #else
676 return immed_double_const (v.elt (0), v.elt (1), mode);
677 #endif
678 }
679
680 #if TARGET_SUPPORTS_WIDE_INT == 0
681 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
682 of ints: I0 is the low-order word and I1 is the high-order word.
683 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
684 implied upper bits are copies of the high bit of i1. The value
685 itself is neither signed nor unsigned. Do not use this routine for
686 non-integer modes; convert to REAL_VALUE_TYPE and use
687 const_double_from_real_value. */
688
689 rtx
690 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
691 {
692 rtx value;
693 unsigned int i;
694
695 /* There are the following cases (note that there are no modes with
696 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
697
698 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
699 gen_int_mode.
700 2) If the value of the integer fits into HOST_WIDE_INT anyway
701 (i.e., i1 consists only from copies of the sign bit, and sign
702 of i0 and i1 are the same), then we return a CONST_INT for i0.
703 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
704 scalar_mode smode;
705 if (is_a <scalar_mode> (mode, &smode)
706 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
707 return gen_int_mode (i0, mode);
708
709 /* If this integer fits in one word, return a CONST_INT. */
710 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
711 return GEN_INT (i0);
712
713 /* We use VOIDmode for integers. */
714 value = rtx_alloc (CONST_DOUBLE);
715 PUT_MODE (value, VOIDmode);
716
717 CONST_DOUBLE_LOW (value) = i0;
718 CONST_DOUBLE_HIGH (value) = i1;
719
720 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
721 XWINT (value, i) = 0;
722
723 return lookup_const_double (value);
724 }
725 #endif
726
727 /* Return an rtx representation of C in mode MODE. */
728
729 rtx
730 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
731 {
732 if (c.is_constant ())
733 return immed_wide_int_const_1 (c.coeffs[0], mode);
734
735 /* Not scalar_int_mode because we also allow pointer bound modes. */
736 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
737
738 /* Allow truncation but not extension since we do not know if the
739 number is signed or unsigned. */
740 gcc_assert (prec <= c.coeffs[0].get_precision ());
741 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
742
743 /* See whether we already have an rtx for this constant. */
744 inchash::hash h;
745 h.add_int (mode);
746 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
747 h.add_wide_int (newc.coeffs[i]);
748 const_poly_int_hasher::compare_type typed_value (mode, newc);
749 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
750 h.end (), INSERT);
751 rtx x = *slot;
752 if (x)
753 return x;
754
755 /* Create a new rtx. There's a choice to be made here between installing
756 the actual mode of the rtx or leaving it as VOIDmode (for consistency
757 with CONST_INT). In practice the handling of the codes is different
758 enough that we get no benefit from using VOIDmode, and various places
759 assume that VOIDmode implies CONST_INT. Using the real mode seems like
760 the right long-term direction anyway. */
761 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
762 size_t extra_size = twi::extra_size (prec);
763 x = rtx_alloc_v (CONST_POLY_INT,
764 sizeof (struct const_poly_int_def) + extra_size);
765 PUT_MODE (x, mode);
766 CONST_POLY_INT_COEFFS (x).set_precision (prec);
767 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
768 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
769
770 *slot = x;
771 return x;
772 }
773
774 rtx
775 gen_rtx_REG (machine_mode mode, unsigned int regno)
776 {
777 /* In case the MD file explicitly references the frame pointer, have
778 all such references point to the same frame pointer. This is
779 used during frame pointer elimination to distinguish the explicit
780 references to these registers from pseudos that happened to be
781 assigned to them.
782
783 If we have eliminated the frame pointer or arg pointer, we will
784 be using it as a normal register, for example as a spill
785 register. In such cases, we might be accessing it in a mode that
786 is not Pmode and therefore cannot use the pre-allocated rtx.
787
788 Also don't do this when we are making new REGs in reload, since
789 we don't want to get confused with the real pointers. */
790
791 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
792 {
793 if (regno == FRAME_POINTER_REGNUM
794 && (!reload_completed || frame_pointer_needed))
795 return frame_pointer_rtx;
796
797 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
798 && regno == HARD_FRAME_POINTER_REGNUM
799 && (!reload_completed || frame_pointer_needed))
800 return hard_frame_pointer_rtx;
801 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
802 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
803 && regno == ARG_POINTER_REGNUM)
804 return arg_pointer_rtx;
805 #endif
806 #ifdef RETURN_ADDRESS_POINTER_REGNUM
807 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
808 return return_address_pointer_rtx;
809 #endif
810 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
811 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
812 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
813 return pic_offset_table_rtx;
814 if (regno == STACK_POINTER_REGNUM)
815 return stack_pointer_rtx;
816 }
817
818 #if 0
819 /* If the per-function register table has been set up, try to re-use
820 an existing entry in that table to avoid useless generation of RTL.
821
822 This code is disabled for now until we can fix the various backends
823 which depend on having non-shared hard registers in some cases. Long
824 term we want to re-enable this code as it can significantly cut down
825 on the amount of useless RTL that gets generated.
826
827 We'll also need to fix some code that runs after reload that wants to
828 set ORIGINAL_REGNO. */
829
830 if (cfun
831 && cfun->emit
832 && regno_reg_rtx
833 && regno < FIRST_PSEUDO_REGISTER
834 && reg_raw_mode[regno] == mode)
835 return regno_reg_rtx[regno];
836 #endif
837
838 return gen_raw_REG (mode, regno);
839 }
840
841 rtx
842 gen_rtx_MEM (machine_mode mode, rtx addr)
843 {
844 rtx rt = gen_rtx_raw_MEM (mode, addr);
845
846 /* This field is not cleared by the mere allocation of the rtx, so
847 we clear it here. */
848 MEM_ATTRS (rt) = 0;
849
850 return rt;
851 }
852
853 /* Generate a memory referring to non-trapping constant memory. */
854
855 rtx
856 gen_const_mem (machine_mode mode, rtx addr)
857 {
858 rtx mem = gen_rtx_MEM (mode, addr);
859 MEM_READONLY_P (mem) = 1;
860 MEM_NOTRAP_P (mem) = 1;
861 return mem;
862 }
863
864 /* Generate a MEM referring to fixed portions of the frame, e.g., register
865 save areas. */
866
867 rtx
868 gen_frame_mem (machine_mode mode, rtx addr)
869 {
870 rtx mem = gen_rtx_MEM (mode, addr);
871 MEM_NOTRAP_P (mem) = 1;
872 set_mem_alias_set (mem, get_frame_alias_set ());
873 return mem;
874 }
875
876 /* Generate a MEM referring to a temporary use of the stack, not part
877 of the fixed stack frame. For example, something which is pushed
878 by a target splitter. */
879 rtx
880 gen_tmp_stack_mem (machine_mode mode, rtx addr)
881 {
882 rtx mem = gen_rtx_MEM (mode, addr);
883 MEM_NOTRAP_P (mem) = 1;
884 if (!cfun->calls_alloca)
885 set_mem_alias_set (mem, get_frame_alias_set ());
886 return mem;
887 }
888
889 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
890 this construct would be valid, and false otherwise. */
891
892 bool
893 validate_subreg (machine_mode omode, machine_mode imode,
894 const_rtx reg, unsigned int offset)
895 {
896 unsigned int isize = GET_MODE_SIZE (imode);
897 unsigned int osize = GET_MODE_SIZE (omode);
898
899 /* All subregs must be aligned. */
900 if (offset % osize != 0)
901 return false;
902
903 /* The subreg offset cannot be outside the inner object. */
904 if (offset >= isize)
905 return false;
906
907 unsigned int regsize = REGMODE_NATURAL_SIZE (imode);
908
909 /* ??? This should not be here. Temporarily continue to allow word_mode
910 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
911 Generally, backends are doing something sketchy but it'll take time to
912 fix them all. */
913 if (omode == word_mode)
914 ;
915 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
916 is the culprit here, and not the backends. */
917 else if (osize >= regsize && isize >= osize)
918 ;
919 /* Allow component subregs of complex and vector. Though given the below
920 extraction rules, it's not always clear what that means. */
921 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
922 && GET_MODE_INNER (imode) == omode)
923 ;
924 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
925 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
926 represent this. It's questionable if this ought to be represented at
927 all -- why can't this all be hidden in post-reload splitters that make
928 arbitrarily mode changes to the registers themselves. */
929 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
930 ;
931 /* Subregs involving floating point modes are not allowed to
932 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
933 (subreg:SI (reg:DF) 0) isn't. */
934 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
935 {
936 if (! (isize == osize
937 /* LRA can use subreg to store a floating point value in
938 an integer mode. Although the floating point and the
939 integer modes need the same number of hard registers,
940 the size of floating point mode can be less than the
941 integer mode. LRA also uses subregs for a register
942 should be used in different mode in on insn. */
943 || lra_in_progress))
944 return false;
945 }
946
947 /* Paradoxical subregs must have offset zero. */
948 if (osize > isize)
949 return offset == 0;
950
951 /* This is a normal subreg. Verify that the offset is representable. */
952
953 /* For hard registers, we already have most of these rules collected in
954 subreg_offset_representable_p. */
955 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
956 {
957 unsigned int regno = REGNO (reg);
958
959 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
960 && GET_MODE_INNER (imode) == omode)
961 ;
962 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
963 return false;
964
965 return subreg_offset_representable_p (regno, imode, offset, omode);
966 }
967
968 /* For pseudo registers, we want most of the same checks. Namely:
969
970 Assume that the pseudo register will be allocated to hard registers
971 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
972 the remainder must correspond to the lowpart of the containing hard
973 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
974 otherwise it is at the lowest offset.
975
976 Given that we've already checked the mode and offset alignment,
977 we only have to check subblock subregs here. */
978 if (osize < regsize
979 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
980 {
981 unsigned int block_size = MIN (isize, regsize);
982 unsigned int offset_within_block = offset % block_size;
983 if (BYTES_BIG_ENDIAN
984 ? offset_within_block != block_size - osize
985 : offset_within_block != 0)
986 return false;
987 }
988 return true;
989 }
990
991 rtx
992 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
993 {
994 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
995 return gen_rtx_raw_SUBREG (mode, reg, offset);
996 }
997
998 /* Generate a SUBREG representing the least-significant part of REG if MODE
999 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1000
1001 rtx
1002 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1003 {
1004 machine_mode inmode;
1005
1006 inmode = GET_MODE (reg);
1007 if (inmode == VOIDmode)
1008 inmode = mode;
1009 return gen_rtx_SUBREG (mode, reg,
1010 subreg_lowpart_offset (mode, inmode));
1011 }
1012
1013 rtx
1014 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1015 enum var_init_status status)
1016 {
1017 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1018 PAT_VAR_LOCATION_STATUS (x) = status;
1019 return x;
1020 }
1021 \f
1022
1023 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1024
1025 rtvec
1026 gen_rtvec (int n, ...)
1027 {
1028 int i;
1029 rtvec rt_val;
1030 va_list p;
1031
1032 va_start (p, n);
1033
1034 /* Don't allocate an empty rtvec... */
1035 if (n == 0)
1036 {
1037 va_end (p);
1038 return NULL_RTVEC;
1039 }
1040
1041 rt_val = rtvec_alloc (n);
1042
1043 for (i = 0; i < n; i++)
1044 rt_val->elem[i] = va_arg (p, rtx);
1045
1046 va_end (p);
1047 return rt_val;
1048 }
1049
1050 rtvec
1051 gen_rtvec_v (int n, rtx *argp)
1052 {
1053 int i;
1054 rtvec rt_val;
1055
1056 /* Don't allocate an empty rtvec... */
1057 if (n == 0)
1058 return NULL_RTVEC;
1059
1060 rt_val = rtvec_alloc (n);
1061
1062 for (i = 0; i < n; i++)
1063 rt_val->elem[i] = *argp++;
1064
1065 return rt_val;
1066 }
1067
1068 rtvec
1069 gen_rtvec_v (int n, rtx_insn **argp)
1070 {
1071 int i;
1072 rtvec rt_val;
1073
1074 /* Don't allocate an empty rtvec... */
1075 if (n == 0)
1076 return NULL_RTVEC;
1077
1078 rt_val = rtvec_alloc (n);
1079
1080 for (i = 0; i < n; i++)
1081 rt_val->elem[i] = *argp++;
1082
1083 return rt_val;
1084 }
1085
1086 \f
1087 /* Return the number of bytes between the start of an OUTER_MODE
1088 in-memory value and the start of an INNER_MODE in-memory value,
1089 given that the former is a lowpart of the latter. It may be a
1090 paradoxical lowpart, in which case the offset will be negative
1091 on big-endian targets. */
1092
1093 int
1094 byte_lowpart_offset (machine_mode outer_mode,
1095 machine_mode inner_mode)
1096 {
1097 if (paradoxical_subreg_p (outer_mode, inner_mode))
1098 return -subreg_lowpart_offset (inner_mode, outer_mode);
1099 else
1100 return subreg_lowpart_offset (outer_mode, inner_mode);
1101 }
1102
1103 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1104 from address X. For paradoxical big-endian subregs this is a
1105 negative value, otherwise it's the same as OFFSET. */
1106
1107 int
1108 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1109 unsigned int offset)
1110 {
1111 if (paradoxical_subreg_p (outer_mode, inner_mode))
1112 {
1113 gcc_assert (offset == 0);
1114 return -subreg_lowpart_offset (inner_mode, outer_mode);
1115 }
1116 return offset;
1117 }
1118
1119 /* As above, but return the offset that existing subreg X would have
1120 if SUBREG_REG (X) were stored in memory. The only significant thing
1121 about the current SUBREG_REG is its mode. */
1122
1123 int
1124 subreg_memory_offset (const_rtx x)
1125 {
1126 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1127 SUBREG_BYTE (x));
1128 }
1129 \f
1130 /* Generate a REG rtx for a new pseudo register of mode MODE.
1131 This pseudo is assigned the next sequential register number. */
1132
1133 rtx
1134 gen_reg_rtx (machine_mode mode)
1135 {
1136 rtx val;
1137 unsigned int align = GET_MODE_ALIGNMENT (mode);
1138
1139 gcc_assert (can_create_pseudo_p ());
1140
1141 /* If a virtual register with bigger mode alignment is generated,
1142 increase stack alignment estimation because it might be spilled
1143 to stack later. */
1144 if (SUPPORTS_STACK_ALIGNMENT
1145 && crtl->stack_alignment_estimated < align
1146 && !crtl->stack_realign_processed)
1147 {
1148 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1149 if (crtl->stack_alignment_estimated < min_align)
1150 crtl->stack_alignment_estimated = min_align;
1151 }
1152
1153 if (generating_concat_p
1154 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1155 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1156 {
1157 /* For complex modes, don't make a single pseudo.
1158 Instead, make a CONCAT of two pseudos.
1159 This allows noncontiguous allocation of the real and imaginary parts,
1160 which makes much better code. Besides, allocating DCmode
1161 pseudos overstrains reload on some machines like the 386. */
1162 rtx realpart, imagpart;
1163 machine_mode partmode = GET_MODE_INNER (mode);
1164
1165 realpart = gen_reg_rtx (partmode);
1166 imagpart = gen_reg_rtx (partmode);
1167 return gen_rtx_CONCAT (mode, realpart, imagpart);
1168 }
1169
1170 /* Do not call gen_reg_rtx with uninitialized crtl. */
1171 gcc_assert (crtl->emit.regno_pointer_align_length);
1172
1173 crtl->emit.ensure_regno_capacity ();
1174 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1175
1176 val = gen_raw_REG (mode, reg_rtx_no);
1177 regno_reg_rtx[reg_rtx_no++] = val;
1178 return val;
1179 }
1180
1181 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1182 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1183
1184 void
1185 emit_status::ensure_regno_capacity ()
1186 {
1187 int old_size = regno_pointer_align_length;
1188
1189 if (reg_rtx_no < old_size)
1190 return;
1191
1192 int new_size = old_size * 2;
1193 while (reg_rtx_no >= new_size)
1194 new_size *= 2;
1195
1196 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1197 memset (tmp + old_size, 0, new_size - old_size);
1198 regno_pointer_align = (unsigned char *) tmp;
1199
1200 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1201 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1202 regno_reg_rtx = new1;
1203
1204 crtl->emit.regno_pointer_align_length = new_size;
1205 }
1206
1207 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1208
1209 bool
1210 reg_is_parm_p (rtx reg)
1211 {
1212 tree decl;
1213
1214 gcc_assert (REG_P (reg));
1215 decl = REG_EXPR (reg);
1216 return (decl && TREE_CODE (decl) == PARM_DECL);
1217 }
1218
1219 /* Update NEW with the same attributes as REG, but with OFFSET added
1220 to the REG_OFFSET. */
1221
1222 static void
1223 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1224 {
1225 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1226 REG_OFFSET (reg) + offset);
1227 }
1228
1229 /* Generate a register with same attributes as REG, but with OFFSET
1230 added to the REG_OFFSET. */
1231
1232 rtx
1233 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1234 poly_int64 offset)
1235 {
1236 rtx new_rtx = gen_rtx_REG (mode, regno);
1237
1238 update_reg_offset (new_rtx, reg, offset);
1239 return new_rtx;
1240 }
1241
1242 /* Generate a new pseudo-register with the same attributes as REG, but
1243 with OFFSET added to the REG_OFFSET. */
1244
1245 rtx
1246 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1247 {
1248 rtx new_rtx = gen_reg_rtx (mode);
1249
1250 update_reg_offset (new_rtx, reg, offset);
1251 return new_rtx;
1252 }
1253
1254 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1255 new register is a (possibly paradoxical) lowpart of the old one. */
1256
1257 void
1258 adjust_reg_mode (rtx reg, machine_mode mode)
1259 {
1260 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1261 PUT_MODE (reg, mode);
1262 }
1263
1264 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1265 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1266
1267 void
1268 set_reg_attrs_from_value (rtx reg, rtx x)
1269 {
1270 poly_int64 offset;
1271 bool can_be_reg_pointer = true;
1272
1273 /* Don't call mark_reg_pointer for incompatible pointer sign
1274 extension. */
1275 while (GET_CODE (x) == SIGN_EXTEND
1276 || GET_CODE (x) == ZERO_EXTEND
1277 || GET_CODE (x) == TRUNCATE
1278 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1279 {
1280 #if defined(POINTERS_EXTEND_UNSIGNED)
1281 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1282 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1283 || (paradoxical_subreg_p (x)
1284 && ! (SUBREG_PROMOTED_VAR_P (x)
1285 && SUBREG_CHECK_PROMOTED_SIGN (x,
1286 POINTERS_EXTEND_UNSIGNED))))
1287 && !targetm.have_ptr_extend ())
1288 can_be_reg_pointer = false;
1289 #endif
1290 x = XEXP (x, 0);
1291 }
1292
1293 /* Hard registers can be reused for multiple purposes within the same
1294 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1295 on them is wrong. */
1296 if (HARD_REGISTER_P (reg))
1297 return;
1298
1299 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1300 if (MEM_P (x))
1301 {
1302 if (MEM_OFFSET_KNOWN_P (x))
1303 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1304 MEM_OFFSET (x) + offset);
1305 if (can_be_reg_pointer && MEM_POINTER (x))
1306 mark_reg_pointer (reg, 0);
1307 }
1308 else if (REG_P (x))
1309 {
1310 if (REG_ATTRS (x))
1311 update_reg_offset (reg, x, offset);
1312 if (can_be_reg_pointer && REG_POINTER (x))
1313 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1314 }
1315 }
1316
1317 /* Generate a REG rtx for a new pseudo register, copying the mode
1318 and attributes from X. */
1319
1320 rtx
1321 gen_reg_rtx_and_attrs (rtx x)
1322 {
1323 rtx reg = gen_reg_rtx (GET_MODE (x));
1324 set_reg_attrs_from_value (reg, x);
1325 return reg;
1326 }
1327
1328 /* Set the register attributes for registers contained in PARM_RTX.
1329 Use needed values from memory attributes of MEM. */
1330
1331 void
1332 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1333 {
1334 if (REG_P (parm_rtx))
1335 set_reg_attrs_from_value (parm_rtx, mem);
1336 else if (GET_CODE (parm_rtx) == PARALLEL)
1337 {
1338 /* Check for a NULL entry in the first slot, used to indicate that the
1339 parameter goes both on the stack and in registers. */
1340 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1341 for (; i < XVECLEN (parm_rtx, 0); i++)
1342 {
1343 rtx x = XVECEXP (parm_rtx, 0, i);
1344 if (REG_P (XEXP (x, 0)))
1345 REG_ATTRS (XEXP (x, 0))
1346 = get_reg_attrs (MEM_EXPR (mem),
1347 INTVAL (XEXP (x, 1)));
1348 }
1349 }
1350 }
1351
1352 /* Set the REG_ATTRS for registers in value X, given that X represents
1353 decl T. */
1354
1355 void
1356 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1357 {
1358 if (!t)
1359 return;
1360 tree tdecl = t;
1361 if (GET_CODE (x) == SUBREG)
1362 {
1363 gcc_assert (subreg_lowpart_p (x));
1364 x = SUBREG_REG (x);
1365 }
1366 if (REG_P (x))
1367 REG_ATTRS (x)
1368 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1369 DECL_P (tdecl)
1370 ? DECL_MODE (tdecl)
1371 : TYPE_MODE (TREE_TYPE (tdecl))));
1372 if (GET_CODE (x) == CONCAT)
1373 {
1374 if (REG_P (XEXP (x, 0)))
1375 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1376 if (REG_P (XEXP (x, 1)))
1377 REG_ATTRS (XEXP (x, 1))
1378 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1379 }
1380 if (GET_CODE (x) == PARALLEL)
1381 {
1382 int i, start;
1383
1384 /* Check for a NULL entry, used to indicate that the parameter goes
1385 both on the stack and in registers. */
1386 if (XEXP (XVECEXP (x, 0, 0), 0))
1387 start = 0;
1388 else
1389 start = 1;
1390
1391 for (i = start; i < XVECLEN (x, 0); i++)
1392 {
1393 rtx y = XVECEXP (x, 0, i);
1394 if (REG_P (XEXP (y, 0)))
1395 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1396 }
1397 }
1398 }
1399
1400 /* Assign the RTX X to declaration T. */
1401
1402 void
1403 set_decl_rtl (tree t, rtx x)
1404 {
1405 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1406 if (x)
1407 set_reg_attrs_for_decl_rtl (t, x);
1408 }
1409
1410 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1411 if the ABI requires the parameter to be passed by reference. */
1412
1413 void
1414 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1415 {
1416 DECL_INCOMING_RTL (t) = x;
1417 if (x && !by_reference_p)
1418 set_reg_attrs_for_decl_rtl (t, x);
1419 }
1420
1421 /* Identify REG (which may be a CONCAT) as a user register. */
1422
1423 void
1424 mark_user_reg (rtx reg)
1425 {
1426 if (GET_CODE (reg) == CONCAT)
1427 {
1428 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1429 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1430 }
1431 else
1432 {
1433 gcc_assert (REG_P (reg));
1434 REG_USERVAR_P (reg) = 1;
1435 }
1436 }
1437
1438 /* Identify REG as a probable pointer register and show its alignment
1439 as ALIGN, if nonzero. */
1440
1441 void
1442 mark_reg_pointer (rtx reg, int align)
1443 {
1444 if (! REG_POINTER (reg))
1445 {
1446 REG_POINTER (reg) = 1;
1447
1448 if (align)
1449 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1450 }
1451 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1452 /* We can no-longer be sure just how aligned this pointer is. */
1453 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1454 }
1455
1456 /* Return 1 plus largest pseudo reg number used in the current function. */
1457
1458 int
1459 max_reg_num (void)
1460 {
1461 return reg_rtx_no;
1462 }
1463
1464 /* Return 1 + the largest label number used so far in the current function. */
1465
1466 int
1467 max_label_num (void)
1468 {
1469 return label_num;
1470 }
1471
1472 /* Return first label number used in this function (if any were used). */
1473
1474 int
1475 get_first_label_num (void)
1476 {
1477 return first_label_num;
1478 }
1479
1480 /* If the rtx for label was created during the expansion of a nested
1481 function, then first_label_num won't include this label number.
1482 Fix this now so that array indices work later. */
1483
1484 void
1485 maybe_set_first_label_num (rtx_code_label *x)
1486 {
1487 if (CODE_LABEL_NUMBER (x) < first_label_num)
1488 first_label_num = CODE_LABEL_NUMBER (x);
1489 }
1490
1491 /* For use by the RTL function loader, when mingling with normal
1492 functions.
1493 Ensure that label_num is greater than the label num of X, to avoid
1494 duplicate labels in the generated assembler. */
1495
1496 void
1497 maybe_set_max_label_num (rtx_code_label *x)
1498 {
1499 if (CODE_LABEL_NUMBER (x) >= label_num)
1500 label_num = CODE_LABEL_NUMBER (x) + 1;
1501 }
1502
1503 \f
1504 /* Return a value representing some low-order bits of X, where the number
1505 of low-order bits is given by MODE. Note that no conversion is done
1506 between floating-point and fixed-point values, rather, the bit
1507 representation is returned.
1508
1509 This function handles the cases in common between gen_lowpart, below,
1510 and two variants in cse.c and combine.c. These are the cases that can
1511 be safely handled at all points in the compilation.
1512
1513 If this is not a case we can handle, return 0. */
1514
1515 rtx
1516 gen_lowpart_common (machine_mode mode, rtx x)
1517 {
1518 int msize = GET_MODE_SIZE (mode);
1519 int xsize;
1520 machine_mode innermode;
1521
1522 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1523 so we have to make one up. Yuk. */
1524 innermode = GET_MODE (x);
1525 if (CONST_INT_P (x)
1526 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1527 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1528 else if (innermode == VOIDmode)
1529 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1530
1531 xsize = GET_MODE_SIZE (innermode);
1532
1533 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1534
1535 if (innermode == mode)
1536 return x;
1537
1538 if (SCALAR_FLOAT_MODE_P (mode))
1539 {
1540 /* Don't allow paradoxical FLOAT_MODE subregs. */
1541 if (msize > xsize)
1542 return 0;
1543 }
1544 else
1545 {
1546 /* MODE must occupy no more of the underlying registers than X. */
1547 unsigned int regsize = REGMODE_NATURAL_SIZE (innermode);
1548 unsigned int mregs = CEIL (msize, regsize);
1549 unsigned int xregs = CEIL (xsize, regsize);
1550 if (mregs > xregs)
1551 return 0;
1552 }
1553
1554 scalar_int_mode int_mode, int_innermode, from_mode;
1555 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1556 && is_a <scalar_int_mode> (mode, &int_mode)
1557 && is_a <scalar_int_mode> (innermode, &int_innermode)
1558 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1559 {
1560 /* If we are getting the low-order part of something that has been
1561 sign- or zero-extended, we can either just use the object being
1562 extended or make a narrower extension. If we want an even smaller
1563 piece than the size of the object being extended, call ourselves
1564 recursively.
1565
1566 This case is used mostly by combine and cse. */
1567
1568 if (from_mode == int_mode)
1569 return XEXP (x, 0);
1570 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1571 return gen_lowpart_common (int_mode, XEXP (x, 0));
1572 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1573 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1574 }
1575 else if (GET_CODE (x) == SUBREG || REG_P (x)
1576 || GET_CODE (x) == CONCAT || const_vec_p (x)
1577 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1578 || CONST_POLY_INT_P (x))
1579 return lowpart_subreg (mode, x, innermode);
1580
1581 /* Otherwise, we can't do this. */
1582 return 0;
1583 }
1584 \f
1585 rtx
1586 gen_highpart (machine_mode mode, rtx x)
1587 {
1588 unsigned int msize = GET_MODE_SIZE (mode);
1589 rtx result;
1590
1591 /* This case loses if X is a subreg. To catch bugs early,
1592 complain if an invalid MODE is used even in other cases. */
1593 gcc_assert (msize <= UNITS_PER_WORD
1594 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1595
1596 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1597 subreg_highpart_offset (mode, GET_MODE (x)));
1598 gcc_assert (result);
1599
1600 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1601 the target if we have a MEM. gen_highpart must return a valid operand,
1602 emitting code if necessary to do so. */
1603 if (MEM_P (result))
1604 {
1605 result = validize_mem (result);
1606 gcc_assert (result);
1607 }
1608
1609 return result;
1610 }
1611
1612 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1613 be VOIDmode constant. */
1614 rtx
1615 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1616 {
1617 if (GET_MODE (exp) != VOIDmode)
1618 {
1619 gcc_assert (GET_MODE (exp) == innermode);
1620 return gen_highpart (outermode, exp);
1621 }
1622 return simplify_gen_subreg (outermode, exp, innermode,
1623 subreg_highpart_offset (outermode, innermode));
1624 }
1625
1626 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1627 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1628
1629 unsigned int
1630 subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
1631 {
1632 if (outer_bytes > inner_bytes)
1633 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1634 return 0;
1635
1636 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1637 return inner_bytes - outer_bytes;
1638 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1639 return 0;
1640 else
1641 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1642 }
1643
1644 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1645 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1646
1647 unsigned int
1648 subreg_size_highpart_offset (unsigned int outer_bytes,
1649 unsigned int inner_bytes)
1650 {
1651 gcc_assert (inner_bytes >= outer_bytes);
1652
1653 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1654 return 0;
1655 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1656 return inner_bytes - outer_bytes;
1657 else
1658 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1659 (inner_bytes - outer_bytes)
1660 * BITS_PER_UNIT);
1661 }
1662
1663 /* Return 1 iff X, assumed to be a SUBREG,
1664 refers to the least significant part of its containing reg.
1665 If X is not a SUBREG, always return 1 (it is its own low part!). */
1666
1667 int
1668 subreg_lowpart_p (const_rtx x)
1669 {
1670 if (GET_CODE (x) != SUBREG)
1671 return 1;
1672 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1673 return 0;
1674
1675 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1676 == SUBREG_BYTE (x));
1677 }
1678 \f
1679 /* Return subword OFFSET of operand OP.
1680 The word number, OFFSET, is interpreted as the word number starting
1681 at the low-order address. OFFSET 0 is the low-order word if not
1682 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1683
1684 If we cannot extract the required word, we return zero. Otherwise,
1685 an rtx corresponding to the requested word will be returned.
1686
1687 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1688 reload has completed, a valid address will always be returned. After
1689 reload, if a valid address cannot be returned, we return zero.
1690
1691 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1692 it is the responsibility of the caller.
1693
1694 MODE is the mode of OP in case it is a CONST_INT.
1695
1696 ??? This is still rather broken for some cases. The problem for the
1697 moment is that all callers of this thing provide no 'goal mode' to
1698 tell us to work with. This exists because all callers were written
1699 in a word based SUBREG world.
1700 Now use of this function can be deprecated by simplify_subreg in most
1701 cases.
1702 */
1703
1704 rtx
1705 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1706 {
1707 if (mode == VOIDmode)
1708 mode = GET_MODE (op);
1709
1710 gcc_assert (mode != VOIDmode);
1711
1712 /* If OP is narrower than a word, fail. */
1713 if (mode != BLKmode
1714 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1715 return 0;
1716
1717 /* If we want a word outside OP, return zero. */
1718 if (mode != BLKmode
1719 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1720 return const0_rtx;
1721
1722 /* Form a new MEM at the requested address. */
1723 if (MEM_P (op))
1724 {
1725 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1726
1727 if (! validate_address)
1728 return new_rtx;
1729
1730 else if (reload_completed)
1731 {
1732 if (! strict_memory_address_addr_space_p (word_mode,
1733 XEXP (new_rtx, 0),
1734 MEM_ADDR_SPACE (op)))
1735 return 0;
1736 }
1737 else
1738 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1739 }
1740
1741 /* Rest can be handled by simplify_subreg. */
1742 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1743 }
1744
1745 /* Similar to `operand_subword', but never return 0. If we can't
1746 extract the required subword, put OP into a register and try again.
1747 The second attempt must succeed. We always validate the address in
1748 this case.
1749
1750 MODE is the mode of OP, in case it is CONST_INT. */
1751
1752 rtx
1753 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1754 {
1755 rtx result = operand_subword (op, offset, 1, mode);
1756
1757 if (result)
1758 return result;
1759
1760 if (mode != BLKmode && mode != VOIDmode)
1761 {
1762 /* If this is a register which can not be accessed by words, copy it
1763 to a pseudo register. */
1764 if (REG_P (op))
1765 op = copy_to_reg (op);
1766 else
1767 op = force_reg (mode, op);
1768 }
1769
1770 result = operand_subword (op, offset, 1, mode);
1771 gcc_assert (result);
1772
1773 return result;
1774 }
1775 \f
1776 /* Returns 1 if both MEM_EXPR can be considered equal
1777 and 0 otherwise. */
1778
1779 int
1780 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1781 {
1782 if (expr1 == expr2)
1783 return 1;
1784
1785 if (! expr1 || ! expr2)
1786 return 0;
1787
1788 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1789 return 0;
1790
1791 return operand_equal_p (expr1, expr2, 0);
1792 }
1793
1794 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1795 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1796 -1 if not known. */
1797
1798 int
1799 get_mem_align_offset (rtx mem, unsigned int align)
1800 {
1801 tree expr;
1802 unsigned HOST_WIDE_INT offset;
1803
1804 /* This function can't use
1805 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1806 || (MAX (MEM_ALIGN (mem),
1807 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1808 < align))
1809 return -1;
1810 else
1811 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1812 for two reasons:
1813 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1814 for <variable>. get_inner_reference doesn't handle it and
1815 even if it did, the alignment in that case needs to be determined
1816 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1817 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1818 isn't sufficiently aligned, the object it is in might be. */
1819 gcc_assert (MEM_P (mem));
1820 expr = MEM_EXPR (mem);
1821 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1822 return -1;
1823
1824 offset = MEM_OFFSET (mem);
1825 if (DECL_P (expr))
1826 {
1827 if (DECL_ALIGN (expr) < align)
1828 return -1;
1829 }
1830 else if (INDIRECT_REF_P (expr))
1831 {
1832 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1833 return -1;
1834 }
1835 else if (TREE_CODE (expr) == COMPONENT_REF)
1836 {
1837 while (1)
1838 {
1839 tree inner = TREE_OPERAND (expr, 0);
1840 tree field = TREE_OPERAND (expr, 1);
1841 tree byte_offset = component_ref_field_offset (expr);
1842 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1843
1844 if (!byte_offset
1845 || !tree_fits_uhwi_p (byte_offset)
1846 || !tree_fits_uhwi_p (bit_offset))
1847 return -1;
1848
1849 offset += tree_to_uhwi (byte_offset);
1850 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1851
1852 if (inner == NULL_TREE)
1853 {
1854 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1855 < (unsigned int) align)
1856 return -1;
1857 break;
1858 }
1859 else if (DECL_P (inner))
1860 {
1861 if (DECL_ALIGN (inner) < align)
1862 return -1;
1863 break;
1864 }
1865 else if (TREE_CODE (inner) != COMPONENT_REF)
1866 return -1;
1867 expr = inner;
1868 }
1869 }
1870 else
1871 return -1;
1872
1873 return offset & ((align / BITS_PER_UNIT) - 1);
1874 }
1875
1876 /* Given REF (a MEM) and T, either the type of X or the expression
1877 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1878 if we are making a new object of this type. BITPOS is nonzero if
1879 there is an offset outstanding on T that will be applied later. */
1880
1881 void
1882 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1883 HOST_WIDE_INT bitpos)
1884 {
1885 HOST_WIDE_INT apply_bitpos = 0;
1886 tree type;
1887 struct mem_attrs attrs, *defattrs, *refattrs;
1888 addr_space_t as;
1889
1890 /* It can happen that type_for_mode was given a mode for which there
1891 is no language-level type. In which case it returns NULL, which
1892 we can see here. */
1893 if (t == NULL_TREE)
1894 return;
1895
1896 type = TYPE_P (t) ? t : TREE_TYPE (t);
1897 if (type == error_mark_node)
1898 return;
1899
1900 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1901 wrong answer, as it assumes that DECL_RTL already has the right alias
1902 info. Callers should not set DECL_RTL until after the call to
1903 set_mem_attributes. */
1904 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1905
1906 memset (&attrs, 0, sizeof (attrs));
1907
1908 /* Get the alias set from the expression or type (perhaps using a
1909 front-end routine) and use it. */
1910 attrs.alias = get_alias_set (t);
1911
1912 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1913 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1914
1915 /* Default values from pre-existing memory attributes if present. */
1916 refattrs = MEM_ATTRS (ref);
1917 if (refattrs)
1918 {
1919 /* ??? Can this ever happen? Calling this routine on a MEM that
1920 already carries memory attributes should probably be invalid. */
1921 attrs.expr = refattrs->expr;
1922 attrs.offset_known_p = refattrs->offset_known_p;
1923 attrs.offset = refattrs->offset;
1924 attrs.size_known_p = refattrs->size_known_p;
1925 attrs.size = refattrs->size;
1926 attrs.align = refattrs->align;
1927 }
1928
1929 /* Otherwise, default values from the mode of the MEM reference. */
1930 else
1931 {
1932 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1933 gcc_assert (!defattrs->expr);
1934 gcc_assert (!defattrs->offset_known_p);
1935
1936 /* Respect mode size. */
1937 attrs.size_known_p = defattrs->size_known_p;
1938 attrs.size = defattrs->size;
1939 /* ??? Is this really necessary? We probably should always get
1940 the size from the type below. */
1941
1942 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1943 if T is an object, always compute the object alignment below. */
1944 if (TYPE_P (t))
1945 attrs.align = defattrs->align;
1946 else
1947 attrs.align = BITS_PER_UNIT;
1948 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1949 e.g. if the type carries an alignment attribute. Should we be
1950 able to simply always use TYPE_ALIGN? */
1951 }
1952
1953 /* We can set the alignment from the type if we are making an object or if
1954 this is an INDIRECT_REF. */
1955 if (objectp || TREE_CODE (t) == INDIRECT_REF)
1956 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1957
1958 /* If the size is known, we can set that. */
1959 tree new_size = TYPE_SIZE_UNIT (type);
1960
1961 /* The address-space is that of the type. */
1962 as = TYPE_ADDR_SPACE (type);
1963
1964 /* If T is not a type, we may be able to deduce some more information about
1965 the expression. */
1966 if (! TYPE_P (t))
1967 {
1968 tree base;
1969
1970 if (TREE_THIS_VOLATILE (t))
1971 MEM_VOLATILE_P (ref) = 1;
1972
1973 /* Now remove any conversions: they don't change what the underlying
1974 object is. Likewise for SAVE_EXPR. */
1975 while (CONVERT_EXPR_P (t)
1976 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1977 || TREE_CODE (t) == SAVE_EXPR)
1978 t = TREE_OPERAND (t, 0);
1979
1980 /* Note whether this expression can trap. */
1981 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1982
1983 base = get_base_address (t);
1984 if (base)
1985 {
1986 if (DECL_P (base)
1987 && TREE_READONLY (base)
1988 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1989 && !TREE_THIS_VOLATILE (base))
1990 MEM_READONLY_P (ref) = 1;
1991
1992 /* Mark static const strings readonly as well. */
1993 if (TREE_CODE (base) == STRING_CST
1994 && TREE_READONLY (base)
1995 && TREE_STATIC (base))
1996 MEM_READONLY_P (ref) = 1;
1997
1998 /* Address-space information is on the base object. */
1999 if (TREE_CODE (base) == MEM_REF
2000 || TREE_CODE (base) == TARGET_MEM_REF)
2001 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2002 0))));
2003 else
2004 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2005 }
2006
2007 /* If this expression uses it's parent's alias set, mark it such
2008 that we won't change it. */
2009 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2010 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2011
2012 /* If this is a decl, set the attributes of the MEM from it. */
2013 if (DECL_P (t))
2014 {
2015 attrs.expr = t;
2016 attrs.offset_known_p = true;
2017 attrs.offset = 0;
2018 apply_bitpos = bitpos;
2019 new_size = DECL_SIZE_UNIT (t);
2020 }
2021
2022 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2023 else if (CONSTANT_CLASS_P (t))
2024 ;
2025
2026 /* If this is a field reference, record it. */
2027 else if (TREE_CODE (t) == COMPONENT_REF)
2028 {
2029 attrs.expr = t;
2030 attrs.offset_known_p = true;
2031 attrs.offset = 0;
2032 apply_bitpos = bitpos;
2033 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2034 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2035 }
2036
2037 /* If this is an array reference, look for an outer field reference. */
2038 else if (TREE_CODE (t) == ARRAY_REF)
2039 {
2040 tree off_tree = size_zero_node;
2041 /* We can't modify t, because we use it at the end of the
2042 function. */
2043 tree t2 = t;
2044
2045 do
2046 {
2047 tree index = TREE_OPERAND (t2, 1);
2048 tree low_bound = array_ref_low_bound (t2);
2049 tree unit_size = array_ref_element_size (t2);
2050
2051 /* We assume all arrays have sizes that are a multiple of a byte.
2052 First subtract the lower bound, if any, in the type of the
2053 index, then convert to sizetype and multiply by the size of
2054 the array element. */
2055 if (! integer_zerop (low_bound))
2056 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2057 index, low_bound);
2058
2059 off_tree = size_binop (PLUS_EXPR,
2060 size_binop (MULT_EXPR,
2061 fold_convert (sizetype,
2062 index),
2063 unit_size),
2064 off_tree);
2065 t2 = TREE_OPERAND (t2, 0);
2066 }
2067 while (TREE_CODE (t2) == ARRAY_REF);
2068
2069 if (DECL_P (t2)
2070 || (TREE_CODE (t2) == COMPONENT_REF
2071 /* For trailing arrays t2 doesn't have a size that
2072 covers all valid accesses. */
2073 && ! array_at_struct_end_p (t)))
2074 {
2075 attrs.expr = t2;
2076 attrs.offset_known_p = false;
2077 if (tree_fits_uhwi_p (off_tree))
2078 {
2079 attrs.offset_known_p = true;
2080 attrs.offset = tree_to_uhwi (off_tree);
2081 apply_bitpos = bitpos;
2082 }
2083 }
2084 /* Else do not record a MEM_EXPR. */
2085 }
2086
2087 /* If this is an indirect reference, record it. */
2088 else if (TREE_CODE (t) == MEM_REF
2089 || TREE_CODE (t) == TARGET_MEM_REF)
2090 {
2091 attrs.expr = t;
2092 attrs.offset_known_p = true;
2093 attrs.offset = 0;
2094 apply_bitpos = bitpos;
2095 }
2096
2097 /* Compute the alignment. */
2098 unsigned int obj_align;
2099 unsigned HOST_WIDE_INT obj_bitpos;
2100 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2101 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
2102 if (obj_bitpos != 0)
2103 obj_align = least_bit_hwi (obj_bitpos);
2104 attrs.align = MAX (attrs.align, obj_align);
2105 }
2106
2107 if (tree_fits_uhwi_p (new_size))
2108 {
2109 attrs.size_known_p = true;
2110 attrs.size = tree_to_uhwi (new_size);
2111 }
2112
2113 /* If we modified OFFSET based on T, then subtract the outstanding
2114 bit position offset. Similarly, increase the size of the accessed
2115 object to contain the negative offset. */
2116 if (apply_bitpos)
2117 {
2118 gcc_assert (attrs.offset_known_p);
2119 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
2120 if (attrs.size_known_p)
2121 attrs.size += apply_bitpos / BITS_PER_UNIT;
2122 }
2123
2124 /* Now set the attributes we computed above. */
2125 attrs.addrspace = as;
2126 set_mem_attrs (ref, &attrs);
2127 }
2128
2129 void
2130 set_mem_attributes (rtx ref, tree t, int objectp)
2131 {
2132 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2133 }
2134
2135 /* Set the alias set of MEM to SET. */
2136
2137 void
2138 set_mem_alias_set (rtx mem, alias_set_type set)
2139 {
2140 struct mem_attrs attrs;
2141
2142 /* If the new and old alias sets don't conflict, something is wrong. */
2143 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2144 attrs = *get_mem_attrs (mem);
2145 attrs.alias = set;
2146 set_mem_attrs (mem, &attrs);
2147 }
2148
2149 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2150
2151 void
2152 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2153 {
2154 struct mem_attrs attrs;
2155
2156 attrs = *get_mem_attrs (mem);
2157 attrs.addrspace = addrspace;
2158 set_mem_attrs (mem, &attrs);
2159 }
2160
2161 /* Set the alignment of MEM to ALIGN bits. */
2162
2163 void
2164 set_mem_align (rtx mem, unsigned int align)
2165 {
2166 struct mem_attrs attrs;
2167
2168 attrs = *get_mem_attrs (mem);
2169 attrs.align = align;
2170 set_mem_attrs (mem, &attrs);
2171 }
2172
2173 /* Set the expr for MEM to EXPR. */
2174
2175 void
2176 set_mem_expr (rtx mem, tree expr)
2177 {
2178 struct mem_attrs attrs;
2179
2180 attrs = *get_mem_attrs (mem);
2181 attrs.expr = expr;
2182 set_mem_attrs (mem, &attrs);
2183 }
2184
2185 /* Set the offset of MEM to OFFSET. */
2186
2187 void
2188 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2189 {
2190 struct mem_attrs attrs;
2191
2192 attrs = *get_mem_attrs (mem);
2193 attrs.offset_known_p = true;
2194 attrs.offset = offset;
2195 set_mem_attrs (mem, &attrs);
2196 }
2197
2198 /* Clear the offset of MEM. */
2199
2200 void
2201 clear_mem_offset (rtx mem)
2202 {
2203 struct mem_attrs attrs;
2204
2205 attrs = *get_mem_attrs (mem);
2206 attrs.offset_known_p = false;
2207 set_mem_attrs (mem, &attrs);
2208 }
2209
2210 /* Set the size of MEM to SIZE. */
2211
2212 void
2213 set_mem_size (rtx mem, HOST_WIDE_INT size)
2214 {
2215 struct mem_attrs attrs;
2216
2217 attrs = *get_mem_attrs (mem);
2218 attrs.size_known_p = true;
2219 attrs.size = size;
2220 set_mem_attrs (mem, &attrs);
2221 }
2222
2223 /* Clear the size of MEM. */
2224
2225 void
2226 clear_mem_size (rtx mem)
2227 {
2228 struct mem_attrs attrs;
2229
2230 attrs = *get_mem_attrs (mem);
2231 attrs.size_known_p = false;
2232 set_mem_attrs (mem, &attrs);
2233 }
2234 \f
2235 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2236 and its address changed to ADDR. (VOIDmode means don't change the mode.
2237 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2238 returned memory location is required to be valid. INPLACE is true if any
2239 changes can be made directly to MEMREF or false if MEMREF must be treated
2240 as immutable.
2241
2242 The memory attributes are not changed. */
2243
2244 static rtx
2245 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2246 bool inplace)
2247 {
2248 addr_space_t as;
2249 rtx new_rtx;
2250
2251 gcc_assert (MEM_P (memref));
2252 as = MEM_ADDR_SPACE (memref);
2253 if (mode == VOIDmode)
2254 mode = GET_MODE (memref);
2255 if (addr == 0)
2256 addr = XEXP (memref, 0);
2257 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2258 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2259 return memref;
2260
2261 /* Don't validate address for LRA. LRA can make the address valid
2262 by itself in most efficient way. */
2263 if (validate && !lra_in_progress)
2264 {
2265 if (reload_in_progress || reload_completed)
2266 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2267 else
2268 addr = memory_address_addr_space (mode, addr, as);
2269 }
2270
2271 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2272 return memref;
2273
2274 if (inplace)
2275 {
2276 XEXP (memref, 0) = addr;
2277 return memref;
2278 }
2279
2280 new_rtx = gen_rtx_MEM (mode, addr);
2281 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2282 return new_rtx;
2283 }
2284
2285 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2286 way we are changing MEMREF, so we only preserve the alias set. */
2287
2288 rtx
2289 change_address (rtx memref, machine_mode mode, rtx addr)
2290 {
2291 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2292 machine_mode mmode = GET_MODE (new_rtx);
2293 struct mem_attrs attrs, *defattrs;
2294
2295 attrs = *get_mem_attrs (memref);
2296 defattrs = mode_mem_attrs[(int) mmode];
2297 attrs.expr = NULL_TREE;
2298 attrs.offset_known_p = false;
2299 attrs.size_known_p = defattrs->size_known_p;
2300 attrs.size = defattrs->size;
2301 attrs.align = defattrs->align;
2302
2303 /* If there are no changes, just return the original memory reference. */
2304 if (new_rtx == memref)
2305 {
2306 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2307 return new_rtx;
2308
2309 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2310 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2311 }
2312
2313 set_mem_attrs (new_rtx, &attrs);
2314 return new_rtx;
2315 }
2316
2317 /* Return a memory reference like MEMREF, but with its mode changed
2318 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2319 nonzero, the memory address is forced to be valid.
2320 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2321 and the caller is responsible for adjusting MEMREF base register.
2322 If ADJUST_OBJECT is zero, the underlying object associated with the
2323 memory reference is left unchanged and the caller is responsible for
2324 dealing with it. Otherwise, if the new memory reference is outside
2325 the underlying object, even partially, then the object is dropped.
2326 SIZE, if nonzero, is the size of an access in cases where MODE
2327 has no inherent size. */
2328
2329 rtx
2330 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2331 int validate, int adjust_address, int adjust_object,
2332 HOST_WIDE_INT size)
2333 {
2334 rtx addr = XEXP (memref, 0);
2335 rtx new_rtx;
2336 scalar_int_mode address_mode;
2337 int pbits;
2338 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2339 unsigned HOST_WIDE_INT max_align;
2340 #ifdef POINTERS_EXTEND_UNSIGNED
2341 scalar_int_mode pointer_mode
2342 = targetm.addr_space.pointer_mode (attrs.addrspace);
2343 #endif
2344
2345 /* VOIDmode means no mode change for change_address_1. */
2346 if (mode == VOIDmode)
2347 mode = GET_MODE (memref);
2348
2349 /* Take the size of non-BLKmode accesses from the mode. */
2350 defattrs = mode_mem_attrs[(int) mode];
2351 if (defattrs->size_known_p)
2352 size = defattrs->size;
2353
2354 /* If there are no changes, just return the original memory reference. */
2355 if (mode == GET_MODE (memref) && !offset
2356 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2357 && (!validate || memory_address_addr_space_p (mode, addr,
2358 attrs.addrspace)))
2359 return memref;
2360
2361 /* ??? Prefer to create garbage instead of creating shared rtl.
2362 This may happen even if offset is nonzero -- consider
2363 (plus (plus reg reg) const_int) -- so do this always. */
2364 addr = copy_rtx (addr);
2365
2366 /* Convert a possibly large offset to a signed value within the
2367 range of the target address space. */
2368 address_mode = get_address_mode (memref);
2369 pbits = GET_MODE_BITSIZE (address_mode);
2370 if (HOST_BITS_PER_WIDE_INT > pbits)
2371 {
2372 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2373 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2374 >> shift);
2375 }
2376
2377 if (adjust_address)
2378 {
2379 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2380 object, we can merge it into the LO_SUM. */
2381 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2382 && offset >= 0
2383 && (unsigned HOST_WIDE_INT) offset
2384 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2385 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2386 plus_constant (address_mode,
2387 XEXP (addr, 1), offset));
2388 #ifdef POINTERS_EXTEND_UNSIGNED
2389 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2390 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2391 the fact that pointers are not allowed to overflow. */
2392 else if (POINTERS_EXTEND_UNSIGNED > 0
2393 && GET_CODE (addr) == ZERO_EXTEND
2394 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2395 && trunc_int_for_mode (offset, pointer_mode) == offset)
2396 addr = gen_rtx_ZERO_EXTEND (address_mode,
2397 plus_constant (pointer_mode,
2398 XEXP (addr, 0), offset));
2399 #endif
2400 else
2401 addr = plus_constant (address_mode, addr, offset);
2402 }
2403
2404 new_rtx = change_address_1 (memref, mode, addr, validate, false);
2405
2406 /* If the address is a REG, change_address_1 rightfully returns memref,
2407 but this would destroy memref's MEM_ATTRS. */
2408 if (new_rtx == memref && offset != 0)
2409 new_rtx = copy_rtx (new_rtx);
2410
2411 /* Conservatively drop the object if we don't know where we start from. */
2412 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2413 {
2414 attrs.expr = NULL_TREE;
2415 attrs.alias = 0;
2416 }
2417
2418 /* Compute the new values of the memory attributes due to this adjustment.
2419 We add the offsets and update the alignment. */
2420 if (attrs.offset_known_p)
2421 {
2422 attrs.offset += offset;
2423
2424 /* Drop the object if the new left end is not within its bounds. */
2425 if (adjust_object && attrs.offset < 0)
2426 {
2427 attrs.expr = NULL_TREE;
2428 attrs.alias = 0;
2429 }
2430 }
2431
2432 /* Compute the new alignment by taking the MIN of the alignment and the
2433 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2434 if zero. */
2435 if (offset != 0)
2436 {
2437 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
2438 attrs.align = MIN (attrs.align, max_align);
2439 }
2440
2441 if (size)
2442 {
2443 /* Drop the object if the new right end is not within its bounds. */
2444 if (adjust_object && (offset + size) > attrs.size)
2445 {
2446 attrs.expr = NULL_TREE;
2447 attrs.alias = 0;
2448 }
2449 attrs.size_known_p = true;
2450 attrs.size = size;
2451 }
2452 else if (attrs.size_known_p)
2453 {
2454 gcc_assert (!adjust_object);
2455 attrs.size -= offset;
2456 /* ??? The store_by_pieces machinery generates negative sizes,
2457 so don't assert for that here. */
2458 }
2459
2460 set_mem_attrs (new_rtx, &attrs);
2461
2462 return new_rtx;
2463 }
2464
2465 /* Return a memory reference like MEMREF, but with its mode changed
2466 to MODE and its address changed to ADDR, which is assumed to be
2467 MEMREF offset by OFFSET bytes. If VALIDATE is
2468 nonzero, the memory address is forced to be valid. */
2469
2470 rtx
2471 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2472 HOST_WIDE_INT offset, int validate)
2473 {
2474 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2475 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2476 }
2477
2478 /* Return a memory reference like MEMREF, but whose address is changed by
2479 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2480 known to be in OFFSET (possibly 1). */
2481
2482 rtx
2483 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2484 {
2485 rtx new_rtx, addr = XEXP (memref, 0);
2486 machine_mode address_mode;
2487 struct mem_attrs attrs, *defattrs;
2488
2489 attrs = *get_mem_attrs (memref);
2490 address_mode = get_address_mode (memref);
2491 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2492
2493 /* At this point we don't know _why_ the address is invalid. It
2494 could have secondary memory references, multiplies or anything.
2495
2496 However, if we did go and rearrange things, we can wind up not
2497 being able to recognize the magic around pic_offset_table_rtx.
2498 This stuff is fragile, and is yet another example of why it is
2499 bad to expose PIC machinery too early. */
2500 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2501 attrs.addrspace)
2502 && GET_CODE (addr) == PLUS
2503 && XEXP (addr, 0) == pic_offset_table_rtx)
2504 {
2505 addr = force_reg (GET_MODE (addr), addr);
2506 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2507 }
2508
2509 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2510 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2511
2512 /* If there are no changes, just return the original memory reference. */
2513 if (new_rtx == memref)
2514 return new_rtx;
2515
2516 /* Update the alignment to reflect the offset. Reset the offset, which
2517 we don't know. */
2518 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2519 attrs.offset_known_p = false;
2520 attrs.size_known_p = defattrs->size_known_p;
2521 attrs.size = defattrs->size;
2522 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2523 set_mem_attrs (new_rtx, &attrs);
2524 return new_rtx;
2525 }
2526
2527 /* Return a memory reference like MEMREF, but with its address changed to
2528 ADDR. The caller is asserting that the actual piece of memory pointed
2529 to is the same, just the form of the address is being changed, such as
2530 by putting something into a register. INPLACE is true if any changes
2531 can be made directly to MEMREF or false if MEMREF must be treated as
2532 immutable. */
2533
2534 rtx
2535 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2536 {
2537 /* change_address_1 copies the memory attribute structure without change
2538 and that's exactly what we want here. */
2539 update_temp_slot_address (XEXP (memref, 0), addr);
2540 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2541 }
2542
2543 /* Likewise, but the reference is not required to be valid. */
2544
2545 rtx
2546 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2547 {
2548 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2549 }
2550
2551 /* Return a memory reference like MEMREF, but with its mode widened to
2552 MODE and offset by OFFSET. This would be used by targets that e.g.
2553 cannot issue QImode memory operations and have to use SImode memory
2554 operations plus masking logic. */
2555
2556 rtx
2557 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2558 {
2559 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2560 struct mem_attrs attrs;
2561 unsigned int size = GET_MODE_SIZE (mode);
2562
2563 /* If there are no changes, just return the original memory reference. */
2564 if (new_rtx == memref)
2565 return new_rtx;
2566
2567 attrs = *get_mem_attrs (new_rtx);
2568
2569 /* If we don't know what offset we were at within the expression, then
2570 we can't know if we've overstepped the bounds. */
2571 if (! attrs.offset_known_p)
2572 attrs.expr = NULL_TREE;
2573
2574 while (attrs.expr)
2575 {
2576 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2577 {
2578 tree field = TREE_OPERAND (attrs.expr, 1);
2579 tree offset = component_ref_field_offset (attrs.expr);
2580
2581 if (! DECL_SIZE_UNIT (field))
2582 {
2583 attrs.expr = NULL_TREE;
2584 break;
2585 }
2586
2587 /* Is the field at least as large as the access? If so, ok,
2588 otherwise strip back to the containing structure. */
2589 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2590 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2591 && attrs.offset >= 0)
2592 break;
2593
2594 if (! tree_fits_uhwi_p (offset))
2595 {
2596 attrs.expr = NULL_TREE;
2597 break;
2598 }
2599
2600 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2601 attrs.offset += tree_to_uhwi (offset);
2602 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2603 / BITS_PER_UNIT);
2604 }
2605 /* Similarly for the decl. */
2606 else if (DECL_P (attrs.expr)
2607 && DECL_SIZE_UNIT (attrs.expr)
2608 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2609 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2610 && (! attrs.offset_known_p || attrs.offset >= 0))
2611 break;
2612 else
2613 {
2614 /* The widened memory access overflows the expression, which means
2615 that it could alias another expression. Zap it. */
2616 attrs.expr = NULL_TREE;
2617 break;
2618 }
2619 }
2620
2621 if (! attrs.expr)
2622 attrs.offset_known_p = false;
2623
2624 /* The widened memory may alias other stuff, so zap the alias set. */
2625 /* ??? Maybe use get_alias_set on any remaining expression. */
2626 attrs.alias = 0;
2627 attrs.size_known_p = true;
2628 attrs.size = size;
2629 set_mem_attrs (new_rtx, &attrs);
2630 return new_rtx;
2631 }
2632 \f
2633 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2634 static GTY(()) tree spill_slot_decl;
2635
2636 tree
2637 get_spill_slot_decl (bool force_build_p)
2638 {
2639 tree d = spill_slot_decl;
2640 rtx rd;
2641 struct mem_attrs attrs;
2642
2643 if (d || !force_build_p)
2644 return d;
2645
2646 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2647 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2648 DECL_ARTIFICIAL (d) = 1;
2649 DECL_IGNORED_P (d) = 1;
2650 TREE_USED (d) = 1;
2651 spill_slot_decl = d;
2652
2653 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2654 MEM_NOTRAP_P (rd) = 1;
2655 attrs = *mode_mem_attrs[(int) BLKmode];
2656 attrs.alias = new_alias_set ();
2657 attrs.expr = d;
2658 set_mem_attrs (rd, &attrs);
2659 SET_DECL_RTL (d, rd);
2660
2661 return d;
2662 }
2663
2664 /* Given MEM, a result from assign_stack_local, fill in the memory
2665 attributes as appropriate for a register allocator spill slot.
2666 These slots are not aliasable by other memory. We arrange for
2667 them all to use a single MEM_EXPR, so that the aliasing code can
2668 work properly in the case of shared spill slots. */
2669
2670 void
2671 set_mem_attrs_for_spill (rtx mem)
2672 {
2673 struct mem_attrs attrs;
2674 rtx addr;
2675
2676 attrs = *get_mem_attrs (mem);
2677 attrs.expr = get_spill_slot_decl (true);
2678 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2679 attrs.addrspace = ADDR_SPACE_GENERIC;
2680
2681 /* We expect the incoming memory to be of the form:
2682 (mem:MODE (plus (reg sfp) (const_int offset)))
2683 with perhaps the plus missing for offset = 0. */
2684 addr = XEXP (mem, 0);
2685 attrs.offset_known_p = true;
2686 attrs.offset = 0;
2687 if (GET_CODE (addr) == PLUS
2688 && CONST_INT_P (XEXP (addr, 1)))
2689 attrs.offset = INTVAL (XEXP (addr, 1));
2690
2691 set_mem_attrs (mem, &attrs);
2692 MEM_NOTRAP_P (mem) = 1;
2693 }
2694 \f
2695 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2696
2697 rtx_code_label *
2698 gen_label_rtx (void)
2699 {
2700 return as_a <rtx_code_label *> (
2701 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2702 NULL, label_num++, NULL));
2703 }
2704 \f
2705 /* For procedure integration. */
2706
2707 /* Install new pointers to the first and last insns in the chain.
2708 Also, set cur_insn_uid to one higher than the last in use.
2709 Used for an inline-procedure after copying the insn chain. */
2710
2711 void
2712 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2713 {
2714 rtx_insn *insn;
2715
2716 set_first_insn (first);
2717 set_last_insn (last);
2718 cur_insn_uid = 0;
2719
2720 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2721 {
2722 int debug_count = 0;
2723
2724 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2725 cur_debug_insn_uid = 0;
2726
2727 for (insn = first; insn; insn = NEXT_INSN (insn))
2728 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2729 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2730 else
2731 {
2732 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2733 if (DEBUG_INSN_P (insn))
2734 debug_count++;
2735 }
2736
2737 if (debug_count)
2738 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2739 else
2740 cur_debug_insn_uid++;
2741 }
2742 else
2743 for (insn = first; insn; insn = NEXT_INSN (insn))
2744 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2745
2746 cur_insn_uid++;
2747 }
2748 \f
2749 /* Go through all the RTL insn bodies and copy any invalid shared
2750 structure. This routine should only be called once. */
2751
2752 static void
2753 unshare_all_rtl_1 (rtx_insn *insn)
2754 {
2755 /* Unshare just about everything else. */
2756 unshare_all_rtl_in_chain (insn);
2757
2758 /* Make sure the addresses of stack slots found outside the insn chain
2759 (such as, in DECL_RTL of a variable) are not shared
2760 with the insn chain.
2761
2762 This special care is necessary when the stack slot MEM does not
2763 actually appear in the insn chain. If it does appear, its address
2764 is unshared from all else at that point. */
2765 unsigned int i;
2766 rtx temp;
2767 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2768 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2769 }
2770
2771 /* Go through all the RTL insn bodies and copy any invalid shared
2772 structure, again. This is a fairly expensive thing to do so it
2773 should be done sparingly. */
2774
2775 void
2776 unshare_all_rtl_again (rtx_insn *insn)
2777 {
2778 rtx_insn *p;
2779 tree decl;
2780
2781 for (p = insn; p; p = NEXT_INSN (p))
2782 if (INSN_P (p))
2783 {
2784 reset_used_flags (PATTERN (p));
2785 reset_used_flags (REG_NOTES (p));
2786 if (CALL_P (p))
2787 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2788 }
2789
2790 /* Make sure that virtual stack slots are not shared. */
2791 set_used_decls (DECL_INITIAL (cfun->decl));
2792
2793 /* Make sure that virtual parameters are not shared. */
2794 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2795 set_used_flags (DECL_RTL (decl));
2796
2797 rtx temp;
2798 unsigned int i;
2799 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2800 reset_used_flags (temp);
2801
2802 unshare_all_rtl_1 (insn);
2803 }
2804
2805 unsigned int
2806 unshare_all_rtl (void)
2807 {
2808 unshare_all_rtl_1 (get_insns ());
2809
2810 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2811 {
2812 if (DECL_RTL_SET_P (decl))
2813 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2814 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2815 }
2816
2817 return 0;
2818 }
2819
2820
2821 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2822 Recursively does the same for subexpressions. */
2823
2824 static void
2825 verify_rtx_sharing (rtx orig, rtx insn)
2826 {
2827 rtx x = orig;
2828 int i;
2829 enum rtx_code code;
2830 const char *format_ptr;
2831
2832 if (x == 0)
2833 return;
2834
2835 code = GET_CODE (x);
2836
2837 /* These types may be freely shared. */
2838
2839 switch (code)
2840 {
2841 case REG:
2842 case DEBUG_EXPR:
2843 case VALUE:
2844 CASE_CONST_ANY:
2845 case SYMBOL_REF:
2846 case LABEL_REF:
2847 case CODE_LABEL:
2848 case PC:
2849 case CC0:
2850 case RETURN:
2851 case SIMPLE_RETURN:
2852 case SCRATCH:
2853 /* SCRATCH must be shared because they represent distinct values. */
2854 return;
2855 case CLOBBER:
2856 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2857 clobbers or clobbers of hard registers that originated as pseudos.
2858 This is needed to allow safe register renaming. */
2859 if (REG_P (XEXP (x, 0))
2860 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2861 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2862 return;
2863 break;
2864
2865 case CONST:
2866 if (shared_const_p (orig))
2867 return;
2868 break;
2869
2870 case MEM:
2871 /* A MEM is allowed to be shared if its address is constant. */
2872 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2873 || reload_completed || reload_in_progress)
2874 return;
2875
2876 break;
2877
2878 default:
2879 break;
2880 }
2881
2882 /* This rtx may not be shared. If it has already been seen,
2883 replace it with a copy of itself. */
2884 if (flag_checking && RTX_FLAG (x, used))
2885 {
2886 error ("invalid rtl sharing found in the insn");
2887 debug_rtx (insn);
2888 error ("shared rtx");
2889 debug_rtx (x);
2890 internal_error ("internal consistency failure");
2891 }
2892 gcc_assert (!RTX_FLAG (x, used));
2893
2894 RTX_FLAG (x, used) = 1;
2895
2896 /* Now scan the subexpressions recursively. */
2897
2898 format_ptr = GET_RTX_FORMAT (code);
2899
2900 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2901 {
2902 switch (*format_ptr++)
2903 {
2904 case 'e':
2905 verify_rtx_sharing (XEXP (x, i), insn);
2906 break;
2907
2908 case 'E':
2909 if (XVEC (x, i) != NULL)
2910 {
2911 int j;
2912 int len = XVECLEN (x, i);
2913
2914 for (j = 0; j < len; j++)
2915 {
2916 /* We allow sharing of ASM_OPERANDS inside single
2917 instruction. */
2918 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2919 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2920 == ASM_OPERANDS))
2921 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2922 else
2923 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2924 }
2925 }
2926 break;
2927 }
2928 }
2929 return;
2930 }
2931
2932 /* Reset used-flags for INSN. */
2933
2934 static void
2935 reset_insn_used_flags (rtx insn)
2936 {
2937 gcc_assert (INSN_P (insn));
2938 reset_used_flags (PATTERN (insn));
2939 reset_used_flags (REG_NOTES (insn));
2940 if (CALL_P (insn))
2941 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2942 }
2943
2944 /* Go through all the RTL insn bodies and clear all the USED bits. */
2945
2946 static void
2947 reset_all_used_flags (void)
2948 {
2949 rtx_insn *p;
2950
2951 for (p = get_insns (); p; p = NEXT_INSN (p))
2952 if (INSN_P (p))
2953 {
2954 rtx pat = PATTERN (p);
2955 if (GET_CODE (pat) != SEQUENCE)
2956 reset_insn_used_flags (p);
2957 else
2958 {
2959 gcc_assert (REG_NOTES (p) == NULL);
2960 for (int i = 0; i < XVECLEN (pat, 0); i++)
2961 {
2962 rtx insn = XVECEXP (pat, 0, i);
2963 if (INSN_P (insn))
2964 reset_insn_used_flags (insn);
2965 }
2966 }
2967 }
2968 }
2969
2970 /* Verify sharing in INSN. */
2971
2972 static void
2973 verify_insn_sharing (rtx insn)
2974 {
2975 gcc_assert (INSN_P (insn));
2976 verify_rtx_sharing (PATTERN (insn), insn);
2977 verify_rtx_sharing (REG_NOTES (insn), insn);
2978 if (CALL_P (insn))
2979 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2980 }
2981
2982 /* Go through all the RTL insn bodies and check that there is no unexpected
2983 sharing in between the subexpressions. */
2984
2985 DEBUG_FUNCTION void
2986 verify_rtl_sharing (void)
2987 {
2988 rtx_insn *p;
2989
2990 timevar_push (TV_VERIFY_RTL_SHARING);
2991
2992 reset_all_used_flags ();
2993
2994 for (p = get_insns (); p; p = NEXT_INSN (p))
2995 if (INSN_P (p))
2996 {
2997 rtx pat = PATTERN (p);
2998 if (GET_CODE (pat) != SEQUENCE)
2999 verify_insn_sharing (p);
3000 else
3001 for (int i = 0; i < XVECLEN (pat, 0); i++)
3002 {
3003 rtx insn = XVECEXP (pat, 0, i);
3004 if (INSN_P (insn))
3005 verify_insn_sharing (insn);
3006 }
3007 }
3008
3009 reset_all_used_flags ();
3010
3011 timevar_pop (TV_VERIFY_RTL_SHARING);
3012 }
3013
3014 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3015 Assumes the mark bits are cleared at entry. */
3016
3017 void
3018 unshare_all_rtl_in_chain (rtx_insn *insn)
3019 {
3020 for (; insn; insn = NEXT_INSN (insn))
3021 if (INSN_P (insn))
3022 {
3023 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3024 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3025 if (CALL_P (insn))
3026 CALL_INSN_FUNCTION_USAGE (insn)
3027 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3028 }
3029 }
3030
3031 /* Go through all virtual stack slots of a function and mark them as
3032 shared. We never replace the DECL_RTLs themselves with a copy,
3033 but expressions mentioned into a DECL_RTL cannot be shared with
3034 expressions in the instruction stream.
3035
3036 Note that reload may convert pseudo registers into memories in-place.
3037 Pseudo registers are always shared, but MEMs never are. Thus if we
3038 reset the used flags on MEMs in the instruction stream, we must set
3039 them again on MEMs that appear in DECL_RTLs. */
3040
3041 static void
3042 set_used_decls (tree blk)
3043 {
3044 tree t;
3045
3046 /* Mark decls. */
3047 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3048 if (DECL_RTL_SET_P (t))
3049 set_used_flags (DECL_RTL (t));
3050
3051 /* Now process sub-blocks. */
3052 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3053 set_used_decls (t);
3054 }
3055
3056 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3057 Recursively does the same for subexpressions. Uses
3058 copy_rtx_if_shared_1 to reduce stack space. */
3059
3060 rtx
3061 copy_rtx_if_shared (rtx orig)
3062 {
3063 copy_rtx_if_shared_1 (&orig);
3064 return orig;
3065 }
3066
3067 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3068 use. Recursively does the same for subexpressions. */
3069
3070 static void
3071 copy_rtx_if_shared_1 (rtx *orig1)
3072 {
3073 rtx x;
3074 int i;
3075 enum rtx_code code;
3076 rtx *last_ptr;
3077 const char *format_ptr;
3078 int copied = 0;
3079 int length;
3080
3081 /* Repeat is used to turn tail-recursion into iteration. */
3082 repeat:
3083 x = *orig1;
3084
3085 if (x == 0)
3086 return;
3087
3088 code = GET_CODE (x);
3089
3090 /* These types may be freely shared. */
3091
3092 switch (code)
3093 {
3094 case REG:
3095 case DEBUG_EXPR:
3096 case VALUE:
3097 CASE_CONST_ANY:
3098 case SYMBOL_REF:
3099 case LABEL_REF:
3100 case CODE_LABEL:
3101 case PC:
3102 case CC0:
3103 case RETURN:
3104 case SIMPLE_RETURN:
3105 case SCRATCH:
3106 /* SCRATCH must be shared because they represent distinct values. */
3107 return;
3108 case CLOBBER:
3109 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3110 clobbers or clobbers of hard registers that originated as pseudos.
3111 This is needed to allow safe register renaming. */
3112 if (REG_P (XEXP (x, 0))
3113 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3114 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3115 return;
3116 break;
3117
3118 case CONST:
3119 if (shared_const_p (x))
3120 return;
3121 break;
3122
3123 case DEBUG_INSN:
3124 case INSN:
3125 case JUMP_INSN:
3126 case CALL_INSN:
3127 case NOTE:
3128 case BARRIER:
3129 /* The chain of insns is not being copied. */
3130 return;
3131
3132 default:
3133 break;
3134 }
3135
3136 /* This rtx may not be shared. If it has already been seen,
3137 replace it with a copy of itself. */
3138
3139 if (RTX_FLAG (x, used))
3140 {
3141 x = shallow_copy_rtx (x);
3142 copied = 1;
3143 }
3144 RTX_FLAG (x, used) = 1;
3145
3146 /* Now scan the subexpressions recursively.
3147 We can store any replaced subexpressions directly into X
3148 since we know X is not shared! Any vectors in X
3149 must be copied if X was copied. */
3150
3151 format_ptr = GET_RTX_FORMAT (code);
3152 length = GET_RTX_LENGTH (code);
3153 last_ptr = NULL;
3154
3155 for (i = 0; i < length; i++)
3156 {
3157 switch (*format_ptr++)
3158 {
3159 case 'e':
3160 if (last_ptr)
3161 copy_rtx_if_shared_1 (last_ptr);
3162 last_ptr = &XEXP (x, i);
3163 break;
3164
3165 case 'E':
3166 if (XVEC (x, i) != NULL)
3167 {
3168 int j;
3169 int len = XVECLEN (x, i);
3170
3171 /* Copy the vector iff I copied the rtx and the length
3172 is nonzero. */
3173 if (copied && len > 0)
3174 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3175
3176 /* Call recursively on all inside the vector. */
3177 for (j = 0; j < len; j++)
3178 {
3179 if (last_ptr)
3180 copy_rtx_if_shared_1 (last_ptr);
3181 last_ptr = &XVECEXP (x, i, j);
3182 }
3183 }
3184 break;
3185 }
3186 }
3187 *orig1 = x;
3188 if (last_ptr)
3189 {
3190 orig1 = last_ptr;
3191 goto repeat;
3192 }
3193 return;
3194 }
3195
3196 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3197
3198 static void
3199 mark_used_flags (rtx x, int flag)
3200 {
3201 int i, j;
3202 enum rtx_code code;
3203 const char *format_ptr;
3204 int length;
3205
3206 /* Repeat is used to turn tail-recursion into iteration. */
3207 repeat:
3208 if (x == 0)
3209 return;
3210
3211 code = GET_CODE (x);
3212
3213 /* These types may be freely shared so we needn't do any resetting
3214 for them. */
3215
3216 switch (code)
3217 {
3218 case REG:
3219 case DEBUG_EXPR:
3220 case VALUE:
3221 CASE_CONST_ANY:
3222 case SYMBOL_REF:
3223 case CODE_LABEL:
3224 case PC:
3225 case CC0:
3226 case RETURN:
3227 case SIMPLE_RETURN:
3228 return;
3229
3230 case DEBUG_INSN:
3231 case INSN:
3232 case JUMP_INSN:
3233 case CALL_INSN:
3234 case NOTE:
3235 case LABEL_REF:
3236 case BARRIER:
3237 /* The chain of insns is not being copied. */
3238 return;
3239
3240 default:
3241 break;
3242 }
3243
3244 RTX_FLAG (x, used) = flag;
3245
3246 format_ptr = GET_RTX_FORMAT (code);
3247 length = GET_RTX_LENGTH (code);
3248
3249 for (i = 0; i < length; i++)
3250 {
3251 switch (*format_ptr++)
3252 {
3253 case 'e':
3254 if (i == length-1)
3255 {
3256 x = XEXP (x, i);
3257 goto repeat;
3258 }
3259 mark_used_flags (XEXP (x, i), flag);
3260 break;
3261
3262 case 'E':
3263 for (j = 0; j < XVECLEN (x, i); j++)
3264 mark_used_flags (XVECEXP (x, i, j), flag);
3265 break;
3266 }
3267 }
3268 }
3269
3270 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3271 to look for shared sub-parts. */
3272
3273 void
3274 reset_used_flags (rtx x)
3275 {
3276 mark_used_flags (x, 0);
3277 }
3278
3279 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3280 to look for shared sub-parts. */
3281
3282 void
3283 set_used_flags (rtx x)
3284 {
3285 mark_used_flags (x, 1);
3286 }
3287 \f
3288 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3289 Return X or the rtx for the pseudo reg the value of X was copied into.
3290 OTHER must be valid as a SET_DEST. */
3291
3292 rtx
3293 make_safe_from (rtx x, rtx other)
3294 {
3295 while (1)
3296 switch (GET_CODE (other))
3297 {
3298 case SUBREG:
3299 other = SUBREG_REG (other);
3300 break;
3301 case STRICT_LOW_PART:
3302 case SIGN_EXTEND:
3303 case ZERO_EXTEND:
3304 other = XEXP (other, 0);
3305 break;
3306 default:
3307 goto done;
3308 }
3309 done:
3310 if ((MEM_P (other)
3311 && ! CONSTANT_P (x)
3312 && !REG_P (x)
3313 && GET_CODE (x) != SUBREG)
3314 || (REG_P (other)
3315 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3316 || reg_mentioned_p (other, x))))
3317 {
3318 rtx temp = gen_reg_rtx (GET_MODE (x));
3319 emit_move_insn (temp, x);
3320 return temp;
3321 }
3322 return x;
3323 }
3324 \f
3325 /* Emission of insns (adding them to the doubly-linked list). */
3326
3327 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3328
3329 rtx_insn *
3330 get_last_insn_anywhere (void)
3331 {
3332 struct sequence_stack *seq;
3333 for (seq = get_current_sequence (); seq; seq = seq->next)
3334 if (seq->last != 0)
3335 return seq->last;
3336 return 0;
3337 }
3338
3339 /* Return the first nonnote insn emitted in current sequence or current
3340 function. This routine looks inside SEQUENCEs. */
3341
3342 rtx_insn *
3343 get_first_nonnote_insn (void)
3344 {
3345 rtx_insn *insn = get_insns ();
3346
3347 if (insn)
3348 {
3349 if (NOTE_P (insn))
3350 for (insn = next_insn (insn);
3351 insn && NOTE_P (insn);
3352 insn = next_insn (insn))
3353 continue;
3354 else
3355 {
3356 if (NONJUMP_INSN_P (insn)
3357 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3358 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3359 }
3360 }
3361
3362 return insn;
3363 }
3364
3365 /* Return the last nonnote insn emitted in current sequence or current
3366 function. This routine looks inside SEQUENCEs. */
3367
3368 rtx_insn *
3369 get_last_nonnote_insn (void)
3370 {
3371 rtx_insn *insn = get_last_insn ();
3372
3373 if (insn)
3374 {
3375 if (NOTE_P (insn))
3376 for (insn = previous_insn (insn);
3377 insn && NOTE_P (insn);
3378 insn = previous_insn (insn))
3379 continue;
3380 else
3381 {
3382 if (NONJUMP_INSN_P (insn))
3383 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3384 insn = seq->insn (seq->len () - 1);
3385 }
3386 }
3387
3388 return insn;
3389 }
3390
3391 /* Return the number of actual (non-debug) insns emitted in this
3392 function. */
3393
3394 int
3395 get_max_insn_count (void)
3396 {
3397 int n = cur_insn_uid;
3398
3399 /* The table size must be stable across -g, to avoid codegen
3400 differences due to debug insns, and not be affected by
3401 -fmin-insn-uid, to avoid excessive table size and to simplify
3402 debugging of -fcompare-debug failures. */
3403 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3404 n -= cur_debug_insn_uid;
3405 else
3406 n -= MIN_NONDEBUG_INSN_UID;
3407
3408 return n;
3409 }
3410
3411 \f
3412 /* Return the next insn. If it is a SEQUENCE, return the first insn
3413 of the sequence. */
3414
3415 rtx_insn *
3416 next_insn (rtx_insn *insn)
3417 {
3418 if (insn)
3419 {
3420 insn = NEXT_INSN (insn);
3421 if (insn && NONJUMP_INSN_P (insn)
3422 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3423 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3424 }
3425
3426 return insn;
3427 }
3428
3429 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3430 of the sequence. */
3431
3432 rtx_insn *
3433 previous_insn (rtx_insn *insn)
3434 {
3435 if (insn)
3436 {
3437 insn = PREV_INSN (insn);
3438 if (insn && NONJUMP_INSN_P (insn))
3439 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3440 insn = seq->insn (seq->len () - 1);
3441 }
3442
3443 return insn;
3444 }
3445
3446 /* Return the next insn after INSN that is not a NOTE. This routine does not
3447 look inside SEQUENCEs. */
3448
3449 rtx_insn *
3450 next_nonnote_insn (rtx_insn *insn)
3451 {
3452 while (insn)
3453 {
3454 insn = NEXT_INSN (insn);
3455 if (insn == 0 || !NOTE_P (insn))
3456 break;
3457 }
3458
3459 return insn;
3460 }
3461
3462 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3463 routine does not look inside SEQUENCEs. */
3464
3465 rtx_insn *
3466 next_nondebug_insn (rtx_insn *insn)
3467 {
3468 while (insn)
3469 {
3470 insn = NEXT_INSN (insn);
3471 if (insn == 0 || !DEBUG_INSN_P (insn))
3472 break;
3473 }
3474
3475 return insn;
3476 }
3477
3478 /* Return the previous insn before INSN that is not a NOTE. This routine does
3479 not look inside SEQUENCEs. */
3480
3481 rtx_insn *
3482 prev_nonnote_insn (rtx_insn *insn)
3483 {
3484 while (insn)
3485 {
3486 insn = PREV_INSN (insn);
3487 if (insn == 0 || !NOTE_P (insn))
3488 break;
3489 }
3490
3491 return insn;
3492 }
3493
3494 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3495 This routine does not look inside SEQUENCEs. */
3496
3497 rtx_insn *
3498 prev_nondebug_insn (rtx_insn *insn)
3499 {
3500 while (insn)
3501 {
3502 insn = PREV_INSN (insn);
3503 if (insn == 0 || !DEBUG_INSN_P (insn))
3504 break;
3505 }
3506
3507 return insn;
3508 }
3509
3510 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3511 This routine does not look inside SEQUENCEs. */
3512
3513 rtx_insn *
3514 next_nonnote_nondebug_insn (rtx_insn *insn)
3515 {
3516 while (insn)
3517 {
3518 insn = NEXT_INSN (insn);
3519 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3520 break;
3521 }
3522
3523 return insn;
3524 }
3525
3526 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3527 but stop the search before we enter another basic block. This
3528 routine does not look inside SEQUENCEs. */
3529
3530 rtx_insn *
3531 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3532 {
3533 while (insn)
3534 {
3535 insn = NEXT_INSN (insn);
3536 if (insn == 0)
3537 break;
3538 if (DEBUG_INSN_P (insn))
3539 continue;
3540 if (!NOTE_P (insn))
3541 break;
3542 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3543 return NULL;
3544 }
3545
3546 return insn;
3547 }
3548
3549 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3550 This routine does not look inside SEQUENCEs. */
3551
3552 rtx_insn *
3553 prev_nonnote_nondebug_insn (rtx_insn *insn)
3554 {
3555 while (insn)
3556 {
3557 insn = PREV_INSN (insn);
3558 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3559 break;
3560 }
3561
3562 return insn;
3563 }
3564
3565 /* Return the previous insn before INSN that is not a NOTE nor
3566 DEBUG_INSN, but stop the search before we enter another basic
3567 block. This routine does not look inside SEQUENCEs. */
3568
3569 rtx_insn *
3570 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3571 {
3572 while (insn)
3573 {
3574 insn = PREV_INSN (insn);
3575 if (insn == 0)
3576 break;
3577 if (DEBUG_INSN_P (insn))
3578 continue;
3579 if (!NOTE_P (insn))
3580 break;
3581 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3582 return NULL;
3583 }
3584
3585 return insn;
3586 }
3587
3588 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3589 or 0, if there is none. This routine does not look inside
3590 SEQUENCEs. */
3591
3592 rtx_insn *
3593 next_real_insn (rtx uncast_insn)
3594 {
3595 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3596
3597 while (insn)
3598 {
3599 insn = NEXT_INSN (insn);
3600 if (insn == 0 || INSN_P (insn))
3601 break;
3602 }
3603
3604 return insn;
3605 }
3606
3607 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3608 or 0, if there is none. This routine does not look inside
3609 SEQUENCEs. */
3610
3611 rtx_insn *
3612 prev_real_insn (rtx_insn *insn)
3613 {
3614 while (insn)
3615 {
3616 insn = PREV_INSN (insn);
3617 if (insn == 0 || INSN_P (insn))
3618 break;
3619 }
3620
3621 return insn;
3622 }
3623
3624 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3625 This routine does not look inside SEQUENCEs. */
3626
3627 rtx_call_insn *
3628 last_call_insn (void)
3629 {
3630 rtx_insn *insn;
3631
3632 for (insn = get_last_insn ();
3633 insn && !CALL_P (insn);
3634 insn = PREV_INSN (insn))
3635 ;
3636
3637 return safe_as_a <rtx_call_insn *> (insn);
3638 }
3639
3640 /* Find the next insn after INSN that really does something. This routine
3641 does not look inside SEQUENCEs. After reload this also skips over
3642 standalone USE and CLOBBER insn. */
3643
3644 int
3645 active_insn_p (const rtx_insn *insn)
3646 {
3647 return (CALL_P (insn) || JUMP_P (insn)
3648 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3649 || (NONJUMP_INSN_P (insn)
3650 && (! reload_completed
3651 || (GET_CODE (PATTERN (insn)) != USE
3652 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3653 }
3654
3655 rtx_insn *
3656 next_active_insn (rtx_insn *insn)
3657 {
3658 while (insn)
3659 {
3660 insn = NEXT_INSN (insn);
3661 if (insn == 0 || active_insn_p (insn))
3662 break;
3663 }
3664
3665 return insn;
3666 }
3667
3668 /* Find the last insn before INSN that really does something. This routine
3669 does not look inside SEQUENCEs. After reload this also skips over
3670 standalone USE and CLOBBER insn. */
3671
3672 rtx_insn *
3673 prev_active_insn (rtx_insn *insn)
3674 {
3675 while (insn)
3676 {
3677 insn = PREV_INSN (insn);
3678 if (insn == 0 || active_insn_p (insn))
3679 break;
3680 }
3681
3682 return insn;
3683 }
3684 \f
3685 /* Return the next insn that uses CC0 after INSN, which is assumed to
3686 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3687 applied to the result of this function should yield INSN).
3688
3689 Normally, this is simply the next insn. However, if a REG_CC_USER note
3690 is present, it contains the insn that uses CC0.
3691
3692 Return 0 if we can't find the insn. */
3693
3694 rtx_insn *
3695 next_cc0_user (rtx_insn *insn)
3696 {
3697 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3698
3699 if (note)
3700 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3701
3702 insn = next_nonnote_insn (insn);
3703 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3704 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3705
3706 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3707 return insn;
3708
3709 return 0;
3710 }
3711
3712 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3713 note, it is the previous insn. */
3714
3715 rtx_insn *
3716 prev_cc0_setter (rtx_insn *insn)
3717 {
3718 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3719
3720 if (note)
3721 return safe_as_a <rtx_insn *> (XEXP (note, 0));
3722
3723 insn = prev_nonnote_insn (insn);
3724 gcc_assert (sets_cc0_p (PATTERN (insn)));
3725
3726 return insn;
3727 }
3728
3729 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3730
3731 static int
3732 find_auto_inc (const_rtx x, const_rtx reg)
3733 {
3734 subrtx_iterator::array_type array;
3735 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3736 {
3737 const_rtx x = *iter;
3738 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3739 && rtx_equal_p (reg, XEXP (x, 0)))
3740 return true;
3741 }
3742 return false;
3743 }
3744
3745 /* Increment the label uses for all labels present in rtx. */
3746
3747 static void
3748 mark_label_nuses (rtx x)
3749 {
3750 enum rtx_code code;
3751 int i, j;
3752 const char *fmt;
3753
3754 code = GET_CODE (x);
3755 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3756 LABEL_NUSES (label_ref_label (x))++;
3757
3758 fmt = GET_RTX_FORMAT (code);
3759 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3760 {
3761 if (fmt[i] == 'e')
3762 mark_label_nuses (XEXP (x, i));
3763 else if (fmt[i] == 'E')
3764 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3765 mark_label_nuses (XVECEXP (x, i, j));
3766 }
3767 }
3768
3769 \f
3770 /* Try splitting insns that can be split for better scheduling.
3771 PAT is the pattern which might split.
3772 TRIAL is the insn providing PAT.
3773 LAST is nonzero if we should return the last insn of the sequence produced.
3774
3775 If this routine succeeds in splitting, it returns the first or last
3776 replacement insn depending on the value of LAST. Otherwise, it
3777 returns TRIAL. If the insn to be returned can be split, it will be. */
3778
3779 rtx_insn *
3780 try_split (rtx pat, rtx_insn *trial, int last)
3781 {
3782 rtx_insn *before, *after;
3783 rtx note;
3784 rtx_insn *seq, *tem;
3785 profile_probability probability;
3786 rtx_insn *insn_last, *insn;
3787 int njumps = 0;
3788 rtx_insn *call_insn = NULL;
3789
3790 /* We're not good at redistributing frame information. */
3791 if (RTX_FRAME_RELATED_P (trial))
3792 return trial;
3793
3794 if (any_condjump_p (trial)
3795 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3796 split_branch_probability
3797 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3798 else
3799 split_branch_probability = profile_probability::uninitialized ();
3800
3801 probability = split_branch_probability;
3802
3803 seq = split_insns (pat, trial);
3804
3805 split_branch_probability = profile_probability::uninitialized ();
3806
3807 if (!seq)
3808 return trial;
3809
3810 /* Avoid infinite loop if any insn of the result matches
3811 the original pattern. */
3812 insn_last = seq;
3813 while (1)
3814 {
3815 if (INSN_P (insn_last)
3816 && rtx_equal_p (PATTERN (insn_last), pat))
3817 return trial;
3818 if (!NEXT_INSN (insn_last))
3819 break;
3820 insn_last = NEXT_INSN (insn_last);
3821 }
3822
3823 /* We will be adding the new sequence to the function. The splitters
3824 may have introduced invalid RTL sharing, so unshare the sequence now. */
3825 unshare_all_rtl_in_chain (seq);
3826
3827 /* Mark labels and copy flags. */
3828 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3829 {
3830 if (JUMP_P (insn))
3831 {
3832 if (JUMP_P (trial))
3833 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3834 mark_jump_label (PATTERN (insn), insn, 0);
3835 njumps++;
3836 if (probability.initialized_p ()
3837 && any_condjump_p (insn)
3838 && !find_reg_note (insn, REG_BR_PROB, 0))
3839 {
3840 /* We can preserve the REG_BR_PROB notes only if exactly
3841 one jump is created, otherwise the machine description
3842 is responsible for this step using
3843 split_branch_probability variable. */
3844 gcc_assert (njumps == 1);
3845 add_reg_br_prob_note (insn, probability);
3846 }
3847 }
3848 }
3849
3850 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3851 in SEQ and copy any additional information across. */
3852 if (CALL_P (trial))
3853 {
3854 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3855 if (CALL_P (insn))
3856 {
3857 rtx_insn *next;
3858 rtx *p;
3859
3860 gcc_assert (call_insn == NULL_RTX);
3861 call_insn = insn;
3862
3863 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3864 target may have explicitly specified. */
3865 p = &CALL_INSN_FUNCTION_USAGE (insn);
3866 while (*p)
3867 p = &XEXP (*p, 1);
3868 *p = CALL_INSN_FUNCTION_USAGE (trial);
3869
3870 /* If the old call was a sibling call, the new one must
3871 be too. */
3872 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3873
3874 /* If the new call is the last instruction in the sequence,
3875 it will effectively replace the old call in-situ. Otherwise
3876 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3877 so that it comes immediately after the new call. */
3878 if (NEXT_INSN (insn))
3879 for (next = NEXT_INSN (trial);
3880 next && NOTE_P (next);
3881 next = NEXT_INSN (next))
3882 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3883 {
3884 remove_insn (next);
3885 add_insn_after (next, insn, NULL);
3886 break;
3887 }
3888 }
3889 }
3890
3891 /* Copy notes, particularly those related to the CFG. */
3892 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3893 {
3894 switch (REG_NOTE_KIND (note))
3895 {
3896 case REG_EH_REGION:
3897 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3898 break;
3899
3900 case REG_NORETURN:
3901 case REG_SETJMP:
3902 case REG_TM:
3903 case REG_CALL_NOCF_CHECK:
3904 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3905 {
3906 if (CALL_P (insn))
3907 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3908 }
3909 break;
3910
3911 case REG_NON_LOCAL_GOTO:
3912 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3913 {
3914 if (JUMP_P (insn))
3915 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3916 }
3917 break;
3918
3919 case REG_INC:
3920 if (!AUTO_INC_DEC)
3921 break;
3922
3923 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3924 {
3925 rtx reg = XEXP (note, 0);
3926 if (!FIND_REG_INC_NOTE (insn, reg)
3927 && find_auto_inc (PATTERN (insn), reg))
3928 add_reg_note (insn, REG_INC, reg);
3929 }
3930 break;
3931
3932 case REG_ARGS_SIZE:
3933 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3934 break;
3935
3936 case REG_CALL_DECL:
3937 gcc_assert (call_insn != NULL_RTX);
3938 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3939 break;
3940
3941 default:
3942 break;
3943 }
3944 }
3945
3946 /* If there are LABELS inside the split insns increment the
3947 usage count so we don't delete the label. */
3948 if (INSN_P (trial))
3949 {
3950 insn = insn_last;
3951 while (insn != NULL_RTX)
3952 {
3953 /* JUMP_P insns have already been "marked" above. */
3954 if (NONJUMP_INSN_P (insn))
3955 mark_label_nuses (PATTERN (insn));
3956
3957 insn = PREV_INSN (insn);
3958 }
3959 }
3960
3961 before = PREV_INSN (trial);
3962 after = NEXT_INSN (trial);
3963
3964 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3965
3966 delete_insn (trial);
3967
3968 /* Recursively call try_split for each new insn created; by the
3969 time control returns here that insn will be fully split, so
3970 set LAST and continue from the insn after the one returned.
3971 We can't use next_active_insn here since AFTER may be a note.
3972 Ignore deleted insns, which can be occur if not optimizing. */
3973 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3974 if (! tem->deleted () && INSN_P (tem))
3975 tem = try_split (PATTERN (tem), tem, 1);
3976
3977 /* Return either the first or the last insn, depending on which was
3978 requested. */
3979 return last
3980 ? (after ? PREV_INSN (after) : get_last_insn ())
3981 : NEXT_INSN (before);
3982 }
3983 \f
3984 /* Make and return an INSN rtx, initializing all its slots.
3985 Store PATTERN in the pattern slots. */
3986
3987 rtx_insn *
3988 make_insn_raw (rtx pattern)
3989 {
3990 rtx_insn *insn;
3991
3992 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3993
3994 INSN_UID (insn) = cur_insn_uid++;
3995 PATTERN (insn) = pattern;
3996 INSN_CODE (insn) = -1;
3997 REG_NOTES (insn) = NULL;
3998 INSN_LOCATION (insn) = curr_insn_location ();
3999 BLOCK_FOR_INSN (insn) = NULL;
4000
4001 #ifdef ENABLE_RTL_CHECKING
4002 if (insn
4003 && INSN_P (insn)
4004 && (returnjump_p (insn)
4005 || (GET_CODE (insn) == SET
4006 && SET_DEST (insn) == pc_rtx)))
4007 {
4008 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4009 debug_rtx (insn);
4010 }
4011 #endif
4012
4013 return insn;
4014 }
4015
4016 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4017
4018 static rtx_insn *
4019 make_debug_insn_raw (rtx pattern)
4020 {
4021 rtx_debug_insn *insn;
4022
4023 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4024 INSN_UID (insn) = cur_debug_insn_uid++;
4025 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4026 INSN_UID (insn) = cur_insn_uid++;
4027
4028 PATTERN (insn) = pattern;
4029 INSN_CODE (insn) = -1;
4030 REG_NOTES (insn) = NULL;
4031 INSN_LOCATION (insn) = curr_insn_location ();
4032 BLOCK_FOR_INSN (insn) = NULL;
4033
4034 return insn;
4035 }
4036
4037 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4038
4039 static rtx_insn *
4040 make_jump_insn_raw (rtx pattern)
4041 {
4042 rtx_jump_insn *insn;
4043
4044 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4045 INSN_UID (insn) = cur_insn_uid++;
4046
4047 PATTERN (insn) = pattern;
4048 INSN_CODE (insn) = -1;
4049 REG_NOTES (insn) = NULL;
4050 JUMP_LABEL (insn) = NULL;
4051 INSN_LOCATION (insn) = curr_insn_location ();
4052 BLOCK_FOR_INSN (insn) = NULL;
4053
4054 return insn;
4055 }
4056
4057 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4058
4059 static rtx_insn *
4060 make_call_insn_raw (rtx pattern)
4061 {
4062 rtx_call_insn *insn;
4063
4064 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4065 INSN_UID (insn) = cur_insn_uid++;
4066
4067 PATTERN (insn) = pattern;
4068 INSN_CODE (insn) = -1;
4069 REG_NOTES (insn) = NULL;
4070 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4071 INSN_LOCATION (insn) = curr_insn_location ();
4072 BLOCK_FOR_INSN (insn) = NULL;
4073
4074 return insn;
4075 }
4076
4077 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4078
4079 static rtx_note *
4080 make_note_raw (enum insn_note subtype)
4081 {
4082 /* Some notes are never created this way at all. These notes are
4083 only created by patching out insns. */
4084 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4085 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4086
4087 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4088 INSN_UID (note) = cur_insn_uid++;
4089 NOTE_KIND (note) = subtype;
4090 BLOCK_FOR_INSN (note) = NULL;
4091 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4092 return note;
4093 }
4094 \f
4095 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4096 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4097 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4098
4099 static inline void
4100 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4101 {
4102 SET_PREV_INSN (insn) = prev;
4103 SET_NEXT_INSN (insn) = next;
4104 if (prev != NULL)
4105 {
4106 SET_NEXT_INSN (prev) = insn;
4107 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4108 {
4109 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4110 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4111 }
4112 }
4113 if (next != NULL)
4114 {
4115 SET_PREV_INSN (next) = insn;
4116 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4117 {
4118 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4119 SET_PREV_INSN (sequence->insn (0)) = insn;
4120 }
4121 }
4122
4123 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4124 {
4125 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4126 SET_PREV_INSN (sequence->insn (0)) = prev;
4127 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4128 }
4129 }
4130
4131 /* Add INSN to the end of the doubly-linked list.
4132 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4133
4134 void
4135 add_insn (rtx_insn *insn)
4136 {
4137 rtx_insn *prev = get_last_insn ();
4138 link_insn_into_chain (insn, prev, NULL);
4139 if (get_insns () == NULL)
4140 set_first_insn (insn);
4141 set_last_insn (insn);
4142 }
4143
4144 /* Add INSN into the doubly-linked list after insn AFTER. */
4145
4146 static void
4147 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4148 {
4149 rtx_insn *next = NEXT_INSN (after);
4150
4151 gcc_assert (!optimize || !after->deleted ());
4152
4153 link_insn_into_chain (insn, after, next);
4154
4155 if (next == NULL)
4156 {
4157 struct sequence_stack *seq;
4158
4159 for (seq = get_current_sequence (); seq; seq = seq->next)
4160 if (after == seq->last)
4161 {
4162 seq->last = insn;
4163 break;
4164 }
4165 }
4166 }
4167
4168 /* Add INSN into the doubly-linked list before insn BEFORE. */
4169
4170 static void
4171 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4172 {
4173 rtx_insn *prev = PREV_INSN (before);
4174
4175 gcc_assert (!optimize || !before->deleted ());
4176
4177 link_insn_into_chain (insn, prev, before);
4178
4179 if (prev == NULL)
4180 {
4181 struct sequence_stack *seq;
4182
4183 for (seq = get_current_sequence (); seq; seq = seq->next)
4184 if (before == seq->first)
4185 {
4186 seq->first = insn;
4187 break;
4188 }
4189
4190 gcc_assert (seq);
4191 }
4192 }
4193
4194 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4195 If BB is NULL, an attempt is made to infer the bb from before.
4196
4197 This and the next function should be the only functions called
4198 to insert an insn once delay slots have been filled since only
4199 they know how to update a SEQUENCE. */
4200
4201 void
4202 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4203 {
4204 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4205 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4206 add_insn_after_nobb (insn, after);
4207 if (!BARRIER_P (after)
4208 && !BARRIER_P (insn)
4209 && (bb = BLOCK_FOR_INSN (after)))
4210 {
4211 set_block_for_insn (insn, bb);
4212 if (INSN_P (insn))
4213 df_insn_rescan (insn);
4214 /* Should not happen as first in the BB is always
4215 either NOTE or LABEL. */
4216 if (BB_END (bb) == after
4217 /* Avoid clobbering of structure when creating new BB. */
4218 && !BARRIER_P (insn)
4219 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4220 BB_END (bb) = insn;
4221 }
4222 }
4223
4224 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4225 If BB is NULL, an attempt is made to infer the bb from before.
4226
4227 This and the previous function should be the only functions called
4228 to insert an insn once delay slots have been filled since only
4229 they know how to update a SEQUENCE. */
4230
4231 void
4232 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4233 {
4234 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4235 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4236 add_insn_before_nobb (insn, before);
4237
4238 if (!bb
4239 && !BARRIER_P (before)
4240 && !BARRIER_P (insn))
4241 bb = BLOCK_FOR_INSN (before);
4242
4243 if (bb)
4244 {
4245 set_block_for_insn (insn, bb);
4246 if (INSN_P (insn))
4247 df_insn_rescan (insn);
4248 /* Should not happen as first in the BB is always either NOTE or
4249 LABEL. */
4250 gcc_assert (BB_HEAD (bb) != insn
4251 /* Avoid clobbering of structure when creating new BB. */
4252 || BARRIER_P (insn)
4253 || NOTE_INSN_BASIC_BLOCK_P (insn));
4254 }
4255 }
4256
4257 /* Replace insn with an deleted instruction note. */
4258
4259 void
4260 set_insn_deleted (rtx insn)
4261 {
4262 if (INSN_P (insn))
4263 df_insn_delete (as_a <rtx_insn *> (insn));
4264 PUT_CODE (insn, NOTE);
4265 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4266 }
4267
4268
4269 /* Unlink INSN from the insn chain.
4270
4271 This function knows how to handle sequences.
4272
4273 This function does not invalidate data flow information associated with
4274 INSN (i.e. does not call df_insn_delete). That makes this function
4275 usable for only disconnecting an insn from the chain, and re-emit it
4276 elsewhere later.
4277
4278 To later insert INSN elsewhere in the insn chain via add_insn and
4279 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4280 the caller. Nullifying them here breaks many insn chain walks.
4281
4282 To really delete an insn and related DF information, use delete_insn. */
4283
4284 void
4285 remove_insn (rtx uncast_insn)
4286 {
4287 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4288 rtx_insn *next = NEXT_INSN (insn);
4289 rtx_insn *prev = PREV_INSN (insn);
4290 basic_block bb;
4291
4292 if (prev)
4293 {
4294 SET_NEXT_INSN (prev) = next;
4295 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4296 {
4297 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4298 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4299 }
4300 }
4301 else
4302 {
4303 struct sequence_stack *seq;
4304
4305 for (seq = get_current_sequence (); seq; seq = seq->next)
4306 if (insn == seq->first)
4307 {
4308 seq->first = next;
4309 break;
4310 }
4311
4312 gcc_assert (seq);
4313 }
4314
4315 if (next)
4316 {
4317 SET_PREV_INSN (next) = prev;
4318 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4319 {
4320 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4321 SET_PREV_INSN (sequence->insn (0)) = prev;
4322 }
4323 }
4324 else
4325 {
4326 struct sequence_stack *seq;
4327
4328 for (seq = get_current_sequence (); seq; seq = seq->next)
4329 if (insn == seq->last)
4330 {
4331 seq->last = prev;
4332 break;
4333 }
4334
4335 gcc_assert (seq);
4336 }
4337
4338 /* Fix up basic block boundaries, if necessary. */
4339 if (!BARRIER_P (insn)
4340 && (bb = BLOCK_FOR_INSN (insn)))
4341 {
4342 if (BB_HEAD (bb) == insn)
4343 {
4344 /* Never ever delete the basic block note without deleting whole
4345 basic block. */
4346 gcc_assert (!NOTE_P (insn));
4347 BB_HEAD (bb) = next;
4348 }
4349 if (BB_END (bb) == insn)
4350 BB_END (bb) = prev;
4351 }
4352 }
4353
4354 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4355
4356 void
4357 add_function_usage_to (rtx call_insn, rtx call_fusage)
4358 {
4359 gcc_assert (call_insn && CALL_P (call_insn));
4360
4361 /* Put the register usage information on the CALL. If there is already
4362 some usage information, put ours at the end. */
4363 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4364 {
4365 rtx link;
4366
4367 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4368 link = XEXP (link, 1))
4369 ;
4370
4371 XEXP (link, 1) = call_fusage;
4372 }
4373 else
4374 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4375 }
4376
4377 /* Delete all insns made since FROM.
4378 FROM becomes the new last instruction. */
4379
4380 void
4381 delete_insns_since (rtx_insn *from)
4382 {
4383 if (from == 0)
4384 set_first_insn (0);
4385 else
4386 SET_NEXT_INSN (from) = 0;
4387 set_last_insn (from);
4388 }
4389
4390 /* This function is deprecated, please use sequences instead.
4391
4392 Move a consecutive bunch of insns to a different place in the chain.
4393 The insns to be moved are those between FROM and TO.
4394 They are moved to a new position after the insn AFTER.
4395 AFTER must not be FROM or TO or any insn in between.
4396
4397 This function does not know about SEQUENCEs and hence should not be
4398 called after delay-slot filling has been done. */
4399
4400 void
4401 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4402 {
4403 if (flag_checking)
4404 {
4405 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4406 gcc_assert (after != x);
4407 gcc_assert (after != to);
4408 }
4409
4410 /* Splice this bunch out of where it is now. */
4411 if (PREV_INSN (from))
4412 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4413 if (NEXT_INSN (to))
4414 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4415 if (get_last_insn () == to)
4416 set_last_insn (PREV_INSN (from));
4417 if (get_insns () == from)
4418 set_first_insn (NEXT_INSN (to));
4419
4420 /* Make the new neighbors point to it and it to them. */
4421 if (NEXT_INSN (after))
4422 SET_PREV_INSN (NEXT_INSN (after)) = to;
4423
4424 SET_NEXT_INSN (to) = NEXT_INSN (after);
4425 SET_PREV_INSN (from) = after;
4426 SET_NEXT_INSN (after) = from;
4427 if (after == get_last_insn ())
4428 set_last_insn (to);
4429 }
4430
4431 /* Same as function above, but take care to update BB boundaries. */
4432 void
4433 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4434 {
4435 rtx_insn *prev = PREV_INSN (from);
4436 basic_block bb, bb2;
4437
4438 reorder_insns_nobb (from, to, after);
4439
4440 if (!BARRIER_P (after)
4441 && (bb = BLOCK_FOR_INSN (after)))
4442 {
4443 rtx_insn *x;
4444 df_set_bb_dirty (bb);
4445
4446 if (!BARRIER_P (from)
4447 && (bb2 = BLOCK_FOR_INSN (from)))
4448 {
4449 if (BB_END (bb2) == to)
4450 BB_END (bb2) = prev;
4451 df_set_bb_dirty (bb2);
4452 }
4453
4454 if (BB_END (bb) == after)
4455 BB_END (bb) = to;
4456
4457 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4458 if (!BARRIER_P (x))
4459 df_insn_change_bb (x, bb);
4460 }
4461 }
4462
4463 \f
4464 /* Emit insn(s) of given code and pattern
4465 at a specified place within the doubly-linked list.
4466
4467 All of the emit_foo global entry points accept an object
4468 X which is either an insn list or a PATTERN of a single
4469 instruction.
4470
4471 There are thus a few canonical ways to generate code and
4472 emit it at a specific place in the instruction stream. For
4473 example, consider the instruction named SPOT and the fact that
4474 we would like to emit some instructions before SPOT. We might
4475 do it like this:
4476
4477 start_sequence ();
4478 ... emit the new instructions ...
4479 insns_head = get_insns ();
4480 end_sequence ();
4481
4482 emit_insn_before (insns_head, SPOT);
4483
4484 It used to be common to generate SEQUENCE rtl instead, but that
4485 is a relic of the past which no longer occurs. The reason is that
4486 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4487 generated would almost certainly die right after it was created. */
4488
4489 static rtx_insn *
4490 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4491 rtx_insn *(*make_raw) (rtx))
4492 {
4493 rtx_insn *insn;
4494
4495 gcc_assert (before);
4496
4497 if (x == NULL_RTX)
4498 return safe_as_a <rtx_insn *> (last);
4499
4500 switch (GET_CODE (x))
4501 {
4502 case DEBUG_INSN:
4503 case INSN:
4504 case JUMP_INSN:
4505 case CALL_INSN:
4506 case CODE_LABEL:
4507 case BARRIER:
4508 case NOTE:
4509 insn = as_a <rtx_insn *> (x);
4510 while (insn)
4511 {
4512 rtx_insn *next = NEXT_INSN (insn);
4513 add_insn_before (insn, before, bb);
4514 last = insn;
4515 insn = next;
4516 }
4517 break;
4518
4519 #ifdef ENABLE_RTL_CHECKING
4520 case SEQUENCE:
4521 gcc_unreachable ();
4522 break;
4523 #endif
4524
4525 default:
4526 last = (*make_raw) (x);
4527 add_insn_before (last, before, bb);
4528 break;
4529 }
4530
4531 return safe_as_a <rtx_insn *> (last);
4532 }
4533
4534 /* Make X be output before the instruction BEFORE. */
4535
4536 rtx_insn *
4537 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4538 {
4539 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4540 }
4541
4542 /* Make an instruction with body X and code JUMP_INSN
4543 and output it before the instruction BEFORE. */
4544
4545 rtx_jump_insn *
4546 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4547 {
4548 return as_a <rtx_jump_insn *> (
4549 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4550 make_jump_insn_raw));
4551 }
4552
4553 /* Make an instruction with body X and code CALL_INSN
4554 and output it before the instruction BEFORE. */
4555
4556 rtx_insn *
4557 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4558 {
4559 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4560 make_call_insn_raw);
4561 }
4562
4563 /* Make an instruction with body X and code DEBUG_INSN
4564 and output it before the instruction BEFORE. */
4565
4566 rtx_insn *
4567 emit_debug_insn_before_noloc (rtx x, rtx before)
4568 {
4569 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4570 make_debug_insn_raw);
4571 }
4572
4573 /* Make an insn of code BARRIER
4574 and output it before the insn BEFORE. */
4575
4576 rtx_barrier *
4577 emit_barrier_before (rtx before)
4578 {
4579 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4580
4581 INSN_UID (insn) = cur_insn_uid++;
4582
4583 add_insn_before (insn, before, NULL);
4584 return insn;
4585 }
4586
4587 /* Emit the label LABEL before the insn BEFORE. */
4588
4589 rtx_code_label *
4590 emit_label_before (rtx label, rtx_insn *before)
4591 {
4592 gcc_checking_assert (INSN_UID (label) == 0);
4593 INSN_UID (label) = cur_insn_uid++;
4594 add_insn_before (label, before, NULL);
4595 return as_a <rtx_code_label *> (label);
4596 }
4597 \f
4598 /* Helper for emit_insn_after, handles lists of instructions
4599 efficiently. */
4600
4601 static rtx_insn *
4602 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4603 {
4604 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4605 rtx_insn *last;
4606 rtx_insn *after_after;
4607 if (!bb && !BARRIER_P (after))
4608 bb = BLOCK_FOR_INSN (after);
4609
4610 if (bb)
4611 {
4612 df_set_bb_dirty (bb);
4613 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4614 if (!BARRIER_P (last))
4615 {
4616 set_block_for_insn (last, bb);
4617 df_insn_rescan (last);
4618 }
4619 if (!BARRIER_P (last))
4620 {
4621 set_block_for_insn (last, bb);
4622 df_insn_rescan (last);
4623 }
4624 if (BB_END (bb) == after)
4625 BB_END (bb) = last;
4626 }
4627 else
4628 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4629 continue;
4630
4631 after_after = NEXT_INSN (after);
4632
4633 SET_NEXT_INSN (after) = first;
4634 SET_PREV_INSN (first) = after;
4635 SET_NEXT_INSN (last) = after_after;
4636 if (after_after)
4637 SET_PREV_INSN (after_after) = last;
4638
4639 if (after == get_last_insn ())
4640 set_last_insn (last);
4641
4642 return last;
4643 }
4644
4645 static rtx_insn *
4646 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4647 rtx_insn *(*make_raw)(rtx))
4648 {
4649 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4650 rtx_insn *last = after;
4651
4652 gcc_assert (after);
4653
4654 if (x == NULL_RTX)
4655 return last;
4656
4657 switch (GET_CODE (x))
4658 {
4659 case DEBUG_INSN:
4660 case INSN:
4661 case JUMP_INSN:
4662 case CALL_INSN:
4663 case CODE_LABEL:
4664 case BARRIER:
4665 case NOTE:
4666 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4667 break;
4668
4669 #ifdef ENABLE_RTL_CHECKING
4670 case SEQUENCE:
4671 gcc_unreachable ();
4672 break;
4673 #endif
4674
4675 default:
4676 last = (*make_raw) (x);
4677 add_insn_after (last, after, bb);
4678 break;
4679 }
4680
4681 return last;
4682 }
4683
4684 /* Make X be output after the insn AFTER and set the BB of insn. If
4685 BB is NULL, an attempt is made to infer the BB from AFTER. */
4686
4687 rtx_insn *
4688 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4689 {
4690 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4691 }
4692
4693
4694 /* Make an insn of code JUMP_INSN with body X
4695 and output it after the insn AFTER. */
4696
4697 rtx_jump_insn *
4698 emit_jump_insn_after_noloc (rtx x, rtx after)
4699 {
4700 return as_a <rtx_jump_insn *> (
4701 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4702 }
4703
4704 /* Make an instruction with body X and code CALL_INSN
4705 and output it after the instruction AFTER. */
4706
4707 rtx_insn *
4708 emit_call_insn_after_noloc (rtx x, rtx after)
4709 {
4710 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4711 }
4712
4713 /* Make an instruction with body X and code CALL_INSN
4714 and output it after the instruction AFTER. */
4715
4716 rtx_insn *
4717 emit_debug_insn_after_noloc (rtx x, rtx after)
4718 {
4719 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4720 }
4721
4722 /* Make an insn of code BARRIER
4723 and output it after the insn AFTER. */
4724
4725 rtx_barrier *
4726 emit_barrier_after (rtx after)
4727 {
4728 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4729
4730 INSN_UID (insn) = cur_insn_uid++;
4731
4732 add_insn_after (insn, after, NULL);
4733 return insn;
4734 }
4735
4736 /* Emit the label LABEL after the insn AFTER. */
4737
4738 rtx_insn *
4739 emit_label_after (rtx label, rtx_insn *after)
4740 {
4741 gcc_checking_assert (INSN_UID (label) == 0);
4742 INSN_UID (label) = cur_insn_uid++;
4743 add_insn_after (label, after, NULL);
4744 return as_a <rtx_insn *> (label);
4745 }
4746 \f
4747 /* Notes require a bit of special handling: Some notes need to have their
4748 BLOCK_FOR_INSN set, others should never have it set, and some should
4749 have it set or clear depending on the context. */
4750
4751 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4752 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4753 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4754
4755 static bool
4756 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4757 {
4758 switch (subtype)
4759 {
4760 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4761 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4762 return true;
4763
4764 /* Notes for var tracking and EH region markers can appear between or
4765 inside basic blocks. If the caller is emitting on the basic block
4766 boundary, do not set BLOCK_FOR_INSN on the new note. */
4767 case NOTE_INSN_VAR_LOCATION:
4768 case NOTE_INSN_CALL_ARG_LOCATION:
4769 case NOTE_INSN_EH_REGION_BEG:
4770 case NOTE_INSN_EH_REGION_END:
4771 return on_bb_boundary_p;
4772
4773 /* Otherwise, BLOCK_FOR_INSN must be set. */
4774 default:
4775 return false;
4776 }
4777 }
4778
4779 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4780
4781 rtx_note *
4782 emit_note_after (enum insn_note subtype, rtx_insn *after)
4783 {
4784 rtx_note *note = make_note_raw (subtype);
4785 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4786 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4787
4788 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4789 add_insn_after_nobb (note, after);
4790 else
4791 add_insn_after (note, after, bb);
4792 return note;
4793 }
4794
4795 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4796
4797 rtx_note *
4798 emit_note_before (enum insn_note subtype, rtx_insn *before)
4799 {
4800 rtx_note *note = make_note_raw (subtype);
4801 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4802 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4803
4804 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4805 add_insn_before_nobb (note, before);
4806 else
4807 add_insn_before (note, before, bb);
4808 return note;
4809 }
4810 \f
4811 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4812 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4813
4814 static rtx_insn *
4815 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4816 rtx_insn *(*make_raw) (rtx))
4817 {
4818 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4819 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4820
4821 if (pattern == NULL_RTX || !loc)
4822 return last;
4823
4824 after = NEXT_INSN (after);
4825 while (1)
4826 {
4827 if (active_insn_p (after)
4828 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4829 && !INSN_LOCATION (after))
4830 INSN_LOCATION (after) = loc;
4831 if (after == last)
4832 break;
4833 after = NEXT_INSN (after);
4834 }
4835 return last;
4836 }
4837
4838 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4839 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4840 any DEBUG_INSNs. */
4841
4842 static rtx_insn *
4843 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4844 rtx_insn *(*make_raw) (rtx))
4845 {
4846 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4847 rtx_insn *prev = after;
4848
4849 if (skip_debug_insns)
4850 while (DEBUG_INSN_P (prev))
4851 prev = PREV_INSN (prev);
4852
4853 if (INSN_P (prev))
4854 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4855 make_raw);
4856 else
4857 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4858 }
4859
4860 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4861 rtx_insn *
4862 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4863 {
4864 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4865 }
4866
4867 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4868 rtx_insn *
4869 emit_insn_after (rtx pattern, rtx after)
4870 {
4871 return emit_pattern_after (pattern, after, true, make_insn_raw);
4872 }
4873
4874 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4875 rtx_jump_insn *
4876 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4877 {
4878 return as_a <rtx_jump_insn *> (
4879 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4880 }
4881
4882 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4883 rtx_jump_insn *
4884 emit_jump_insn_after (rtx pattern, rtx after)
4885 {
4886 return as_a <rtx_jump_insn *> (
4887 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4888 }
4889
4890 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4891 rtx_insn *
4892 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4893 {
4894 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4895 }
4896
4897 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4898 rtx_insn *
4899 emit_call_insn_after (rtx pattern, rtx after)
4900 {
4901 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4902 }
4903
4904 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4905 rtx_insn *
4906 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4907 {
4908 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4909 }
4910
4911 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4912 rtx_insn *
4913 emit_debug_insn_after (rtx pattern, rtx after)
4914 {
4915 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4916 }
4917
4918 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4919 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4920 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4921 CALL_INSN, etc. */
4922
4923 static rtx_insn *
4924 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4925 rtx_insn *(*make_raw) (rtx))
4926 {
4927 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4928 rtx_insn *first = PREV_INSN (before);
4929 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4930 insnp ? before : NULL_RTX,
4931 NULL, make_raw);
4932
4933 if (pattern == NULL_RTX || !loc)
4934 return last;
4935
4936 if (!first)
4937 first = get_insns ();
4938 else
4939 first = NEXT_INSN (first);
4940 while (1)
4941 {
4942 if (active_insn_p (first)
4943 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4944 && !INSN_LOCATION (first))
4945 INSN_LOCATION (first) = loc;
4946 if (first == last)
4947 break;
4948 first = NEXT_INSN (first);
4949 }
4950 return last;
4951 }
4952
4953 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4954 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4955 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4956 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4957
4958 static rtx_insn *
4959 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4960 bool insnp, rtx_insn *(*make_raw) (rtx))
4961 {
4962 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4963 rtx_insn *next = before;
4964
4965 if (skip_debug_insns)
4966 while (DEBUG_INSN_P (next))
4967 next = PREV_INSN (next);
4968
4969 if (INSN_P (next))
4970 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4971 insnp, make_raw);
4972 else
4973 return emit_pattern_before_noloc (pattern, before,
4974 insnp ? before : NULL_RTX,
4975 NULL, make_raw);
4976 }
4977
4978 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4979 rtx_insn *
4980 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4981 {
4982 return emit_pattern_before_setloc (pattern, before, loc, true,
4983 make_insn_raw);
4984 }
4985
4986 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4987 rtx_insn *
4988 emit_insn_before (rtx pattern, rtx before)
4989 {
4990 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4991 }
4992
4993 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4994 rtx_jump_insn *
4995 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4996 {
4997 return as_a <rtx_jump_insn *> (
4998 emit_pattern_before_setloc (pattern, before, loc, false,
4999 make_jump_insn_raw));
5000 }
5001
5002 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5003 rtx_jump_insn *
5004 emit_jump_insn_before (rtx pattern, rtx before)
5005 {
5006 return as_a <rtx_jump_insn *> (
5007 emit_pattern_before (pattern, before, true, false,
5008 make_jump_insn_raw));
5009 }
5010
5011 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5012 rtx_insn *
5013 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5014 {
5015 return emit_pattern_before_setloc (pattern, before, loc, false,
5016 make_call_insn_raw);
5017 }
5018
5019 /* Like emit_call_insn_before_noloc,
5020 but set insn_location according to BEFORE. */
5021 rtx_insn *
5022 emit_call_insn_before (rtx pattern, rtx_insn *before)
5023 {
5024 return emit_pattern_before (pattern, before, true, false,
5025 make_call_insn_raw);
5026 }
5027
5028 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5029 rtx_insn *
5030 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5031 {
5032 return emit_pattern_before_setloc (pattern, before, loc, false,
5033 make_debug_insn_raw);
5034 }
5035
5036 /* Like emit_debug_insn_before_noloc,
5037 but set insn_location according to BEFORE. */
5038 rtx_insn *
5039 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5040 {
5041 return emit_pattern_before (pattern, before, false, false,
5042 make_debug_insn_raw);
5043 }
5044 \f
5045 /* Take X and emit it at the end of the doubly-linked
5046 INSN list.
5047
5048 Returns the last insn emitted. */
5049
5050 rtx_insn *
5051 emit_insn (rtx x)
5052 {
5053 rtx_insn *last = get_last_insn ();
5054 rtx_insn *insn;
5055
5056 if (x == NULL_RTX)
5057 return last;
5058
5059 switch (GET_CODE (x))
5060 {
5061 case DEBUG_INSN:
5062 case INSN:
5063 case JUMP_INSN:
5064 case CALL_INSN:
5065 case CODE_LABEL:
5066 case BARRIER:
5067 case NOTE:
5068 insn = as_a <rtx_insn *> (x);
5069 while (insn)
5070 {
5071 rtx_insn *next = NEXT_INSN (insn);
5072 add_insn (insn);
5073 last = insn;
5074 insn = next;
5075 }
5076 break;
5077
5078 #ifdef ENABLE_RTL_CHECKING
5079 case JUMP_TABLE_DATA:
5080 case SEQUENCE:
5081 gcc_unreachable ();
5082 break;
5083 #endif
5084
5085 default:
5086 last = make_insn_raw (x);
5087 add_insn (last);
5088 break;
5089 }
5090
5091 return last;
5092 }
5093
5094 /* Make an insn of code DEBUG_INSN with pattern X
5095 and add it to the end of the doubly-linked list. */
5096
5097 rtx_insn *
5098 emit_debug_insn (rtx x)
5099 {
5100 rtx_insn *last = get_last_insn ();
5101 rtx_insn *insn;
5102
5103 if (x == NULL_RTX)
5104 return last;
5105
5106 switch (GET_CODE (x))
5107 {
5108 case DEBUG_INSN:
5109 case INSN:
5110 case JUMP_INSN:
5111 case CALL_INSN:
5112 case CODE_LABEL:
5113 case BARRIER:
5114 case NOTE:
5115 insn = as_a <rtx_insn *> (x);
5116 while (insn)
5117 {
5118 rtx_insn *next = NEXT_INSN (insn);
5119 add_insn (insn);
5120 last = insn;
5121 insn = next;
5122 }
5123 break;
5124
5125 #ifdef ENABLE_RTL_CHECKING
5126 case JUMP_TABLE_DATA:
5127 case SEQUENCE:
5128 gcc_unreachable ();
5129 break;
5130 #endif
5131
5132 default:
5133 last = make_debug_insn_raw (x);
5134 add_insn (last);
5135 break;
5136 }
5137
5138 return last;
5139 }
5140
5141 /* Make an insn of code JUMP_INSN with pattern X
5142 and add it to the end of the doubly-linked list. */
5143
5144 rtx_insn *
5145 emit_jump_insn (rtx x)
5146 {
5147 rtx_insn *last = NULL;
5148 rtx_insn *insn;
5149
5150 switch (GET_CODE (x))
5151 {
5152 case DEBUG_INSN:
5153 case INSN:
5154 case JUMP_INSN:
5155 case CALL_INSN:
5156 case CODE_LABEL:
5157 case BARRIER:
5158 case NOTE:
5159 insn = as_a <rtx_insn *> (x);
5160 while (insn)
5161 {
5162 rtx_insn *next = NEXT_INSN (insn);
5163 add_insn (insn);
5164 last = insn;
5165 insn = next;
5166 }
5167 break;
5168
5169 #ifdef ENABLE_RTL_CHECKING
5170 case JUMP_TABLE_DATA:
5171 case SEQUENCE:
5172 gcc_unreachable ();
5173 break;
5174 #endif
5175
5176 default:
5177 last = make_jump_insn_raw (x);
5178 add_insn (last);
5179 break;
5180 }
5181
5182 return last;
5183 }
5184
5185 /* Make an insn of code CALL_INSN with pattern X
5186 and add it to the end of the doubly-linked list. */
5187
5188 rtx_insn *
5189 emit_call_insn (rtx x)
5190 {
5191 rtx_insn *insn;
5192
5193 switch (GET_CODE (x))
5194 {
5195 case DEBUG_INSN:
5196 case INSN:
5197 case JUMP_INSN:
5198 case CALL_INSN:
5199 case CODE_LABEL:
5200 case BARRIER:
5201 case NOTE:
5202 insn = emit_insn (x);
5203 break;
5204
5205 #ifdef ENABLE_RTL_CHECKING
5206 case SEQUENCE:
5207 case JUMP_TABLE_DATA:
5208 gcc_unreachable ();
5209 break;
5210 #endif
5211
5212 default:
5213 insn = make_call_insn_raw (x);
5214 add_insn (insn);
5215 break;
5216 }
5217
5218 return insn;
5219 }
5220
5221 /* Add the label LABEL to the end of the doubly-linked list. */
5222
5223 rtx_code_label *
5224 emit_label (rtx uncast_label)
5225 {
5226 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5227
5228 gcc_checking_assert (INSN_UID (label) == 0);
5229 INSN_UID (label) = cur_insn_uid++;
5230 add_insn (label);
5231 return label;
5232 }
5233
5234 /* Make an insn of code JUMP_TABLE_DATA
5235 and add it to the end of the doubly-linked list. */
5236
5237 rtx_jump_table_data *
5238 emit_jump_table_data (rtx table)
5239 {
5240 rtx_jump_table_data *jump_table_data =
5241 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5242 INSN_UID (jump_table_data) = cur_insn_uid++;
5243 PATTERN (jump_table_data) = table;
5244 BLOCK_FOR_INSN (jump_table_data) = NULL;
5245 add_insn (jump_table_data);
5246 return jump_table_data;
5247 }
5248
5249 /* Make an insn of code BARRIER
5250 and add it to the end of the doubly-linked list. */
5251
5252 rtx_barrier *
5253 emit_barrier (void)
5254 {
5255 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5256 INSN_UID (barrier) = cur_insn_uid++;
5257 add_insn (barrier);
5258 return barrier;
5259 }
5260
5261 /* Emit a copy of note ORIG. */
5262
5263 rtx_note *
5264 emit_note_copy (rtx_note *orig)
5265 {
5266 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5267 rtx_note *note = make_note_raw (kind);
5268 NOTE_DATA (note) = NOTE_DATA (orig);
5269 add_insn (note);
5270 return note;
5271 }
5272
5273 /* Make an insn of code NOTE or type NOTE_NO
5274 and add it to the end of the doubly-linked list. */
5275
5276 rtx_note *
5277 emit_note (enum insn_note kind)
5278 {
5279 rtx_note *note = make_note_raw (kind);
5280 add_insn (note);
5281 return note;
5282 }
5283
5284 /* Emit a clobber of lvalue X. */
5285
5286 rtx_insn *
5287 emit_clobber (rtx x)
5288 {
5289 /* CONCATs should not appear in the insn stream. */
5290 if (GET_CODE (x) == CONCAT)
5291 {
5292 emit_clobber (XEXP (x, 0));
5293 return emit_clobber (XEXP (x, 1));
5294 }
5295 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5296 }
5297
5298 /* Return a sequence of insns to clobber lvalue X. */
5299
5300 rtx_insn *
5301 gen_clobber (rtx x)
5302 {
5303 rtx_insn *seq;
5304
5305 start_sequence ();
5306 emit_clobber (x);
5307 seq = get_insns ();
5308 end_sequence ();
5309 return seq;
5310 }
5311
5312 /* Emit a use of rvalue X. */
5313
5314 rtx_insn *
5315 emit_use (rtx x)
5316 {
5317 /* CONCATs should not appear in the insn stream. */
5318 if (GET_CODE (x) == CONCAT)
5319 {
5320 emit_use (XEXP (x, 0));
5321 return emit_use (XEXP (x, 1));
5322 }
5323 return emit_insn (gen_rtx_USE (VOIDmode, x));
5324 }
5325
5326 /* Return a sequence of insns to use rvalue X. */
5327
5328 rtx_insn *
5329 gen_use (rtx x)
5330 {
5331 rtx_insn *seq;
5332
5333 start_sequence ();
5334 emit_use (x);
5335 seq = get_insns ();
5336 end_sequence ();
5337 return seq;
5338 }
5339
5340 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5341 Return the set in INSN that such notes describe, or NULL if the notes
5342 have no meaning for INSN. */
5343
5344 rtx
5345 set_for_reg_notes (rtx insn)
5346 {
5347 rtx pat, reg;
5348
5349 if (!INSN_P (insn))
5350 return NULL_RTX;
5351
5352 pat = PATTERN (insn);
5353 if (GET_CODE (pat) == PARALLEL)
5354 {
5355 /* We do not use single_set because that ignores SETs of unused
5356 registers. REG_EQUAL and REG_EQUIV notes really do require the
5357 PARALLEL to have a single SET. */
5358 if (multiple_sets (insn))
5359 return NULL_RTX;
5360 pat = XVECEXP (pat, 0, 0);
5361 }
5362
5363 if (GET_CODE (pat) != SET)
5364 return NULL_RTX;
5365
5366 reg = SET_DEST (pat);
5367
5368 /* Notes apply to the contents of a STRICT_LOW_PART. */
5369 if (GET_CODE (reg) == STRICT_LOW_PART
5370 || GET_CODE (reg) == ZERO_EXTRACT)
5371 reg = XEXP (reg, 0);
5372
5373 /* Check that we have a register. */
5374 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5375 return NULL_RTX;
5376
5377 return pat;
5378 }
5379
5380 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5381 note of this type already exists, remove it first. */
5382
5383 rtx
5384 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5385 {
5386 rtx note = find_reg_note (insn, kind, NULL_RTX);
5387
5388 switch (kind)
5389 {
5390 case REG_EQUAL:
5391 case REG_EQUIV:
5392 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5393 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5394 return NULL_RTX;
5395
5396 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5397 It serves no useful purpose and breaks eliminate_regs. */
5398 if (GET_CODE (datum) == ASM_OPERANDS)
5399 return NULL_RTX;
5400
5401 /* Notes with side effects are dangerous. Even if the side-effect
5402 initially mirrors one in PATTERN (INSN), later optimizations
5403 might alter the way that the final register value is calculated
5404 and so move or alter the side-effect in some way. The note would
5405 then no longer be a valid substitution for SET_SRC. */
5406 if (side_effects_p (datum))
5407 return NULL_RTX;
5408 break;
5409
5410 default:
5411 break;
5412 }
5413
5414 if (note)
5415 XEXP (note, 0) = datum;
5416 else
5417 {
5418 add_reg_note (insn, kind, datum);
5419 note = REG_NOTES (insn);
5420 }
5421
5422 switch (kind)
5423 {
5424 case REG_EQUAL:
5425 case REG_EQUIV:
5426 df_notes_rescan (as_a <rtx_insn *> (insn));
5427 break;
5428 default:
5429 break;
5430 }
5431
5432 return note;
5433 }
5434
5435 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5436 rtx
5437 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5438 {
5439 rtx set = set_for_reg_notes (insn);
5440
5441 if (set && SET_DEST (set) == dst)
5442 return set_unique_reg_note (insn, kind, datum);
5443 return NULL_RTX;
5444 }
5445 \f
5446 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5447 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5448 is true.
5449
5450 If X is a label, it is simply added into the insn chain. */
5451
5452 rtx_insn *
5453 emit (rtx x, bool allow_barrier_p)
5454 {
5455 enum rtx_code code = classify_insn (x);
5456
5457 switch (code)
5458 {
5459 case CODE_LABEL:
5460 return emit_label (x);
5461 case INSN:
5462 return emit_insn (x);
5463 case JUMP_INSN:
5464 {
5465 rtx_insn *insn = emit_jump_insn (x);
5466 if (allow_barrier_p
5467 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5468 return emit_barrier ();
5469 return insn;
5470 }
5471 case CALL_INSN:
5472 return emit_call_insn (x);
5473 case DEBUG_INSN:
5474 return emit_debug_insn (x);
5475 default:
5476 gcc_unreachable ();
5477 }
5478 }
5479 \f
5480 /* Space for free sequence stack entries. */
5481 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5482
5483 /* Begin emitting insns to a sequence. If this sequence will contain
5484 something that might cause the compiler to pop arguments to function
5485 calls (because those pops have previously been deferred; see
5486 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5487 before calling this function. That will ensure that the deferred
5488 pops are not accidentally emitted in the middle of this sequence. */
5489
5490 void
5491 start_sequence (void)
5492 {
5493 struct sequence_stack *tem;
5494
5495 if (free_sequence_stack != NULL)
5496 {
5497 tem = free_sequence_stack;
5498 free_sequence_stack = tem->next;
5499 }
5500 else
5501 tem = ggc_alloc<sequence_stack> ();
5502
5503 tem->next = get_current_sequence ()->next;
5504 tem->first = get_insns ();
5505 tem->last = get_last_insn ();
5506 get_current_sequence ()->next = tem;
5507
5508 set_first_insn (0);
5509 set_last_insn (0);
5510 }
5511
5512 /* Set up the insn chain starting with FIRST as the current sequence,
5513 saving the previously current one. See the documentation for
5514 start_sequence for more information about how to use this function. */
5515
5516 void
5517 push_to_sequence (rtx_insn *first)
5518 {
5519 rtx_insn *last;
5520
5521 start_sequence ();
5522
5523 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5524 ;
5525
5526 set_first_insn (first);
5527 set_last_insn (last);
5528 }
5529
5530 /* Like push_to_sequence, but take the last insn as an argument to avoid
5531 looping through the list. */
5532
5533 void
5534 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5535 {
5536 start_sequence ();
5537
5538 set_first_insn (first);
5539 set_last_insn (last);
5540 }
5541
5542 /* Set up the outer-level insn chain
5543 as the current sequence, saving the previously current one. */
5544
5545 void
5546 push_topmost_sequence (void)
5547 {
5548 struct sequence_stack *top;
5549
5550 start_sequence ();
5551
5552 top = get_topmost_sequence ();
5553 set_first_insn (top->first);
5554 set_last_insn (top->last);
5555 }
5556
5557 /* After emitting to the outer-level insn chain, update the outer-level
5558 insn chain, and restore the previous saved state. */
5559
5560 void
5561 pop_topmost_sequence (void)
5562 {
5563 struct sequence_stack *top;
5564
5565 top = get_topmost_sequence ();
5566 top->first = get_insns ();
5567 top->last = get_last_insn ();
5568
5569 end_sequence ();
5570 }
5571
5572 /* After emitting to a sequence, restore previous saved state.
5573
5574 To get the contents of the sequence just made, you must call
5575 `get_insns' *before* calling here.
5576
5577 If the compiler might have deferred popping arguments while
5578 generating this sequence, and this sequence will not be immediately
5579 inserted into the instruction stream, use do_pending_stack_adjust
5580 before calling get_insns. That will ensure that the deferred
5581 pops are inserted into this sequence, and not into some random
5582 location in the instruction stream. See INHIBIT_DEFER_POP for more
5583 information about deferred popping of arguments. */
5584
5585 void
5586 end_sequence (void)
5587 {
5588 struct sequence_stack *tem = get_current_sequence ()->next;
5589
5590 set_first_insn (tem->first);
5591 set_last_insn (tem->last);
5592 get_current_sequence ()->next = tem->next;
5593
5594 memset (tem, 0, sizeof (*tem));
5595 tem->next = free_sequence_stack;
5596 free_sequence_stack = tem;
5597 }
5598
5599 /* Return 1 if currently emitting into a sequence. */
5600
5601 int
5602 in_sequence_p (void)
5603 {
5604 return get_current_sequence ()->next != 0;
5605 }
5606 \f
5607 /* Put the various virtual registers into REGNO_REG_RTX. */
5608
5609 static void
5610 init_virtual_regs (void)
5611 {
5612 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5613 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5614 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5615 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5616 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5617 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5618 = virtual_preferred_stack_boundary_rtx;
5619 }
5620
5621 \f
5622 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5623 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5624 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5625 static int copy_insn_n_scratches;
5626
5627 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5628 copied an ASM_OPERANDS.
5629 In that case, it is the original input-operand vector. */
5630 static rtvec orig_asm_operands_vector;
5631
5632 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5633 copied an ASM_OPERANDS.
5634 In that case, it is the copied input-operand vector. */
5635 static rtvec copy_asm_operands_vector;
5636
5637 /* Likewise for the constraints vector. */
5638 static rtvec orig_asm_constraints_vector;
5639 static rtvec copy_asm_constraints_vector;
5640
5641 /* Recursively create a new copy of an rtx for copy_insn.
5642 This function differs from copy_rtx in that it handles SCRATCHes and
5643 ASM_OPERANDs properly.
5644 Normally, this function is not used directly; use copy_insn as front end.
5645 However, you could first copy an insn pattern with copy_insn and then use
5646 this function afterwards to properly copy any REG_NOTEs containing
5647 SCRATCHes. */
5648
5649 rtx
5650 copy_insn_1 (rtx orig)
5651 {
5652 rtx copy;
5653 int i, j;
5654 RTX_CODE code;
5655 const char *format_ptr;
5656
5657 if (orig == NULL)
5658 return NULL;
5659
5660 code = GET_CODE (orig);
5661
5662 switch (code)
5663 {
5664 case REG:
5665 case DEBUG_EXPR:
5666 CASE_CONST_ANY:
5667 case SYMBOL_REF:
5668 case CODE_LABEL:
5669 case PC:
5670 case CC0:
5671 case RETURN:
5672 case SIMPLE_RETURN:
5673 return orig;
5674 case CLOBBER:
5675 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5676 clobbers or clobbers of hard registers that originated as pseudos.
5677 This is needed to allow safe register renaming. */
5678 if (REG_P (XEXP (orig, 0))
5679 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5680 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5681 return orig;
5682 break;
5683
5684 case SCRATCH:
5685 for (i = 0; i < copy_insn_n_scratches; i++)
5686 if (copy_insn_scratch_in[i] == orig)
5687 return copy_insn_scratch_out[i];
5688 break;
5689
5690 case CONST:
5691 if (shared_const_p (orig))
5692 return orig;
5693 break;
5694
5695 /* A MEM with a constant address is not sharable. The problem is that
5696 the constant address may need to be reloaded. If the mem is shared,
5697 then reloading one copy of this mem will cause all copies to appear
5698 to have been reloaded. */
5699
5700 default:
5701 break;
5702 }
5703
5704 /* Copy the various flags, fields, and other information. We assume
5705 that all fields need copying, and then clear the fields that should
5706 not be copied. That is the sensible default behavior, and forces
5707 us to explicitly document why we are *not* copying a flag. */
5708 copy = shallow_copy_rtx (orig);
5709
5710 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5711 if (INSN_P (orig))
5712 {
5713 RTX_FLAG (copy, jump) = 0;
5714 RTX_FLAG (copy, call) = 0;
5715 RTX_FLAG (copy, frame_related) = 0;
5716 }
5717
5718 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5719
5720 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5721 switch (*format_ptr++)
5722 {
5723 case 'e':
5724 if (XEXP (orig, i) != NULL)
5725 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5726 break;
5727
5728 case 'E':
5729 case 'V':
5730 if (XVEC (orig, i) == orig_asm_constraints_vector)
5731 XVEC (copy, i) = copy_asm_constraints_vector;
5732 else if (XVEC (orig, i) == orig_asm_operands_vector)
5733 XVEC (copy, i) = copy_asm_operands_vector;
5734 else if (XVEC (orig, i) != NULL)
5735 {
5736 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5737 for (j = 0; j < XVECLEN (copy, i); j++)
5738 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5739 }
5740 break;
5741
5742 case 't':
5743 case 'w':
5744 case 'i':
5745 case 's':
5746 case 'S':
5747 case 'u':
5748 case '0':
5749 /* These are left unchanged. */
5750 break;
5751
5752 default:
5753 gcc_unreachable ();
5754 }
5755
5756 if (code == SCRATCH)
5757 {
5758 i = copy_insn_n_scratches++;
5759 gcc_assert (i < MAX_RECOG_OPERANDS);
5760 copy_insn_scratch_in[i] = orig;
5761 copy_insn_scratch_out[i] = copy;
5762 }
5763 else if (code == ASM_OPERANDS)
5764 {
5765 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5766 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5767 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5768 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5769 }
5770
5771 return copy;
5772 }
5773
5774 /* Create a new copy of an rtx.
5775 This function differs from copy_rtx in that it handles SCRATCHes and
5776 ASM_OPERANDs properly.
5777 INSN doesn't really have to be a full INSN; it could be just the
5778 pattern. */
5779 rtx
5780 copy_insn (rtx insn)
5781 {
5782 copy_insn_n_scratches = 0;
5783 orig_asm_operands_vector = 0;
5784 orig_asm_constraints_vector = 0;
5785 copy_asm_operands_vector = 0;
5786 copy_asm_constraints_vector = 0;
5787 return copy_insn_1 (insn);
5788 }
5789
5790 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5791 on that assumption that INSN itself remains in its original place. */
5792
5793 rtx_insn *
5794 copy_delay_slot_insn (rtx_insn *insn)
5795 {
5796 /* Copy INSN with its rtx_code, all its notes, location etc. */
5797 insn = as_a <rtx_insn *> (copy_rtx (insn));
5798 INSN_UID (insn) = cur_insn_uid++;
5799 return insn;
5800 }
5801
5802 /* Initialize data structures and variables in this file
5803 before generating rtl for each function. */
5804
5805 void
5806 init_emit (void)
5807 {
5808 set_first_insn (NULL);
5809 set_last_insn (NULL);
5810 if (MIN_NONDEBUG_INSN_UID)
5811 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5812 else
5813 cur_insn_uid = 1;
5814 cur_debug_insn_uid = 1;
5815 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5816 first_label_num = label_num;
5817 get_current_sequence ()->next = NULL;
5818
5819 /* Init the tables that describe all the pseudo regs. */
5820
5821 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5822
5823 crtl->emit.regno_pointer_align
5824 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5825
5826 regno_reg_rtx
5827 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5828
5829 /* Put copies of all the hard registers into regno_reg_rtx. */
5830 memcpy (regno_reg_rtx,
5831 initial_regno_reg_rtx,
5832 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5833
5834 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5835 init_virtual_regs ();
5836
5837 /* Indicate that the virtual registers and stack locations are
5838 all pointers. */
5839 REG_POINTER (stack_pointer_rtx) = 1;
5840 REG_POINTER (frame_pointer_rtx) = 1;
5841 REG_POINTER (hard_frame_pointer_rtx) = 1;
5842 REG_POINTER (arg_pointer_rtx) = 1;
5843
5844 REG_POINTER (virtual_incoming_args_rtx) = 1;
5845 REG_POINTER (virtual_stack_vars_rtx) = 1;
5846 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5847 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5848 REG_POINTER (virtual_cfa_rtx) = 1;
5849
5850 #ifdef STACK_BOUNDARY
5851 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5852 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5853 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5854 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5855
5856 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5857 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5858 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5859 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5860
5861 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5862 #endif
5863
5864 #ifdef INIT_EXPANDERS
5865 INIT_EXPANDERS;
5866 #endif
5867 }
5868
5869 /* Return true if X is a valid element for a duplicated vector constant
5870 of the given mode. */
5871
5872 bool
5873 valid_for_const_vec_duplicate_p (machine_mode, rtx x)
5874 {
5875 return (CONST_SCALAR_INT_P (x)
5876 || CONST_DOUBLE_AS_FLOAT_P (x)
5877 || CONST_FIXED_P (x));
5878 }
5879
5880 /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */
5881
5882 static rtx
5883 gen_const_vec_duplicate_1 (machine_mode mode, rtx el)
5884 {
5885 int nunits = GET_MODE_NUNITS (mode);
5886 rtvec v = rtvec_alloc (nunits);
5887 for (int i = 0; i < nunits; ++i)
5888 RTVEC_ELT (v, i) = el;
5889 return gen_rtx_raw_CONST_VECTOR (mode, v);
5890 }
5891
5892 /* Generate a vector constant of mode MODE in which every element has
5893 value ELT. */
5894
5895 rtx
5896 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5897 {
5898 scalar_mode inner_mode = GET_MODE_INNER (mode);
5899 if (elt == CONST0_RTX (inner_mode))
5900 return CONST0_RTX (mode);
5901 else if (elt == CONST1_RTX (inner_mode))
5902 return CONST1_RTX (mode);
5903 else if (elt == CONSTM1_RTX (inner_mode))
5904 return CONSTM1_RTX (mode);
5905
5906 return gen_const_vec_duplicate_1 (mode, elt);
5907 }
5908
5909 /* Return a vector rtx of mode MODE in which every element has value X.
5910 The result will be a constant if X is constant. */
5911
5912 rtx
5913 gen_vec_duplicate (machine_mode mode, rtx x)
5914 {
5915 if (valid_for_const_vec_duplicate_p (mode, x))
5916 return gen_const_vec_duplicate (mode, x);
5917 return gen_rtx_VEC_DUPLICATE (mode, x);
5918 }
5919
5920 /* A subroutine of const_vec_series_p that handles the case in which
5921 X is known to be an integer CONST_VECTOR. */
5922
5923 bool
5924 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5925 {
5926 unsigned int nelts = CONST_VECTOR_NUNITS (x);
5927 if (nelts < 2)
5928 return false;
5929
5930 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5931 rtx base = CONST_VECTOR_ELT (x, 0);
5932 rtx step = simplify_binary_operation (MINUS, inner,
5933 CONST_VECTOR_ELT (x, 1), base);
5934 if (rtx_equal_p (step, CONST0_RTX (inner)))
5935 return false;
5936
5937 for (unsigned int i = 2; i < nelts; ++i)
5938 {
5939 rtx diff = simplify_binary_operation (MINUS, inner,
5940 CONST_VECTOR_ELT (x, i),
5941 CONST_VECTOR_ELT (x, i - 1));
5942 if (!rtx_equal_p (step, diff))
5943 return false;
5944 }
5945
5946 *base_out = base;
5947 *step_out = step;
5948 return true;
5949 }
5950
5951 /* Generate a vector constant of mode MODE in which element I has
5952 the value BASE + I * STEP. */
5953
5954 rtx
5955 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
5956 {
5957 gcc_assert (CONSTANT_P (base) && CONSTANT_P (step));
5958
5959 int nunits = GET_MODE_NUNITS (mode);
5960 rtvec v = rtvec_alloc (nunits);
5961 scalar_mode inner_mode = GET_MODE_INNER (mode);
5962 RTVEC_ELT (v, 0) = base;
5963 for (int i = 1; i < nunits; ++i)
5964 RTVEC_ELT (v, i) = simplify_gen_binary (PLUS, inner_mode,
5965 RTVEC_ELT (v, i - 1), step);
5966 return gen_rtx_raw_CONST_VECTOR (mode, v);
5967 }
5968
5969 /* Generate a vector of mode MODE in which element I has the value
5970 BASE + I * STEP. The result will be a constant if BASE and STEP
5971 are both constants. */
5972
5973 rtx
5974 gen_vec_series (machine_mode mode, rtx base, rtx step)
5975 {
5976 if (step == const0_rtx)
5977 return gen_vec_duplicate (mode, base);
5978 if (CONSTANT_P (base) && CONSTANT_P (step))
5979 return gen_const_vec_series (mode, base, step);
5980 return gen_rtx_VEC_SERIES (mode, base, step);
5981 }
5982
5983 /* Generate a new vector constant for mode MODE and constant value
5984 CONSTANT. */
5985
5986 static rtx
5987 gen_const_vector (machine_mode mode, int constant)
5988 {
5989 machine_mode inner = GET_MODE_INNER (mode);
5990
5991 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5992
5993 rtx el = const_tiny_rtx[constant][(int) inner];
5994 gcc_assert (el);
5995
5996 return gen_const_vec_duplicate_1 (mode, el);
5997 }
5998
5999 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6000 all elements are zero, and the one vector when all elements are one. */
6001 rtx
6002 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6003 {
6004 gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v));
6005
6006 /* If the values are all the same, check to see if we can use one of the
6007 standard constant vectors. */
6008 if (rtvec_all_equal_p (v))
6009 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6010
6011 return gen_rtx_raw_CONST_VECTOR (mode, v);
6012 }
6013
6014 /* Initialise global register information required by all functions. */
6015
6016 void
6017 init_emit_regs (void)
6018 {
6019 int i;
6020 machine_mode mode;
6021 mem_attrs *attrs;
6022
6023 /* Reset register attributes */
6024 reg_attrs_htab->empty ();
6025
6026 /* We need reg_raw_mode, so initialize the modes now. */
6027 init_reg_modes_target ();
6028
6029 /* Assign register numbers to the globally defined register rtx. */
6030 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6031 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6032 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6033 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6034 virtual_incoming_args_rtx =
6035 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6036 virtual_stack_vars_rtx =
6037 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6038 virtual_stack_dynamic_rtx =
6039 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6040 virtual_outgoing_args_rtx =
6041 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6042 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6043 virtual_preferred_stack_boundary_rtx =
6044 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6045
6046 /* Initialize RTL for commonly used hard registers. These are
6047 copied into regno_reg_rtx as we begin to compile each function. */
6048 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6049 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6050
6051 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6052 return_address_pointer_rtx
6053 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6054 #endif
6055
6056 pic_offset_table_rtx = NULL_RTX;
6057 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6058 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6059
6060 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6061 {
6062 mode = (machine_mode) i;
6063 attrs = ggc_cleared_alloc<mem_attrs> ();
6064 attrs->align = BITS_PER_UNIT;
6065 attrs->addrspace = ADDR_SPACE_GENERIC;
6066 if (mode != BLKmode)
6067 {
6068 attrs->size_known_p = true;
6069 attrs->size = GET_MODE_SIZE (mode);
6070 if (STRICT_ALIGNMENT)
6071 attrs->align = GET_MODE_ALIGNMENT (mode);
6072 }
6073 mode_mem_attrs[i] = attrs;
6074 }
6075
6076 split_branch_probability = profile_probability::uninitialized ();
6077 }
6078
6079 /* Initialize global machine_mode variables. */
6080
6081 void
6082 init_derived_machine_modes (void)
6083 {
6084 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6085 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6086 {
6087 scalar_int_mode mode = mode_iter.require ();
6088
6089 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6090 && !opt_byte_mode.exists ())
6091 opt_byte_mode = mode;
6092
6093 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6094 && !opt_word_mode.exists ())
6095 opt_word_mode = mode;
6096 }
6097
6098 byte_mode = opt_byte_mode.require ();
6099 word_mode = opt_word_mode.require ();
6100 ptr_mode = as_a <scalar_int_mode>
6101 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6102 }
6103
6104 /* Create some permanent unique rtl objects shared between all functions. */
6105
6106 void
6107 init_emit_once (void)
6108 {
6109 int i;
6110 machine_mode mode;
6111 scalar_float_mode double_mode;
6112 opt_scalar_mode smode_iter;
6113
6114 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6115 CONST_FIXED, and memory attribute hash tables. */
6116 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6117
6118 #if TARGET_SUPPORTS_WIDE_INT
6119 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6120 #endif
6121 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6122
6123 if (NUM_POLY_INT_COEFFS > 1)
6124 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6125
6126 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6127
6128 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6129
6130 #ifdef INIT_EXPANDERS
6131 /* This is to initialize {init|mark|free}_machine_status before the first
6132 call to push_function_context_to. This is needed by the Chill front
6133 end which calls push_function_context_to before the first call to
6134 init_function_start. */
6135 INIT_EXPANDERS;
6136 #endif
6137
6138 /* Create the unique rtx's for certain rtx codes and operand values. */
6139
6140 /* Process stack-limiting command-line options. */
6141 if (opt_fstack_limit_symbol_arg != NULL)
6142 stack_limit_rtx
6143 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6144 if (opt_fstack_limit_register_no >= 0)
6145 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6146
6147 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6148 tries to use these variables. */
6149 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6150 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6151 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6152
6153 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6154 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6155 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6156 else
6157 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6158
6159 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6160
6161 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6162 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6163 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6164
6165 dconstm1 = dconst1;
6166 dconstm1.sign = 1;
6167
6168 dconsthalf = dconst1;
6169 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6170
6171 for (i = 0; i < 3; i++)
6172 {
6173 const REAL_VALUE_TYPE *const r =
6174 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6175
6176 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6177 const_tiny_rtx[i][(int) mode] =
6178 const_double_from_real_value (*r, mode);
6179
6180 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6181 const_tiny_rtx[i][(int) mode] =
6182 const_double_from_real_value (*r, mode);
6183
6184 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6185
6186 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6187 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6188
6189 for (mode = MIN_MODE_PARTIAL_INT;
6190 mode <= MAX_MODE_PARTIAL_INT;
6191 mode = (machine_mode)((int)(mode) + 1))
6192 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6193 }
6194
6195 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6196
6197 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6198 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6199
6200 for (mode = MIN_MODE_PARTIAL_INT;
6201 mode <= MAX_MODE_PARTIAL_INT;
6202 mode = (machine_mode)((int)(mode) + 1))
6203 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6204
6205 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6206 {
6207 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6208 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6209 }
6210
6211 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6212 {
6213 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6214 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6215 }
6216
6217 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6218 {
6219 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6220 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6221 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6222 }
6223
6224 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6225 {
6226 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6227 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6228 }
6229
6230 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6231 {
6232 scalar_mode smode = smode_iter.require ();
6233 FCONST0 (smode).data.high = 0;
6234 FCONST0 (smode).data.low = 0;
6235 FCONST0 (smode).mode = smode;
6236 const_tiny_rtx[0][(int) smode]
6237 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6238 }
6239
6240 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6241 {
6242 scalar_mode smode = smode_iter.require ();
6243 FCONST0 (smode).data.high = 0;
6244 FCONST0 (smode).data.low = 0;
6245 FCONST0 (smode).mode = smode;
6246 const_tiny_rtx[0][(int) smode]
6247 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6248 }
6249
6250 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6251 {
6252 scalar_mode smode = smode_iter.require ();
6253 FCONST0 (smode).data.high = 0;
6254 FCONST0 (smode).data.low = 0;
6255 FCONST0 (smode).mode = smode;
6256 const_tiny_rtx[0][(int) smode]
6257 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6258
6259 /* We store the value 1. */
6260 FCONST1 (smode).data.high = 0;
6261 FCONST1 (smode).data.low = 0;
6262 FCONST1 (smode).mode = smode;
6263 FCONST1 (smode).data
6264 = double_int_one.lshift (GET_MODE_FBIT (smode),
6265 HOST_BITS_PER_DOUBLE_INT,
6266 SIGNED_FIXED_POINT_MODE_P (smode));
6267 const_tiny_rtx[1][(int) smode]
6268 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6269 }
6270
6271 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6272 {
6273 scalar_mode smode = smode_iter.require ();
6274 FCONST0 (smode).data.high = 0;
6275 FCONST0 (smode).data.low = 0;
6276 FCONST0 (smode).mode = smode;
6277 const_tiny_rtx[0][(int) smode]
6278 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6279
6280 /* We store the value 1. */
6281 FCONST1 (smode).data.high = 0;
6282 FCONST1 (smode).data.low = 0;
6283 FCONST1 (smode).mode = smode;
6284 FCONST1 (smode).data
6285 = double_int_one.lshift (GET_MODE_FBIT (smode),
6286 HOST_BITS_PER_DOUBLE_INT,
6287 SIGNED_FIXED_POINT_MODE_P (smode));
6288 const_tiny_rtx[1][(int) smode]
6289 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6290 }
6291
6292 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6293 {
6294 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6295 }
6296
6297 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6298 {
6299 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6300 }
6301
6302 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6303 {
6304 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6305 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6306 }
6307
6308 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6309 {
6310 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6311 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6312 }
6313
6314 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6315 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6316 const_tiny_rtx[0][i] = const0_rtx;
6317
6318 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6319 if (STORE_FLAG_VALUE == 1)
6320 const_tiny_rtx[1][(int) BImode] = const1_rtx;
6321
6322 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6323 {
6324 scalar_mode smode = smode_iter.require ();
6325 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6326 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6327 }
6328
6329 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6330 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6331 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6332 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6333 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6334 /*prev_insn=*/NULL,
6335 /*next_insn=*/NULL,
6336 /*bb=*/NULL,
6337 /*pattern=*/NULL_RTX,
6338 /*location=*/-1,
6339 CODE_FOR_nothing,
6340 /*reg_notes=*/NULL_RTX);
6341 }
6342 \f
6343 /* Produce exact duplicate of insn INSN after AFTER.
6344 Care updating of libcall regions if present. */
6345
6346 rtx_insn *
6347 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6348 {
6349 rtx_insn *new_rtx;
6350 rtx link;
6351
6352 switch (GET_CODE (insn))
6353 {
6354 case INSN:
6355 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6356 break;
6357
6358 case JUMP_INSN:
6359 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6360 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6361 break;
6362
6363 case DEBUG_INSN:
6364 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6365 break;
6366
6367 case CALL_INSN:
6368 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6369 if (CALL_INSN_FUNCTION_USAGE (insn))
6370 CALL_INSN_FUNCTION_USAGE (new_rtx)
6371 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6372 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6373 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6374 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6375 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6376 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6377 break;
6378
6379 default:
6380 gcc_unreachable ();
6381 }
6382
6383 /* Update LABEL_NUSES. */
6384 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6385
6386 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6387
6388 /* If the old insn is frame related, then so is the new one. This is
6389 primarily needed for IA-64 unwind info which marks epilogue insns,
6390 which may be duplicated by the basic block reordering code. */
6391 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6392
6393 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6394 rtx *ptail = &REG_NOTES (new_rtx);
6395 while (*ptail != NULL_RTX)
6396 ptail = &XEXP (*ptail, 1);
6397
6398 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6399 will make them. REG_LABEL_TARGETs are created there too, but are
6400 supposed to be sticky, so we copy them. */
6401 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6402 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6403 {
6404 *ptail = duplicate_reg_note (link);
6405 ptail = &XEXP (*ptail, 1);
6406 }
6407
6408 INSN_CODE (new_rtx) = INSN_CODE (insn);
6409 return new_rtx;
6410 }
6411
6412 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6413 rtx
6414 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6415 {
6416 if (hard_reg_clobbers[mode][regno])
6417 return hard_reg_clobbers[mode][regno];
6418 else
6419 return (hard_reg_clobbers[mode][regno] =
6420 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6421 }
6422
6423 location_t prologue_location;
6424 location_t epilogue_location;
6425
6426 /* Hold current location information and last location information, so the
6427 datastructures are built lazily only when some instructions in given
6428 place are needed. */
6429 static location_t curr_location;
6430
6431 /* Allocate insn location datastructure. */
6432 void
6433 insn_locations_init (void)
6434 {
6435 prologue_location = epilogue_location = 0;
6436 curr_location = UNKNOWN_LOCATION;
6437 }
6438
6439 /* At the end of emit stage, clear current location. */
6440 void
6441 insn_locations_finalize (void)
6442 {
6443 epilogue_location = curr_location;
6444 curr_location = UNKNOWN_LOCATION;
6445 }
6446
6447 /* Set current location. */
6448 void
6449 set_curr_insn_location (location_t location)
6450 {
6451 curr_location = location;
6452 }
6453
6454 /* Get current location. */
6455 location_t
6456 curr_insn_location (void)
6457 {
6458 return curr_location;
6459 }
6460
6461 /* Return lexical scope block insn belongs to. */
6462 tree
6463 insn_scope (const rtx_insn *insn)
6464 {
6465 return LOCATION_BLOCK (INSN_LOCATION (insn));
6466 }
6467
6468 /* Return line number of the statement that produced this insn. */
6469 int
6470 insn_line (const rtx_insn *insn)
6471 {
6472 return LOCATION_LINE (INSN_LOCATION (insn));
6473 }
6474
6475 /* Return source file of the statement that produced this insn. */
6476 const char *
6477 insn_file (const rtx_insn *insn)
6478 {
6479 return LOCATION_FILE (INSN_LOCATION (insn));
6480 }
6481
6482 /* Return expanded location of the statement that produced this insn. */
6483 expanded_location
6484 insn_location (const rtx_insn *insn)
6485 {
6486 return expand_location (INSN_LOCATION (insn));
6487 }
6488
6489 /* Return true if memory model MODEL requires a pre-operation (release-style)
6490 barrier or a post-operation (acquire-style) barrier. While not universal,
6491 this function matches behavior of several targets. */
6492
6493 bool
6494 need_atomic_barrier_p (enum memmodel model, bool pre)
6495 {
6496 switch (model & MEMMODEL_BASE_MASK)
6497 {
6498 case MEMMODEL_RELAXED:
6499 case MEMMODEL_CONSUME:
6500 return false;
6501 case MEMMODEL_RELEASE:
6502 return pre;
6503 case MEMMODEL_ACQUIRE:
6504 return !pre;
6505 case MEMMODEL_ACQ_REL:
6506 case MEMMODEL_SEQ_CST:
6507 return true;
6508 default:
6509 gcc_unreachable ();
6510 }
6511 }
6512
6513 /* Return a constant shift amount for shifting a value of mode MODE
6514 by VALUE bits. */
6515
6516 rtx
6517 gen_int_shift_amount (machine_mode, poly_int64 value)
6518 {
6519 /* Use a 64-bit mode, to avoid any truncation.
6520
6521 ??? Perhaps this should be automatically derived from the .md files
6522 instead, or perhaps have a target hook. */
6523 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6524 ? DImode
6525 : int_mode_for_size (64, 0).require ());
6526 return gen_int_mode (value, shift_mode);
6527 }
6528
6529 /* Initialize fields of rtl_data related to stack alignment. */
6530
6531 void
6532 rtl_data::init_stack_alignment ()
6533 {
6534 stack_alignment_needed = STACK_BOUNDARY;
6535 max_used_stack_slot_alignment = STACK_BOUNDARY;
6536 stack_alignment_estimated = 0;
6537 preferred_stack_boundary = STACK_BOUNDARY;
6538 }
6539
6540 \f
6541 #include "gt-emit-rtl.h"