]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
2015-10-29 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
d353bf18 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
15bbde2b 3
f12b58b3 4This file is part of GCC.
15bbde2b 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
15bbde2b 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15bbde2b 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
15bbde2b 19
20
21/* Middle-to-low level generation of rtx code and insns.
22
74efa612 23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
15bbde2b 25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
74efa612 28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
8fd5918e 31 dependent is the kind of rtx's they make and what arguments they
32 use. */
15bbde2b 33
34#include "config.h"
405711de 35#include "system.h"
805e22b2 36#include "coretypes.h"
9ef16211 37#include "backend.h"
7c29e30e 38#include "target.h"
15bbde2b 39#include "rtl.h"
7c29e30e 40#include "tree.h"
9ef16211 41#include "df.h"
7c29e30e 42#include "tm_p.h"
43#include "stringpool.h"
44#include "expmed.h"
45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
9ef16211 49#include "diagnostic-core.h"
b20a8bb4 50#include "alias.h"
b20a8bb4 51#include "fold-const.h"
9ed99284 52#include "varasm.h"
94ea8568 53#include "cfgrtl.h"
94ea8568 54#include "tree-eh.h"
94ea8568 55#include "flags.h"
d53441c8 56#include "dojump.h"
57#include "explow.h"
58#include "calls.h"
d53441c8 59#include "stmt.h"
15bbde2b 60#include "expr.h"
b29760a8 61#include "debug.h"
b0278d39 62#include "langhooks.h"
9845d120 63#include "params.h"
f7715905 64#include "builtins.h"
4073adaa 65#include "rtl-iter.h"
94f92c36 66#include "stor-layout.h"
649d8da6 67
679bcc8d 68struct target_rtl default_target_rtl;
69#if SWITCHABLE_TARGET
70struct target_rtl *this_target_rtl = &default_target_rtl;
71#endif
72
73#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74
399d45d3 75/* Commonly used modes. */
76
3754d046 77machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
78machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
79machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
80machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 81
b079a207 82/* Datastructures maintained for currently processed function in RTL form. */
83
fd6ffb7c 84struct rtl_data x_rtl;
b079a207 85
86/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 87 Allocated in parallel with regno_pointer_align.
b079a207 88 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
89 with length attribute nested in top level structures. */
90
91rtx * regno_reg_rtx;
15bbde2b 92
93/* This is *not* reset after each function. It gives each CODE_LABEL
94 in the entire compilation a unique label number. */
95
9105005a 96static GTY(()) int label_num = 1;
15bbde2b 97
15bbde2b 98/* We record floating-point CONST_DOUBLEs in each floating-point mode for
99 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
ba8dfb08 100 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
101 is set only for MODE_INT and MODE_VECTOR_INT modes. */
15bbde2b 102
ba8dfb08 103rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
15bbde2b 104
1a60f06a 105rtx const_true_rtx;
106
15bbde2b 107REAL_VALUE_TYPE dconst0;
108REAL_VALUE_TYPE dconst1;
109REAL_VALUE_TYPE dconst2;
110REAL_VALUE_TYPE dconstm1;
77e89269 111REAL_VALUE_TYPE dconsthalf;
15bbde2b 112
06f0b99c 113/* Record fixed-point constant 0 and 1. */
114FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
115FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116
15bbde2b 117/* We make one copy of (const_int C) where C is in
118 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
119 to save space during the compilation and simplify comparisons of
120 integers. */
121
57c097d5 122rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 123
7d7b0bac 124/* Standard pieces of rtx, to be substituted directly into things. */
125rtx pc_rtx;
126rtx ret_rtx;
127rtx simple_return_rtx;
128rtx cc0_rtx;
129
f9a00e9e 130/* Marker used for denoting an INSN, which should never be accessed (i.e.,
131 this pointer should normally never be dereferenced), but is required to be
132 distinct from NULL_RTX. Currently used by peephole2 pass. */
133rtx_insn *invalid_insn_rtx;
134
73f5c1e3 135/* A hash table storing CONST_INTs whose absolute value is greater
136 than MAX_SAVED_CONST_INT. */
137
eae1ecb4 138struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 139{
140 typedef HOST_WIDE_INT compare_type;
141
142 static hashval_t hash (rtx i);
143 static bool equal (rtx i, HOST_WIDE_INT h);
144};
73f5c1e3 145
f863a586 146static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
147
eae1ecb4 148struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 149{
150 static hashval_t hash (rtx x);
151 static bool equal (rtx x, rtx y);
152};
153
154static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
e913b5cd 155
ca74b940 156/* A hash table storing register attribute structures. */
eae1ecb4 157struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
f863a586 158{
159 static hashval_t hash (reg_attrs *x);
160 static bool equal (reg_attrs *a, reg_attrs *b);
161};
162
163static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
ca74b940 164
2ff23ed0 165/* A hash table storing all CONST_DOUBLEs. */
eae1ecb4 166struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 167{
168 static hashval_t hash (rtx x);
169 static bool equal (rtx x, rtx y);
170};
171
172static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
2ff23ed0 173
e397ad8e 174/* A hash table storing all CONST_FIXEDs. */
eae1ecb4 175struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 176{
177 static hashval_t hash (rtx x);
178 static bool equal (rtx x, rtx y);
179};
180
181static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
e397ad8e 182
fd6ffb7c 183#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 184#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 185#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 186
265be050 187static void set_used_decls (tree);
35cb5232 188static void mark_label_nuses (rtx);
e913b5cd 189#if TARGET_SUPPORTS_WIDE_INT
e913b5cd 190static rtx lookup_const_wide_int (rtx);
191#endif
35cb5232 192static rtx lookup_const_double (rtx);
e397ad8e 193static rtx lookup_const_fixed (rtx);
35cb5232 194static reg_attrs *get_reg_attrs (tree, int);
3754d046 195static rtx gen_const_vector (machine_mode, int);
0e0727c4 196static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 197
3cd757b1 198/* Probability of the conditional branch currently proceeded by try_split.
199 Set to -1 otherwise. */
200int split_branch_probability = -1;
649d8da6 201\f
73f5c1e3 202/* Returns a hash code for X (which is a really a CONST_INT). */
203
f863a586 204hashval_t
205const_int_hasher::hash (rtx x)
73f5c1e3 206{
f863a586 207 return (hashval_t) INTVAL (x);
73f5c1e3 208}
209
6ef828f9 210/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 211 CONST_INT) is the same as that given by Y (which is really a
212 HOST_WIDE_INT *). */
213
f863a586 214bool
215const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
73f5c1e3 216{
f863a586 217 return (INTVAL (x) == y);
2ff23ed0 218}
219
e913b5cd 220#if TARGET_SUPPORTS_WIDE_INT
221/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
222
f863a586 223hashval_t
224const_wide_int_hasher::hash (rtx x)
e913b5cd 225{
226 int i;
06b8401d 227 unsigned HOST_WIDE_INT hash = 0;
f863a586 228 const_rtx xr = x;
e913b5cd 229
230 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
231 hash += CONST_WIDE_INT_ELT (xr, i);
232
233 return (hashval_t) hash;
234}
235
236/* Returns nonzero if the value represented by X (which is really a
237 CONST_WIDE_INT) is the same as that given by Y (which is really a
238 CONST_WIDE_INT). */
239
f863a586 240bool
241const_wide_int_hasher::equal (rtx x, rtx y)
e913b5cd 242{
243 int i;
f863a586 244 const_rtx xr = x;
245 const_rtx yr = y;
e913b5cd 246 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
f863a586 247 return false;
e913b5cd 248
249 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
250 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
f863a586 251 return false;
ddb1be65 252
f863a586 253 return true;
e913b5cd 254}
255#endif
256
2ff23ed0 257/* Returns a hash code for X (which is really a CONST_DOUBLE). */
f863a586 258hashval_t
259const_double_hasher::hash (rtx x)
2ff23ed0 260{
f863a586 261 const_rtx const value = x;
3393215f 262 hashval_t h;
2ff23ed0 263
e913b5cd 264 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
3393215f 265 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
266 else
a5760913 267 {
e2e205b3 268 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 269 /* MODE is used in the comparison, so it should be in the hash. */
270 h ^= GET_MODE (value);
271 }
2ff23ed0 272 return h;
273}
274
6ef828f9 275/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 276 is the same as that represented by Y (really a ...) */
f863a586 277bool
278const_double_hasher::equal (rtx x, rtx y)
2ff23ed0 279{
f863a586 280 const_rtx const a = x, b = y;
2ff23ed0 281
282 if (GET_MODE (a) != GET_MODE (b))
283 return 0;
e913b5cd 284 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
f82a103d 285 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
286 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
287 else
288 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
289 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 290}
291
e397ad8e 292/* Returns a hash code for X (which is really a CONST_FIXED). */
293
f863a586 294hashval_t
295const_fixed_hasher::hash (rtx x)
e397ad8e 296{
f863a586 297 const_rtx const value = x;
e397ad8e 298 hashval_t h;
299
300 h = fixed_hash (CONST_FIXED_VALUE (value));
301 /* MODE is used in the comparison, so it should be in the hash. */
302 h ^= GET_MODE (value);
303 return h;
304}
305
f863a586 306/* Returns nonzero if the value represented by X is the same as that
307 represented by Y. */
e397ad8e 308
f863a586 309bool
310const_fixed_hasher::equal (rtx x, rtx y)
e397ad8e 311{
f863a586 312 const_rtx const a = x, b = y;
e397ad8e 313
314 if (GET_MODE (a) != GET_MODE (b))
315 return 0;
316 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
317}
318
d72886b5 319/* Return true if the given memory attributes are equal. */
73f5c1e3 320
7e304b71 321bool
d72886b5 322mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
73f5c1e3 323{
7e304b71 324 if (p == q)
325 return true;
326 if (!p || !q)
327 return false;
6d58bcba 328 return (p->alias == q->alias
329 && p->offset_known_p == q->offset_known_p
330 && (!p->offset_known_p || p->offset == q->offset)
331 && p->size_known_p == q->size_known_p
332 && (!p->size_known_p || p->size == q->size)
333 && p->align == q->align
bd1a81f7 334 && p->addrspace == q->addrspace
2f16183e 335 && (p->expr == q->expr
336 || (p->expr != NULL_TREE && q->expr != NULL_TREE
337 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 338}
339
d72886b5 340/* Set MEM's memory attributes so that they are the same as ATTRS. */
5cc193e7 341
d72886b5 342static void
343set_mem_attrs (rtx mem, mem_attrs *attrs)
344{
d72886b5 345 /* If everything is the default, we can just clear the attributes. */
346 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
347 {
348 MEM_ATTRS (mem) = 0;
349 return;
350 }
c6259b83 351
8dc3230c 352 if (!MEM_ATTRS (mem)
353 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
c6259b83 354 {
25a27413 355 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
8dc3230c 356 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
c6259b83 357 }
73f5c1e3 358}
359
ca74b940 360/* Returns a hash code for X (which is a really a reg_attrs *). */
361
f863a586 362hashval_t
363reg_attr_hasher::hash (reg_attrs *x)
ca74b940 364{
f863a586 365 const reg_attrs *const p = x;
ca74b940 366
e19e0a33 367 return ((p->offset * 1000) ^ (intptr_t) p->decl);
ca74b940 368}
369
f863a586 370/* Returns nonzero if the value represented by X is the same as that given by
371 Y. */
ca74b940 372
f863a586 373bool
374reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
ca74b940 375{
f863a586 376 const reg_attrs *const p = x;
377 const reg_attrs *const q = y;
ca74b940 378
379 return (p->decl == q->decl && p->offset == q->offset);
380}
381/* Allocate a new reg_attrs structure and insert it into the hash table if
382 one identical to it is not already in the table. We are doing this for
383 MEM of mode MODE. */
384
385static reg_attrs *
35cb5232 386get_reg_attrs (tree decl, int offset)
ca74b940 387{
388 reg_attrs attrs;
ca74b940 389
390 /* If everything is the default, we can just return zero. */
391 if (decl == 0 && offset == 0)
392 return 0;
393
394 attrs.decl = decl;
395 attrs.offset = offset;
396
f863a586 397 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
ca74b940 398 if (*slot == 0)
399 {
25a27413 400 *slot = ggc_alloc<reg_attrs> ();
ca74b940 401 memcpy (*slot, &attrs, sizeof (reg_attrs));
402 }
403
f863a586 404 return *slot;
ca74b940 405}
406
3072d30e 407
408#if !HAVE_blockage
e12b44a3 409/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
410 and to block register equivalences to be seen across this insn. */
3072d30e 411
412rtx
413gen_blockage (void)
414{
415 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
416 MEM_VOLATILE_P (x) = true;
417 return x;
418}
419#endif
420
421
937ca48e 422/* Set the mode and register number of X to MODE and REGNO. */
423
424void
425set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
426{
1c0849e5 427 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
428 ? hard_regno_nregs[regno][mode]
429 : 1);
937ca48e 430 PUT_MODE_RAW (x, mode);
1c0849e5 431 set_regno_raw (x, regno, nregs);
937ca48e 432}
433
22cf44bc 434/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
435 don't attempt to share with the various global pieces of rtl (such as
436 frame_pointer_rtx). */
437
438rtx
937ca48e 439gen_raw_REG (machine_mode mode, unsigned int regno)
22cf44bc 440{
0ff42de5 441 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
937ca48e 442 set_mode_and_regno (x, mode, regno);
15183fd2 443 REG_ATTRS (x) = NULL;
22cf44bc 444 ORIGINAL_REGNO (x) = regno;
445 return x;
446}
447
7014838c 448/* There are some RTL codes that require special attention; the generation
449 functions do the raw handling. If you add to this list, modify
450 special_rtx in gengenrtl.c as well. */
451
ede4900a 452rtx_expr_list *
3754d046 453gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
ede4900a 454{
455 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
456 expr_list));
457}
458
13be9dc6 459rtx_insn_list *
3754d046 460gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
13be9dc6 461{
462 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
463 insn_list));
464}
465
f935868a 466rtx_insn *
3754d046 467gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
f935868a 468 basic_block bb, rtx pattern, int location, int code,
469 rtx reg_notes)
470{
471 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
472 prev_insn, next_insn,
473 bb, pattern, location, code,
474 reg_notes));
475}
476
3ad7bb1c 477rtx
3754d046 478gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 479{
480 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 481 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 482
483#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
484 if (const_true_rtx && arg == STORE_FLAG_VALUE)
485 return const_true_rtx;
486#endif
487
73f5c1e3 488 /* Look up the CONST_INT in the hash table. */
f863a586 489 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
490 INSERT);
7f2875d3 491 if (*slot == 0)
d7c47c0e 492 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 493
f863a586 494 return *slot;
3ad7bb1c 495}
496
2d232d05 497rtx
3754d046 498gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2d232d05 499{
500 return GEN_INT (trunc_int_for_mode (c, mode));
501}
502
2ff23ed0 503/* CONST_DOUBLEs might be created from pairs of integers, or from
504 REAL_VALUE_TYPEs. Also, their length is known only at run time,
505 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
506
507/* Determine whether REAL, a CONST_DOUBLE, already exists in the
508 hash table. If so, return its counterpart; otherwise add it
509 to the hash table and return it. */
510static rtx
35cb5232 511lookup_const_double (rtx real)
2ff23ed0 512{
f863a586 513 rtx *slot = const_double_htab->find_slot (real, INSERT);
2ff23ed0 514 if (*slot == 0)
515 *slot = real;
516
f863a586 517 return *slot;
2ff23ed0 518}
7f2875d3 519
2ff23ed0 520/* Return a CONST_DOUBLE rtx for a floating-point value specified by
521 VALUE in mode MODE. */
67f2a2eb 522rtx
3754d046 523const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
67f2a2eb 524{
2ff23ed0 525 rtx real = rtx_alloc (CONST_DOUBLE);
526 PUT_MODE (real, mode);
527
e8aaae4e 528 real->u.rv = value;
2ff23ed0 529
530 return lookup_const_double (real);
531}
532
e397ad8e 533/* Determine whether FIXED, a CONST_FIXED, already exists in the
534 hash table. If so, return its counterpart; otherwise add it
535 to the hash table and return it. */
536
537static rtx
538lookup_const_fixed (rtx fixed)
539{
f863a586 540 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
e397ad8e 541 if (*slot == 0)
542 *slot = fixed;
543
f863a586 544 return *slot;
e397ad8e 545}
546
547/* Return a CONST_FIXED rtx for a fixed-point value specified by
548 VALUE in mode MODE. */
549
550rtx
3754d046 551const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
e397ad8e 552{
553 rtx fixed = rtx_alloc (CONST_FIXED);
554 PUT_MODE (fixed, mode);
555
556 fixed->u.fv = value;
557
558 return lookup_const_fixed (fixed);
559}
560
e913b5cd 561#if TARGET_SUPPORTS_WIDE_INT == 0
33274180 562/* Constructs double_int from rtx CST. */
563
564double_int
565rtx_to_double_int (const_rtx cst)
566{
567 double_int r;
568
569 if (CONST_INT_P (cst))
cf8f0e63 570 r = double_int::from_shwi (INTVAL (cst));
78f1962f 571 else if (CONST_DOUBLE_AS_INT_P (cst))
33274180 572 {
573 r.low = CONST_DOUBLE_LOW (cst);
574 r.high = CONST_DOUBLE_HIGH (cst);
575 }
576 else
577 gcc_unreachable ();
578
579 return r;
580}
e913b5cd 581#endif
582
583#if TARGET_SUPPORTS_WIDE_INT
a342dbb2 584/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
585 If so, return its counterpart; otherwise add it to the hash table and
e913b5cd 586 return it. */
33274180 587
e913b5cd 588static rtx
589lookup_const_wide_int (rtx wint)
590{
f863a586 591 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
e913b5cd 592 if (*slot == 0)
593 *slot = wint;
33274180 594
f863a586 595 return *slot;
e913b5cd 596}
597#endif
3e052aec 598
a342dbb2 599/* Return an rtx constant for V, given that the constant has mode MODE.
600 The returned rtx will be a CONST_INT if V fits, otherwise it will be
601 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
602 (if TARGET_SUPPORTS_WIDE_INT). */
603
3e052aec 604rtx
3754d046 605immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
3e052aec 606{
e913b5cd 607 unsigned int len = v.get_len ();
608 unsigned int prec = GET_MODE_PRECISION (mode);
609
610 /* Allow truncation but not extension since we do not know if the
611 number is signed or unsigned. */
612 gcc_assert (prec <= v.get_precision ());
613
614 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
615 return gen_int_mode (v.elt (0), mode);
616
617#if TARGET_SUPPORTS_WIDE_INT
618 {
619 unsigned int i;
620 rtx value;
ddb1be65 621 unsigned int blocks_needed
e913b5cd 622 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
623
624 if (len > blocks_needed)
625 len = blocks_needed;
626
627 value = const_wide_int_alloc (len);
628
629 /* It is so tempting to just put the mode in here. Must control
630 myself ... */
631 PUT_MODE (value, VOIDmode);
05c25ee6 632 CWI_PUT_NUM_ELEM (value, len);
e913b5cd 633
634 for (i = 0; i < len; i++)
05363b4a 635 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
e913b5cd 636
637 return lookup_const_wide_int (value);
638 }
639#else
05363b4a 640 return immed_double_const (v.elt (0), v.elt (1), mode);
e913b5cd 641#endif
3e052aec 642}
643
e913b5cd 644#if TARGET_SUPPORTS_WIDE_INT == 0
2ff23ed0 645/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
646 of ints: I0 is the low-order word and I1 is the high-order word.
24cd46a7 647 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
db20fb47 648 implied upper bits are copies of the high bit of i1. The value
649 itself is neither signed nor unsigned. Do not use this routine for
650 non-integer modes; convert to REAL_VALUE_TYPE and use
d5f9611d 651 const_double_from_real_value. */
2ff23ed0 652
653rtx
3754d046 654immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
2ff23ed0 655{
656 rtx value;
657 unsigned int i;
658
b1ca4af4 659 /* There are the following cases (note that there are no modes with
24cd46a7 660 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
b1ca4af4 661
662 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
663 gen_int_mode.
db20fb47 664 2) If the value of the integer fits into HOST_WIDE_INT anyway
665 (i.e., i1 consists only from copies of the sign bit, and sign
666 of i0 and i1 are the same), then we return a CONST_INT for i0.
b1ca4af4 667 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 668 if (mode != VOIDmode)
669 {
611234b4 670 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
671 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
672 /* We can get a 0 for an error mark. */
673 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
058a1b7a 674 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
675 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
2ff23ed0 676
b1ca4af4 677 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
678 return gen_int_mode (i0, mode);
2ff23ed0 679 }
680
681 /* If this integer fits in one word, return a CONST_INT. */
682 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
683 return GEN_INT (i0);
684
685 /* We use VOIDmode for integers. */
686 value = rtx_alloc (CONST_DOUBLE);
687 PUT_MODE (value, VOIDmode);
688
689 CONST_DOUBLE_LOW (value) = i0;
690 CONST_DOUBLE_HIGH (value) = i1;
691
692 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
693 XWINT (value, i) = 0;
694
695 return lookup_const_double (value);
67f2a2eb 696}
e913b5cd 697#endif
67f2a2eb 698
3ad7bb1c 699rtx
3754d046 700gen_rtx_REG (machine_mode mode, unsigned int regno)
3ad7bb1c 701{
702 /* In case the MD file explicitly references the frame pointer, have
703 all such references point to the same frame pointer. This is
704 used during frame pointer elimination to distinguish the explicit
705 references to these registers from pseudos that happened to be
706 assigned to them.
707
708 If we have eliminated the frame pointer or arg pointer, we will
709 be using it as a normal register, for example as a spill
710 register. In such cases, we might be accessing it in a mode that
711 is not Pmode and therefore cannot use the pre-allocated rtx.
712
713 Also don't do this when we are making new REGs in reload, since
714 we don't want to get confused with the real pointers. */
715
c6a6cdaa 716 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3ad7bb1c 717 {
71801afc 718 if (regno == FRAME_POINTER_REGNUM
719 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 720 return frame_pointer_rtx;
f703b3d6 721
722 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
723 && regno == HARD_FRAME_POINTER_REGNUM
71801afc 724 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 725 return hard_frame_pointer_rtx;
c6bb296a 726#if !HARD_FRAME_POINTER_IS_ARG_POINTER
727 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
728 && regno == ARG_POINTER_REGNUM)
3ad7bb1c 729 return arg_pointer_rtx;
730#endif
731#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 732 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 733 return return_address_pointer_rtx;
734#endif
3473aefe 735 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
8d43ad05 736 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
6ea47475 737 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 738 return pic_offset_table_rtx;
e8b59353 739 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 740 return stack_pointer_rtx;
741 }
742
32b53d83 743#if 0
90295bd2 744 /* If the per-function register table has been set up, try to re-use
32b53d83 745 an existing entry in that table to avoid useless generation of RTL.
746
747 This code is disabled for now until we can fix the various backends
748 which depend on having non-shared hard registers in some cases. Long
749 term we want to re-enable this code as it can significantly cut down
71801afc 750 on the amount of useless RTL that gets generated.
751
752 We'll also need to fix some code that runs after reload that wants to
753 set ORIGINAL_REGNO. */
754
90295bd2 755 if (cfun
756 && cfun->emit
757 && regno_reg_rtx
758 && regno < FIRST_PSEUDO_REGISTER
759 && reg_raw_mode[regno] == mode)
760 return regno_reg_rtx[regno];
32b53d83 761#endif
90295bd2 762
22cf44bc 763 return gen_raw_REG (mode, regno);
3ad7bb1c 764}
765
b5ba9f3a 766rtx
3754d046 767gen_rtx_MEM (machine_mode mode, rtx addr)
b5ba9f3a 768{
769 rtx rt = gen_rtx_raw_MEM (mode, addr);
770
771 /* This field is not cleared by the mere allocation of the rtx, so
772 we clear it here. */
c6259b83 773 MEM_ATTRS (rt) = 0;
b5ba9f3a 774
775 return rt;
776}
701e46d0 777
e265a6da 778/* Generate a memory referring to non-trapping constant memory. */
779
780rtx
3754d046 781gen_const_mem (machine_mode mode, rtx addr)
e265a6da 782{
783 rtx mem = gen_rtx_MEM (mode, addr);
784 MEM_READONLY_P (mem) = 1;
785 MEM_NOTRAP_P (mem) = 1;
786 return mem;
787}
788
00060fc2 789/* Generate a MEM referring to fixed portions of the frame, e.g., register
790 save areas. */
791
792rtx
3754d046 793gen_frame_mem (machine_mode mode, rtx addr)
00060fc2 794{
795 rtx mem = gen_rtx_MEM (mode, addr);
796 MEM_NOTRAP_P (mem) = 1;
797 set_mem_alias_set (mem, get_frame_alias_set ());
798 return mem;
799}
800
801/* Generate a MEM referring to a temporary use of the stack, not part
802 of the fixed stack frame. For example, something which is pushed
803 by a target splitter. */
804rtx
3754d046 805gen_tmp_stack_mem (machine_mode mode, rtx addr)
00060fc2 806{
807 rtx mem = gen_rtx_MEM (mode, addr);
808 MEM_NOTRAP_P (mem) = 1;
18d50ae6 809 if (!cfun->calls_alloca)
00060fc2 810 set_mem_alias_set (mem, get_frame_alias_set ());
811 return mem;
812}
813
2166bbaa 814/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
815 this construct would be valid, and false otherwise. */
816
817bool
3754d046 818validate_subreg (machine_mode omode, machine_mode imode,
7ecb5bb2 819 const_rtx reg, unsigned int offset)
701e46d0 820{
2166bbaa 821 unsigned int isize = GET_MODE_SIZE (imode);
822 unsigned int osize = GET_MODE_SIZE (omode);
823
824 /* All subregs must be aligned. */
825 if (offset % osize != 0)
826 return false;
827
828 /* The subreg offset cannot be outside the inner object. */
829 if (offset >= isize)
830 return false;
831
832 /* ??? This should not be here. Temporarily continue to allow word_mode
833 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
834 Generally, backends are doing something sketchy but it'll take time to
835 fix them all. */
836 if (omode == word_mode)
837 ;
838 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
839 is the culprit here, and not the backends. */
840 else if (osize >= UNITS_PER_WORD && isize >= osize)
841 ;
842 /* Allow component subregs of complex and vector. Though given the below
843 extraction rules, it's not always clear what that means. */
844 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
845 && GET_MODE_INNER (imode) == omode)
846 ;
847 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
848 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
849 represent this. It's questionable if this ought to be represented at
850 all -- why can't this all be hidden in post-reload splitters that make
851 arbitrarily mode changes to the registers themselves. */
852 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
853 ;
854 /* Subregs involving floating point modes are not allowed to
855 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
856 (subreg:SI (reg:DF) 0) isn't. */
857 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
858 {
c6a6cdaa 859 if (! (isize == osize
860 /* LRA can use subreg to store a floating point value in
861 an integer mode. Although the floating point and the
862 integer modes need the same number of hard registers,
863 the size of floating point mode can be less than the
864 integer mode. LRA also uses subregs for a register
865 should be used in different mode in on insn. */
866 || lra_in_progress))
2166bbaa 867 return false;
868 }
701e46d0 869
2166bbaa 870 /* Paradoxical subregs must have offset zero. */
871 if (osize > isize)
872 return offset == 0;
873
874 /* This is a normal subreg. Verify that the offset is representable. */
875
876 /* For hard registers, we already have most of these rules collected in
877 subreg_offset_representable_p. */
878 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
879 {
880 unsigned int regno = REGNO (reg);
881
882#ifdef CANNOT_CHANGE_MODE_CLASS
883 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
884 && GET_MODE_INNER (imode) == omode)
885 ;
886 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
887 return false;
701e46d0 888#endif
2166bbaa 889
890 return subreg_offset_representable_p (regno, imode, offset, omode);
891 }
892
893 /* For pseudo registers, we want most of the same checks. Namely:
894 If the register no larger than a word, the subreg must be lowpart.
895 If the register is larger than a word, the subreg must be the lowpart
896 of a subword. A subreg does *not* perform arbitrary bit extraction.
897 Given that we've already checked mode/offset alignment, we only have
898 to check subword subregs here. */
c6a6cdaa 899 if (osize < UNITS_PER_WORD
900 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
2166bbaa 901 {
3754d046 902 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
2166bbaa 903 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
904 if (offset % UNITS_PER_WORD != low_off)
905 return false;
906 }
907 return true;
908}
909
910rtx
3754d046 911gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
2166bbaa 912{
913 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 914 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 915}
916
c6259b83 917/* Generate a SUBREG representing the least-significant part of REG if MODE
918 is smaller than mode of REG, otherwise paradoxical SUBREG. */
919
701e46d0 920rtx
3754d046 921gen_lowpart_SUBREG (machine_mode mode, rtx reg)
701e46d0 922{
3754d046 923 machine_mode inmode;
701e46d0 924
925 inmode = GET_MODE (reg);
926 if (inmode == VOIDmode)
927 inmode = mode;
81802af6 928 return gen_rtx_SUBREG (mode, reg,
929 subreg_lowpart_offset (mode, inmode));
701e46d0 930}
e1398578 931
932rtx
3754d046 933gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
e1398578 934 enum var_init_status status)
935{
936 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
937 PAT_VAR_LOCATION_STATUS (x) = status;
938 return x;
939}
7014838c 940\f
15bbde2b 941
cf9ac040 942/* Create an rtvec and stores within it the RTXen passed in the arguments. */
943
15bbde2b 944rtvec
ee582a61 945gen_rtvec (int n, ...)
15bbde2b 946{
cf9ac040 947 int i;
948 rtvec rt_val;
ee582a61 949 va_list p;
15bbde2b 950
ee582a61 951 va_start (p, n);
15bbde2b 952
cf9ac040 953 /* Don't allocate an empty rtvec... */
15bbde2b 954 if (n == 0)
451c8e2f 955 {
956 va_end (p);
957 return NULL_RTVEC;
958 }
15bbde2b 959
cf9ac040 960 rt_val = rtvec_alloc (n);
e5fcd76a 961
15bbde2b 962 for (i = 0; i < n; i++)
cf9ac040 963 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 964
ee582a61 965 va_end (p);
cf9ac040 966 return rt_val;
15bbde2b 967}
968
969rtvec
35cb5232 970gen_rtvec_v (int n, rtx *argp)
15bbde2b 971{
19cb6b50 972 int i;
973 rtvec rt_val;
15bbde2b 974
cf9ac040 975 /* Don't allocate an empty rtvec... */
15bbde2b 976 if (n == 0)
cf9ac040 977 return NULL_RTVEC;
15bbde2b 978
cf9ac040 979 rt_val = rtvec_alloc (n);
15bbde2b 980
981 for (i = 0; i < n; i++)
a4070a91 982 rt_val->elem[i] = *argp++;
15bbde2b 983
984 return rt_val;
985}
f17e3fff 986
987rtvec
988gen_rtvec_v (int n, rtx_insn **argp)
989{
990 int i;
991 rtvec rt_val;
992
993 /* Don't allocate an empty rtvec... */
994 if (n == 0)
995 return NULL_RTVEC;
996
997 rt_val = rtvec_alloc (n);
998
999 for (i = 0; i < n; i++)
1000 rt_val->elem[i] = *argp++;
1001
1002 return rt_val;
1003}
1004
15bbde2b 1005\f
80c70e76 1006/* Return the number of bytes between the start of an OUTER_MODE
1007 in-memory value and the start of an INNER_MODE in-memory value,
1008 given that the former is a lowpart of the latter. It may be a
1009 paradoxical lowpart, in which case the offset will be negative
1010 on big-endian targets. */
1011
1012int
3754d046 1013byte_lowpart_offset (machine_mode outer_mode,
1014 machine_mode inner_mode)
80c70e76 1015{
1016 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1017 return subreg_lowpart_offset (outer_mode, inner_mode);
1018 else
1019 return -subreg_lowpart_offset (inner_mode, outer_mode);
1020}
1021\f
15bbde2b 1022/* Generate a REG rtx for a new pseudo register of mode MODE.
1023 This pseudo is assigned the next sequential register number. */
1024
1025rtx
3754d046 1026gen_reg_rtx (machine_mode mode)
15bbde2b 1027{
19cb6b50 1028 rtx val;
27a7a23a 1029 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 1030
1b7ff857 1031 gcc_assert (can_create_pseudo_p ());
15bbde2b 1032
27a7a23a 1033 /* If a virtual register with bigger mode alignment is generated,
1034 increase stack alignment estimation because it might be spilled
1035 to stack later. */
48e1416a 1036 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 1037 && crtl->stack_alignment_estimated < align
1038 && !crtl->stack_realign_processed)
8645d3e7 1039 {
1040 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1041 if (crtl->stack_alignment_estimated < min_align)
1042 crtl->stack_alignment_estimated = min_align;
1043 }
27a7a23a 1044
316bc009 1045 if (generating_concat_p
1046 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1047 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 1048 {
1049 /* For complex modes, don't make a single pseudo.
1050 Instead, make a CONCAT of two pseudos.
1051 This allows noncontiguous allocation of the real and imaginary parts,
1052 which makes much better code. Besides, allocating DCmode
1053 pseudos overstrains reload on some machines like the 386. */
1054 rtx realpart, imagpart;
3754d046 1055 machine_mode partmode = GET_MODE_INNER (mode);
76c37538 1056
1057 realpart = gen_reg_rtx (partmode);
1058 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 1059 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 1060 }
1061
b4c6ce9b 1062 /* Do not call gen_reg_rtx with uninitialized crtl. */
1063 gcc_assert (crtl->emit.regno_pointer_align_length);
1064
ca74b940 1065 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 1066 enough to have an element for this pseudo reg number. */
15bbde2b 1067
fd6ffb7c 1068 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 1069 {
fd6ffb7c 1070 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 1071 char *tmp;
fcdc122e 1072 rtx *new1;
fcdc122e 1073
9ce37fa7 1074 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1075 memset (tmp + old_size, 0, old_size);
1076 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 1077
2457c754 1078 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 1079 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 1080 regno_reg_rtx = new1;
1081
fd6ffb7c 1082 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 1083 }
1084
22cf44bc 1085 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 1086 regno_reg_rtx[reg_rtx_no++] = val;
1087 return val;
1088}
1089
ea239197 1090/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1091
1092bool
1093reg_is_parm_p (rtx reg)
1094{
1095 tree decl;
1096
1097 gcc_assert (REG_P (reg));
1098 decl = REG_EXPR (reg);
1099 return (decl && TREE_CODE (decl) == PARM_DECL);
1100}
1101
80c70e76 1102/* Update NEW with the same attributes as REG, but with OFFSET added
1103 to the REG_OFFSET. */
ca74b940 1104
1a6a0f2a 1105static void
9ce37fa7 1106update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 1107{
9ce37fa7 1108 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 1109 REG_OFFSET (reg) + offset);
1a6a0f2a 1110}
1111
80c70e76 1112/* Generate a register with same attributes as REG, but with OFFSET
1113 added to the REG_OFFSET. */
1a6a0f2a 1114
1115rtx
3754d046 1116gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1a6a0f2a 1117 int offset)
1118{
9ce37fa7 1119 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 1120
9ce37fa7 1121 update_reg_offset (new_rtx, reg, offset);
1122 return new_rtx;
1a6a0f2a 1123}
1124
1125/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 1126 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 1127
1128rtx
3754d046 1129gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1a6a0f2a 1130{
9ce37fa7 1131 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 1132
9ce37fa7 1133 update_reg_offset (new_rtx, reg, offset);
1134 return new_rtx;
ca74b940 1135}
1136
80c70e76 1137/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1138 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 1139
1140void
3754d046 1141adjust_reg_mode (rtx reg, machine_mode mode)
ca74b940 1142{
80c70e76 1143 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1144 PUT_MODE (reg, mode);
1145}
1146
1147/* Copy REG's attributes from X, if X has any attributes. If REG and X
1148 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1149
1150void
1151set_reg_attrs_from_value (rtx reg, rtx x)
1152{
1153 int offset;
e623c80a 1154 bool can_be_reg_pointer = true;
1155
1156 /* Don't call mark_reg_pointer for incompatible pointer sign
1157 extension. */
1158 while (GET_CODE (x) == SIGN_EXTEND
1159 || GET_CODE (x) == ZERO_EXTEND
1160 || GET_CODE (x) == TRUNCATE
1161 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1162 {
4dd7c283 1163#if defined(POINTERS_EXTEND_UNSIGNED)
1164 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1165 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1166 && !targetm.have_ptr_extend ())
e623c80a 1167 can_be_reg_pointer = false;
1168#endif
1169 x = XEXP (x, 0);
1170 }
80c70e76 1171
ac56145e 1172 /* Hard registers can be reused for multiple purposes within the same
1173 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1174 on them is wrong. */
1175 if (HARD_REGISTER_P (reg))
1176 return;
1177
80c70e76 1178 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 1179 if (MEM_P (x))
1180 {
da443c27 1181 if (MEM_OFFSET_KNOWN_P (x))
1182 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1183 MEM_OFFSET (x) + offset);
e623c80a 1184 if (can_be_reg_pointer && MEM_POINTER (x))
40b93dba 1185 mark_reg_pointer (reg, 0);
ae12ddda 1186 }
1187 else if (REG_P (x))
1188 {
1189 if (REG_ATTRS (x))
1190 update_reg_offset (reg, x, offset);
e623c80a 1191 if (can_be_reg_pointer && REG_POINTER (x))
ae12ddda 1192 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1193 }
1194}
1195
1196/* Generate a REG rtx for a new pseudo register, copying the mode
1197 and attributes from X. */
1198
1199rtx
1200gen_reg_rtx_and_attrs (rtx x)
1201{
1202 rtx reg = gen_reg_rtx (GET_MODE (x));
1203 set_reg_attrs_from_value (reg, x);
1204 return reg;
ca74b940 1205}
1206
263c416c 1207/* Set the register attributes for registers contained in PARM_RTX.
1208 Use needed values from memory attributes of MEM. */
1209
1210void
35cb5232 1211set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1212{
8ad4c111 1213 if (REG_P (parm_rtx))
80c70e76 1214 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1215 else if (GET_CODE (parm_rtx) == PARALLEL)
1216 {
1217 /* Check for a NULL entry in the first slot, used to indicate that the
1218 parameter goes both on the stack and in registers. */
1219 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1220 for (; i < XVECLEN (parm_rtx, 0); i++)
1221 {
1222 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1223 if (REG_P (XEXP (x, 0)))
263c416c 1224 REG_ATTRS (XEXP (x, 0))
1225 = get_reg_attrs (MEM_EXPR (mem),
1226 INTVAL (XEXP (x, 1)));
1227 }
1228 }
1229}
1230
80c70e76 1231/* Set the REG_ATTRS for registers in value X, given that X represents
1232 decl T. */
ca74b940 1233
a8dd994c 1234void
80c70e76 1235set_reg_attrs_for_decl_rtl (tree t, rtx x)
1236{
94f92c36 1237 if (!t)
1238 return;
1239 tree tdecl = t;
80c70e76 1240 if (GET_CODE (x) == SUBREG)
ebfc27f5 1241 {
80c70e76 1242 gcc_assert (subreg_lowpart_p (x));
1243 x = SUBREG_REG (x);
ebfc27f5 1244 }
8ad4c111 1245 if (REG_P (x))
80c70e76 1246 REG_ATTRS (x)
1247 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
94f92c36 1248 DECL_P (tdecl)
1249 ? DECL_MODE (tdecl)
1250 : TYPE_MODE (TREE_TYPE (tdecl))));
ca74b940 1251 if (GET_CODE (x) == CONCAT)
1252 {
1253 if (REG_P (XEXP (x, 0)))
1254 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1255 if (REG_P (XEXP (x, 1)))
1256 REG_ATTRS (XEXP (x, 1))
1257 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1258 }
1259 if (GET_CODE (x) == PARALLEL)
1260 {
85d25060 1261 int i, start;
1262
1263 /* Check for a NULL entry, used to indicate that the parameter goes
1264 both on the stack and in registers. */
1265 if (XEXP (XVECEXP (x, 0, 0), 0))
1266 start = 0;
1267 else
1268 start = 1;
1269
1270 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1271 {
1272 rtx y = XVECEXP (x, 0, i);
1273 if (REG_P (XEXP (y, 0)))
1274 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1275 }
1276 }
1277}
1278
80c70e76 1279/* Assign the RTX X to declaration T. */
1280
1281void
1282set_decl_rtl (tree t, rtx x)
1283{
1284 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1285 if (x)
1286 set_reg_attrs_for_decl_rtl (t, x);
1287}
1288
d91cf567 1289/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1290 if the ABI requires the parameter to be passed by reference. */
80c70e76 1291
1292void
d91cf567 1293set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1294{
1295 DECL_INCOMING_RTL (t) = x;
d91cf567 1296 if (x && !by_reference_p)
80c70e76 1297 set_reg_attrs_for_decl_rtl (t, x);
1298}
1299
de8ecfb5 1300/* Identify REG (which may be a CONCAT) as a user register. */
1301
1302void
35cb5232 1303mark_user_reg (rtx reg)
de8ecfb5 1304{
1305 if (GET_CODE (reg) == CONCAT)
1306 {
1307 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1308 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1309 }
de8ecfb5 1310 else
611234b4 1311 {
1312 gcc_assert (REG_P (reg));
1313 REG_USERVAR_P (reg) = 1;
1314 }
de8ecfb5 1315}
1316
d4c332ff 1317/* Identify REG as a probable pointer register and show its alignment
1318 as ALIGN, if nonzero. */
15bbde2b 1319
1320void
35cb5232 1321mark_reg_pointer (rtx reg, int align)
15bbde2b 1322{
e61a0a7f 1323 if (! REG_POINTER (reg))
612409a6 1324 {
e61a0a7f 1325 REG_POINTER (reg) = 1;
d4c332ff 1326
612409a6 1327 if (align)
1328 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1329 }
1330 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1331 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1332 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1333}
1334
1335/* Return 1 plus largest pseudo reg number used in the current function. */
1336
1337int
35cb5232 1338max_reg_num (void)
15bbde2b 1339{
1340 return reg_rtx_no;
1341}
1342
1343/* Return 1 + the largest label number used so far in the current function. */
1344
1345int
35cb5232 1346max_label_num (void)
15bbde2b 1347{
15bbde2b 1348 return label_num;
1349}
1350
1351/* Return first label number used in this function (if any were used). */
1352
1353int
35cb5232 1354get_first_label_num (void)
15bbde2b 1355{
1356 return first_label_num;
1357}
4ee9c684 1358
1359/* If the rtx for label was created during the expansion of a nested
1360 function, then first_label_num won't include this label number.
f0b5f617 1361 Fix this now so that array indices work later. */
4ee9c684 1362
1363void
1364maybe_set_first_label_num (rtx x)
1365{
1366 if (CODE_LABEL_NUMBER (x) < first_label_num)
1367 first_label_num = CODE_LABEL_NUMBER (x);
1368}
15bbde2b 1369\f
1370/* Return a value representing some low-order bits of X, where the number
1371 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1372 between floating-point and fixed-point values, rather, the bit
15bbde2b 1373 representation is returned.
1374
1375 This function handles the cases in common between gen_lowpart, below,
1376 and two variants in cse.c and combine.c. These are the cases that can
1377 be safely handled at all points in the compilation.
1378
1379 If this is not a case we can handle, return 0. */
1380
1381rtx
3754d046 1382gen_lowpart_common (machine_mode mode, rtx x)
15bbde2b 1383{
701e46d0 1384 int msize = GET_MODE_SIZE (mode);
791172c5 1385 int xsize;
3754d046 1386 machine_mode innermode;
791172c5 1387
1388 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1389 so we have to make one up. Yuk. */
1390 innermode = GET_MODE (x);
971ba038 1391 if (CONST_INT_P (x)
6c799a83 1392 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1393 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1394 else if (innermode == VOIDmode)
24cd46a7 1395 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
48e1416a 1396
791172c5 1397 xsize = GET_MODE_SIZE (innermode);
1398
611234b4 1399 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1400
791172c5 1401 if (innermode == mode)
15bbde2b 1402 return x;
1403
1404 /* MODE must occupy no more words than the mode of X. */
791172c5 1405 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1406 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1407 return 0;
1408
9abe1e73 1409 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1410 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1411 return 0;
1412
15bbde2b 1413 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1414 && (GET_MODE_CLASS (mode) == MODE_INT
1415 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1416 {
1417 /* If we are getting the low-order part of something that has been
1418 sign- or zero-extended, we can either just use the object being
1419 extended or make a narrower extension. If we want an even smaller
1420 piece than the size of the object being extended, call ourselves
1421 recursively.
1422
1423 This case is used mostly by combine and cse. */
1424
1425 if (GET_MODE (XEXP (x, 0)) == mode)
1426 return XEXP (x, 0);
791172c5 1427 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1428 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1429 else if (msize < xsize)
3ad7bb1c 1430 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1431 }
8ad4c111 1432 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1433 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
efa08fc2 1434 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
a8a727ad 1435 return lowpart_subreg (mode, x, innermode);
4a307dd5 1436
15bbde2b 1437 /* Otherwise, we can't do this. */
1438 return 0;
1439}
1440\f
d56d0ca2 1441rtx
3754d046 1442gen_highpart (machine_mode mode, rtx x)
d56d0ca2 1443{
701e46d0 1444 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1445 rtx result;
701e46d0 1446
d56d0ca2 1447 /* This case loses if X is a subreg. To catch bugs early,
1448 complain if an invalid MODE is used even in other cases. */
611234b4 1449 gcc_assert (msize <= UNITS_PER_WORD
1450 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1451
81802af6 1452 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1453 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1454 gcc_assert (result);
48e1416a 1455
a8c36ab2 1456 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1457 the target if we have a MEM. gen_highpart must return a valid operand,
1458 emitting code if necessary to do so. */
611234b4 1459 if (MEM_P (result))
1460 {
1461 result = validize_mem (result);
1462 gcc_assert (result);
1463 }
48e1416a 1464
81802af6 1465 return result;
1466}
704fcf2b 1467
29d56731 1468/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1469 be VOIDmode constant. */
1470rtx
3754d046 1471gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
704fcf2b 1472{
1473 if (GET_MODE (exp) != VOIDmode)
1474 {
611234b4 1475 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1476 return gen_highpart (outermode, exp);
1477 }
1478 return simplify_gen_subreg (outermode, exp, innermode,
1479 subreg_highpart_offset (outermode, innermode));
1480}
d4c5e26d 1481
80c70e76 1482/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1483
81802af6 1484unsigned int
3754d046 1485subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
81802af6 1486{
1487 unsigned int offset = 0;
1488 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1489
81802af6 1490 if (difference > 0)
d56d0ca2 1491 {
81802af6 1492 if (WORDS_BIG_ENDIAN)
1493 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1494 if (BYTES_BIG_ENDIAN)
1495 offset += difference % UNITS_PER_WORD;
d56d0ca2 1496 }
701e46d0 1497
81802af6 1498 return offset;
d56d0ca2 1499}
64ab453f 1500
81802af6 1501/* Return offset in bytes to get OUTERMODE high part
1502 of the value in mode INNERMODE stored in memory in target format. */
1503unsigned int
3754d046 1504subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
64ab453f 1505{
1506 unsigned int offset = 0;
1507 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1508
611234b4 1509 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1510
64ab453f 1511 if (difference > 0)
1512 {
81802af6 1513 if (! WORDS_BIG_ENDIAN)
64ab453f 1514 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1515 if (! BYTES_BIG_ENDIAN)
64ab453f 1516 offset += difference % UNITS_PER_WORD;
1517 }
1518
81802af6 1519 return offset;
64ab453f 1520}
d56d0ca2 1521
15bbde2b 1522/* Return 1 iff X, assumed to be a SUBREG,
1523 refers to the least significant part of its containing reg.
1524 If X is not a SUBREG, always return 1 (it is its own low part!). */
1525
1526int
b7bf20db 1527subreg_lowpart_p (const_rtx x)
15bbde2b 1528{
1529 if (GET_CODE (x) != SUBREG)
1530 return 1;
7e14c1bf 1531 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1532 return 0;
15bbde2b 1533
81802af6 1534 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1535 == SUBREG_BYTE (x));
15bbde2b 1536}
b537bfdb 1537
1538/* Return true if X is a paradoxical subreg, false otherwise. */
1539bool
1540paradoxical_subreg_p (const_rtx x)
1541{
1542 if (GET_CODE (x) != SUBREG)
1543 return false;
1544 return (GET_MODE_PRECISION (GET_MODE (x))
1545 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1546}
15bbde2b 1547\f
701e46d0 1548/* Return subword OFFSET of operand OP.
1549 The word number, OFFSET, is interpreted as the word number starting
1550 at the low-order address. OFFSET 0 is the low-order word if not
1551 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1552
1553 If we cannot extract the required word, we return zero. Otherwise,
1554 an rtx corresponding to the requested word will be returned.
1555
1556 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1557 reload has completed, a valid address will always be returned. After
1558 reload, if a valid address cannot be returned, we return zero.
1559
1560 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1561 it is the responsibility of the caller.
1562
1563 MODE is the mode of OP in case it is a CONST_INT.
1564
1565 ??? This is still rather broken for some cases. The problem for the
1566 moment is that all callers of this thing provide no 'goal mode' to
1567 tell us to work with. This exists because all callers were written
84e81e84 1568 in a word based SUBREG world.
1569 Now use of this function can be deprecated by simplify_subreg in most
1570 cases.
1571 */
701e46d0 1572
1573rtx
3754d046 1574operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
701e46d0 1575{
1576 if (mode == VOIDmode)
1577 mode = GET_MODE (op);
1578
611234b4 1579 gcc_assert (mode != VOIDmode);
701e46d0 1580
6312a35e 1581 /* If OP is narrower than a word, fail. */
701e46d0 1582 if (mode != BLKmode
1583 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1584 return 0;
1585
6312a35e 1586 /* If we want a word outside OP, return zero. */
701e46d0 1587 if (mode != BLKmode
1588 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1589 return const0_rtx;
1590
701e46d0 1591 /* Form a new MEM at the requested address. */
e16ceb8e 1592 if (MEM_P (op))
701e46d0 1593 {
9ce37fa7 1594 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1595
e4e86ec5 1596 if (! validate_address)
9ce37fa7 1597 return new_rtx;
e4e86ec5 1598
1599 else if (reload_completed)
701e46d0 1600 {
bd1a81f7 1601 if (! strict_memory_address_addr_space_p (word_mode,
1602 XEXP (new_rtx, 0),
1603 MEM_ADDR_SPACE (op)))
e4e86ec5 1604 return 0;
701e46d0 1605 }
e4e86ec5 1606 else
9ce37fa7 1607 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1608 }
1609
84e81e84 1610 /* Rest can be handled by simplify_subreg. */
1611 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1612}
1613
89f18f73 1614/* Similar to `operand_subword', but never return 0. If we can't
1615 extract the required subword, put OP into a register and try again.
1616 The second attempt must succeed. We always validate the address in
1617 this case.
15bbde2b 1618
1619 MODE is the mode of OP, in case it is CONST_INT. */
1620
1621rtx
3754d046 1622operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
15bbde2b 1623{
701e46d0 1624 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1625
1626 if (result)
1627 return result;
1628
1629 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1630 {
1631 /* If this is a register which can not be accessed by words, copy it
1632 to a pseudo register. */
8ad4c111 1633 if (REG_P (op))
ac825d29 1634 op = copy_to_reg (op);
1635 else
1636 op = force_reg (mode, op);
1637 }
15bbde2b 1638
701e46d0 1639 result = operand_subword (op, offset, 1, mode);
611234b4 1640 gcc_assert (result);
15bbde2b 1641
1642 return result;
1643}
1644\f
b3ff8d90 1645/* Returns 1 if both MEM_EXPR can be considered equal
1646 and 0 otherwise. */
1647
1648int
52d07779 1649mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1650{
1651 if (expr1 == expr2)
1652 return 1;
1653
1654 if (! expr1 || ! expr2)
1655 return 0;
1656
1657 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1658 return 0;
1659
3a443843 1660 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1661}
1662
ad0a178f 1663/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1664 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1665 -1 if not known. */
1666
1667int
7cfdc2f0 1668get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1669{
1670 tree expr;
1671 unsigned HOST_WIDE_INT offset;
1672
1673 /* This function can't use
da443c27 1674 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
98ab9e8f 1675 || (MAX (MEM_ALIGN (mem),
957d0361 1676 MAX (align, get_object_alignment (MEM_EXPR (mem))))
ad0a178f 1677 < align))
1678 return -1;
1679 else
da443c27 1680 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
ad0a178f 1681 for two reasons:
1682 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1683 for <variable>. get_inner_reference doesn't handle it and
1684 even if it did, the alignment in that case needs to be determined
1685 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1686 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1687 isn't sufficiently aligned, the object it is in might be. */
1688 gcc_assert (MEM_P (mem));
1689 expr = MEM_EXPR (mem);
da443c27 1690 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
ad0a178f 1691 return -1;
1692
da443c27 1693 offset = MEM_OFFSET (mem);
ad0a178f 1694 if (DECL_P (expr))
1695 {
1696 if (DECL_ALIGN (expr) < align)
1697 return -1;
1698 }
1699 else if (INDIRECT_REF_P (expr))
1700 {
1701 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1702 return -1;
1703 }
1704 else if (TREE_CODE (expr) == COMPONENT_REF)
1705 {
1706 while (1)
1707 {
1708 tree inner = TREE_OPERAND (expr, 0);
1709 tree field = TREE_OPERAND (expr, 1);
1710 tree byte_offset = component_ref_field_offset (expr);
1711 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1712
1713 if (!byte_offset
e913b5cd 1714 || !tree_fits_uhwi_p (byte_offset)
1715 || !tree_fits_uhwi_p (bit_offset))
ad0a178f 1716 return -1;
1717
e913b5cd 1718 offset += tree_to_uhwi (byte_offset);
1719 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
ad0a178f 1720
1721 if (inner == NULL_TREE)
1722 {
1723 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1724 < (unsigned int) align)
1725 return -1;
1726 break;
1727 }
1728 else if (DECL_P (inner))
1729 {
1730 if (DECL_ALIGN (inner) < align)
1731 return -1;
1732 break;
1733 }
1734 else if (TREE_CODE (inner) != COMPONENT_REF)
1735 return -1;
1736 expr = inner;
1737 }
1738 }
1739 else
1740 return -1;
1741
1742 return offset & ((align / BITS_PER_UNIT) - 1);
1743}
1744
310b57a1 1745/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1746 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1747 if we are making a new object of this type. BITPOS is nonzero if
1748 there is an offset outstanding on T that will be applied later. */
c6259b83 1749
1750void
35cb5232 1751set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1752 HOST_WIDE_INT bitpos)
c6259b83 1753{
6f717f77 1754 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1755 tree type;
d72886b5 1756 struct mem_attrs attrs, *defattrs, *refattrs;
3f06bd1b 1757 addr_space_t as;
c6259b83 1758
1759 /* It can happen that type_for_mode was given a mode for which there
1760 is no language-level type. In which case it returns NULL, which
1761 we can see here. */
1762 if (t == NULL_TREE)
1763 return;
1764
1765 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1766 if (type == error_mark_node)
1767 return;
c6259b83 1768
c6259b83 1769 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1770 wrong answer, as it assumes that DECL_RTL already has the right alias
1771 info. Callers should not set DECL_RTL until after the call to
1772 set_mem_attributes. */
611234b4 1773 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1774
d72886b5 1775 memset (&attrs, 0, sizeof (attrs));
1776
96216d37 1777 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1778 front-end routine) and use it. */
d72886b5 1779 attrs.alias = get_alias_set (t);
c6259b83 1780
fbc6244b 1781 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
8d350e69 1782 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1783
d8dccfe9 1784 /* Default values from pre-existing memory attributes if present. */
d72886b5 1785 refattrs = MEM_ATTRS (ref);
1786 if (refattrs)
d8dccfe9 1787 {
1788 /* ??? Can this ever happen? Calling this routine on a MEM that
1789 already carries memory attributes should probably be invalid. */
d72886b5 1790 attrs.expr = refattrs->expr;
6d58bcba 1791 attrs.offset_known_p = refattrs->offset_known_p;
d72886b5 1792 attrs.offset = refattrs->offset;
6d58bcba 1793 attrs.size_known_p = refattrs->size_known_p;
d72886b5 1794 attrs.size = refattrs->size;
1795 attrs.align = refattrs->align;
d8dccfe9 1796 }
1797
1798 /* Otherwise, default values from the mode of the MEM reference. */
d72886b5 1799 else
d8dccfe9 1800 {
d72886b5 1801 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1802 gcc_assert (!defattrs->expr);
6d58bcba 1803 gcc_assert (!defattrs->offset_known_p);
d72886b5 1804
d8dccfe9 1805 /* Respect mode size. */
6d58bcba 1806 attrs.size_known_p = defattrs->size_known_p;
d72886b5 1807 attrs.size = defattrs->size;
d8dccfe9 1808 /* ??? Is this really necessary? We probably should always get
1809 the size from the type below. */
1810
1811 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1812 if T is an object, always compute the object alignment below. */
d72886b5 1813 if (TYPE_P (t))
1814 attrs.align = defattrs->align;
1815 else
1816 attrs.align = BITS_PER_UNIT;
d8dccfe9 1817 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1818 e.g. if the type carries an alignment attribute. Should we be
1819 able to simply always use TYPE_ALIGN? */
1820 }
1821
a9d9ab08 1822 /* We can set the alignment from the type if we are making an object,
1823 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
679e0056 1824 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
d72886b5 1825 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
679e0056 1826
96216d37 1827 /* If the size is known, we can set that. */
50ba3acc 1828 tree new_size = TYPE_SIZE_UNIT (type);
96216d37 1829
9eec20bf 1830 /* The address-space is that of the type. */
1831 as = TYPE_ADDR_SPACE (type);
1832
579bccf9 1833 /* If T is not a type, we may be able to deduce some more information about
1834 the expression. */
1835 if (! TYPE_P (t))
2a631e19 1836 {
ae2dd339 1837 tree base;
b04fab2a 1838
2a631e19 1839 if (TREE_THIS_VOLATILE (t))
1840 MEM_VOLATILE_P (ref) = 1;
c6259b83 1841
3c00f11c 1842 /* Now remove any conversions: they don't change what the underlying
1843 object is. Likewise for SAVE_EXPR. */
72dd6141 1844 while (CONVERT_EXPR_P (t)
3c00f11c 1845 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1846 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1847 t = TREE_OPERAND (t, 0);
1848
73eb0a09 1849 /* Note whether this expression can trap. */
1850 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1851
1852 base = get_base_address (t);
3f06bd1b 1853 if (base)
1854 {
1855 if (DECL_P (base)
1856 && TREE_READONLY (base)
1857 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1858 && !TREE_THIS_VOLATILE (base))
1859 MEM_READONLY_P (ref) = 1;
1860
1861 /* Mark static const strings readonly as well. */
1862 if (TREE_CODE (base) == STRING_CST
1863 && TREE_READONLY (base)
1864 && TREE_STATIC (base))
1865 MEM_READONLY_P (ref) = 1;
1866
9eec20bf 1867 /* Address-space information is on the base object. */
3f06bd1b 1868 if (TREE_CODE (base) == MEM_REF
1869 || TREE_CODE (base) == TARGET_MEM_REF)
1870 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1871 0))));
1872 else
1873 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1874 }
cab98a0d 1875
2b02580f 1876 /* If this expression uses it's parent's alias set, mark it such
1877 that we won't change it. */
d400f5e1 1878 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
5cc193e7 1879 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1880
2a631e19 1881 /* If this is a decl, set the attributes of the MEM from it. */
1882 if (DECL_P (t))
1883 {
d72886b5 1884 attrs.expr = t;
6d58bcba 1885 attrs.offset_known_p = true;
1886 attrs.offset = 0;
6f717f77 1887 apply_bitpos = bitpos;
50ba3acc 1888 new_size = DECL_SIZE_UNIT (t);
2a631e19 1889 }
1890
9eec20bf 1891 /* ??? If we end up with a constant here do record a MEM_EXPR. */
ce45a448 1892 else if (CONSTANT_CLASS_P (t))
9eec20bf 1893 ;
b10dbbca 1894
50ba3acc 1895 /* If this is a field reference, record it. */
1896 else if (TREE_CODE (t) == COMPONENT_REF)
b10dbbca 1897 {
d72886b5 1898 attrs.expr = t;
6d58bcba 1899 attrs.offset_known_p = true;
1900 attrs.offset = 0;
6f717f77 1901 apply_bitpos = bitpos;
50ba3acc 1902 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1903 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
b10dbbca 1904 }
1905
1906 /* If this is an array reference, look for an outer field reference. */
1907 else if (TREE_CODE (t) == ARRAY_REF)
1908 {
1909 tree off_tree = size_zero_node;
6b039979 1910 /* We can't modify t, because we use it at the end of the
1911 function. */
1912 tree t2 = t;
b10dbbca 1913
1914 do
1915 {
6b039979 1916 tree index = TREE_OPERAND (t2, 1);
6374121b 1917 tree low_bound = array_ref_low_bound (t2);
1918 tree unit_size = array_ref_element_size (t2);
97f8ce30 1919
1920 /* We assume all arrays have sizes that are a multiple of a byte.
1921 First subtract the lower bound, if any, in the type of the
6374121b 1922 index, then convert to sizetype and multiply by the size of
1923 the array element. */
1924 if (! integer_zerop (low_bound))
faa43f85 1925 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1926 index, low_bound);
97f8ce30 1927
6374121b 1928 off_tree = size_binop (PLUS_EXPR,
535664e3 1929 size_binop (MULT_EXPR,
1930 fold_convert (sizetype,
1931 index),
6374121b 1932 unit_size),
1933 off_tree);
6b039979 1934 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1935 }
6b039979 1936 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1937
9eec20bf 1938 if (DECL_P (t2)
1939 || TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1940 {
d72886b5 1941 attrs.expr = t2;
6d58bcba 1942 attrs.offset_known_p = false;
e913b5cd 1943 if (tree_fits_uhwi_p (off_tree))
6f717f77 1944 {
6d58bcba 1945 attrs.offset_known_p = true;
e913b5cd 1946 attrs.offset = tree_to_uhwi (off_tree);
6f717f77 1947 apply_bitpos = bitpos;
1948 }
b10dbbca 1949 }
9eec20bf 1950 /* Else do not record a MEM_EXPR. */
2d8fe5d0 1951 }
1952
6d72287b 1953 /* If this is an indirect reference, record it. */
182cf5a9 1954 else if (TREE_CODE (t) == MEM_REF
5d9de213 1955 || TREE_CODE (t) == TARGET_MEM_REF)
6d72287b 1956 {
d72886b5 1957 attrs.expr = t;
6d58bcba 1958 attrs.offset_known_p = true;
1959 attrs.offset = 0;
6d72287b 1960 apply_bitpos = bitpos;
1961 }
1962
9eec20bf 1963 /* Compute the alignment. */
1964 unsigned int obj_align;
1965 unsigned HOST_WIDE_INT obj_bitpos;
1966 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1967 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1968 if (obj_bitpos != 0)
1969 obj_align = (obj_bitpos & -obj_bitpos);
1970 attrs.align = MAX (attrs.align, obj_align);
2a631e19 1971 }
1972
e913b5cd 1973 if (tree_fits_uhwi_p (new_size))
50ba3acc 1974 {
1975 attrs.size_known_p = true;
e913b5cd 1976 attrs.size = tree_to_uhwi (new_size);
50ba3acc 1977 }
1978
e2e205b3 1979 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1980 bit position offset. Similarly, increase the size of the accessed
1981 object to contain the negative offset. */
6f717f77 1982 if (apply_bitpos)
595f1461 1983 {
6d58bcba 1984 gcc_assert (attrs.offset_known_p);
1985 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1986 if (attrs.size_known_p)
1987 attrs.size += apply_bitpos / BITS_PER_UNIT;
595f1461 1988 }
6f717f77 1989
2a631e19 1990 /* Now set the attributes we computed above. */
3f06bd1b 1991 attrs.addrspace = as;
d72886b5 1992 set_mem_attrs (ref, &attrs);
c6259b83 1993}
1994
6f717f77 1995void
35cb5232 1996set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1997{
1998 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1999}
2000
c6259b83 2001/* Set the alias set of MEM to SET. */
2002
2003void
32c2fdea 2004set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 2005{
d72886b5 2006 struct mem_attrs attrs;
2007
c6259b83 2008 /* If the new and old alias sets don't conflict, something is wrong. */
1b4345f7 2009 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d72886b5 2010 attrs = *get_mem_attrs (mem);
2011 attrs.alias = set;
2012 set_mem_attrs (mem, &attrs);
bd1a81f7 2013}
2014
2015/* Set the address space of MEM to ADDRSPACE (target-defined). */
2016
2017void
2018set_mem_addr_space (rtx mem, addr_space_t addrspace)
2019{
d72886b5 2020 struct mem_attrs attrs;
2021
2022 attrs = *get_mem_attrs (mem);
2023 attrs.addrspace = addrspace;
2024 set_mem_attrs (mem, &attrs);
c6259b83 2025}
96216d37 2026
1c4512da 2027/* Set the alignment of MEM to ALIGN bits. */
96216d37 2028
2029void
35cb5232 2030set_mem_align (rtx mem, unsigned int align)
96216d37 2031{
d72886b5 2032 struct mem_attrs attrs;
2033
2034 attrs = *get_mem_attrs (mem);
2035 attrs.align = align;
2036 set_mem_attrs (mem, &attrs);
96216d37 2037}
278fe152 2038
b10dbbca 2039/* Set the expr for MEM to EXPR. */
278fe152 2040
2041void
35cb5232 2042set_mem_expr (rtx mem, tree expr)
278fe152 2043{
d72886b5 2044 struct mem_attrs attrs;
2045
2046 attrs = *get_mem_attrs (mem);
2047 attrs.expr = expr;
2048 set_mem_attrs (mem, &attrs);
278fe152 2049}
b10dbbca 2050
2051/* Set the offset of MEM to OFFSET. */
2052
2053void
da443c27 2054set_mem_offset (rtx mem, HOST_WIDE_INT offset)
b10dbbca 2055{
d72886b5 2056 struct mem_attrs attrs;
2057
2058 attrs = *get_mem_attrs (mem);
6d58bcba 2059 attrs.offset_known_p = true;
2060 attrs.offset = offset;
da443c27 2061 set_mem_attrs (mem, &attrs);
2062}
2063
2064/* Clear the offset of MEM. */
2065
2066void
2067clear_mem_offset (rtx mem)
2068{
2069 struct mem_attrs attrs;
2070
2071 attrs = *get_mem_attrs (mem);
6d58bcba 2072 attrs.offset_known_p = false;
d72886b5 2073 set_mem_attrs (mem, &attrs);
f0500469 2074}
2075
2076/* Set the size of MEM to SIZE. */
2077
2078void
5b2a69fa 2079set_mem_size (rtx mem, HOST_WIDE_INT size)
f0500469 2080{
d72886b5 2081 struct mem_attrs attrs;
2082
2083 attrs = *get_mem_attrs (mem);
6d58bcba 2084 attrs.size_known_p = true;
2085 attrs.size = size;
5b2a69fa 2086 set_mem_attrs (mem, &attrs);
2087}
2088
2089/* Clear the size of MEM. */
2090
2091void
2092clear_mem_size (rtx mem)
2093{
2094 struct mem_attrs attrs;
2095
2096 attrs = *get_mem_attrs (mem);
6d58bcba 2097 attrs.size_known_p = false;
d72886b5 2098 set_mem_attrs (mem, &attrs);
b10dbbca 2099}
c6259b83 2100\f
96216d37 2101/* Return a memory reference like MEMREF, but with its mode changed to MODE
2102 and its address changed to ADDR. (VOIDmode means don't change the mode.
2103 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
5cc04e45 2104 returned memory location is required to be valid. INPLACE is true if any
2105 changes can be made directly to MEMREF or false if MEMREF must be treated
2106 as immutable.
2107
2108 The memory attributes are not changed. */
15bbde2b 2109
96216d37 2110static rtx
3754d046 2111change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
5cc04e45 2112 bool inplace)
15bbde2b 2113{
bd1a81f7 2114 addr_space_t as;
9ce37fa7 2115 rtx new_rtx;
15bbde2b 2116
611234b4 2117 gcc_assert (MEM_P (memref));
bd1a81f7 2118 as = MEM_ADDR_SPACE (memref);
15bbde2b 2119 if (mode == VOIDmode)
2120 mode = GET_MODE (memref);
2121 if (addr == 0)
2122 addr = XEXP (memref, 0);
3988ef8b 2123 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 2124 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 2125 return memref;
15bbde2b 2126
73a18f44 2127 /* Don't validate address for LRA. LRA can make the address valid
2128 by itself in most efficient way. */
2129 if (validate && !lra_in_progress)
15bbde2b 2130 {
e4e86ec5 2131 if (reload_in_progress || reload_completed)
bd1a81f7 2132 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 2133 else
bd1a81f7 2134 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 2135 }
d823ba47 2136
e8976cd7 2137 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2138 return memref;
2139
5cc04e45 2140 if (inplace)
2141 {
2142 XEXP (memref, 0) = addr;
2143 return memref;
2144 }
2145
9ce37fa7 2146 new_rtx = gen_rtx_MEM (mode, addr);
2147 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2148 return new_rtx;
15bbde2b 2149}
537ffcfc 2150
96216d37 2151/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2152 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 2153
2154rtx
3754d046 2155change_address (rtx memref, machine_mode mode, rtx addr)
e513d163 2156{
5cc04e45 2157 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
3754d046 2158 machine_mode mmode = GET_MODE (new_rtx);
d72886b5 2159 struct mem_attrs attrs, *defattrs;
0ab96142 2160
d72886b5 2161 attrs = *get_mem_attrs (memref);
2162 defattrs = mode_mem_attrs[(int) mmode];
6d58bcba 2163 attrs.expr = NULL_TREE;
2164 attrs.offset_known_p = false;
2165 attrs.size_known_p = defattrs->size_known_p;
d72886b5 2166 attrs.size = defattrs->size;
2167 attrs.align = defattrs->align;
6cc60c4d 2168
d28edf0d 2169 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2170 if (new_rtx == memref)
0ab96142 2171 {
d72886b5 2172 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
9ce37fa7 2173 return new_rtx;
0ab96142 2174
9ce37fa7 2175 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2176 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 2177 }
d28edf0d 2178
d72886b5 2179 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2180 return new_rtx;
e513d163 2181}
537ffcfc 2182
96216d37 2183/* Return a memory reference like MEMREF, but with its mode changed
2184 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2185 nonzero, the memory address is forced to be valid.
2d0fd66d 2186 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2187 and the caller is responsible for adjusting MEMREF base register.
2188 If ADJUST_OBJECT is zero, the underlying object associated with the
2189 memory reference is left unchanged and the caller is responsible for
2190 dealing with it. Otherwise, if the new memory reference is outside
226c6baf 2191 the underlying object, even partially, then the object is dropped.
2192 SIZE, if nonzero, is the size of an access in cases where MODE
2193 has no inherent size. */
e4e86ec5 2194
2195rtx
3754d046 2196adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
226c6baf 2197 int validate, int adjust_address, int adjust_object,
2198 HOST_WIDE_INT size)
e4e86ec5 2199{
fb257ae6 2200 rtx addr = XEXP (memref, 0);
9ce37fa7 2201 rtx new_rtx;
3754d046 2202 machine_mode address_mode;
cfb75cdf 2203 int pbits;
21b8bc7e 2204 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
d72886b5 2205 unsigned HOST_WIDE_INT max_align;
21b8bc7e 2206#ifdef POINTERS_EXTEND_UNSIGNED
3754d046 2207 machine_mode pointer_mode
21b8bc7e 2208 = targetm.addr_space.pointer_mode (attrs.addrspace);
2209#endif
fb257ae6 2210
4733f549 2211 /* VOIDmode means no mode change for change_address_1. */
2212 if (mode == VOIDmode)
2213 mode = GET_MODE (memref);
2214
226c6baf 2215 /* Take the size of non-BLKmode accesses from the mode. */
2216 defattrs = mode_mem_attrs[(int) mode];
2217 if (defattrs->size_known_p)
2218 size = defattrs->size;
2219
d28edf0d 2220 /* If there are no changes, just return the original memory reference. */
2221 if (mode == GET_MODE (memref) && !offset
226c6baf 2222 && (size == 0 || (attrs.size_known_p && attrs.size == size))
d72886b5 2223 && (!validate || memory_address_addr_space_p (mode, addr,
2224 attrs.addrspace)))
d28edf0d 2225 return memref;
2226
e36c3d58 2227 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2228 This may happen even if offset is nonzero -- consider
e36c3d58 2229 (plus (plus reg reg) const_int) -- so do this always. */
2230 addr = copy_rtx (addr);
2231
cfb75cdf 2232 /* Convert a possibly large offset to a signed value within the
2233 range of the target address space. */
87cf5753 2234 address_mode = get_address_mode (memref);
98155838 2235 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2236 if (HOST_BITS_PER_WIDE_INT > pbits)
2237 {
2238 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2239 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2240 >> shift);
2241 }
2242
2d0fd66d 2243 if (adjust_address)
cd358719 2244 {
2245 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2246 object, we can merge it into the LO_SUM. */
2247 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2248 && offset >= 0
2249 && (unsigned HOST_WIDE_INT) offset
2250 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2251 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
29c05e22 2252 plus_constant (address_mode,
2253 XEXP (addr, 1), offset));
21b8bc7e 2254#ifdef POINTERS_EXTEND_UNSIGNED
2255 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2256 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2257 the fact that pointers are not allowed to overflow. */
2258 else if (POINTERS_EXTEND_UNSIGNED > 0
2259 && GET_CODE (addr) == ZERO_EXTEND
2260 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2261 && trunc_int_for_mode (offset, pointer_mode) == offset)
2262 addr = gen_rtx_ZERO_EXTEND (address_mode,
2263 plus_constant (pointer_mode,
2264 XEXP (addr, 0), offset));
2265#endif
cd358719 2266 else
29c05e22 2267 addr = plus_constant (address_mode, addr, offset);
cd358719 2268 }
fb257ae6 2269
5cc04e45 2270 new_rtx = change_address_1 (memref, mode, addr, validate, false);
96216d37 2271
e077413c 2272 /* If the address is a REG, change_address_1 rightfully returns memref,
2273 but this would destroy memref's MEM_ATTRS. */
2274 if (new_rtx == memref && offset != 0)
2275 new_rtx = copy_rtx (new_rtx);
2276
2d0fd66d 2277 /* Conservatively drop the object if we don't know where we start from. */
2278 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2279 {
2280 attrs.expr = NULL_TREE;
2281 attrs.alias = 0;
2282 }
2283
96216d37 2284 /* Compute the new values of the memory attributes due to this adjustment.
2285 We add the offsets and update the alignment. */
6d58bcba 2286 if (attrs.offset_known_p)
2d0fd66d 2287 {
2288 attrs.offset += offset;
2289
2290 /* Drop the object if the new left end is not within its bounds. */
2291 if (adjust_object && attrs.offset < 0)
2292 {
2293 attrs.expr = NULL_TREE;
2294 attrs.alias = 0;
2295 }
2296 }
96216d37 2297
b8098e5b 2298 /* Compute the new alignment by taking the MIN of the alignment and the
2299 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2300 if zero. */
2301 if (offset != 0)
d72886b5 2302 {
2303 max_align = (offset & -offset) * BITS_PER_UNIT;
2304 attrs.align = MIN (attrs.align, max_align);
2305 }
96216d37 2306
226c6baf 2307 if (size)
6d58bcba 2308 {
2d0fd66d 2309 /* Drop the object if the new right end is not within its bounds. */
226c6baf 2310 if (adjust_object && (offset + size) > attrs.size)
2d0fd66d 2311 {
2312 attrs.expr = NULL_TREE;
2313 attrs.alias = 0;
2314 }
6d58bcba 2315 attrs.size_known_p = true;
226c6baf 2316 attrs.size = size;
6d58bcba 2317 }
2318 else if (attrs.size_known_p)
2d0fd66d 2319 {
226c6baf 2320 gcc_assert (!adjust_object);
2d0fd66d 2321 attrs.size -= offset;
226c6baf 2322 /* ??? The store_by_pieces machinery generates negative sizes,
2323 so don't assert for that here. */
2d0fd66d 2324 }
5cc193e7 2325
d72886b5 2326 set_mem_attrs (new_rtx, &attrs);
96216d37 2327
9ce37fa7 2328 return new_rtx;
e4e86ec5 2329}
2330
bf42c62d 2331/* Return a memory reference like MEMREF, but with its mode changed
2332 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2333 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2334 nonzero, the memory address is forced to be valid. */
2335
2336rtx
3754d046 2337adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
35cb5232 2338 HOST_WIDE_INT offset, int validate)
bf42c62d 2339{
5cc04e45 2340 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
226c6baf 2341 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
bf42c62d 2342}
2343
2a631e19 2344/* Return a memory reference like MEMREF, but whose address is changed by
2345 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2346 known to be in OFFSET (possibly 1). */
fcdc122e 2347
2348rtx
35cb5232 2349offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2350{
9ce37fa7 2351 rtx new_rtx, addr = XEXP (memref, 0);
3754d046 2352 machine_mode address_mode;
6d58bcba 2353 struct mem_attrs attrs, *defattrs;
fac6aae6 2354
d72886b5 2355 attrs = *get_mem_attrs (memref);
87cf5753 2356 address_mode = get_address_mode (memref);
98155838 2357 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2358
d4c5e26d 2359 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2360 could have secondary memory references, multiplies or anything.
fac6aae6 2361
2362 However, if we did go and rearrange things, we can wind up not
2363 being able to recognize the magic around pic_offset_table_rtx.
2364 This stuff is fragile, and is yet another example of why it is
2365 bad to expose PIC machinery too early. */
d72886b5 2366 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2367 attrs.addrspace)
fac6aae6 2368 && GET_CODE (addr) == PLUS
2369 && XEXP (addr, 0) == pic_offset_table_rtx)
2370 {
2371 addr = force_reg (GET_MODE (addr), addr);
98155838 2372 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2373 }
2374
9ce37fa7 2375 update_temp_slot_address (XEXP (memref, 0), new_rtx);
5cc04e45 2376 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
fcdc122e 2377
d28edf0d 2378 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2379 if (new_rtx == memref)
2380 return new_rtx;
d28edf0d 2381
fcdc122e 2382 /* Update the alignment to reflect the offset. Reset the offset, which
2383 we don't know. */
6d58bcba 2384 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2385 attrs.offset_known_p = false;
2386 attrs.size_known_p = defattrs->size_known_p;
2387 attrs.size = defattrs->size;
d72886b5 2388 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2389 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2390 return new_rtx;
fcdc122e 2391}
d4c5e26d 2392
537ffcfc 2393/* Return a memory reference like MEMREF, but with its address changed to
2394 ADDR. The caller is asserting that the actual piece of memory pointed
2395 to is the same, just the form of the address is being changed, such as
5cc04e45 2396 by putting something into a register. INPLACE is true if any changes
2397 can be made directly to MEMREF or false if MEMREF must be treated as
2398 immutable. */
537ffcfc 2399
2400rtx
5cc04e45 2401replace_equiv_address (rtx memref, rtx addr, bool inplace)
537ffcfc 2402{
96216d37 2403 /* change_address_1 copies the memory attribute structure without change
2404 and that's exactly what we want here. */
ecfe4ca9 2405 update_temp_slot_address (XEXP (memref, 0), addr);
5cc04e45 2406 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
537ffcfc 2407}
96216d37 2408
e4e86ec5 2409/* Likewise, but the reference is not required to be valid. */
2410
2411rtx
5cc04e45 2412replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
e4e86ec5 2413{
5cc04e45 2414 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
e4e86ec5 2415}
8259ab07 2416
2417/* Return a memory reference like MEMREF, but with its mode widened to
2418 MODE and offset by OFFSET. This would be used by targets that e.g.
2419 cannot issue QImode memory operations and have to use SImode memory
2420 operations plus masking logic. */
2421
2422rtx
3754d046 2423widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2424{
226c6baf 2425 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
d72886b5 2426 struct mem_attrs attrs;
8259ab07 2427 unsigned int size = GET_MODE_SIZE (mode);
2428
d28edf0d 2429 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2430 if (new_rtx == memref)
2431 return new_rtx;
d28edf0d 2432
d72886b5 2433 attrs = *get_mem_attrs (new_rtx);
2434
8259ab07 2435 /* If we don't know what offset we were at within the expression, then
2436 we can't know if we've overstepped the bounds. */
6d58bcba 2437 if (! attrs.offset_known_p)
d72886b5 2438 attrs.expr = NULL_TREE;
8259ab07 2439
d72886b5 2440 while (attrs.expr)
8259ab07 2441 {
d72886b5 2442 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
8259ab07 2443 {
d72886b5 2444 tree field = TREE_OPERAND (attrs.expr, 1);
2445 tree offset = component_ref_field_offset (attrs.expr);
8259ab07 2446
2447 if (! DECL_SIZE_UNIT (field))
2448 {
d72886b5 2449 attrs.expr = NULL_TREE;
8259ab07 2450 break;
2451 }
2452
2453 /* Is the field at least as large as the access? If so, ok,
2454 otherwise strip back to the containing structure. */
8359cfb4 2455 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2456 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
6d58bcba 2457 && attrs.offset >= 0)
8259ab07 2458 break;
2459
e913b5cd 2460 if (! tree_fits_uhwi_p (offset))
8259ab07 2461 {
d72886b5 2462 attrs.expr = NULL_TREE;
8259ab07 2463 break;
2464 }
2465
d72886b5 2466 attrs.expr = TREE_OPERAND (attrs.expr, 0);
e913b5cd 2467 attrs.offset += tree_to_uhwi (offset);
2468 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
6d58bcba 2469 / BITS_PER_UNIT);
8259ab07 2470 }
2471 /* Similarly for the decl. */
d72886b5 2472 else if (DECL_P (attrs.expr)
2473 && DECL_SIZE_UNIT (attrs.expr)
2474 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2475 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
6d58bcba 2476 && (! attrs.offset_known_p || attrs.offset >= 0))
8259ab07 2477 break;
2478 else
2479 {
2480 /* The widened memory access overflows the expression, which means
2481 that it could alias another expression. Zap it. */
d72886b5 2482 attrs.expr = NULL_TREE;
8259ab07 2483 break;
2484 }
2485 }
2486
d72886b5 2487 if (! attrs.expr)
6d58bcba 2488 attrs.offset_known_p = false;
8259ab07 2489
2490 /* The widened memory may alias other stuff, so zap the alias set. */
2491 /* ??? Maybe use get_alias_set on any remaining expression. */
d72886b5 2492 attrs.alias = 0;
6d58bcba 2493 attrs.size_known_p = true;
2494 attrs.size = size;
d72886b5 2495 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2496 return new_rtx;
8259ab07 2497}
15bbde2b 2498\f
ac681e84 2499/* A fake decl that is used as the MEM_EXPR of spill slots. */
2500static GTY(()) tree spill_slot_decl;
2501
58029e61 2502tree
2503get_spill_slot_decl (bool force_build_p)
ac681e84 2504{
2505 tree d = spill_slot_decl;
2506 rtx rd;
d72886b5 2507 struct mem_attrs attrs;
ac681e84 2508
58029e61 2509 if (d || !force_build_p)
ac681e84 2510 return d;
2511
e60a6f7b 2512 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2513 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2514 DECL_ARTIFICIAL (d) = 1;
2515 DECL_IGNORED_P (d) = 1;
2516 TREE_USED (d) = 1;
ac681e84 2517 spill_slot_decl = d;
2518
2519 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2520 MEM_NOTRAP_P (rd) = 1;
d72886b5 2521 attrs = *mode_mem_attrs[(int) BLKmode];
2522 attrs.alias = new_alias_set ();
2523 attrs.expr = d;
2524 set_mem_attrs (rd, &attrs);
ac681e84 2525 SET_DECL_RTL (d, rd);
2526
2527 return d;
2528}
2529
2530/* Given MEM, a result from assign_stack_local, fill in the memory
2531 attributes as appropriate for a register allocator spill slot.
2532 These slots are not aliasable by other memory. We arrange for
2533 them all to use a single MEM_EXPR, so that the aliasing code can
2534 work properly in the case of shared spill slots. */
2535
2536void
2537set_mem_attrs_for_spill (rtx mem)
2538{
d72886b5 2539 struct mem_attrs attrs;
2540 rtx addr;
ac681e84 2541
d72886b5 2542 attrs = *get_mem_attrs (mem);
2543 attrs.expr = get_spill_slot_decl (true);
2544 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2545 attrs.addrspace = ADDR_SPACE_GENERIC;
ac681e84 2546
2547 /* We expect the incoming memory to be of the form:
2548 (mem:MODE (plus (reg sfp) (const_int offset)))
2549 with perhaps the plus missing for offset = 0. */
2550 addr = XEXP (mem, 0);
6d58bcba 2551 attrs.offset_known_p = true;
2552 attrs.offset = 0;
ac681e84 2553 if (GET_CODE (addr) == PLUS
971ba038 2554 && CONST_INT_P (XEXP (addr, 1)))
6d58bcba 2555 attrs.offset = INTVAL (XEXP (addr, 1));
ac681e84 2556
d72886b5 2557 set_mem_attrs (mem, &attrs);
ac681e84 2558 MEM_NOTRAP_P (mem) = 1;
2559}
2560\f
15bbde2b 2561/* Return a newly created CODE_LABEL rtx with a unique label number. */
2562
be95c7c7 2563rtx_code_label *
35cb5232 2564gen_label_rtx (void)
15bbde2b 2565{
be95c7c7 2566 return as_a <rtx_code_label *> (
2567 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2568 NULL, label_num++, NULL));
15bbde2b 2569}
2570\f
2571/* For procedure integration. */
2572
15bbde2b 2573/* Install new pointers to the first and last insns in the chain.
d4c332ff 2574 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2575 Used for an inline-procedure after copying the insn chain. */
2576
2577void
57c26b3a 2578set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
15bbde2b 2579{
57c26b3a 2580 rtx_insn *insn;
d4c332ff 2581
06f9d6ef 2582 set_first_insn (first);
2583 set_last_insn (last);
d4c332ff 2584 cur_insn_uid = 0;
2585
9845d120 2586 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2587 {
2588 int debug_count = 0;
2589
2590 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2591 cur_debug_insn_uid = 0;
2592
2593 for (insn = first; insn; insn = NEXT_INSN (insn))
2594 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2595 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2596 else
2597 {
2598 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2599 if (DEBUG_INSN_P (insn))
2600 debug_count++;
2601 }
2602
2603 if (debug_count)
2604 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2605 else
2606 cur_debug_insn_uid++;
2607 }
2608 else
2609 for (insn = first; insn; insn = NEXT_INSN (insn))
2610 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2611
2612 cur_insn_uid++;
15bbde2b 2613}
15bbde2b 2614\f
d823ba47 2615/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2616 structure. This routine should only be called once. */
15bbde2b 2617
a40c0eeb 2618static void
58945f46 2619unshare_all_rtl_1 (rtx_insn *insn)
15bbde2b 2620{
2d96a59a 2621 /* Unshare just about everything else. */
1cd4cfea 2622 unshare_all_rtl_in_chain (insn);
d823ba47 2623
15bbde2b 2624 /* Make sure the addresses of stack slots found outside the insn chain
2625 (such as, in DECL_RTL of a variable) are not shared
2626 with the insn chain.
2627
2628 This special care is necessary when the stack slot MEM does not
2629 actually appear in the insn chain. If it does appear, its address
2630 is unshared from all else at that point. */
ee165bb1 2631 stack_slot_list = safe_as_a <rtx_expr_list *> (
2632 copy_rtx_if_shared (stack_slot_list));
15bbde2b 2633}
2634
d823ba47 2635/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2636 structure, again. This is a fairly expensive thing to do so it
2637 should be done sparingly. */
2638
2639void
58945f46 2640unshare_all_rtl_again (rtx_insn *insn)
2d96a59a 2641{
58945f46 2642 rtx_insn *p;
5244079b 2643 tree decl;
2644
2d96a59a 2645 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2646 if (INSN_P (p))
2d96a59a 2647 {
2648 reset_used_flags (PATTERN (p));
2649 reset_used_flags (REG_NOTES (p));
6d2a4bac 2650 if (CALL_P (p))
2651 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2d96a59a 2652 }
5244079b 2653
01dc9f0c 2654 /* Make sure that virtual stack slots are not shared. */
265be050 2655 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2656
5244079b 2657 /* Make sure that virtual parameters are not shared. */
1767a056 2658 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2659 set_used_flags (DECL_RTL (decl));
5244079b 2660
2661 reset_used_flags (stack_slot_list);
2662
df329266 2663 unshare_all_rtl_1 (insn);
a40c0eeb 2664}
2665
2a1990e9 2666unsigned int
a40c0eeb 2667unshare_all_rtl (void)
2668{
df329266 2669 unshare_all_rtl_1 (get_insns ());
2a1990e9 2670 return 0;
2d96a59a 2671}
2672
77fce4cd 2673
1cd4cfea 2674/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2675 Recursively does the same for subexpressions. */
2676
2677static void
2678verify_rtx_sharing (rtx orig, rtx insn)
2679{
2680 rtx x = orig;
2681 int i;
2682 enum rtx_code code;
2683 const char *format_ptr;
2684
2685 if (x == 0)
2686 return;
2687
2688 code = GET_CODE (x);
2689
2690 /* These types may be freely shared. */
2691
2692 switch (code)
2693 {
2694 case REG:
688ff29b 2695 case DEBUG_EXPR:
2696 case VALUE:
0349edce 2697 CASE_CONST_ANY:
1cd4cfea 2698 case SYMBOL_REF:
2699 case LABEL_REF:
2700 case CODE_LABEL:
2701 case PC:
2702 case CC0:
1a860023 2703 case RETURN:
9cb2517e 2704 case SIMPLE_RETURN:
1cd4cfea 2705 case SCRATCH:
c09425a0 2706 /* SCRATCH must be shared because they represent distinct values. */
b291008a 2707 return;
c09425a0 2708 case CLOBBER:
b291008a 2709 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2710 clobbers or clobbers of hard registers that originated as pseudos.
2711 This is needed to allow safe register renaming. */
2712 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2713 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
c09425a0 2714 return;
2715 break;
1cd4cfea 2716
2717 case CONST:
3072d30e 2718 if (shared_const_p (orig))
1cd4cfea 2719 return;
2720 break;
2721
2722 case MEM:
2723 /* A MEM is allowed to be shared if its address is constant. */
2724 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2725 || reload_completed || reload_in_progress)
2726 return;
2727
2728 break;
2729
2730 default:
2731 break;
2732 }
2733
2734 /* This rtx may not be shared. If it has already been seen,
2735 replace it with a copy of itself. */
382ecba7 2736 if (flag_checking && RTX_FLAG (x, used))
1cd4cfea 2737 {
0a81f5a0 2738 error ("invalid rtl sharing found in the insn");
1cd4cfea 2739 debug_rtx (insn);
0a81f5a0 2740 error ("shared rtx");
1cd4cfea 2741 debug_rtx (x);
0a81f5a0 2742 internal_error ("internal consistency failure");
1cd4cfea 2743 }
9cee7c3f 2744 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2745
1cd4cfea 2746 RTX_FLAG (x, used) = 1;
2747
8b332087 2748 /* Now scan the subexpressions recursively. */
1cd4cfea 2749
2750 format_ptr = GET_RTX_FORMAT (code);
2751
2752 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2753 {
2754 switch (*format_ptr++)
2755 {
2756 case 'e':
2757 verify_rtx_sharing (XEXP (x, i), insn);
2758 break;
2759
2760 case 'E':
2761 if (XVEC (x, i) != NULL)
2762 {
2763 int j;
2764 int len = XVECLEN (x, i);
2765
2766 for (j = 0; j < len; j++)
2767 {
9cee7c3f 2768 /* We allow sharing of ASM_OPERANDS inside single
2769 instruction. */
1cd4cfea 2770 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2771 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2772 == ASM_OPERANDS))
1cd4cfea 2773 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2774 else
2775 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2776 }
2777 }
2778 break;
2779 }
2780 }
2781 return;
2782}
2783
1e9af25c 2784/* Reset used-flags for INSN. */
2785
2786static void
2787reset_insn_used_flags (rtx insn)
2788{
2789 gcc_assert (INSN_P (insn));
2790 reset_used_flags (PATTERN (insn));
2791 reset_used_flags (REG_NOTES (insn));
2792 if (CALL_P (insn))
2793 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2794}
2795
7cdd84a2 2796/* Go through all the RTL insn bodies and clear all the USED bits. */
1cd4cfea 2797
7cdd84a2 2798static void
2799reset_all_used_flags (void)
1cd4cfea 2800{
4cd001d5 2801 rtx_insn *p;
1cd4cfea 2802
2803 for (p = get_insns (); p; p = NEXT_INSN (p))
2804 if (INSN_P (p))
2805 {
1e9af25c 2806 rtx pat = PATTERN (p);
2807 if (GET_CODE (pat) != SEQUENCE)
2808 reset_insn_used_flags (p);
2809 else
764f640f 2810 {
1e9af25c 2811 gcc_assert (REG_NOTES (p) == NULL);
2812 for (int i = 0; i < XVECLEN (pat, 0); i++)
11c8949c 2813 {
2814 rtx insn = XVECEXP (pat, 0, i);
2815 if (INSN_P (insn))
2816 reset_insn_used_flags (insn);
2817 }
764f640f 2818 }
1cd4cfea 2819 }
7cdd84a2 2820}
2821
1e9af25c 2822/* Verify sharing in INSN. */
2823
2824static void
2825verify_insn_sharing (rtx insn)
2826{
2827 gcc_assert (INSN_P (insn));
2828 reset_used_flags (PATTERN (insn));
2829 reset_used_flags (REG_NOTES (insn));
2830 if (CALL_P (insn))
2831 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2832}
2833
7cdd84a2 2834/* Go through all the RTL insn bodies and check that there is no unexpected
2835 sharing in between the subexpressions. */
2836
2837DEBUG_FUNCTION void
2838verify_rtl_sharing (void)
2839{
4cd001d5 2840 rtx_insn *p;
7cdd84a2 2841
2842 timevar_push (TV_VERIFY_RTL_SHARING);
2843
2844 reset_all_used_flags ();
1cd4cfea 2845
2846 for (p = get_insns (); p; p = NEXT_INSN (p))
2847 if (INSN_P (p))
2848 {
1e9af25c 2849 rtx pat = PATTERN (p);
2850 if (GET_CODE (pat) != SEQUENCE)
2851 verify_insn_sharing (p);
2852 else
2853 for (int i = 0; i < XVECLEN (pat, 0); i++)
11c8949c 2854 {
2855 rtx insn = XVECEXP (pat, 0, i);
2856 if (INSN_P (insn))
2857 verify_insn_sharing (insn);
2858 }
1cd4cfea 2859 }
4b366dd3 2860
7cdd84a2 2861 reset_all_used_flags ();
2862
4b366dd3 2863 timevar_pop (TV_VERIFY_RTL_SHARING);
1cd4cfea 2864}
2865
2d96a59a 2866/* Go through all the RTL insn bodies and copy any invalid shared structure.
2867 Assumes the mark bits are cleared at entry. */
2868
1cd4cfea 2869void
4cd001d5 2870unshare_all_rtl_in_chain (rtx_insn *insn)
2d96a59a 2871{
2872 for (; insn; insn = NEXT_INSN (insn))
9204e736 2873 if (INSN_P (insn))
2d96a59a 2874 {
2875 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2876 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
6d2a4bac 2877 if (CALL_P (insn))
2878 CALL_INSN_FUNCTION_USAGE (insn)
2879 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2d96a59a 2880 }
2881}
2882
01dc9f0c 2883/* Go through all virtual stack slots of a function and mark them as
265be050 2884 shared. We never replace the DECL_RTLs themselves with a copy,
2885 but expressions mentioned into a DECL_RTL cannot be shared with
2886 expressions in the instruction stream.
2887
2888 Note that reload may convert pseudo registers into memories in-place.
2889 Pseudo registers are always shared, but MEMs never are. Thus if we
2890 reset the used flags on MEMs in the instruction stream, we must set
2891 them again on MEMs that appear in DECL_RTLs. */
2892
01dc9f0c 2893static void
265be050 2894set_used_decls (tree blk)
01dc9f0c 2895{
2896 tree t;
2897
2898 /* Mark decls. */
1767a056 2899 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2900 if (DECL_RTL_SET_P (t))
265be050 2901 set_used_flags (DECL_RTL (t));
01dc9f0c 2902
2903 /* Now process sub-blocks. */
93110716 2904 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2905 set_used_decls (t);
01dc9f0c 2906}
2907
15bbde2b 2908/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2909 Recursively does the same for subexpressions. Uses
2910 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2911
2912rtx
35cb5232 2913copy_rtx_if_shared (rtx orig)
15bbde2b 2914{
0e0727c4 2915 copy_rtx_if_shared_1 (&orig);
2916 return orig;
2917}
2918
7ba6ce7a 2919/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2920 use. Recursively does the same for subexpressions. */
2921
0e0727c4 2922static void
2923copy_rtx_if_shared_1 (rtx *orig1)
2924{
2925 rtx x;
19cb6b50 2926 int i;
2927 enum rtx_code code;
0e0727c4 2928 rtx *last_ptr;
19cb6b50 2929 const char *format_ptr;
15bbde2b 2930 int copied = 0;
0e0727c4 2931 int length;
2932
2933 /* Repeat is used to turn tail-recursion into iteration. */
2934repeat:
2935 x = *orig1;
15bbde2b 2936
2937 if (x == 0)
0e0727c4 2938 return;
15bbde2b 2939
2940 code = GET_CODE (x);
2941
2942 /* These types may be freely shared. */
2943
2944 switch (code)
2945 {
2946 case REG:
688ff29b 2947 case DEBUG_EXPR:
2948 case VALUE:
0349edce 2949 CASE_CONST_ANY:
15bbde2b 2950 case SYMBOL_REF:
1cd4cfea 2951 case LABEL_REF:
15bbde2b 2952 case CODE_LABEL:
2953 case PC:
2954 case CC0:
e0691b9a 2955 case RETURN:
9cb2517e 2956 case SIMPLE_RETURN:
15bbde2b 2957 case SCRATCH:
a92771b8 2958 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2959 return;
c09425a0 2960 case CLOBBER:
b291008a 2961 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2962 clobbers or clobbers of hard registers that originated as pseudos.
2963 This is needed to allow safe register renaming. */
2964 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2965 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
c09425a0 2966 return;
2967 break;
15bbde2b 2968
f63d12e3 2969 case CONST:
3072d30e 2970 if (shared_const_p (x))
0e0727c4 2971 return;
f63d12e3 2972 break;
2973
9845d120 2974 case DEBUG_INSN:
15bbde2b 2975 case INSN:
2976 case JUMP_INSN:
2977 case CALL_INSN:
2978 case NOTE:
15bbde2b 2979 case BARRIER:
2980 /* The chain of insns is not being copied. */
0e0727c4 2981 return;
15bbde2b 2982
0dbd1c74 2983 default:
2984 break;
15bbde2b 2985 }
2986
2987 /* This rtx may not be shared. If it has already been seen,
2988 replace it with a copy of itself. */
2989
7c25cb91 2990 if (RTX_FLAG (x, used))
15bbde2b 2991 {
f2d0e9f1 2992 x = shallow_copy_rtx (x);
15bbde2b 2993 copied = 1;
2994 }
7c25cb91 2995 RTX_FLAG (x, used) = 1;
15bbde2b 2996
2997 /* Now scan the subexpressions recursively.
2998 We can store any replaced subexpressions directly into X
2999 since we know X is not shared! Any vectors in X
3000 must be copied if X was copied. */
3001
3002 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 3003 length = GET_RTX_LENGTH (code);
3004 last_ptr = NULL;
48e1416a 3005
0e0727c4 3006 for (i = 0; i < length; i++)
15bbde2b 3007 {
3008 switch (*format_ptr++)
3009 {
3010 case 'e':
0e0727c4 3011 if (last_ptr)
3012 copy_rtx_if_shared_1 (last_ptr);
3013 last_ptr = &XEXP (x, i);
15bbde2b 3014 break;
3015
3016 case 'E':
3017 if (XVEC (x, i) != NULL)
3018 {
19cb6b50 3019 int j;
ffe0869b 3020 int len = XVECLEN (x, i);
48e1416a 3021
8b332087 3022 /* Copy the vector iff I copied the rtx and the length
3023 is nonzero. */
ffe0869b 3024 if (copied && len > 0)
a4070a91 3025 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 3026
d632b59a 3027 /* Call recursively on all inside the vector. */
ffe0869b 3028 for (j = 0; j < len; j++)
0e0727c4 3029 {
3030 if (last_ptr)
3031 copy_rtx_if_shared_1 (last_ptr);
3032 last_ptr = &XVECEXP (x, i, j);
3033 }
15bbde2b 3034 }
3035 break;
3036 }
3037 }
0e0727c4 3038 *orig1 = x;
3039 if (last_ptr)
3040 {
3041 orig1 = last_ptr;
3042 goto repeat;
3043 }
3044 return;
15bbde2b 3045}
3046
709947e6 3047/* Set the USED bit in X and its non-shareable subparts to FLAG. */
15bbde2b 3048
709947e6 3049static void
3050mark_used_flags (rtx x, int flag)
15bbde2b 3051{
19cb6b50 3052 int i, j;
3053 enum rtx_code code;
3054 const char *format_ptr;
0e0727c4 3055 int length;
15bbde2b 3056
0e0727c4 3057 /* Repeat is used to turn tail-recursion into iteration. */
3058repeat:
15bbde2b 3059 if (x == 0)
3060 return;
3061
3062 code = GET_CODE (x);
3063
c3418f42 3064 /* These types may be freely shared so we needn't do any resetting
15bbde2b 3065 for them. */
3066
3067 switch (code)
3068 {
3069 case REG:
688ff29b 3070 case DEBUG_EXPR:
3071 case VALUE:
0349edce 3072 CASE_CONST_ANY:
15bbde2b 3073 case SYMBOL_REF:
3074 case CODE_LABEL:
3075 case PC:
3076 case CC0:
e0691b9a 3077 case RETURN:
9cb2517e 3078 case SIMPLE_RETURN:
15bbde2b 3079 return;
3080
9845d120 3081 case DEBUG_INSN:
15bbde2b 3082 case INSN:
3083 case JUMP_INSN:
3084 case CALL_INSN:
3085 case NOTE:
3086 case LABEL_REF:
3087 case BARRIER:
3088 /* The chain of insns is not being copied. */
3089 return;
d823ba47 3090
0dbd1c74 3091 default:
3092 break;
15bbde2b 3093 }
3094
709947e6 3095 RTX_FLAG (x, used) = flag;
15bbde2b 3096
3097 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 3098 length = GET_RTX_LENGTH (code);
48e1416a 3099
0e0727c4 3100 for (i = 0; i < length; i++)
15bbde2b 3101 {
3102 switch (*format_ptr++)
3103 {
3104 case 'e':
0e0727c4 3105 if (i == length-1)
3106 {
3107 x = XEXP (x, i);
3108 goto repeat;
3109 }
709947e6 3110 mark_used_flags (XEXP (x, i), flag);
15bbde2b 3111 break;
3112
3113 case 'E':
3114 for (j = 0; j < XVECLEN (x, i); j++)
709947e6 3115 mark_used_flags (XVECEXP (x, i, j), flag);
15bbde2b 3116 break;
3117 }
3118 }
3119}
1cd4cfea 3120
709947e6 3121/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1cd4cfea 3122 to look for shared sub-parts. */
3123
3124void
709947e6 3125reset_used_flags (rtx x)
1cd4cfea 3126{
709947e6 3127 mark_used_flags (x, 0);
3128}
1cd4cfea 3129
709947e6 3130/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3131 to look for shared sub-parts. */
1cd4cfea 3132
709947e6 3133void
3134set_used_flags (rtx x)
3135{
3136 mark_used_flags (x, 1);
1cd4cfea 3137}
15bbde2b 3138\f
3139/* Copy X if necessary so that it won't be altered by changes in OTHER.
3140 Return X or the rtx for the pseudo reg the value of X was copied into.
3141 OTHER must be valid as a SET_DEST. */
3142
3143rtx
35cb5232 3144make_safe_from (rtx x, rtx other)
15bbde2b 3145{
3146 while (1)
3147 switch (GET_CODE (other))
3148 {
3149 case SUBREG:
3150 other = SUBREG_REG (other);
3151 break;
3152 case STRICT_LOW_PART:
3153 case SIGN_EXTEND:
3154 case ZERO_EXTEND:
3155 other = XEXP (other, 0);
3156 break;
3157 default:
3158 goto done;
3159 }
3160 done:
e16ceb8e 3161 if ((MEM_P (other)
15bbde2b 3162 && ! CONSTANT_P (x)
8ad4c111 3163 && !REG_P (x)
15bbde2b 3164 && GET_CODE (x) != SUBREG)
8ad4c111 3165 || (REG_P (other)
15bbde2b 3166 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3167 || reg_mentioned_p (other, x))))
3168 {
3169 rtx temp = gen_reg_rtx (GET_MODE (x));
3170 emit_move_insn (temp, x);
3171 return temp;
3172 }
3173 return x;
3174}
3175\f
3176/* Emission of insns (adding them to the doubly-linked list). */
3177
15bbde2b 3178/* Return the last insn emitted, even if it is in a sequence now pushed. */
3179
447ab0fc 3180rtx_insn *
35cb5232 3181get_last_insn_anywhere (void)
15bbde2b 3182{
c36aa54b 3183 struct sequence_stack *seq;
3184 for (seq = get_current_sequence (); seq; seq = seq->next)
3185 if (seq->last != 0)
3186 return seq->last;
15bbde2b 3187 return 0;
3188}
3189
70545de4 3190/* Return the first nonnote insn emitted in current sequence or current
3191 function. This routine looks inside SEQUENCEs. */
3192
2eb8c261 3193rtx_insn *
35cb5232 3194get_first_nonnote_insn (void)
70545de4 3195{
4cd001d5 3196 rtx_insn *insn = get_insns ();
f86e856e 3197
3198 if (insn)
3199 {
3200 if (NOTE_P (insn))
3201 for (insn = next_insn (insn);
3202 insn && NOTE_P (insn);
3203 insn = next_insn (insn))
3204 continue;
3205 else
3206 {
1c14a50e 3207 if (NONJUMP_INSN_P (insn)
f86e856e 3208 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4cd001d5 3209 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
f86e856e 3210 }
3211 }
70545de4 3212
3213 return insn;
3214}
3215
3216/* Return the last nonnote insn emitted in current sequence or current
3217 function. This routine looks inside SEQUENCEs. */
3218
2eb8c261 3219rtx_insn *
35cb5232 3220get_last_nonnote_insn (void)
70545de4 3221{
4cd001d5 3222 rtx_insn *insn = get_last_insn ();
f86e856e 3223
3224 if (insn)
3225 {
3226 if (NOTE_P (insn))
3227 for (insn = previous_insn (insn);
3228 insn && NOTE_P (insn);
3229 insn = previous_insn (insn))
3230 continue;
3231 else
3232 {
4cd001d5 3233 if (NONJUMP_INSN_P (insn))
3234 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3235 insn = seq->insn (seq->len () - 1);
f86e856e 3236 }
3237 }
70545de4 3238
3239 return insn;
3240}
3241
9845d120 3242/* Return the number of actual (non-debug) insns emitted in this
3243 function. */
3244
3245int
3246get_max_insn_count (void)
3247{
3248 int n = cur_insn_uid;
3249
3250 /* The table size must be stable across -g, to avoid codegen
3251 differences due to debug insns, and not be affected by
3252 -fmin-insn-uid, to avoid excessive table size and to simplify
3253 debugging of -fcompare-debug failures. */
3254 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3255 n -= cur_debug_insn_uid;
3256 else
3257 n -= MIN_NONDEBUG_INSN_UID;
3258
3259 return n;
3260}
3261
15bbde2b 3262\f
3263/* Return the next insn. If it is a SEQUENCE, return the first insn
3264 of the sequence. */
3265
7bac25b3 3266rtx_insn *
50895eab 3267next_insn (rtx_insn *insn)
15bbde2b 3268{
ce4469fa 3269 if (insn)
3270 {
3271 insn = NEXT_INSN (insn);
3272 if (insn && NONJUMP_INSN_P (insn)
3273 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4cd001d5 3274 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
ce4469fa 3275 }
15bbde2b 3276
4cd001d5 3277 return insn;
15bbde2b 3278}
3279
3280/* Return the previous insn. If it is a SEQUENCE, return the last insn
3281 of the sequence. */
3282
7bac25b3 3283rtx_insn *
50895eab 3284previous_insn (rtx_insn *insn)
15bbde2b 3285{
ce4469fa 3286 if (insn)
3287 {
3288 insn = PREV_INSN (insn);
4cd001d5 3289 if (insn && NONJUMP_INSN_P (insn))
3290 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3291 insn = seq->insn (seq->len () - 1);
ce4469fa 3292 }
15bbde2b 3293
4cd001d5 3294 return insn;
15bbde2b 3295}
3296
3297/* Return the next insn after INSN that is not a NOTE. This routine does not
3298 look inside SEQUENCEs. */
3299
7bac25b3 3300rtx_insn *
4cd001d5 3301next_nonnote_insn (rtx uncast_insn)
15bbde2b 3302{
4cd001d5 3303 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
ce4469fa 3304 while (insn)
3305 {
3306 insn = NEXT_INSN (insn);
3307 if (insn == 0 || !NOTE_P (insn))
3308 break;
3309 }
15bbde2b 3310
4cd001d5 3311 return insn;
15bbde2b 3312}
3313
c4d13c5c 3314/* Return the next insn after INSN that is not a NOTE, but stop the
3315 search before we enter another basic block. This routine does not
3316 look inside SEQUENCEs. */
3317
7bac25b3 3318rtx_insn *
2eb8c261 3319next_nonnote_insn_bb (rtx_insn *insn)
c4d13c5c 3320{
3321 while (insn)
3322 {
3323 insn = NEXT_INSN (insn);
3324 if (insn == 0 || !NOTE_P (insn))
3325 break;
3326 if (NOTE_INSN_BASIC_BLOCK_P (insn))
7bac25b3 3327 return NULL;
c4d13c5c 3328 }
3329
4cd001d5 3330 return insn;
c4d13c5c 3331}
3332
15bbde2b 3333/* Return the previous insn before INSN that is not a NOTE. This routine does
3334 not look inside SEQUENCEs. */
3335
7bac25b3 3336rtx_insn *
4cd001d5 3337prev_nonnote_insn (rtx uncast_insn)
15bbde2b 3338{
4cd001d5 3339 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3340
ce4469fa 3341 while (insn)
3342 {
3343 insn = PREV_INSN (insn);
3344 if (insn == 0 || !NOTE_P (insn))
3345 break;
3346 }
15bbde2b 3347
4cd001d5 3348 return insn;
15bbde2b 3349}
3350
bcc66782 3351/* Return the previous insn before INSN that is not a NOTE, but stop
3352 the search before we enter another basic block. This routine does
3353 not look inside SEQUENCEs. */
3354
7bac25b3 3355rtx_insn *
4cd001d5 3356prev_nonnote_insn_bb (rtx uncast_insn)
bcc66782 3357{
4cd001d5 3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
bcc66782 3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !NOTE_P (insn))
3364 break;
3365 if (NOTE_INSN_BASIC_BLOCK_P (insn))
7bac25b3 3366 return NULL;
bcc66782 3367 }
3368
4cd001d5 3369 return insn;
bcc66782 3370}
3371
9845d120 3372/* Return the next insn after INSN that is not a DEBUG_INSN. This
3373 routine does not look inside SEQUENCEs. */
3374
7bac25b3 3375rtx_insn *
4cd001d5 3376next_nondebug_insn (rtx uncast_insn)
9845d120 3377{
4cd001d5 3378 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3379
9845d120 3380 while (insn)
3381 {
3382 insn = NEXT_INSN (insn);
3383 if (insn == 0 || !DEBUG_INSN_P (insn))
3384 break;
3385 }
3386
4cd001d5 3387 return insn;
9845d120 3388}
3389
3390/* Return the previous insn before INSN that is not a DEBUG_INSN.
3391 This routine does not look inside SEQUENCEs. */
3392
7bac25b3 3393rtx_insn *
4cd001d5 3394prev_nondebug_insn (rtx uncast_insn)
9845d120 3395{
4cd001d5 3396 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3397
9845d120 3398 while (insn)
3399 {
3400 insn = PREV_INSN (insn);
3401 if (insn == 0 || !DEBUG_INSN_P (insn))
3402 break;
3403 }
3404
4cd001d5 3405 return insn;
9845d120 3406}
3407
5b8537a8 3408/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3409 This routine does not look inside SEQUENCEs. */
3410
7bac25b3 3411rtx_insn *
4cd001d5 3412next_nonnote_nondebug_insn (rtx uncast_insn)
5b8537a8 3413{
4cd001d5 3414 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3415
5b8537a8 3416 while (insn)
3417 {
3418 insn = NEXT_INSN (insn);
3419 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3420 break;
3421 }
3422
4cd001d5 3423 return insn;
5b8537a8 3424}
3425
3426/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3427 This routine does not look inside SEQUENCEs. */
3428
7bac25b3 3429rtx_insn *
4cd001d5 3430prev_nonnote_nondebug_insn (rtx uncast_insn)
5b8537a8 3431{
4cd001d5 3432 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3433
5b8537a8 3434 while (insn)
3435 {
3436 insn = PREV_INSN (insn);
3437 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3438 break;
3439 }
3440
4cd001d5 3441 return insn;
5b8537a8 3442}
3443
15bbde2b 3444/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3445 or 0, if there is none. This routine does not look inside
a92771b8 3446 SEQUENCEs. */
15bbde2b 3447
7bac25b3 3448rtx_insn *
4cd001d5 3449next_real_insn (rtx uncast_insn)
15bbde2b 3450{
4cd001d5 3451 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3452
ce4469fa 3453 while (insn)
3454 {
3455 insn = NEXT_INSN (insn);
3456 if (insn == 0 || INSN_P (insn))
3457 break;
3458 }
15bbde2b 3459
4cd001d5 3460 return insn;
15bbde2b 3461}
3462
3463/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3464 or 0, if there is none. This routine does not look inside
3465 SEQUENCEs. */
3466
7bac25b3 3467rtx_insn *
4cd001d5 3468prev_real_insn (rtx uncast_insn)
15bbde2b 3469{
4cd001d5 3470 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3471
ce4469fa 3472 while (insn)
3473 {
3474 insn = PREV_INSN (insn);
3475 if (insn == 0 || INSN_P (insn))
3476 break;
3477 }
15bbde2b 3478
4cd001d5 3479 return insn;
15bbde2b 3480}
3481
d5f9786f 3482/* Return the last CALL_INSN in the current list, or 0 if there is none.
3483 This routine does not look inside SEQUENCEs. */
3484
ec22da62 3485rtx_call_insn *
35cb5232 3486last_call_insn (void)
d5f9786f 3487{
ec22da62 3488 rtx_insn *insn;
d5f9786f 3489
3490 for (insn = get_last_insn ();
6d7dc5b9 3491 insn && !CALL_P (insn);
d5f9786f 3492 insn = PREV_INSN (insn))
3493 ;
3494
ec22da62 3495 return safe_as_a <rtx_call_insn *> (insn);
d5f9786f 3496}
3497
15bbde2b 3498/* Find the next insn after INSN that really does something. This routine
084950ee 3499 does not look inside SEQUENCEs. After reload this also skips over
3500 standalone USE and CLOBBER insn. */
15bbde2b 3501
2215ca0d 3502int
52d07779 3503active_insn_p (const_rtx insn)
2215ca0d 3504{
6d7dc5b9 3505 return (CALL_P (insn) || JUMP_P (insn)
91f71fa3 3506 || JUMP_TABLE_DATA_P (insn) /* FIXME */
6d7dc5b9 3507 || (NONJUMP_INSN_P (insn)
3a66feab 3508 && (! reload_completed
3509 || (GET_CODE (PATTERN (insn)) != USE
3510 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3511}
3512
7bac25b3 3513rtx_insn *
4cd001d5 3514next_active_insn (rtx uncast_insn)
15bbde2b 3515{
4cd001d5 3516 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3517
ce4469fa 3518 while (insn)
3519 {
3520 insn = NEXT_INSN (insn);
3521 if (insn == 0 || active_insn_p (insn))
3522 break;
3523 }
15bbde2b 3524
4cd001d5 3525 return insn;
15bbde2b 3526}
3527
3528/* Find the last insn before INSN that really does something. This routine
084950ee 3529 does not look inside SEQUENCEs. After reload this also skips over
3530 standalone USE and CLOBBER insn. */
15bbde2b 3531
7bac25b3 3532rtx_insn *
4cd001d5 3533prev_active_insn (rtx uncast_insn)
15bbde2b 3534{
4cd001d5 3535 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3536
ce4469fa 3537 while (insn)
3538 {
3539 insn = PREV_INSN (insn);
3540 if (insn == 0 || active_insn_p (insn))
3541 break;
3542 }
15bbde2b 3543
4cd001d5 3544 return insn;
15bbde2b 3545}
15bbde2b 3546\f
15bbde2b 3547/* Return the next insn that uses CC0 after INSN, which is assumed to
3548 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3549 applied to the result of this function should yield INSN).
3550
3551 Normally, this is simply the next insn. However, if a REG_CC_USER note
3552 is present, it contains the insn that uses CC0.
3553
3554 Return 0 if we can't find the insn. */
3555
0be88abd 3556rtx_insn *
4cd001d5 3557next_cc0_user (rtx uncast_insn)
15bbde2b 3558{
4cd001d5 3559 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3560
b572011e 3561 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3562
3563 if (note)
0be88abd 3564 return safe_as_a <rtx_insn *> (XEXP (note, 0));
15bbde2b 3565
3566 insn = next_nonnote_insn (insn);
6d7dc5b9 3567 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4cd001d5 3568 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
15bbde2b 3569
9204e736 3570 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
4cd001d5 3571 return insn;
15bbde2b 3572
3573 return 0;
3574}
3575
3576/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3577 note, it is the previous insn. */
3578
0be88abd 3579rtx_insn *
fd8b0a1a 3580prev_cc0_setter (rtx_insn *insn)
15bbde2b 3581{
b572011e 3582 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3583
3584 if (note)
0be88abd 3585 return safe_as_a <rtx_insn *> (XEXP (note, 0));
15bbde2b 3586
3587 insn = prev_nonnote_insn (insn);
611234b4 3588 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3589
4cd001d5 3590 return insn;
15bbde2b 3591}
344dc2fa 3592
698ff1f0 3593/* Find a RTX_AUTOINC class rtx which matches DATA. */
3594
3595static int
4073adaa 3596find_auto_inc (const_rtx x, const_rtx reg)
698ff1f0 3597{
4073adaa 3598 subrtx_iterator::array_type array;
3599 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
698ff1f0 3600 {
4073adaa 3601 const_rtx x = *iter;
3602 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3603 && rtx_equal_p (reg, XEXP (x, 0)))
3604 return true;
698ff1f0 3605 }
4073adaa 3606 return false;
698ff1f0 3607}
698ff1f0 3608
344dc2fa 3609/* Increment the label uses for all labels present in rtx. */
3610
3611static void
35cb5232 3612mark_label_nuses (rtx x)
344dc2fa 3613{
19cb6b50 3614 enum rtx_code code;
3615 int i, j;
3616 const char *fmt;
344dc2fa 3617
3618 code = GET_CODE (x);
b49f2e4b 3619 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3620 LABEL_NUSES (LABEL_REF_LABEL (x))++;
344dc2fa 3621
3622 fmt = GET_RTX_FORMAT (code);
3623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3624 {
3625 if (fmt[i] == 'e')
ff385626 3626 mark_label_nuses (XEXP (x, i));
344dc2fa 3627 else if (fmt[i] == 'E')
ff385626 3628 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3629 mark_label_nuses (XVECEXP (x, i, j));
3630 }
3631}
3632
15bbde2b 3633\f
3634/* Try splitting insns that can be split for better scheduling.
3635 PAT is the pattern which might split.
3636 TRIAL is the insn providing PAT.
6ef828f9 3637 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3638
3639 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3640 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3641 returns TRIAL. If the insn to be returned can be split, it will be. */
3642
bffa1357 3643rtx_insn *
58a87a29 3644try_split (rtx pat, rtx_insn *trial, int last)
15bbde2b 3645{
bffa1357 3646 rtx_insn *before = PREV_INSN (trial);
3647 rtx_insn *after = NEXT_INSN (trial);
4cd001d5 3648 rtx note;
3649 rtx_insn *seq, *tem;
3cd757b1 3650 int probability;
4cd001d5 3651 rtx_insn *insn_last, *insn;
e13693ec 3652 int njumps = 0;
9ed997be 3653 rtx_insn *call_insn = NULL;
3cd757b1 3654
25e880b1 3655 /* We're not good at redistributing frame information. */
3656 if (RTX_FRAME_RELATED_P (trial))
4cd001d5 3657 return trial;
25e880b1 3658
3cd757b1 3659 if (any_condjump_p (trial)
3660 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
9eb946de 3661 split_branch_probability = XINT (note, 0);
3cd757b1 3662 probability = split_branch_probability;
3663
58a87a29 3664 seq = split_insns (pat, trial);
3cd757b1 3665
3666 split_branch_probability = -1;
15bbde2b 3667
e13693ec 3668 if (!seq)
4cd001d5 3669 return trial;
e13693ec 3670
3671 /* Avoid infinite loop if any insn of the result matches
3672 the original pattern. */
3673 insn_last = seq;
3674 while (1)
15bbde2b 3675 {
e13693ec 3676 if (INSN_P (insn_last)
3677 && rtx_equal_p (PATTERN (insn_last), pat))
4cd001d5 3678 return trial;
e13693ec 3679 if (!NEXT_INSN (insn_last))
3680 break;
3681 insn_last = NEXT_INSN (insn_last);
3682 }
d823ba47 3683
3072d30e 3684 /* We will be adding the new sequence to the function. The splitters
3685 may have introduced invalid RTL sharing, so unshare the sequence now. */
3686 unshare_all_rtl_in_chain (seq);
3687
8f869004 3688 /* Mark labels and copy flags. */
e13693ec 3689 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3690 {
6d7dc5b9 3691 if (JUMP_P (insn))
e13693ec 3692 {
8f869004 3693 if (JUMP_P (trial))
3694 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
e13693ec 3695 mark_jump_label (PATTERN (insn), insn, 0);
3696 njumps++;
3697 if (probability != -1
3698 && any_condjump_p (insn)
3699 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3700 {
e13693ec 3701 /* We can preserve the REG_BR_PROB notes only if exactly
3702 one jump is created, otherwise the machine description
3703 is responsible for this step using
3704 split_branch_probability variable. */
611234b4 3705 gcc_assert (njumps == 1);
9eb946de 3706 add_int_reg_note (insn, REG_BR_PROB, probability);
31d3e01c 3707 }
e13693ec 3708 }
3709 }
3710
3711 /* If we are splitting a CALL_INSN, look for the CALL_INSN
b0bd0491 3712 in SEQ and copy any additional information across. */
6d7dc5b9 3713 if (CALL_P (trial))
e13693ec 3714 {
3715 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3716 if (CALL_P (insn))
e13693ec 3717 {
4cd001d5 3718 rtx_insn *next;
3719 rtx *p;
b0bd0491 3720
2e3b0d0f 3721 gcc_assert (call_insn == NULL_RTX);
3722 call_insn = insn;
3723
b0bd0491 3724 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3725 target may have explicitly specified. */
3726 p = &CALL_INSN_FUNCTION_USAGE (insn);
0bb5a6cd 3727 while (*p)
3728 p = &XEXP (*p, 1);
3729 *p = CALL_INSN_FUNCTION_USAGE (trial);
b0bd0491 3730
3731 /* If the old call was a sibling call, the new one must
3732 be too. */
e13693ec 3733 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b0bd0491 3734
3735 /* If the new call is the last instruction in the sequence,
3736 it will effectively replace the old call in-situ. Otherwise
3737 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3738 so that it comes immediately after the new call. */
3739 if (NEXT_INSN (insn))
47e1410d 3740 for (next = NEXT_INSN (trial);
3741 next && NOTE_P (next);
3742 next = NEXT_INSN (next))
3743 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
b0bd0491 3744 {
3745 remove_insn (next);
3746 add_insn_after (next, insn, NULL);
47e1410d 3747 break;
b0bd0491 3748 }
e13693ec 3749 }
3750 }
5262c253 3751
e13693ec 3752 /* Copy notes, particularly those related to the CFG. */
3753 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3754 {
3755 switch (REG_NOTE_KIND (note))
3756 {
3757 case REG_EH_REGION:
e38def9c 3758 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3759 break;
381eb1e7 3760
e13693ec 3761 case REG_NORETURN:
3762 case REG_SETJMP:
4c0315d0 3763 case REG_TM:
698ff1f0 3764 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3765 {
6d7dc5b9 3766 if (CALL_P (insn))
a1ddb869 3767 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3768 }
e13693ec 3769 break;
5bb27a4b 3770
e13693ec 3771 case REG_NON_LOCAL_GOTO:
698ff1f0 3772 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3773 {
6d7dc5b9 3774 if (JUMP_P (insn))
a1ddb869 3775 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3776 }
e13693ec 3777 break;
344dc2fa 3778
698ff1f0 3779 case REG_INC:
32aa77d9 3780 if (!AUTO_INC_DEC)
3781 break;
3782
698ff1f0 3783 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3784 {
3785 rtx reg = XEXP (note, 0);
3786 if (!FIND_REG_INC_NOTE (insn, reg)
4073adaa 3787 && find_auto_inc (PATTERN (insn), reg))
a1ddb869 3788 add_reg_note (insn, REG_INC, reg);
698ff1f0 3789 }
3790 break;
698ff1f0 3791
dfe00a8f 3792 case REG_ARGS_SIZE:
32f1a0c8 3793 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
dfe00a8f 3794 break;
3795
2e3b0d0f 3796 case REG_CALL_DECL:
3797 gcc_assert (call_insn != NULL_RTX);
3798 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3799 break;
3800
e13693ec 3801 default:
3802 break;
15bbde2b 3803 }
e13693ec 3804 }
3805
3806 /* If there are LABELS inside the split insns increment the
3807 usage count so we don't delete the label. */
19d2fe05 3808 if (INSN_P (trial))
e13693ec 3809 {
3810 insn = insn_last;
3811 while (insn != NULL_RTX)
15bbde2b 3812 {
19d2fe05 3813 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3814 if (NONJUMP_INSN_P (insn))
e13693ec 3815 mark_label_nuses (PATTERN (insn));
15bbde2b 3816
e13693ec 3817 insn = PREV_INSN (insn);
3818 }
15bbde2b 3819 }
3820
5169661d 3821 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
e13693ec 3822
3823 delete_insn (trial);
e13693ec 3824
3825 /* Recursively call try_split for each new insn created; by the
3826 time control returns here that insn will be fully split, so
3827 set LAST and continue from the insn after the one returned.
3828 We can't use next_active_insn here since AFTER may be a note.
3829 Ignore deleted insns, which can be occur if not optimizing. */
3830 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
dd1286fb 3831 if (! tem->deleted () && INSN_P (tem))
e13693ec 3832 tem = try_split (PATTERN (tem), tem, 1);
3833
3834 /* Return either the first or the last insn, depending on which was
3835 requested. */
3836 return last
06f9d6ef 3837 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3838 : NEXT_INSN (before);
15bbde2b 3839}
3840\f
3841/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3842 Store PATTERN in the pattern slots. */
15bbde2b 3843
2c57d586 3844rtx_insn *
35cb5232 3845make_insn_raw (rtx pattern)
15bbde2b 3846{
2c57d586 3847 rtx_insn *insn;
15bbde2b 3848
2c57d586 3849 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
15bbde2b 3850
575333f9 3851 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3852 PATTERN (insn) = pattern;
3853 INSN_CODE (insn) = -1;
fc92fa61 3854 REG_NOTES (insn) = NULL;
5169661d 3855 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3856 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3857
fe7f701d 3858#ifdef ENABLE_RTL_CHECKING
3859 if (insn
9204e736 3860 && INSN_P (insn)
fe7f701d 3861 && (returnjump_p (insn)
3862 || (GET_CODE (insn) == SET
3863 && SET_DEST (insn) == pc_rtx)))
3864 {
c3ceba8e 3865 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3866 debug_rtx (insn);
3867 }
3868#endif
d823ba47 3869
15bbde2b 3870 return insn;
3871}
3872
9845d120 3873/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3874
2c57d586 3875static rtx_insn *
9845d120 3876make_debug_insn_raw (rtx pattern)
3877{
2c57d586 3878 rtx_debug_insn *insn;
9845d120 3879
2c57d586 3880 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
9845d120 3881 INSN_UID (insn) = cur_debug_insn_uid++;
3882 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3883 INSN_UID (insn) = cur_insn_uid++;
3884
3885 PATTERN (insn) = pattern;
3886 INSN_CODE (insn) = -1;
3887 REG_NOTES (insn) = NULL;
5169661d 3888 INSN_LOCATION (insn) = curr_insn_location ();
9845d120 3889 BLOCK_FOR_INSN (insn) = NULL;
3890
3891 return insn;
3892}
3893
31d3e01c 3894/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3895
2c57d586 3896static rtx_insn *
35cb5232 3897make_jump_insn_raw (rtx pattern)
15bbde2b 3898{
2c57d586 3899 rtx_jump_insn *insn;
15bbde2b 3900
2c57d586 3901 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
fc92fa61 3902 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3903
3904 PATTERN (insn) = pattern;
3905 INSN_CODE (insn) = -1;
fc92fa61 3906 REG_NOTES (insn) = NULL;
3907 JUMP_LABEL (insn) = NULL;
5169661d 3908 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3909 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3910
3911 return insn;
3912}
6e911104 3913
31d3e01c 3914/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3915
2c57d586 3916static rtx_insn *
35cb5232 3917make_call_insn_raw (rtx pattern)
6e911104 3918{
2c57d586 3919 rtx_call_insn *insn;
6e911104 3920
2c57d586 3921 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
6e911104 3922 INSN_UID (insn) = cur_insn_uid++;
3923
3924 PATTERN (insn) = pattern;
3925 INSN_CODE (insn) = -1;
6e911104 3926 REG_NOTES (insn) = NULL;
3927 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5169661d 3928 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3929 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3930
3931 return insn;
3932}
35f3420b 3933
3934/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3935
cef3d8ad 3936static rtx_note *
35f3420b 3937make_note_raw (enum insn_note subtype)
3938{
3939 /* Some notes are never created this way at all. These notes are
3940 only created by patching out insns. */
3941 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3942 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3943
cef3d8ad 3944 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
35f3420b 3945 INSN_UID (note) = cur_insn_uid++;
3946 NOTE_KIND (note) = subtype;
3947 BLOCK_FOR_INSN (note) = NULL;
3948 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3949 return note;
3950}
15bbde2b 3951\f
35f3420b 3952/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3953 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3954 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3955
3956static inline void
3e75e92b 3957link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
35f3420b 3958{
4a57a2e8 3959 SET_PREV_INSN (insn) = prev;
3960 SET_NEXT_INSN (insn) = next;
35f3420b 3961 if (prev != NULL)
3962 {
4a57a2e8 3963 SET_NEXT_INSN (prev) = insn;
35f3420b 3964 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3965 {
f17e3fff 3966 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3967 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
35f3420b 3968 }
3969 }
3970 if (next != NULL)
3971 {
4a57a2e8 3972 SET_PREV_INSN (next) = insn;
35f3420b 3973 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
f17e3fff 3974 {
3975 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3976 SET_PREV_INSN (sequence->insn (0)) = insn;
3977 }
35f3420b 3978 }
34f5b9ac 3979
3980 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3981 {
f17e3fff 3982 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3983 SET_PREV_INSN (sequence->insn (0)) = prev;
3984 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
34f5b9ac 3985 }
35f3420b 3986}
3987
15bbde2b 3988/* Add INSN to the end of the doubly-linked list.
3989 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3990
3991void
3e75e92b 3992add_insn (rtx_insn *insn)
15bbde2b 3993{
3e75e92b 3994 rtx_insn *prev = get_last_insn ();
35f3420b 3995 link_insn_into_chain (insn, prev, NULL);
06f9d6ef 3996 if (NULL == get_insns ())
3997 set_first_insn (insn);
06f9d6ef 3998 set_last_insn (insn);
15bbde2b 3999}
4000
35f3420b 4001/* Add INSN into the doubly-linked list after insn AFTER. */
15bbde2b 4002
35f3420b 4003static void
3e75e92b 4004add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
15bbde2b 4005{
3e75e92b 4006 rtx_insn *next = NEXT_INSN (after);
15bbde2b 4007
dd1286fb 4008 gcc_assert (!optimize || !after->deleted ());
f65c10c0 4009
35f3420b 4010 link_insn_into_chain (insn, after, next);
15bbde2b 4011
35f3420b 4012 if (next == NULL)
15bbde2b 4013 {
c36aa54b 4014 struct sequence_stack *seq;
4015
4016 for (seq = get_current_sequence (); seq; seq = seq->next)
4017 if (after == seq->last)
4018 {
4019 seq->last = insn;
4020 break;
4021 }
15bbde2b 4022 }
35f3420b 4023}
4024
4025/* Add INSN into the doubly-linked list before insn BEFORE. */
4026
4027static void
3e75e92b 4028add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
35f3420b 4029{
3e75e92b 4030 rtx_insn *prev = PREV_INSN (before);
35f3420b 4031
dd1286fb 4032 gcc_assert (!optimize || !before->deleted ());
35f3420b 4033
4034 link_insn_into_chain (insn, prev, before);
4035
4036 if (prev == NULL)
15bbde2b 4037 {
c36aa54b 4038 struct sequence_stack *seq;
312de84d 4039
c36aa54b 4040 for (seq = get_current_sequence (); seq; seq = seq->next)
4041 if (before == seq->first)
4042 {
4043 seq->first = insn;
4044 break;
4045 }
4046
4047 gcc_assert (seq);
15bbde2b 4048 }
35f3420b 4049}
4050
4051/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4052 If BB is NULL, an attempt is made to infer the bb from before.
4053
4054 This and the next function should be the only functions called
4055 to insert an insn once delay slots have been filled since only
4056 they know how to update a SEQUENCE. */
15bbde2b 4057
35f3420b 4058void
3e75e92b 4059add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
35f3420b 4060{
26bb3cb2 4061 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
3e75e92b 4062 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
35f3420b 4063 add_insn_after_nobb (insn, after);
6d7dc5b9 4064 if (!BARRIER_P (after)
4065 && !BARRIER_P (insn)
9dda7915 4066 && (bb = BLOCK_FOR_INSN (after)))
4067 {
4068 set_block_for_insn (insn, bb);
308f9b79 4069 if (INSN_P (insn))
3072d30e 4070 df_insn_rescan (insn);
9dda7915 4071 /* Should not happen as first in the BB is always
3fb1e43b 4072 either NOTE or LABEL. */
5496dbfc 4073 if (BB_END (bb) == after
9dda7915 4074 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 4075 && !BARRIER_P (insn)
ad4583d9 4076 && !NOTE_INSN_BASIC_BLOCK_P (insn))
26bb3cb2 4077 BB_END (bb) = insn;
9dda7915 4078 }
15bbde2b 4079}
4080
35f3420b 4081/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4082 If BB is NULL, an attempt is made to infer the bb from before.
4083
4084 This and the previous function should be the only functions called
4085 to insert an insn once delay slots have been filled since only
4086 they know how to update a SEQUENCE. */
312de84d 4087
4088void
3e75e92b 4089add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
312de84d 4090{
3e75e92b 4091 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4092 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
35f3420b 4093 add_insn_before_nobb (insn, before);
312de84d 4094
48e1416a 4095 if (!bb
3072d30e 4096 && !BARRIER_P (before)
4097 && !BARRIER_P (insn))
4098 bb = BLOCK_FOR_INSN (before);
4099
4100 if (bb)
9dda7915 4101 {
4102 set_block_for_insn (insn, bb);
308f9b79 4103 if (INSN_P (insn))
3072d30e 4104 df_insn_rescan (insn);
611234b4 4105 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 4106 LABEL. */
611234b4 4107 gcc_assert (BB_HEAD (bb) != insn
4108 /* Avoid clobbering of structure when creating new BB. */
4109 || BARRIER_P (insn)
ad4583d9 4110 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 4111 }
312de84d 4112}
4113
3072d30e 4114/* Replace insn with an deleted instruction note. */
4115
fc3d1695 4116void
4117set_insn_deleted (rtx insn)
3072d30e 4118{
91f71fa3 4119 if (INSN_P (insn))
e149ca56 4120 df_insn_delete (as_a <rtx_insn *> (insn));
3072d30e 4121 PUT_CODE (insn, NOTE);
4122 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4123}
4124
4125
93ff53d3 4126/* Unlink INSN from the insn chain.
4127
4128 This function knows how to handle sequences.
4129
4130 This function does not invalidate data flow information associated with
4131 INSN (i.e. does not call df_insn_delete). That makes this function
4132 usable for only disconnecting an insn from the chain, and re-emit it
4133 elsewhere later.
4134
4135 To later insert INSN elsewhere in the insn chain via add_insn and
4136 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4137 the caller. Nullifying them here breaks many insn chain walks.
4138
4139 To really delete an insn and related DF information, use delete_insn. */
4140
7ddcf2bf 4141void
4cd001d5 4142remove_insn (rtx uncast_insn)
7ddcf2bf 4143{
4cd001d5 4144 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
26bb3cb2 4145 rtx_insn *next = NEXT_INSN (insn);
4146 rtx_insn *prev = PREV_INSN (insn);
e4bf866d 4147 basic_block bb;
4148
7ddcf2bf 4149 if (prev)
4150 {
4a57a2e8 4151 SET_NEXT_INSN (prev) = next;
6d7dc5b9 4152 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 4153 {
f17e3fff 4154 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4155 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
7ddcf2bf 4156 }
4157 }
7ddcf2bf 4158 else
4159 {
c36aa54b 4160 struct sequence_stack *seq;
4161
4162 for (seq = get_current_sequence (); seq; seq = seq->next)
4163 if (insn == seq->first)
7ddcf2bf 4164 {
c36aa54b 4165 seq->first = next;
7ddcf2bf 4166 break;
4167 }
4168
c36aa54b 4169 gcc_assert (seq);
7ddcf2bf 4170 }
4171
4172 if (next)
4173 {
4a57a2e8 4174 SET_PREV_INSN (next) = prev;
6d7dc5b9 4175 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
f17e3fff 4176 {
4177 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4178 SET_PREV_INSN (sequence->insn (0)) = prev;
4179 }
7ddcf2bf 4180 }
7ddcf2bf 4181 else
4182 {
c36aa54b 4183 struct sequence_stack *seq;
4184
4185 for (seq = get_current_sequence (); seq; seq = seq->next)
4186 if (insn == seq->last)
7ddcf2bf 4187 {
c36aa54b 4188 seq->last = prev;
7ddcf2bf 4189 break;
4190 }
4191
c36aa54b 4192 gcc_assert (seq);
7ddcf2bf 4193 }
b983ea33 4194
b983ea33 4195 /* Fix up basic block boundaries, if necessary. */
6d7dc5b9 4196 if (!BARRIER_P (insn)
e4bf866d 4197 && (bb = BLOCK_FOR_INSN (insn)))
4198 {
5496dbfc 4199 if (BB_HEAD (bb) == insn)
e4bf866d 4200 {
f4aee538 4201 /* Never ever delete the basic block note without deleting whole
4202 basic block. */
611234b4 4203 gcc_assert (!NOTE_P (insn));
26bb3cb2 4204 BB_HEAD (bb) = next;
e4bf866d 4205 }
5496dbfc 4206 if (BB_END (bb) == insn)
26bb3cb2 4207 BB_END (bb) = prev;
e4bf866d 4208 }
7ddcf2bf 4209}
4210
d5f9786f 4211/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4212
4213void
35cb5232 4214add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 4215{
611234b4 4216 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 4217
4218 /* Put the register usage information on the CALL. If there is already
4219 some usage information, put ours at the end. */
4220 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4221 {
4222 rtx link;
4223
4224 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4225 link = XEXP (link, 1))
4226 ;
4227
4228 XEXP (link, 1) = call_fusage;
4229 }
4230 else
4231 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4232}
4233
15bbde2b 4234/* Delete all insns made since FROM.
4235 FROM becomes the new last instruction. */
4236
4237void
57c26b3a 4238delete_insns_since (rtx_insn *from)
15bbde2b 4239{
4240 if (from == 0)
06f9d6ef 4241 set_first_insn (0);
15bbde2b 4242 else
4a57a2e8 4243 SET_NEXT_INSN (from) = 0;
06f9d6ef 4244 set_last_insn (from);
15bbde2b 4245}
4246
34e2ddcd 4247/* This function is deprecated, please use sequences instead.
4248
4249 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 4250 The insns to be moved are those between FROM and TO.
4251 They are moved to a new position after the insn AFTER.
4252 AFTER must not be FROM or TO or any insn in between.
4253
4254 This function does not know about SEQUENCEs and hence should not be
4255 called after delay-slot filling has been done. */
4256
4257void
57c26b3a 4258reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
15bbde2b 4259{
382ecba7 4260 if (flag_checking)
4261 {
4262 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4263 gcc_assert (after != x);
4264 gcc_assert (after != to);
4265 }
7f6ca11f 4266
15bbde2b 4267 /* Splice this bunch out of where it is now. */
4268 if (PREV_INSN (from))
4a57a2e8 4269 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
15bbde2b 4270 if (NEXT_INSN (to))
4a57a2e8 4271 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 4272 if (get_last_insn () == to)
4273 set_last_insn (PREV_INSN (from));
4274 if (get_insns () == from)
4275 set_first_insn (NEXT_INSN (to));
15bbde2b 4276
4277 /* Make the new neighbors point to it and it to them. */
4278 if (NEXT_INSN (after))
4a57a2e8 4279 SET_PREV_INSN (NEXT_INSN (after)) = to;
15bbde2b 4280
4a57a2e8 4281 SET_NEXT_INSN (to) = NEXT_INSN (after);
4282 SET_PREV_INSN (from) = after;
4283 SET_NEXT_INSN (after) = from;
9af5ce0c 4284 if (after == get_last_insn ())
06f9d6ef 4285 set_last_insn (to);
15bbde2b 4286}
4287
9dda7915 4288/* Same as function above, but take care to update BB boundaries. */
4289void
4a3fb716 4290reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
9dda7915 4291{
4a3fb716 4292 rtx_insn *prev = PREV_INSN (from);
9dda7915 4293 basic_block bb, bb2;
4294
4295 reorder_insns_nobb (from, to, after);
4296
6d7dc5b9 4297 if (!BARRIER_P (after)
9dda7915 4298 && (bb = BLOCK_FOR_INSN (after)))
4299 {
e149ca56 4300 rtx_insn *x;
3072d30e 4301 df_set_bb_dirty (bb);
d4c5e26d 4302
6d7dc5b9 4303 if (!BARRIER_P (from)
9dda7915 4304 && (bb2 = BLOCK_FOR_INSN (from)))
4305 {
5496dbfc 4306 if (BB_END (bb2) == to)
26bb3cb2 4307 BB_END (bb2) = prev;
3072d30e 4308 df_set_bb_dirty (bb2);
9dda7915 4309 }
4310
5496dbfc 4311 if (BB_END (bb) == after)
26bb3cb2 4312 BB_END (bb) = to;
9dda7915 4313
4314 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4315 if (!BARRIER_P (x))
a2bdd643 4316 df_insn_change_bb (x, bb);
9dda7915 4317 }
4318}
4319
15bbde2b 4320\f
31d3e01c 4321/* Emit insn(s) of given code and pattern
4322 at a specified place within the doubly-linked list.
15bbde2b 4323
31d3e01c 4324 All of the emit_foo global entry points accept an object
4325 X which is either an insn list or a PATTERN of a single
4326 instruction.
15bbde2b 4327
31d3e01c 4328 There are thus a few canonical ways to generate code and
4329 emit it at a specific place in the instruction stream. For
4330 example, consider the instruction named SPOT and the fact that
4331 we would like to emit some instructions before SPOT. We might
4332 do it like this:
15bbde2b 4333
31d3e01c 4334 start_sequence ();
4335 ... emit the new instructions ...
4336 insns_head = get_insns ();
4337 end_sequence ();
15bbde2b 4338
31d3e01c 4339 emit_insn_before (insns_head, SPOT);
15bbde2b 4340
31d3e01c 4341 It used to be common to generate SEQUENCE rtl instead, but that
4342 is a relic of the past which no longer occurs. The reason is that
4343 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4344 generated would almost certainly die right after it was created. */
15bbde2b 4345
722334ea 4346static rtx_insn *
5f7c5ddd 4347emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
2c57d586 4348 rtx_insn *(*make_raw) (rtx))
15bbde2b 4349{
2c57d586 4350 rtx_insn *insn;
15bbde2b 4351
611234b4 4352 gcc_assert (before);
31d3e01c 4353
4354 if (x == NULL_RTX)
722334ea 4355 return safe_as_a <rtx_insn *> (last);
31d3e01c 4356
4357 switch (GET_CODE (x))
15bbde2b 4358 {
9845d120 4359 case DEBUG_INSN:
31d3e01c 4360 case INSN:
4361 case JUMP_INSN:
4362 case CALL_INSN:
4363 case CODE_LABEL:
4364 case BARRIER:
4365 case NOTE:
2c57d586 4366 insn = as_a <rtx_insn *> (x);
31d3e01c 4367 while (insn)
4368 {
2c57d586 4369 rtx_insn *next = NEXT_INSN (insn);
3072d30e 4370 add_insn_before (insn, before, bb);
31d3e01c 4371 last = insn;
4372 insn = next;
4373 }
4374 break;
4375
4376#ifdef ENABLE_RTL_CHECKING
4377 case SEQUENCE:
611234b4 4378 gcc_unreachable ();
31d3e01c 4379 break;
4380#endif
4381
4382 default:
5f7c5ddd 4383 last = (*make_raw) (x);
3072d30e 4384 add_insn_before (last, before, bb);
31d3e01c 4385 break;
15bbde2b 4386 }
4387
722334ea 4388 return safe_as_a <rtx_insn *> (last);
15bbde2b 4389}
4390
5f7c5ddd 4391/* Make X be output before the instruction BEFORE. */
4392
722334ea 4393rtx_insn *
c9a09955 4394emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f7c5ddd 4395{
4396 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4397}
4398
31d3e01c 4399/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4400 and output it before the instruction BEFORE. */
4401
f9a00e9e 4402rtx_jump_insn *
c9a09955 4403emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
15bbde2b 4404{
f9a00e9e 4405 return as_a <rtx_jump_insn *> (
4406 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4407 make_jump_insn_raw));
15bbde2b 4408}
4409
31d3e01c 4410/* Make an instruction with body X and code CALL_INSN
cd0fe062 4411 and output it before the instruction BEFORE. */
4412
722334ea 4413rtx_insn *
c9a09955 4414emit_call_insn_before_noloc (rtx x, rtx_insn *before)
cd0fe062 4415{
5f7c5ddd 4416 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4417 make_call_insn_raw);
cd0fe062 4418}
4419
9845d120 4420/* Make an instruction with body X and code DEBUG_INSN
4421 and output it before the instruction BEFORE. */
4422
722334ea 4423rtx_insn *
9845d120 4424emit_debug_insn_before_noloc (rtx x, rtx before)
4425{
5f7c5ddd 4426 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4427 make_debug_insn_raw);
9845d120 4428}
4429
15bbde2b 4430/* Make an insn of code BARRIER
71caadc0 4431 and output it before the insn BEFORE. */
15bbde2b 4432
722334ea 4433rtx_barrier *
35cb5232 4434emit_barrier_before (rtx before)
15bbde2b 4435{
722334ea 4436 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
15bbde2b 4437
4438 INSN_UID (insn) = cur_insn_uid++;
4439
3072d30e 4440 add_insn_before (insn, before, NULL);
15bbde2b 4441 return insn;
4442}
4443
71caadc0 4444/* Emit the label LABEL before the insn BEFORE. */
4445
f9a00e9e 4446rtx_code_label *
c9a09955 4447emit_label_before (rtx label, rtx_insn *before)
71caadc0 4448{
596ef494 4449 gcc_checking_assert (INSN_UID (label) == 0);
4450 INSN_UID (label) = cur_insn_uid++;
4451 add_insn_before (label, before, NULL);
f9a00e9e 4452 return as_a <rtx_code_label *> (label);
71caadc0 4453}
15bbde2b 4454\f
31d3e01c 4455/* Helper for emit_insn_after, handles lists of instructions
4456 efficiently. */
15bbde2b 4457
f17e3fff 4458static rtx_insn *
4459emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
15bbde2b 4460{
f17e3fff 4461 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
26bb3cb2 4462 rtx_insn *last;
4463 rtx_insn *after_after;
3072d30e 4464 if (!bb && !BARRIER_P (after))
4465 bb = BLOCK_FOR_INSN (after);
15bbde2b 4466
3072d30e 4467 if (bb)
15bbde2b 4468 {
3072d30e 4469 df_set_bb_dirty (bb);
31d3e01c 4470 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4471 if (!BARRIER_P (last))
3072d30e 4472 {
4473 set_block_for_insn (last, bb);
4474 df_insn_rescan (last);
4475 }
6d7dc5b9 4476 if (!BARRIER_P (last))
3072d30e 4477 {
4478 set_block_for_insn (last, bb);
4479 df_insn_rescan (last);
4480 }
5496dbfc 4481 if (BB_END (bb) == after)
26bb3cb2 4482 BB_END (bb) = last;
15bbde2b 4483 }
4484 else
31d3e01c 4485 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4486 continue;
4487
4488 after_after = NEXT_INSN (after);
4489
4a57a2e8 4490 SET_NEXT_INSN (after) = first;
4491 SET_PREV_INSN (first) = after;
4492 SET_NEXT_INSN (last) = after_after;
31d3e01c 4493 if (after_after)
4a57a2e8 4494 SET_PREV_INSN (after_after) = last;
31d3e01c 4495
9af5ce0c 4496 if (after == get_last_insn ())
06f9d6ef 4497 set_last_insn (last);
e1ab7874 4498
31d3e01c 4499 return last;
4500}
4501
722334ea 4502static rtx_insn *
f17e3fff 4503emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
2c57d586 4504 rtx_insn *(*make_raw)(rtx))
31d3e01c 4505{
f17e3fff 4506 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4507 rtx_insn *last = after;
31d3e01c 4508
611234b4 4509 gcc_assert (after);
31d3e01c 4510
4511 if (x == NULL_RTX)
f17e3fff 4512 return last;
31d3e01c 4513
4514 switch (GET_CODE (x))
15bbde2b 4515 {
9845d120 4516 case DEBUG_INSN:
31d3e01c 4517 case INSN:
4518 case JUMP_INSN:
4519 case CALL_INSN:
4520 case CODE_LABEL:
4521 case BARRIER:
4522 case NOTE:
26bb3cb2 4523 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
31d3e01c 4524 break;
4525
4526#ifdef ENABLE_RTL_CHECKING
4527 case SEQUENCE:
611234b4 4528 gcc_unreachable ();
31d3e01c 4529 break;
4530#endif
4531
4532 default:
5f7c5ddd 4533 last = (*make_raw) (x);
3072d30e 4534 add_insn_after (last, after, bb);
31d3e01c 4535 break;
15bbde2b 4536 }
4537
f17e3fff 4538 return last;
15bbde2b 4539}
4540
5f7c5ddd 4541/* Make X be output after the insn AFTER and set the BB of insn. If
4542 BB is NULL, an attempt is made to infer the BB from AFTER. */
4543
722334ea 4544rtx_insn *
5f7c5ddd 4545emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4546{
4547 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4548}
4549
1bea98fb 4550
31d3e01c 4551/* Make an insn of code JUMP_INSN with body X
15bbde2b 4552 and output it after the insn AFTER. */
4553
f9a00e9e 4554rtx_jump_insn *
0891f67c 4555emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4556{
f9a00e9e 4557 return as_a <rtx_jump_insn *> (
4558 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
31d3e01c 4559}
4560
4561/* Make an instruction with body X and code CALL_INSN
4562 and output it after the instruction AFTER. */
4563
722334ea 4564rtx_insn *
0891f67c 4565emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4566{
5f7c5ddd 4567 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
15bbde2b 4568}
4569
9845d120 4570/* Make an instruction with body X and code CALL_INSN
4571 and output it after the instruction AFTER. */
4572
722334ea 4573rtx_insn *
9845d120 4574emit_debug_insn_after_noloc (rtx x, rtx after)
4575{
5f7c5ddd 4576 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
9845d120 4577}
4578
15bbde2b 4579/* Make an insn of code BARRIER
4580 and output it after the insn AFTER. */
4581
722334ea 4582rtx_barrier *
35cb5232 4583emit_barrier_after (rtx after)
15bbde2b 4584{
722334ea 4585 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
15bbde2b 4586
4587 INSN_UID (insn) = cur_insn_uid++;
4588
3072d30e 4589 add_insn_after (insn, after, NULL);
15bbde2b 4590 return insn;
4591}
4592
4593/* Emit the label LABEL after the insn AFTER. */
4594
722334ea 4595rtx_insn *
c9a09955 4596emit_label_after (rtx label, rtx_insn *after)
15bbde2b 4597{
596ef494 4598 gcc_checking_assert (INSN_UID (label) == 0);
4599 INSN_UID (label) = cur_insn_uid++;
4600 add_insn_after (label, after, NULL);
722334ea 4601 return as_a <rtx_insn *> (label);
15bbde2b 4602}
35f3420b 4603\f
4604/* Notes require a bit of special handling: Some notes need to have their
4605 BLOCK_FOR_INSN set, others should never have it set, and some should
4606 have it set or clear depending on the context. */
4607
4608/* Return true iff a note of kind SUBTYPE should be emitted with routines
4609 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4610 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4611
4612static bool
4613note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4614{
4615 switch (subtype)
4616 {
4617 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4618 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4619 return true;
4620
4621 /* Notes for var tracking and EH region markers can appear between or
4622 inside basic blocks. If the caller is emitting on the basic block
4623 boundary, do not set BLOCK_FOR_INSN on the new note. */
4624 case NOTE_INSN_VAR_LOCATION:
4625 case NOTE_INSN_CALL_ARG_LOCATION:
4626 case NOTE_INSN_EH_REGION_BEG:
4627 case NOTE_INSN_EH_REGION_END:
4628 return on_bb_boundary_p;
4629
4630 /* Otherwise, BLOCK_FOR_INSN must be set. */
4631 default:
4632 return false;
4633 }
4634}
15bbde2b 4635
4636/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4637
cef3d8ad 4638rtx_note *
4d86329d 4639emit_note_after (enum insn_note subtype, rtx_insn *after)
15bbde2b 4640{
cef3d8ad 4641 rtx_note *note = make_note_raw (subtype);
35f3420b 4642 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4643 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4644
4645 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4646 add_insn_after_nobb (note, after);
4647 else
4648 add_insn_after (note, after, bb);
4649 return note;
4650}
4651
4652/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4653
cef3d8ad 4654rtx_note *
1dc26636 4655emit_note_before (enum insn_note subtype, rtx_insn *before)
35f3420b 4656{
cef3d8ad 4657 rtx_note *note = make_note_raw (subtype);
35f3420b 4658 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4659 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4660
4661 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4662 add_insn_before_nobb (note, before);
4663 else
4664 add_insn_before (note, before, bb);
15bbde2b 4665 return note;
4666}
15bbde2b 4667\f
ede4ebcb 4668/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4669 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4670
722334ea 4671static rtx_insn *
4cd001d5 4672emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
2c57d586 4673 rtx_insn *(*make_raw) (rtx))
d321a68b 4674{
4cd001d5 4675 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
9ed997be 4676 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
d321a68b 4677
0891f67c 4678 if (pattern == NULL_RTX || !loc)
9ed997be 4679 return last;
ca154f3f 4680
31d3e01c 4681 after = NEXT_INSN (after);
4682 while (1)
4683 {
57e999d9 4684 if (active_insn_p (after)
4685 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4686 && !INSN_LOCATION (after))
5169661d 4687 INSN_LOCATION (after) = loc;
31d3e01c 4688 if (after == last)
4689 break;
4690 after = NEXT_INSN (after);
4691 }
9ed997be 4692 return last;
d321a68b 4693}
4694
ede4ebcb 4695/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4696 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4697 any DEBUG_INSNs. */
4698
722334ea 4699static rtx_insn *
4cd001d5 4700emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
2c57d586 4701 rtx_insn *(*make_raw) (rtx))
0891f67c 4702{
4cd001d5 4703 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4704 rtx_insn *prev = after;
9845d120 4705
ede4ebcb 4706 if (skip_debug_insns)
4707 while (DEBUG_INSN_P (prev))
4708 prev = PREV_INSN (prev);
9845d120 4709
4710 if (INSN_P (prev))
5169661d 4711 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
ede4ebcb 4712 make_raw);
0891f67c 4713 else
ede4ebcb 4714 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0891f67c 4715}
4716
5169661d 4717/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4718rtx_insn *
ede4ebcb 4719emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4720{
ede4ebcb 4721 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4722}
31d3e01c 4723
5169661d 4724/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
722334ea 4725rtx_insn *
ede4ebcb 4726emit_insn_after (rtx pattern, rtx after)
4727{
4728 return emit_pattern_after (pattern, after, true, make_insn_raw);
4729}
ca154f3f 4730
5169661d 4731/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
f9a00e9e 4732rtx_jump_insn *
ede4ebcb 4733emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4734{
f9a00e9e 4735 return as_a <rtx_jump_insn *> (
4736 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
d321a68b 4737}
4738
5169661d 4739/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
f9a00e9e 4740rtx_jump_insn *
0891f67c 4741emit_jump_insn_after (rtx pattern, rtx after)
4742{
f9a00e9e 4743 return as_a <rtx_jump_insn *> (
4744 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
0891f67c 4745}
4746
5169661d 4747/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4748rtx_insn *
35cb5232 4749emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4750{
ede4ebcb 4751 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
d321a68b 4752}
4753
5169661d 4754/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
722334ea 4755rtx_insn *
0891f67c 4756emit_call_insn_after (rtx pattern, rtx after)
4757{
ede4ebcb 4758 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
0891f67c 4759}
4760
5169661d 4761/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4762rtx_insn *
9845d120 4763emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4764{
ede4ebcb 4765 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
9845d120 4766}
4767
5169661d 4768/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
722334ea 4769rtx_insn *
9845d120 4770emit_debug_insn_after (rtx pattern, rtx after)
4771{
ede4ebcb 4772 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
9845d120 4773}
4774
ede4ebcb 4775/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4776 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4777 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4778 CALL_INSN, etc. */
4779
722334ea 4780static rtx_insn *
4cd001d5 4781emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
2c57d586 4782 rtx_insn *(*make_raw) (rtx))
d321a68b 4783{
4cd001d5 4784 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4785 rtx_insn *first = PREV_INSN (before);
4786 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4787 insnp ? before : NULL_RTX,
4788 NULL, make_raw);
0891f67c 4789
4790 if (pattern == NULL_RTX || !loc)
4cd001d5 4791 return last;
0891f67c 4792
4486418e 4793 if (!first)
4794 first = get_insns ();
4795 else
4796 first = NEXT_INSN (first);
0891f67c 4797 while (1)
4798 {
57e999d9 4799 if (active_insn_p (first)
4800 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4801 && !INSN_LOCATION (first))
5169661d 4802 INSN_LOCATION (first) = loc;
0891f67c 4803 if (first == last)
4804 break;
4805 first = NEXT_INSN (first);
4806 }
4cd001d5 4807 return last;
0891f67c 4808}
4809
ede4ebcb 4810/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4811 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4812 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4813 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4814
722334ea 4815static rtx_insn *
4cd001d5 4816emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
2c57d586 4817 bool insnp, rtx_insn *(*make_raw) (rtx))
0891f67c 4818{
4cd001d5 4819 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4820 rtx_insn *next = before;
9845d120 4821
ede4ebcb 4822 if (skip_debug_insns)
4823 while (DEBUG_INSN_P (next))
4824 next = PREV_INSN (next);
9845d120 4825
4826 if (INSN_P (next))
5169661d 4827 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
ede4ebcb 4828 insnp, make_raw);
0891f67c 4829 else
ede4ebcb 4830 return emit_pattern_before_noloc (pattern, before,
4831 insnp ? before : NULL_RTX,
4832 NULL, make_raw);
0891f67c 4833}
4834
5169661d 4835/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4836rtx_insn *
c9a09955 4837emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
0891f67c 4838{
ede4ebcb 4839 return emit_pattern_before_setloc (pattern, before, loc, true,
4840 make_insn_raw);
4841}
0891f67c 4842
5169661d 4843/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
722334ea 4844rtx_insn *
ede4ebcb 4845emit_insn_before (rtx pattern, rtx before)
4846{
4847 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4848}
0891f67c 4849
5169661d 4850/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
f9a00e9e 4851rtx_jump_insn *
c9a09955 4852emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
ede4ebcb 4853{
f9a00e9e 4854 return as_a <rtx_jump_insn *> (
4855 emit_pattern_before_setloc (pattern, before, loc, false,
4856 make_jump_insn_raw));
0891f67c 4857}
4858
5169661d 4859/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
f9a00e9e 4860rtx_jump_insn *
0891f67c 4861emit_jump_insn_before (rtx pattern, rtx before)
4862{
f9a00e9e 4863 return as_a <rtx_jump_insn *> (
4864 emit_pattern_before (pattern, before, true, false,
4865 make_jump_insn_raw));
0891f67c 4866}
4867
5169661d 4868/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4869rtx_insn *
c9a09955 4870emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
0891f67c 4871{
ede4ebcb 4872 return emit_pattern_before_setloc (pattern, before, loc, false,
4873 make_call_insn_raw);
d321a68b 4874}
0891f67c 4875
ede4ebcb 4876/* Like emit_call_insn_before_noloc,
5169661d 4877 but set insn_location according to BEFORE. */
722334ea 4878rtx_insn *
c9a09955 4879emit_call_insn_before (rtx pattern, rtx_insn *before)
0891f67c 4880{
ede4ebcb 4881 return emit_pattern_before (pattern, before, true, false,
4882 make_call_insn_raw);
0891f67c 4883}
9845d120 4884
5169661d 4885/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4886rtx_insn *
9845d120 4887emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4888{
ede4ebcb 4889 return emit_pattern_before_setloc (pattern, before, loc, false,
4890 make_debug_insn_raw);
9845d120 4891}
4892
ede4ebcb 4893/* Like emit_debug_insn_before_noloc,
5169661d 4894 but set insn_location according to BEFORE. */
722334ea 4895rtx_insn *
5518cf83 4896emit_debug_insn_before (rtx pattern, rtx_insn *before)
9845d120 4897{
ede4ebcb 4898 return emit_pattern_before (pattern, before, false, false,
4899 make_debug_insn_raw);
9845d120 4900}
d321a68b 4901\f
31d3e01c 4902/* Take X and emit it at the end of the doubly-linked
4903 INSN list.
15bbde2b 4904
4905 Returns the last insn emitted. */
4906
722334ea 4907rtx_insn *
35cb5232 4908emit_insn (rtx x)
15bbde2b 4909{
722334ea 4910 rtx_insn *last = get_last_insn ();
4911 rtx_insn *insn;
15bbde2b 4912
31d3e01c 4913 if (x == NULL_RTX)
4914 return last;
15bbde2b 4915
31d3e01c 4916 switch (GET_CODE (x))
4917 {
9845d120 4918 case DEBUG_INSN:
31d3e01c 4919 case INSN:
4920 case JUMP_INSN:
4921 case CALL_INSN:
4922 case CODE_LABEL:
4923 case BARRIER:
4924 case NOTE:
722334ea 4925 insn = as_a <rtx_insn *> (x);
31d3e01c 4926 while (insn)
15bbde2b 4927 {
722334ea 4928 rtx_insn *next = NEXT_INSN (insn);
15bbde2b 4929 add_insn (insn);
31d3e01c 4930 last = insn;
4931 insn = next;
15bbde2b 4932 }
31d3e01c 4933 break;
15bbde2b 4934
31d3e01c 4935#ifdef ENABLE_RTL_CHECKING
91f71fa3 4936 case JUMP_TABLE_DATA:
31d3e01c 4937 case SEQUENCE:
611234b4 4938 gcc_unreachable ();
31d3e01c 4939 break;
4940#endif
15bbde2b 4941
31d3e01c 4942 default:
4943 last = make_insn_raw (x);
4944 add_insn (last);
4945 break;
15bbde2b 4946 }
4947
4948 return last;
4949}
4950
9845d120 4951/* Make an insn of code DEBUG_INSN with pattern X
4952 and add it to the end of the doubly-linked list. */
4953
722334ea 4954rtx_insn *
9845d120 4955emit_debug_insn (rtx x)
4956{
722334ea 4957 rtx_insn *last = get_last_insn ();
4958 rtx_insn *insn;
9845d120 4959
4960 if (x == NULL_RTX)
4961 return last;
4962
4963 switch (GET_CODE (x))
4964 {
4965 case DEBUG_INSN:
4966 case INSN:
4967 case JUMP_INSN:
4968 case CALL_INSN:
4969 case CODE_LABEL:
4970 case BARRIER:
4971 case NOTE:
722334ea 4972 insn = as_a <rtx_insn *> (x);
9845d120 4973 while (insn)
4974 {
722334ea 4975 rtx_insn *next = NEXT_INSN (insn);
9845d120 4976 add_insn (insn);
4977 last = insn;
4978 insn = next;
4979 }
4980 break;
4981
4982#ifdef ENABLE_RTL_CHECKING
91f71fa3 4983 case JUMP_TABLE_DATA:
9845d120 4984 case SEQUENCE:
4985 gcc_unreachable ();
4986 break;
4987#endif
4988
4989 default:
4990 last = make_debug_insn_raw (x);
4991 add_insn (last);
4992 break;
4993 }
4994
4995 return last;
4996}
4997
31d3e01c 4998/* Make an insn of code JUMP_INSN with pattern X
4999 and add it to the end of the doubly-linked list. */
15bbde2b 5000
722334ea 5001rtx_insn *
35cb5232 5002emit_jump_insn (rtx x)
15bbde2b 5003{
722334ea 5004 rtx_insn *last = NULL;
5005 rtx_insn *insn;
15bbde2b 5006
31d3e01c 5007 switch (GET_CODE (x))
15bbde2b 5008 {
9845d120 5009 case DEBUG_INSN:
31d3e01c 5010 case INSN:
5011 case JUMP_INSN:
5012 case CALL_INSN:
5013 case CODE_LABEL:
5014 case BARRIER:
5015 case NOTE:
722334ea 5016 insn = as_a <rtx_insn *> (x);
31d3e01c 5017 while (insn)
5018 {
722334ea 5019 rtx_insn *next = NEXT_INSN (insn);
31d3e01c 5020 add_insn (insn);
5021 last = insn;
5022 insn = next;
5023 }
5024 break;
b36b07d8 5025
31d3e01c 5026#ifdef ENABLE_RTL_CHECKING
91f71fa3 5027 case JUMP_TABLE_DATA:
31d3e01c 5028 case SEQUENCE:
611234b4 5029 gcc_unreachable ();
31d3e01c 5030 break;
5031#endif
b36b07d8 5032
31d3e01c 5033 default:
5034 last = make_jump_insn_raw (x);
5035 add_insn (last);
5036 break;
9dda7915 5037 }
b36b07d8 5038
5039 return last;
5040}
5041
31d3e01c 5042/* Make an insn of code CALL_INSN with pattern X
15bbde2b 5043 and add it to the end of the doubly-linked list. */
5044
722334ea 5045rtx_insn *
35cb5232 5046emit_call_insn (rtx x)
15bbde2b 5047{
722334ea 5048 rtx_insn *insn;
31d3e01c 5049
5050 switch (GET_CODE (x))
15bbde2b 5051 {
9845d120 5052 case DEBUG_INSN:
31d3e01c 5053 case INSN:
5054 case JUMP_INSN:
5055 case CALL_INSN:
5056 case CODE_LABEL:
5057 case BARRIER:
5058 case NOTE:
5059 insn = emit_insn (x);
5060 break;
15bbde2b 5061
31d3e01c 5062#ifdef ENABLE_RTL_CHECKING
5063 case SEQUENCE:
91f71fa3 5064 case JUMP_TABLE_DATA:
611234b4 5065 gcc_unreachable ();
31d3e01c 5066 break;
5067#endif
15bbde2b 5068
31d3e01c 5069 default:
5070 insn = make_call_insn_raw (x);
15bbde2b 5071 add_insn (insn);
31d3e01c 5072 break;
15bbde2b 5073 }
31d3e01c 5074
5075 return insn;
15bbde2b 5076}
5077
5078/* Add the label LABEL to the end of the doubly-linked list. */
5079
f9a00e9e 5080rtx_code_label *
5081emit_label (rtx uncast_label)
15bbde2b 5082{
f9a00e9e 5083 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5084
596ef494 5085 gcc_checking_assert (INSN_UID (label) == 0);
5086 INSN_UID (label) = cur_insn_uid++;
f9a00e9e 5087 add_insn (label);
5088 return label;
15bbde2b 5089}
5090
91f71fa3 5091/* Make an insn of code JUMP_TABLE_DATA
5092 and add it to the end of the doubly-linked list. */
5093
e41badc0 5094rtx_jump_table_data *
91f71fa3 5095emit_jump_table_data (rtx table)
5096{
e41badc0 5097 rtx_jump_table_data *jump_table_data =
5098 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
91f71fa3 5099 INSN_UID (jump_table_data) = cur_insn_uid++;
5100 PATTERN (jump_table_data) = table;
5101 BLOCK_FOR_INSN (jump_table_data) = NULL;
5102 add_insn (jump_table_data);
5103 return jump_table_data;
5104}
5105
15bbde2b 5106/* Make an insn of code BARRIER
5107 and add it to the end of the doubly-linked list. */
5108
722334ea 5109rtx_barrier *
35cb5232 5110emit_barrier (void)
15bbde2b 5111{
722334ea 5112 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
15bbde2b 5113 INSN_UID (barrier) = cur_insn_uid++;
5114 add_insn (barrier);
5115 return barrier;
5116}
5117
2f57e3d9 5118/* Emit a copy of note ORIG. */
35cb5232 5119
cef3d8ad 5120rtx_note *
5121emit_note_copy (rtx_note *orig)
2f57e3d9 5122{
35f3420b 5123 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
cef3d8ad 5124 rtx_note *note = make_note_raw (kind);
2f57e3d9 5125 NOTE_DATA (note) = NOTE_DATA (orig);
2f57e3d9 5126 add_insn (note);
31b97e8f 5127 return note;
15bbde2b 5128}
5129
31b97e8f 5130/* Make an insn of code NOTE or type NOTE_NO
5131 and add it to the end of the doubly-linked list. */
15bbde2b 5132
cef3d8ad 5133rtx_note *
ad4583d9 5134emit_note (enum insn_note kind)
15bbde2b 5135{
cef3d8ad 5136 rtx_note *note = make_note_raw (kind);
15bbde2b 5137 add_insn (note);
5138 return note;
5139}
5140
18b42941 5141/* Emit a clobber of lvalue X. */
5142
722334ea 5143rtx_insn *
18b42941 5144emit_clobber (rtx x)
5145{
5146 /* CONCATs should not appear in the insn stream. */
5147 if (GET_CODE (x) == CONCAT)
5148 {
5149 emit_clobber (XEXP (x, 0));
5150 return emit_clobber (XEXP (x, 1));
5151 }
5152 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5153}
5154
5155/* Return a sequence of insns to clobber lvalue X. */
5156
722334ea 5157rtx_insn *
18b42941 5158gen_clobber (rtx x)
5159{
722334ea 5160 rtx_insn *seq;
18b42941 5161
5162 start_sequence ();
5163 emit_clobber (x);
5164 seq = get_insns ();
5165 end_sequence ();
5166 return seq;
5167}
5168
5169/* Emit a use of rvalue X. */
5170
722334ea 5171rtx_insn *
18b42941 5172emit_use (rtx x)
5173{
5174 /* CONCATs should not appear in the insn stream. */
5175 if (GET_CODE (x) == CONCAT)
5176 {
5177 emit_use (XEXP (x, 0));
5178 return emit_use (XEXP (x, 1));
5179 }
5180 return emit_insn (gen_rtx_USE (VOIDmode, x));
5181}
5182
5183/* Return a sequence of insns to use rvalue X. */
5184
722334ea 5185rtx_insn *
18b42941 5186gen_use (rtx x)
5187{
722334ea 5188 rtx_insn *seq;
18b42941 5189
5190 start_sequence ();
5191 emit_use (x);
5192 seq = get_insns ();
5193 end_sequence ();
5194 return seq;
5195}
5196
3a286419 5197/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5198 Return the set in INSN that such notes describe, or NULL if the notes
5199 have no meaning for INSN. */
5200
5201rtx
5202set_for_reg_notes (rtx insn)
5203{
5204 rtx pat, reg;
5205
5206 if (!INSN_P (insn))
5207 return NULL_RTX;
5208
5209 pat = PATTERN (insn);
5210 if (GET_CODE (pat) == PARALLEL)
5211 {
5212 /* We do not use single_set because that ignores SETs of unused
5213 registers. REG_EQUAL and REG_EQUIV notes really do require the
5214 PARALLEL to have a single SET. */
5215 if (multiple_sets (insn))
5216 return NULL_RTX;
5217 pat = XVECEXP (pat, 0, 0);
5218 }
5219
5220 if (GET_CODE (pat) != SET)
5221 return NULL_RTX;
5222
5223 reg = SET_DEST (pat);
5224
5225 /* Notes apply to the contents of a STRICT_LOW_PART. */
f2c7e335 5226 if (GET_CODE (reg) == STRICT_LOW_PART
5227 || GET_CODE (reg) == ZERO_EXTRACT)
3a286419 5228 reg = XEXP (reg, 0);
5229
5230 /* Check that we have a register. */
5231 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5232 return NULL_RTX;
5233
5234 return pat;
5235}
5236
f1934a33 5237/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 5238 note of this type already exists, remove it first. */
f1934a33 5239
c080d8f0 5240rtx
35cb5232 5241set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 5242{
5243 rtx note = find_reg_note (insn, kind, NULL_RTX);
5244
7e6224ab 5245 switch (kind)
5246 {
5247 case REG_EQUAL:
5248 case REG_EQUIV:
3a286419 5249 if (!set_for_reg_notes (insn))
5250 return NULL_RTX;
7e6224ab 5251
5252 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5253 It serves no useful purpose and breaks eliminate_regs. */
5254 if (GET_CODE (datum) == ASM_OPERANDS)
5255 return NULL_RTX;
2f8cf22c 5256
5257 /* Notes with side effects are dangerous. Even if the side-effect
5258 initially mirrors one in PATTERN (INSN), later optimizations
5259 might alter the way that the final register value is calculated
5260 and so move or alter the side-effect in some way. The note would
5261 then no longer be a valid substitution for SET_SRC. */
5262 if (side_effects_p (datum))
5263 return NULL_RTX;
7e6224ab 5264 break;
5265
5266 default:
5267 break;
5268 }
c080d8f0 5269
3a286419 5270 if (note)
5271 XEXP (note, 0) = datum;
5272 else
5273 {
5274 add_reg_note (insn, kind, datum);
5275 note = REG_NOTES (insn);
5276 }
3072d30e 5277
5278 switch (kind)
c080d8f0 5279 {
3072d30e 5280 case REG_EQUAL:
5281 case REG_EQUIV:
e149ca56 5282 df_notes_rescan (as_a <rtx_insn *> (insn));
3072d30e 5283 break;
5284 default:
5285 break;
c080d8f0 5286 }
f1934a33 5287
3a286419 5288 return note;
f1934a33 5289}
41cf444a 5290
5291/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5292rtx
5293set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5294{
3a286419 5295 rtx set = set_for_reg_notes (insn);
41cf444a 5296
5297 if (set && SET_DEST (set) == dst)
5298 return set_unique_reg_note (insn, kind, datum);
5299 return NULL_RTX;
5300}
15bbde2b 5301\f
16d83c02 5302/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5303 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5304 is true.
5305
15bbde2b 5306 If X is a label, it is simply added into the insn chain. */
5307
722334ea 5308rtx_insn *
16d83c02 5309emit (rtx x, bool allow_barrier_p)
15bbde2b 5310{
5311 enum rtx_code code = classify_insn (x);
5312
611234b4 5313 switch (code)
15bbde2b 5314 {
611234b4 5315 case CODE_LABEL:
5316 return emit_label (x);
5317 case INSN:
5318 return emit_insn (x);
5319 case JUMP_INSN:
5320 {
722334ea 5321 rtx_insn *insn = emit_jump_insn (x);
16d83c02 5322 if (allow_barrier_p
5323 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
611234b4 5324 return emit_barrier ();
5325 return insn;
5326 }
5327 case CALL_INSN:
5328 return emit_call_insn (x);
9845d120 5329 case DEBUG_INSN:
5330 return emit_debug_insn (x);
611234b4 5331 default:
5332 gcc_unreachable ();
15bbde2b 5333 }
15bbde2b 5334}
5335\f
1f3233d1 5336/* Space for free sequence stack entries. */
7035b2ab 5337static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5338
735f4358 5339/* Begin emitting insns to a sequence. If this sequence will contain
5340 something that might cause the compiler to pop arguments to function
5341 calls (because those pops have previously been deferred; see
5342 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5343 before calling this function. That will ensure that the deferred
5344 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5345
5346void
35cb5232 5347start_sequence (void)
15bbde2b 5348{
5349 struct sequence_stack *tem;
5350
1f3233d1 5351 if (free_sequence_stack != NULL)
5352 {
5353 tem = free_sequence_stack;
5354 free_sequence_stack = tem->next;
5355 }
5356 else
25a27413 5357 tem = ggc_alloc<sequence_stack> ();
15bbde2b 5358
c36aa54b 5359 tem->next = get_current_sequence ()->next;
06f9d6ef 5360 tem->first = get_insns ();
5361 tem->last = get_last_insn ();
c36aa54b 5362 get_current_sequence ()->next = tem;
15bbde2b 5363
06f9d6ef 5364 set_first_insn (0);
5365 set_last_insn (0);
15bbde2b 5366}
5367
b49854c6 5368/* Set up the insn chain starting with FIRST as the current sequence,
5369 saving the previously current one. See the documentation for
5370 start_sequence for more information about how to use this function. */
15bbde2b 5371
5372void
57c26b3a 5373push_to_sequence (rtx_insn *first)
15bbde2b 5374{
57c26b3a 5375 rtx_insn *last;
15bbde2b 5376
5377 start_sequence ();
5378
3c802a1e 5379 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5380 ;
15bbde2b 5381
06f9d6ef 5382 set_first_insn (first);
5383 set_last_insn (last);
15bbde2b 5384}
5385
28bf151d 5386/* Like push_to_sequence, but take the last insn as an argument to avoid
5387 looping through the list. */
5388
5389void
57c26b3a 5390push_to_sequence2 (rtx_insn *first, rtx_insn *last)
28bf151d 5391{
5392 start_sequence ();
5393
06f9d6ef 5394 set_first_insn (first);
5395 set_last_insn (last);
28bf151d 5396}
5397
ab74c92f 5398/* Set up the outer-level insn chain
5399 as the current sequence, saving the previously current one. */
5400
5401void
35cb5232 5402push_topmost_sequence (void)
ab74c92f 5403{
c36aa54b 5404 struct sequence_stack *top;
ab74c92f 5405
5406 start_sequence ();
5407
c36aa54b 5408 top = get_topmost_sequence ();
06f9d6ef 5409 set_first_insn (top->first);
5410 set_last_insn (top->last);
ab74c92f 5411}
5412
5413/* After emitting to the outer-level insn chain, update the outer-level
5414 insn chain, and restore the previous saved state. */
5415
5416void
35cb5232 5417pop_topmost_sequence (void)
ab74c92f 5418{
c36aa54b 5419 struct sequence_stack *top;
ab74c92f 5420
c36aa54b 5421 top = get_topmost_sequence ();
06f9d6ef 5422 top->first = get_insns ();
5423 top->last = get_last_insn ();
ab74c92f 5424
5425 end_sequence ();
5426}
5427
15bbde2b 5428/* After emitting to a sequence, restore previous saved state.
5429
b49854c6 5430 To get the contents of the sequence just made, you must call
31d3e01c 5431 `get_insns' *before* calling here.
b49854c6 5432
5433 If the compiler might have deferred popping arguments while
5434 generating this sequence, and this sequence will not be immediately
5435 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5436 before calling get_insns. That will ensure that the deferred
b49854c6 5437 pops are inserted into this sequence, and not into some random
5438 location in the instruction stream. See INHIBIT_DEFER_POP for more
5439 information about deferred popping of arguments. */
15bbde2b 5440
5441void
35cb5232 5442end_sequence (void)
15bbde2b 5443{
c36aa54b 5444 struct sequence_stack *tem = get_current_sequence ()->next;
15bbde2b 5445
06f9d6ef 5446 set_first_insn (tem->first);
5447 set_last_insn (tem->last);
c36aa54b 5448 get_current_sequence ()->next = tem->next;
15bbde2b 5449
1f3233d1 5450 memset (tem, 0, sizeof (*tem));
5451 tem->next = free_sequence_stack;
5452 free_sequence_stack = tem;
15bbde2b 5453}
5454
5455/* Return 1 if currently emitting into a sequence. */
5456
5457int
35cb5232 5458in_sequence_p (void)
15bbde2b 5459{
c36aa54b 5460 return get_current_sequence ()->next != 0;
15bbde2b 5461}
15bbde2b 5462\f
02ebfa52 5463/* Put the various virtual registers into REGNO_REG_RTX. */
5464
2f3874ce 5465static void
b079a207 5466init_virtual_regs (void)
02ebfa52 5467{
b079a207 5468 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5469 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5470 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5471 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5472 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
60778e62 5473 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5474 = virtual_preferred_stack_boundary_rtx;
0a893c29 5475}
5476
928d57e3 5477\f
5478/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5479static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5480static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5481static int copy_insn_n_scratches;
5482
5483/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5484 copied an ASM_OPERANDS.
5485 In that case, it is the original input-operand vector. */
5486static rtvec orig_asm_operands_vector;
5487
5488/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5489 copied an ASM_OPERANDS.
5490 In that case, it is the copied input-operand vector. */
5491static rtvec copy_asm_operands_vector;
5492
5493/* Likewise for the constraints vector. */
5494static rtvec orig_asm_constraints_vector;
5495static rtvec copy_asm_constraints_vector;
5496
5497/* Recursively create a new copy of an rtx for copy_insn.
5498 This function differs from copy_rtx in that it handles SCRATCHes and
5499 ASM_OPERANDs properly.
5500 Normally, this function is not used directly; use copy_insn as front end.
5501 However, you could first copy an insn pattern with copy_insn and then use
5502 this function afterwards to properly copy any REG_NOTEs containing
5503 SCRATCHes. */
5504
5505rtx
35cb5232 5506copy_insn_1 (rtx orig)
928d57e3 5507{
19cb6b50 5508 rtx copy;
5509 int i, j;
5510 RTX_CODE code;
5511 const char *format_ptr;
928d57e3 5512
25e880b1 5513 if (orig == NULL)
5514 return NULL;
5515
928d57e3 5516 code = GET_CODE (orig);
5517
5518 switch (code)
5519 {
5520 case REG:
d7fce3c8 5521 case DEBUG_EXPR:
0349edce 5522 CASE_CONST_ANY:
928d57e3 5523 case SYMBOL_REF:
5524 case CODE_LABEL:
5525 case PC:
5526 case CC0:
e0691b9a 5527 case RETURN:
9cb2517e 5528 case SIMPLE_RETURN:
928d57e3 5529 return orig;
c09425a0 5530 case CLOBBER:
b291008a 5531 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5532 clobbers or clobbers of hard registers that originated as pseudos.
5533 This is needed to allow safe register renaming. */
5534 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5535 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
c09425a0 5536 return orig;
5537 break;
928d57e3 5538
5539 case SCRATCH:
5540 for (i = 0; i < copy_insn_n_scratches; i++)
5541 if (copy_insn_scratch_in[i] == orig)
5542 return copy_insn_scratch_out[i];
5543 break;
5544
5545 case CONST:
3072d30e 5546 if (shared_const_p (orig))
928d57e3 5547 return orig;
5548 break;
d823ba47 5549
928d57e3 5550 /* A MEM with a constant address is not sharable. The problem is that
5551 the constant address may need to be reloaded. If the mem is shared,
5552 then reloading one copy of this mem will cause all copies to appear
5553 to have been reloaded. */
5554
5555 default:
5556 break;
5557 }
5558
f2d0e9f1 5559 /* Copy the various flags, fields, and other information. We assume
5560 that all fields need copying, and then clear the fields that should
928d57e3 5561 not be copied. That is the sensible default behavior, and forces
5562 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5563 copy = shallow_copy_rtx (orig);
928d57e3 5564
5565 /* We do not copy the USED flag, which is used as a mark bit during
5566 walks over the RTL. */
7c25cb91 5567 RTX_FLAG (copy, used) = 0;
928d57e3 5568
5569 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5570 if (INSN_P (orig))
928d57e3 5571 {
7c25cb91 5572 RTX_FLAG (copy, jump) = 0;
5573 RTX_FLAG (copy, call) = 0;
5574 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5575 }
d823ba47 5576
928d57e3 5577 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5578
5579 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5580 switch (*format_ptr++)
5581 {
5582 case 'e':
5583 if (XEXP (orig, i) != NULL)
5584 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5585 break;
928d57e3 5586
f2d0e9f1 5587 case 'E':
5588 case 'V':
5589 if (XVEC (orig, i) == orig_asm_constraints_vector)
5590 XVEC (copy, i) = copy_asm_constraints_vector;
5591 else if (XVEC (orig, i) == orig_asm_operands_vector)
5592 XVEC (copy, i) = copy_asm_operands_vector;
5593 else if (XVEC (orig, i) != NULL)
5594 {
5595 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5596 for (j = 0; j < XVECLEN (copy, i); j++)
5597 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5598 }
5599 break;
928d57e3 5600
f2d0e9f1 5601 case 't':
5602 case 'w':
5603 case 'i':
5604 case 's':
5605 case 'S':
5606 case 'u':
5607 case '0':
5608 /* These are left unchanged. */
5609 break;
928d57e3 5610
f2d0e9f1 5611 default:
5612 gcc_unreachable ();
5613 }
928d57e3 5614
5615 if (code == SCRATCH)
5616 {
5617 i = copy_insn_n_scratches++;
611234b4 5618 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5619 copy_insn_scratch_in[i] = orig;
5620 copy_insn_scratch_out[i] = copy;
5621 }
5622 else if (code == ASM_OPERANDS)
5623 {
d91f2122 5624 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5625 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5626 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5627 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5628 }
5629
5630 return copy;
5631}
5632
5633/* Create a new copy of an rtx.
5634 This function differs from copy_rtx in that it handles SCRATCHes and
5635 ASM_OPERANDs properly.
5636 INSN doesn't really have to be a full INSN; it could be just the
5637 pattern. */
5638rtx
35cb5232 5639copy_insn (rtx insn)
928d57e3 5640{
5641 copy_insn_n_scratches = 0;
5642 orig_asm_operands_vector = 0;
5643 orig_asm_constraints_vector = 0;
5644 copy_asm_operands_vector = 0;
5645 copy_asm_constraints_vector = 0;
5646 return copy_insn_1 (insn);
5647}
02ebfa52 5648
a9abe1f1 5649/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5650 on that assumption that INSN itself remains in its original place. */
5651
575a12f2 5652rtx_insn *
5653copy_delay_slot_insn (rtx_insn *insn)
a9abe1f1 5654{
5655 /* Copy INSN with its rtx_code, all its notes, location etc. */
575a12f2 5656 insn = as_a <rtx_insn *> (copy_rtx (insn));
a9abe1f1 5657 INSN_UID (insn) = cur_insn_uid++;
5658 return insn;
5659}
5660
15bbde2b 5661/* Initialize data structures and variables in this file
5662 before generating rtl for each function. */
5663
5664void
35cb5232 5665init_emit (void)
15bbde2b 5666{
06f9d6ef 5667 set_first_insn (NULL);
5668 set_last_insn (NULL);
9845d120 5669 if (MIN_NONDEBUG_INSN_UID)
5670 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5671 else
5672 cur_insn_uid = 1;
5673 cur_debug_insn_uid = 1;
15bbde2b 5674 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
15bbde2b 5675 first_label_num = label_num;
c36aa54b 5676 get_current_sequence ()->next = NULL;
15bbde2b 5677
15bbde2b 5678 /* Init the tables that describe all the pseudo regs. */
5679
fd6ffb7c 5680 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5681
fd6ffb7c 5682 crtl->emit.regno_pointer_align
2457c754 5683 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5684
25a27413 5685 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
fcdc122e 5686
936082bb 5687 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5688 memcpy (regno_reg_rtx,
679bcc8d 5689 initial_regno_reg_rtx,
90295bd2 5690 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5691
15bbde2b 5692 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5693 init_virtual_regs ();
888e0d33 5694
5695 /* Indicate that the virtual registers and stack locations are
5696 all pointers. */
e61a0a7f 5697 REG_POINTER (stack_pointer_rtx) = 1;
5698 REG_POINTER (frame_pointer_rtx) = 1;
5699 REG_POINTER (hard_frame_pointer_rtx) = 1;
5700 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5701
e61a0a7f 5702 REG_POINTER (virtual_incoming_args_rtx) = 1;
5703 REG_POINTER (virtual_stack_vars_rtx) = 1;
5704 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5705 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5706 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5707
d4c332ff 5708#ifdef STACK_BOUNDARY
80909c64 5709 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5710 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5711 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5712 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5713
5714 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5715 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5716 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5717 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5718 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5719#endif
5720
89525da0 5721#ifdef INIT_EXPANDERS
5722 INIT_EXPANDERS;
5723#endif
15bbde2b 5724}
5725
6e68dcb2 5726/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5727
5728static rtx
3754d046 5729gen_const_vector (machine_mode mode, int constant)
886cfd4f 5730{
5731 rtx tem;
5732 rtvec v;
5733 int units, i;
3754d046 5734 machine_mode inner;
886cfd4f 5735
5736 units = GET_MODE_NUNITS (mode);
5737 inner = GET_MODE_INNER (mode);
5738
069b07bf 5739 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5740
886cfd4f 5741 v = rtvec_alloc (units);
5742
6e68dcb2 5743 /* We need to call this function after we set the scalar const_tiny_rtx
5744 entries. */
5745 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5746
5747 for (i = 0; i < units; ++i)
6e68dcb2 5748 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5749
9426b612 5750 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5751 return tem;
5752}
5753
9426b612 5754/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5755 all elements are zero, and the one vector when all elements are one. */
9426b612 5756rtx
3754d046 5757gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
9426b612 5758{
3754d046 5759 machine_mode inner = GET_MODE_INNER (mode);
6e68dcb2 5760 int nunits = GET_MODE_NUNITS (mode);
5761 rtx x;
9426b612 5762 int i;
5763
6e68dcb2 5764 /* Check to see if all of the elements have the same value. */
5765 x = RTVEC_ELT (v, nunits - 1);
5766 for (i = nunits - 2; i >= 0; i--)
5767 if (RTVEC_ELT (v, i) != x)
5768 break;
5769
5770 /* If the values are all the same, check to see if we can use one of the
5771 standard constant vectors. */
5772 if (i == -1)
5773 {
5774 if (x == CONST0_RTX (inner))
5775 return CONST0_RTX (mode);
5776 else if (x == CONST1_RTX (inner))
5777 return CONST1_RTX (mode);
ba8dfb08 5778 else if (x == CONSTM1_RTX (inner))
5779 return CONSTM1_RTX (mode);
6e68dcb2 5780 }
5781
5782 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5783}
5784
6d8b68a3 5785/* Initialise global register information required by all functions. */
5786
5787void
5788init_emit_regs (void)
5789{
5790 int i;
3754d046 5791 machine_mode mode;
d83fcaa1 5792 mem_attrs *attrs;
6d8b68a3 5793
5794 /* Reset register attributes */
f863a586 5795 reg_attrs_htab->empty ();
6d8b68a3 5796
5797 /* We need reg_raw_mode, so initialize the modes now. */
5798 init_reg_modes_target ();
5799
5800 /* Assign register numbers to the globally defined register rtx. */
6d8b68a3 5801 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5802 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5803 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5804 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5805 virtual_incoming_args_rtx =
5806 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5807 virtual_stack_vars_rtx =
5808 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5809 virtual_stack_dynamic_rtx =
5810 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5811 virtual_outgoing_args_rtx =
5812 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5813 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
60778e62 5814 virtual_preferred_stack_boundary_rtx =
5815 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6d8b68a3 5816
5817 /* Initialize RTL for commonly used hard registers. These are
5818 copied into regno_reg_rtx as we begin to compile each function. */
5819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5820 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5821
5822#ifdef RETURN_ADDRESS_POINTER_REGNUM
5823 return_address_pointer_rtx
5824 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5825#endif
5826
639f32a2 5827 pic_offset_table_rtx = NULL_RTX;
6d8b68a3 5828 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5829 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
d83fcaa1 5830
5831 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5832 {
3754d046 5833 mode = (machine_mode) i;
25a27413 5834 attrs = ggc_cleared_alloc<mem_attrs> ();
d83fcaa1 5835 attrs->align = BITS_PER_UNIT;
5836 attrs->addrspace = ADDR_SPACE_GENERIC;
5837 if (mode != BLKmode)
5838 {
6d58bcba 5839 attrs->size_known_p = true;
5840 attrs->size = GET_MODE_SIZE (mode);
d83fcaa1 5841 if (STRICT_ALIGNMENT)
5842 attrs->align = GET_MODE_ALIGNMENT (mode);
5843 }
5844 mode_mem_attrs[i] = attrs;
5845 }
6d8b68a3 5846}
5847
8059b95a 5848/* Initialize global machine_mode variables. */
5849
5850void
5851init_derived_machine_modes (void)
5852{
5853 byte_mode = VOIDmode;
5854 word_mode = VOIDmode;
5855
3754d046 5856 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8059b95a 5857 mode != VOIDmode;
5858 mode = GET_MODE_WIDER_MODE (mode))
5859 {
5860 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5861 && byte_mode == VOIDmode)
5862 byte_mode = mode;
5863
5864 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5865 && word_mode == VOIDmode)
5866 word_mode = mode;
5867 }
5868
5869 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5870}
5871
01703575 5872/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5873
5874void
01703575 5875init_emit_once (void)
15bbde2b 5876{
5877 int i;
3754d046 5878 machine_mode mode;
5879 machine_mode double_mode;
15bbde2b 5880
e913b5cd 5881 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5882 CONST_FIXED, and memory attribute hash tables. */
f863a586 5883 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
c6259b83 5884
e913b5cd 5885#if TARGET_SUPPORTS_WIDE_INT
f863a586 5886 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
e913b5cd 5887#endif
f863a586 5888 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
2ff23ed0 5889
f863a586 5890 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
e397ad8e 5891
f863a586 5892 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
77695070 5893
57c097d5 5894#ifdef INIT_EXPANDERS
ab5beff9 5895 /* This is to initialize {init|mark|free}_machine_status before the first
5896 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5897 end which calls push_function_context_to before the first call to
57c097d5 5898 init_function_start. */
5899 INIT_EXPANDERS;
5900#endif
5901
15bbde2b 5902 /* Create the unique rtx's for certain rtx codes and operand values. */
5903
8fd5918e 5904 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5905 tries to use these variables. */
15bbde2b 5906 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5907 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5908 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5909
1a60f06a 5910 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5911 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5912 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5913 else
3ad7bb1c 5914 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5915
8059b95a 5916 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5917
cc69d08a 5918 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5919 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5920 real_from_integer (&dconst2, double_mode, 2, SIGNED);
3fa759a9 5921
5922 dconstm1 = dconst1;
5923 dconstm1.sign = 1;
77e89269 5924
5925 dconsthalf = dconst1;
9d96125b 5926 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5927
ba8dfb08 5928 for (i = 0; i < 3; i++)
15bbde2b 5929 {
3fa759a9 5930 const REAL_VALUE_TYPE *const r =
badfe841 5931 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5932
069b07bf 5933 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5934 mode != VOIDmode;
5935 mode = GET_MODE_WIDER_MODE (mode))
5936 const_tiny_rtx[i][(int) mode] =
d5f9611d 5937 const_double_from_real_value (*r, mode);
069b07bf 5938
5939 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5940 mode != VOIDmode;
15bbde2b 5941 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5942 const_tiny_rtx[i][(int) mode] =
d5f9611d 5943 const_double_from_real_value (*r, mode);
15bbde2b 5944
b572011e 5945 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5946
069b07bf 5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5948 mode != VOIDmode;
15bbde2b 5949 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5950 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5951
8c20007a 5952 for (mode = MIN_MODE_PARTIAL_INT;
5953 mode <= MAX_MODE_PARTIAL_INT;
3754d046 5954 mode = (machine_mode)((int)(mode) + 1))
7540dcc4 5955 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5956 }
5957
ba8dfb08 5958 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5959
5960 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5961 mode != VOIDmode;
5962 mode = GET_MODE_WIDER_MODE (mode))
5963 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5964
8c20007a 5965 for (mode = MIN_MODE_PARTIAL_INT;
5966 mode <= MAX_MODE_PARTIAL_INT;
3754d046 5967 mode = (machine_mode)((int)(mode) + 1))
dd276d20 5968 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5969
4248fc32 5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5971 mode != VOIDmode;
5972 mode = GET_MODE_WIDER_MODE (mode))
5973 {
5974 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5975 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5976 }
5977
5978 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5979 mode != VOIDmode;
5980 mode = GET_MODE_WIDER_MODE (mode))
5981 {
5982 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5983 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5984 }
5985
886cfd4f 5986 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5987 mode != VOIDmode;
5988 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5989 {
5990 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5991 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
ba8dfb08 5992 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6e68dcb2 5993 }
886cfd4f 5994
5995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5996 mode != VOIDmode;
5997 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5998 {
5999 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6000 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6001 }
886cfd4f 6002
06f0b99c 6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
6006 {
9af5ce0c 6007 FCONST0 (mode).data.high = 0;
6008 FCONST0 (mode).data.low = 0;
6009 FCONST0 (mode).mode = mode;
e397ad8e 6010 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6011 FCONST0 (mode), mode);
06f0b99c 6012 }
6013
6014 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6015 mode != VOIDmode;
6016 mode = GET_MODE_WIDER_MODE (mode))
6017 {
9af5ce0c 6018 FCONST0 (mode).data.high = 0;
6019 FCONST0 (mode).data.low = 0;
6020 FCONST0 (mode).mode = mode;
e397ad8e 6021 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6022 FCONST0 (mode), mode);
06f0b99c 6023 }
6024
6025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6026 mode != VOIDmode;
6027 mode = GET_MODE_WIDER_MODE (mode))
6028 {
9af5ce0c 6029 FCONST0 (mode).data.high = 0;
6030 FCONST0 (mode).data.low = 0;
6031 FCONST0 (mode).mode = mode;
e397ad8e 6032 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6033 FCONST0 (mode), mode);
06f0b99c 6034
6035 /* We store the value 1. */
9af5ce0c 6036 FCONST1 (mode).data.high = 0;
6037 FCONST1 (mode).data.low = 0;
6038 FCONST1 (mode).mode = mode;
6039 FCONST1 (mode).data
d67b7119 6040 = double_int_one.lshift (GET_MODE_FBIT (mode),
6041 HOST_BITS_PER_DOUBLE_INT,
6042 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 6043 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6044 FCONST1 (mode), mode);
06f0b99c 6045 }
6046
6047 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6048 mode != VOIDmode;
6049 mode = GET_MODE_WIDER_MODE (mode))
6050 {
9af5ce0c 6051 FCONST0 (mode).data.high = 0;
6052 FCONST0 (mode).data.low = 0;
6053 FCONST0 (mode).mode = mode;
e397ad8e 6054 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6055 FCONST0 (mode), mode);
06f0b99c 6056
6057 /* We store the value 1. */
9af5ce0c 6058 FCONST1 (mode).data.high = 0;
6059 FCONST1 (mode).data.low = 0;
6060 FCONST1 (mode).mode = mode;
6061 FCONST1 (mode).data
d67b7119 6062 = double_int_one.lshift (GET_MODE_FBIT (mode),
6063 HOST_BITS_PER_DOUBLE_INT,
6064 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 6065 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6066 FCONST1 (mode), mode);
6067 }
6068
6069 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6070 mode != VOIDmode;
6071 mode = GET_MODE_WIDER_MODE (mode))
6072 {
6073 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6074 }
6075
6076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6077 mode != VOIDmode;
6078 mode = GET_MODE_WIDER_MODE (mode))
6079 {
6080 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6081 }
6082
6083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6084 mode != VOIDmode;
6085 mode = GET_MODE_WIDER_MODE (mode))
6086 {
6087 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6088 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6089 }
6090
6091 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6092 mode != VOIDmode;
6093 mode = GET_MODE_WIDER_MODE (mode))
6094 {
6095 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6096 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 6097 }
6098
0fd4500a 6099 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
3754d046 6100 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
0fd4500a 6101 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 6102
065336b4 6103 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6104 if (STORE_FLAG_VALUE == 1)
6105 const_tiny_rtx[1][(int) BImode] = const1_rtx;
7d7b0bac 6106
058a1b7a 6107 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6108 mode != VOIDmode;
6109 mode = GET_MODE_WIDER_MODE (mode))
6110 {
6111 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6112 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6113 }
6114
7d7b0bac 6115 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6116 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6117 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6118 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
f9a00e9e 6119 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6120 /*prev_insn=*/NULL,
6121 /*next_insn=*/NULL,
6122 /*bb=*/NULL,
6123 /*pattern=*/NULL_RTX,
6124 /*location=*/-1,
6125 CODE_FOR_nothing,
6126 /*reg_notes=*/NULL_RTX);
15bbde2b 6127}
ac6c481d 6128\f
cd0fe062 6129/* Produce exact duplicate of insn INSN after AFTER.
6130 Care updating of libcall regions if present. */
6131
722334ea 6132rtx_insn *
5e9c670f 6133emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
cd0fe062 6134{
722334ea 6135 rtx_insn *new_rtx;
6136 rtx link;
cd0fe062 6137
6138 switch (GET_CODE (insn))
6139 {
6140 case INSN:
9ce37fa7 6141 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6142 break;
6143
6144 case JUMP_INSN:
9ce37fa7 6145 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
01762951 6146 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
cd0fe062 6147 break;
6148
9845d120 6149 case DEBUG_INSN:
6150 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6151 break;
6152
cd0fe062 6153 case CALL_INSN:
9ce37fa7 6154 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6155 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 6156 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 6157 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 6158 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6159 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6160 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 6161 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 6162 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 6163 break;
6164
6165 default:
611234b4 6166 gcc_unreachable ();
cd0fe062 6167 }
6168
6169 /* Update LABEL_NUSES. */
9ce37fa7 6170 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 6171
5169661d 6172 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ab87d1bc 6173
98116afd 6174 /* If the old insn is frame related, then so is the new one. This is
6175 primarily needed for IA-64 unwind info which marks epilogue insns,
6176 which may be duplicated by the basic block reordering code. */
9ce37fa7 6177 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 6178
19d2fe05 6179 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6180 will make them. REG_LABEL_TARGETs are created there too, but are
6181 supposed to be sticky, so we copy them. */
cd0fe062 6182 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 6183 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 6184 {
6185 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 6186 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 6187 copy_insn_1 (XEXP (link, 0)));
cd0fe062 6188 else
9eb946de 6189 add_shallow_copy_of_reg_note (new_rtx, link);
cd0fe062 6190 }
6191
9ce37fa7 6192 INSN_CODE (new_rtx) = INSN_CODE (insn);
6193 return new_rtx;
cd0fe062 6194}
1f3233d1 6195
7035b2ab 6196static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 6197rtx
3754d046 6198gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
c09425a0 6199{
6200 if (hard_reg_clobbers[mode][regno])
6201 return hard_reg_clobbers[mode][regno];
6202 else
6203 return (hard_reg_clobbers[mode][regno] =
6204 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6205}
6206
5169661d 6207location_t prologue_location;
6208location_t epilogue_location;
23a070f3 6209
6210/* Hold current location information and last location information, so the
6211 datastructures are built lazily only when some instructions in given
6212 place are needed. */
c7abeac5 6213static location_t curr_location;
23a070f3 6214
5169661d 6215/* Allocate insn location datastructure. */
23a070f3 6216void
5169661d 6217insn_locations_init (void)
23a070f3 6218{
5169661d 6219 prologue_location = epilogue_location = 0;
23a070f3 6220 curr_location = UNKNOWN_LOCATION;
23a070f3 6221}
6222
6223/* At the end of emit stage, clear current location. */
6224void
5169661d 6225insn_locations_finalize (void)
23a070f3 6226{
5169661d 6227 epilogue_location = curr_location;
6228 curr_location = UNKNOWN_LOCATION;
23a070f3 6229}
6230
6231/* Set current location. */
6232void
5169661d 6233set_curr_insn_location (location_t location)
23a070f3 6234{
23a070f3 6235 curr_location = location;
6236}
6237
6238/* Get current location. */
6239location_t
5169661d 6240curr_insn_location (void)
23a070f3 6241{
6242 return curr_location;
6243}
6244
23a070f3 6245/* Return lexical scope block insn belongs to. */
6246tree
5e9c670f 6247insn_scope (const rtx_insn *insn)
23a070f3 6248{
5169661d 6249 return LOCATION_BLOCK (INSN_LOCATION (insn));
23a070f3 6250}
6251
6252/* Return line number of the statement that produced this insn. */
6253int
5e9c670f 6254insn_line (const rtx_insn *insn)
23a070f3 6255{
5169661d 6256 return LOCATION_LINE (INSN_LOCATION (insn));
23a070f3 6257}
6258
6259/* Return source file of the statement that produced this insn. */
6260const char *
5e9c670f 6261insn_file (const rtx_insn *insn)
23a070f3 6262{
5169661d 6263 return LOCATION_FILE (INSN_LOCATION (insn));
23a070f3 6264}
30c3c442 6265
0e7ae557 6266/* Return expanded location of the statement that produced this insn. */
6267expanded_location
5e9c670f 6268insn_location (const rtx_insn *insn)
0e7ae557 6269{
6270 return expand_location (INSN_LOCATION (insn));
6271}
6272
30c3c442 6273/* Return true if memory model MODEL requires a pre-operation (release-style)
6274 barrier or a post-operation (acquire-style) barrier. While not universal,
6275 this function matches behavior of several targets. */
6276
6277bool
6278need_atomic_barrier_p (enum memmodel model, bool pre)
6279{
e205c62d 6280 switch (model & MEMMODEL_BASE_MASK)
30c3c442 6281 {
6282 case MEMMODEL_RELAXED:
6283 case MEMMODEL_CONSUME:
6284 return false;
6285 case MEMMODEL_RELEASE:
6286 return pre;
6287 case MEMMODEL_ACQUIRE:
6288 return !pre;
6289 case MEMMODEL_ACQ_REL:
6290 case MEMMODEL_SEQ_CST:
6291 return true;
6292 default:
6293 gcc_unreachable ();
6294 }
6295}
6296\f
1f3233d1 6297#include "gt-emit-rtl.h"