]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
[70/77] Make expand_fix/float check for scalar modes
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
aad93da1 2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
15bbde2b 3
f12b58b3 4This file is part of GCC.
15bbde2b 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
15bbde2b 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15bbde2b 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
15bbde2b 19
20
21/* Middle-to-low level generation of rtx code and insns.
22
74efa612 23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
15bbde2b 25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
74efa612 28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
8fd5918e 31 dependent is the kind of rtx's they make and what arguments they
32 use. */
15bbde2b 33
34#include "config.h"
405711de 35#include "system.h"
805e22b2 36#include "coretypes.h"
ad7b10a2 37#include "memmodel.h"
9ef16211 38#include "backend.h"
7c29e30e 39#include "target.h"
15bbde2b 40#include "rtl.h"
7c29e30e 41#include "tree.h"
9ef16211 42#include "df.h"
7c29e30e 43#include "tm_p.h"
44#include "stringpool.h"
7c29e30e 45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
9ef16211 49#include "diagnostic-core.h"
b20a8bb4 50#include "alias.h"
b20a8bb4 51#include "fold-const.h"
9ed99284 52#include "varasm.h"
94ea8568 53#include "cfgrtl.h"
94ea8568 54#include "tree-eh.h"
d53441c8 55#include "explow.h"
15bbde2b 56#include "expr.h"
9845d120 57#include "params.h"
f7715905 58#include "builtins.h"
4073adaa 59#include "rtl-iter.h"
94f92c36 60#include "stor-layout.h"
48a7e3d1 61#include "opts.h"
61cb1816 62#include "predict.h"
649d8da6 63
679bcc8d 64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
399d45d3 71/* Commonly used modes. */
72
af8303fa 73scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 76
b079a207 77/* Datastructures maintained for currently processed function in RTL form. */
78
fd6ffb7c 79struct rtl_data x_rtl;
b079a207 80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 82 Allocated in parallel with regno_pointer_align.
b079a207 83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
15bbde2b 87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
9105005a 91static GTY(()) int label_num = 1;
15bbde2b 92
15bbde2b 93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
ba8dfb08 95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
15bbde2b 97
ba8dfb08 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
15bbde2b 99
1a60f06a 100rtx const_true_rtx;
101
15bbde2b 102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
77e89269 106REAL_VALUE_TYPE dconsthalf;
15bbde2b 107
06f0b99c 108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
15bbde2b 112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
57c097d5 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 118
7d7b0bac 119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
f9a00e9e 125/* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128rtx_insn *invalid_insn_rtx;
129
73f5c1e3 130/* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
eae1ecb4 133struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 134{
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139};
73f5c1e3 140
f863a586 141static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
eae1ecb4 143struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 144{
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147};
148
149static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
e913b5cd 150
ca74b940 151/* A hash table storing register attribute structures. */
eae1ecb4 152struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
f863a586 153{
154 static hashval_t hash (reg_attrs *x);
155 static bool equal (reg_attrs *a, reg_attrs *b);
156};
157
158static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
ca74b940 159
2ff23ed0 160/* A hash table storing all CONST_DOUBLEs. */
eae1ecb4 161struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 162{
163 static hashval_t hash (rtx x);
164 static bool equal (rtx x, rtx y);
165};
166
167static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
2ff23ed0 168
e397ad8e 169/* A hash table storing all CONST_FIXEDs. */
eae1ecb4 170struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
f863a586 171{
172 static hashval_t hash (rtx x);
173 static bool equal (rtx x, rtx y);
174};
175
176static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
e397ad8e 177
fd6ffb7c 178#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 179#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 180#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 181
265be050 182static void set_used_decls (tree);
35cb5232 183static void mark_label_nuses (rtx);
e913b5cd 184#if TARGET_SUPPORTS_WIDE_INT
e913b5cd 185static rtx lookup_const_wide_int (rtx);
186#endif
35cb5232 187static rtx lookup_const_double (rtx);
e397ad8e 188static rtx lookup_const_fixed (rtx);
35cb5232 189static reg_attrs *get_reg_attrs (tree, int);
3754d046 190static rtx gen_const_vector (machine_mode, int);
0e0727c4 191static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 192
61cb1816 193/* Probability of the conditional branch currently proceeded by try_split. */
194profile_probability split_branch_probability;
649d8da6 195\f
73f5c1e3 196/* Returns a hash code for X (which is a really a CONST_INT). */
197
f863a586 198hashval_t
199const_int_hasher::hash (rtx x)
73f5c1e3 200{
f863a586 201 return (hashval_t) INTVAL (x);
73f5c1e3 202}
203
6ef828f9 204/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
207
f863a586 208bool
209const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
73f5c1e3 210{
f863a586 211 return (INTVAL (x) == y);
2ff23ed0 212}
213
e913b5cd 214#if TARGET_SUPPORTS_WIDE_INT
215/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
216
f863a586 217hashval_t
218const_wide_int_hasher::hash (rtx x)
e913b5cd 219{
220 int i;
06b8401d 221 unsigned HOST_WIDE_INT hash = 0;
f863a586 222 const_rtx xr = x;
e913b5cd 223
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
226
227 return (hashval_t) hash;
228}
229
230/* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
233
f863a586 234bool
235const_wide_int_hasher::equal (rtx x, rtx y)
e913b5cd 236{
237 int i;
f863a586 238 const_rtx xr = x;
239 const_rtx yr = y;
e913b5cd 240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
f863a586 241 return false;
e913b5cd 242
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
f863a586 245 return false;
ddb1be65 246
f863a586 247 return true;
e913b5cd 248}
249#endif
250
2ff23ed0 251/* Returns a hash code for X (which is really a CONST_DOUBLE). */
f863a586 252hashval_t
253const_double_hasher::hash (rtx x)
2ff23ed0 254{
f863a586 255 const_rtx const value = x;
3393215f 256 hashval_t h;
2ff23ed0 257
e913b5cd 258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
3393215f 259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
a5760913 261 {
e2e205b3 262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 }
2ff23ed0 266 return h;
267}
268
6ef828f9 269/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 270 is the same as that represented by Y (really a ...) */
f863a586 271bool
272const_double_hasher::equal (rtx x, rtx y)
2ff23ed0 273{
f863a586 274 const_rtx const a = x, b = y;
2ff23ed0 275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
e913b5cd 278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
f82a103d 279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 284}
285
e397ad8e 286/* Returns a hash code for X (which is really a CONST_FIXED). */
287
f863a586 288hashval_t
289const_fixed_hasher::hash (rtx x)
e397ad8e 290{
f863a586 291 const_rtx const value = x;
e397ad8e 292 hashval_t h;
293
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
298}
299
f863a586 300/* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
e397ad8e 302
f863a586 303bool
304const_fixed_hasher::equal (rtx x, rtx y)
e397ad8e 305{
f863a586 306 const_rtx const a = x, b = y;
e397ad8e 307
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
311}
312
d72886b5 313/* Return true if the given memory attributes are equal. */
73f5c1e3 314
7e304b71 315bool
d72886b5 316mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
73f5c1e3 317{
7e304b71 318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
6d58bcba 322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
bd1a81f7 328 && p->addrspace == q->addrspace
2f16183e 329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 332}
333
d72886b5 334/* Set MEM's memory attributes so that they are the same as ATTRS. */
5cc193e7 335
d72886b5 336static void
337set_mem_attrs (rtx mem, mem_attrs *attrs)
338{
d72886b5 339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
341 {
342 MEM_ATTRS (mem) = 0;
343 return;
344 }
c6259b83 345
8dc3230c 346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
c6259b83 348 {
25a27413 349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
8dc3230c 350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
c6259b83 351 }
73f5c1e3 352}
353
ca74b940 354/* Returns a hash code for X (which is a really a reg_attrs *). */
355
f863a586 356hashval_t
357reg_attr_hasher::hash (reg_attrs *x)
ca74b940 358{
f863a586 359 const reg_attrs *const p = x;
ca74b940 360
e19e0a33 361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
ca74b940 362}
363
f863a586 364/* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
ca74b940 366
f863a586 367bool
368reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
ca74b940 369{
f863a586 370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
ca74b940 372
373 return (p->decl == q->decl && p->offset == q->offset);
374}
375/* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
378
379static reg_attrs *
35cb5232 380get_reg_attrs (tree decl, int offset)
ca74b940 381{
382 reg_attrs attrs;
ca74b940 383
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
387
388 attrs.decl = decl;
389 attrs.offset = offset;
390
f863a586 391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
ca74b940 392 if (*slot == 0)
393 {
25a27413 394 *slot = ggc_alloc<reg_attrs> ();
ca74b940 395 memcpy (*slot, &attrs, sizeof (reg_attrs));
396 }
397
f863a586 398 return *slot;
ca74b940 399}
400
3072d30e 401
402#if !HAVE_blockage
e12b44a3 403/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
3072d30e 405
406rtx
407gen_blockage (void)
408{
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
412}
413#endif
414
415
937ca48e 416/* Set the mode and register number of X to MODE and REGNO. */
417
418void
419set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
420{
1c0849e5 421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
422 ? hard_regno_nregs[regno][mode]
423 : 1);
937ca48e 424 PUT_MODE_RAW (x, mode);
1c0849e5 425 set_regno_raw (x, regno, nregs);
937ca48e 426}
427
22cf44bc 428/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432rtx
937ca48e 433gen_raw_REG (machine_mode mode, unsigned int regno)
22cf44bc 434{
68095389 435 rtx x = rtx_alloc (REG MEM_STAT_INFO);
937ca48e 436 set_mode_and_regno (x, mode, regno);
15183fd2 437 REG_ATTRS (x) = NULL;
22cf44bc 438 ORIGINAL_REGNO (x) = regno;
439 return x;
440}
441
7014838c 442/* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
445
ede4900a 446rtx_expr_list *
3754d046 447gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
ede4900a 448{
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
451}
452
13be9dc6 453rtx_insn_list *
3754d046 454gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
13be9dc6 455{
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
458}
459
f935868a 460rtx_insn *
3754d046 461gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
f935868a 462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
464{
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
469}
470
3ad7bb1c 471rtx
3754d046 472gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 473{
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 476
477#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480#endif
481
73f5c1e3 482 /* Look up the CONST_INT in the hash table. */
f863a586 483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
7f2875d3 485 if (*slot == 0)
d7c47c0e 486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 487
f863a586 488 return *slot;
3ad7bb1c 489}
490
2d232d05 491rtx
3754d046 492gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2d232d05 493{
494 return GEN_INT (trunc_int_for_mode (c, mode));
495}
496
2ff23ed0 497/* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500
501/* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504static rtx
35cb5232 505lookup_const_double (rtx real)
2ff23ed0 506{
f863a586 507 rtx *slot = const_double_htab->find_slot (real, INSERT);
2ff23ed0 508 if (*slot == 0)
509 *slot = real;
510
f863a586 511 return *slot;
2ff23ed0 512}
7f2875d3 513
2ff23ed0 514/* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
67f2a2eb 516rtx
3754d046 517const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
67f2a2eb 518{
2ff23ed0 519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
521
e8aaae4e 522 real->u.rv = value;
2ff23ed0 523
524 return lookup_const_double (real);
525}
526
e397ad8e 527/* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
530
531static rtx
532lookup_const_fixed (rtx fixed)
533{
f863a586 534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
e397ad8e 535 if (*slot == 0)
536 *slot = fixed;
537
f863a586 538 return *slot;
e397ad8e 539}
540
541/* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
543
544rtx
3754d046 545const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
e397ad8e 546{
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
549
550 fixed->u.fv = value;
551
552 return lookup_const_fixed (fixed);
553}
554
e913b5cd 555#if TARGET_SUPPORTS_WIDE_INT == 0
33274180 556/* Constructs double_int from rtx CST. */
557
558double_int
559rtx_to_double_int (const_rtx cst)
560{
561 double_int r;
562
563 if (CONST_INT_P (cst))
cf8f0e63 564 r = double_int::from_shwi (INTVAL (cst));
78f1962f 565 else if (CONST_DOUBLE_AS_INT_P (cst))
33274180 566 {
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
569 }
570 else
571 gcc_unreachable ();
572
573 return r;
574}
e913b5cd 575#endif
576
577#if TARGET_SUPPORTS_WIDE_INT
a342dbb2 578/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
e913b5cd 580 return it. */
33274180 581
e913b5cd 582static rtx
583lookup_const_wide_int (rtx wint)
584{
f863a586 585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
e913b5cd 586 if (*slot == 0)
587 *slot = wint;
33274180 588
f863a586 589 return *slot;
e913b5cd 590}
591#endif
3e052aec 592
a342dbb2 593/* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
597
3e052aec 598rtx
3754d046 599immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
3e052aec 600{
e913b5cd 601 unsigned int len = v.get_len ();
074473dd 602 /* Not scalar_int_mode because we also allow pointer bound modes. */
603 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
e913b5cd 604
605 /* Allow truncation but not extension since we do not know if the
606 number is signed or unsigned. */
607 gcc_assert (prec <= v.get_precision ());
608
609 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (v.elt (0), mode);
611
612#if TARGET_SUPPORTS_WIDE_INT
613 {
614 unsigned int i;
615 rtx value;
ddb1be65 616 unsigned int blocks_needed
e913b5cd 617 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618
619 if (len > blocks_needed)
620 len = blocks_needed;
621
622 value = const_wide_int_alloc (len);
623
624 /* It is so tempting to just put the mode in here. Must control
625 myself ... */
626 PUT_MODE (value, VOIDmode);
05c25ee6 627 CWI_PUT_NUM_ELEM (value, len);
e913b5cd 628
629 for (i = 0; i < len; i++)
05363b4a 630 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
e913b5cd 631
632 return lookup_const_wide_int (value);
633 }
634#else
05363b4a 635 return immed_double_const (v.elt (0), v.elt (1), mode);
e913b5cd 636#endif
3e052aec 637}
638
e913b5cd 639#if TARGET_SUPPORTS_WIDE_INT == 0
2ff23ed0 640/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
641 of ints: I0 is the low-order word and I1 is the high-order word.
24cd46a7 642 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
db20fb47 643 implied upper bits are copies of the high bit of i1. The value
644 itself is neither signed nor unsigned. Do not use this routine for
645 non-integer modes; convert to REAL_VALUE_TYPE and use
d5f9611d 646 const_double_from_real_value. */
2ff23ed0 647
648rtx
3754d046 649immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
2ff23ed0 650{
651 rtx value;
652 unsigned int i;
653
b1ca4af4 654 /* There are the following cases (note that there are no modes with
24cd46a7 655 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
b1ca4af4 656
657 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
658 gen_int_mode.
db20fb47 659 2) If the value of the integer fits into HOST_WIDE_INT anyway
660 (i.e., i1 consists only from copies of the sign bit, and sign
661 of i0 and i1 are the same), then we return a CONST_INT for i0.
b1ca4af4 662 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
074473dd 663 scalar_mode smode;
664 if (is_a <scalar_mode> (mode, &smode)
665 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
666 return gen_int_mode (i0, mode);
2ff23ed0 667
668 /* If this integer fits in one word, return a CONST_INT. */
669 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
670 return GEN_INT (i0);
671
672 /* We use VOIDmode for integers. */
673 value = rtx_alloc (CONST_DOUBLE);
674 PUT_MODE (value, VOIDmode);
675
676 CONST_DOUBLE_LOW (value) = i0;
677 CONST_DOUBLE_HIGH (value) = i1;
678
679 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
680 XWINT (value, i) = 0;
681
682 return lookup_const_double (value);
67f2a2eb 683}
e913b5cd 684#endif
67f2a2eb 685
3ad7bb1c 686rtx
3754d046 687gen_rtx_REG (machine_mode mode, unsigned int regno)
3ad7bb1c 688{
689 /* In case the MD file explicitly references the frame pointer, have
690 all such references point to the same frame pointer. This is
691 used during frame pointer elimination to distinguish the explicit
692 references to these registers from pseudos that happened to be
693 assigned to them.
694
695 If we have eliminated the frame pointer or arg pointer, we will
696 be using it as a normal register, for example as a spill
697 register. In such cases, we might be accessing it in a mode that
698 is not Pmode and therefore cannot use the pre-allocated rtx.
699
700 Also don't do this when we are making new REGs in reload, since
701 we don't want to get confused with the real pointers. */
702
c6a6cdaa 703 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3ad7bb1c 704 {
71801afc 705 if (regno == FRAME_POINTER_REGNUM
706 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 707 return frame_pointer_rtx;
f703b3d6 708
709 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
710 && regno == HARD_FRAME_POINTER_REGNUM
71801afc 711 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 712 return hard_frame_pointer_rtx;
c6bb296a 713#if !HARD_FRAME_POINTER_IS_ARG_POINTER
714 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
715 && regno == ARG_POINTER_REGNUM)
3ad7bb1c 716 return arg_pointer_rtx;
717#endif
718#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 719 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 720 return return_address_pointer_rtx;
721#endif
3473aefe 722 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
8d43ad05 723 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
6ea47475 724 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 725 return pic_offset_table_rtx;
e8b59353 726 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 727 return stack_pointer_rtx;
728 }
729
32b53d83 730#if 0
90295bd2 731 /* If the per-function register table has been set up, try to re-use
32b53d83 732 an existing entry in that table to avoid useless generation of RTL.
733
734 This code is disabled for now until we can fix the various backends
735 which depend on having non-shared hard registers in some cases. Long
736 term we want to re-enable this code as it can significantly cut down
71801afc 737 on the amount of useless RTL that gets generated.
738
739 We'll also need to fix some code that runs after reload that wants to
740 set ORIGINAL_REGNO. */
741
90295bd2 742 if (cfun
743 && cfun->emit
744 && regno_reg_rtx
745 && regno < FIRST_PSEUDO_REGISTER
746 && reg_raw_mode[regno] == mode)
747 return regno_reg_rtx[regno];
32b53d83 748#endif
90295bd2 749
22cf44bc 750 return gen_raw_REG (mode, regno);
3ad7bb1c 751}
752
b5ba9f3a 753rtx
3754d046 754gen_rtx_MEM (machine_mode mode, rtx addr)
b5ba9f3a 755{
756 rtx rt = gen_rtx_raw_MEM (mode, addr);
757
758 /* This field is not cleared by the mere allocation of the rtx, so
759 we clear it here. */
c6259b83 760 MEM_ATTRS (rt) = 0;
b5ba9f3a 761
762 return rt;
763}
701e46d0 764
e265a6da 765/* Generate a memory referring to non-trapping constant memory. */
766
767rtx
3754d046 768gen_const_mem (machine_mode mode, rtx addr)
e265a6da 769{
770 rtx mem = gen_rtx_MEM (mode, addr);
771 MEM_READONLY_P (mem) = 1;
772 MEM_NOTRAP_P (mem) = 1;
773 return mem;
774}
775
00060fc2 776/* Generate a MEM referring to fixed portions of the frame, e.g., register
777 save areas. */
778
779rtx
3754d046 780gen_frame_mem (machine_mode mode, rtx addr)
00060fc2 781{
782 rtx mem = gen_rtx_MEM (mode, addr);
783 MEM_NOTRAP_P (mem) = 1;
784 set_mem_alias_set (mem, get_frame_alias_set ());
785 return mem;
786}
787
788/* Generate a MEM referring to a temporary use of the stack, not part
789 of the fixed stack frame. For example, something which is pushed
790 by a target splitter. */
791rtx
3754d046 792gen_tmp_stack_mem (machine_mode mode, rtx addr)
00060fc2 793{
794 rtx mem = gen_rtx_MEM (mode, addr);
795 MEM_NOTRAP_P (mem) = 1;
18d50ae6 796 if (!cfun->calls_alloca)
00060fc2 797 set_mem_alias_set (mem, get_frame_alias_set ());
798 return mem;
799}
800
2166bbaa 801/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
802 this construct would be valid, and false otherwise. */
803
804bool
3754d046 805validate_subreg (machine_mode omode, machine_mode imode,
7ecb5bb2 806 const_rtx reg, unsigned int offset)
701e46d0 807{
2166bbaa 808 unsigned int isize = GET_MODE_SIZE (imode);
809 unsigned int osize = GET_MODE_SIZE (omode);
810
811 /* All subregs must be aligned. */
812 if (offset % osize != 0)
813 return false;
814
815 /* The subreg offset cannot be outside the inner object. */
816 if (offset >= isize)
817 return false;
818
819 /* ??? This should not be here. Temporarily continue to allow word_mode
820 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
821 Generally, backends are doing something sketchy but it'll take time to
822 fix them all. */
823 if (omode == word_mode)
824 ;
825 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
826 is the culprit here, and not the backends. */
827 else if (osize >= UNITS_PER_WORD && isize >= osize)
828 ;
829 /* Allow component subregs of complex and vector. Though given the below
830 extraction rules, it's not always clear what that means. */
831 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
832 && GET_MODE_INNER (imode) == omode)
833 ;
834 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
835 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
836 represent this. It's questionable if this ought to be represented at
837 all -- why can't this all be hidden in post-reload splitters that make
838 arbitrarily mode changes to the registers themselves. */
839 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
840 ;
841 /* Subregs involving floating point modes are not allowed to
842 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
843 (subreg:SI (reg:DF) 0) isn't. */
844 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
845 {
c6a6cdaa 846 if (! (isize == osize
847 /* LRA can use subreg to store a floating point value in
848 an integer mode. Although the floating point and the
849 integer modes need the same number of hard registers,
850 the size of floating point mode can be less than the
851 integer mode. LRA also uses subregs for a register
852 should be used in different mode in on insn. */
853 || lra_in_progress))
2166bbaa 854 return false;
855 }
701e46d0 856
2166bbaa 857 /* Paradoxical subregs must have offset zero. */
858 if (osize > isize)
859 return offset == 0;
860
861 /* This is a normal subreg. Verify that the offset is representable. */
862
863 /* For hard registers, we already have most of these rules collected in
864 subreg_offset_representable_p. */
865 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
866 {
867 unsigned int regno = REGNO (reg);
868
869#ifdef CANNOT_CHANGE_MODE_CLASS
870 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
871 && GET_MODE_INNER (imode) == omode)
872 ;
873 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
874 return false;
701e46d0 875#endif
2166bbaa 876
877 return subreg_offset_representable_p (regno, imode, offset, omode);
878 }
879
880 /* For pseudo registers, we want most of the same checks. Namely:
881 If the register no larger than a word, the subreg must be lowpart.
882 If the register is larger than a word, the subreg must be the lowpart
883 of a subword. A subreg does *not* perform arbitrary bit extraction.
884 Given that we've already checked mode/offset alignment, we only have
885 to check subword subregs here. */
c6a6cdaa 886 if (osize < UNITS_PER_WORD
887 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
2166bbaa 888 {
3754d046 889 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
2166bbaa 890 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
891 if (offset % UNITS_PER_WORD != low_off)
892 return false;
893 }
894 return true;
895}
896
897rtx
3754d046 898gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
2166bbaa 899{
900 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 901 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 902}
903
c6259b83 904/* Generate a SUBREG representing the least-significant part of REG if MODE
905 is smaller than mode of REG, otherwise paradoxical SUBREG. */
906
701e46d0 907rtx
3754d046 908gen_lowpart_SUBREG (machine_mode mode, rtx reg)
701e46d0 909{
3754d046 910 machine_mode inmode;
701e46d0 911
912 inmode = GET_MODE (reg);
913 if (inmode == VOIDmode)
914 inmode = mode;
81802af6 915 return gen_rtx_SUBREG (mode, reg,
916 subreg_lowpart_offset (mode, inmode));
701e46d0 917}
e1398578 918
919rtx
3754d046 920gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
e1398578 921 enum var_init_status status)
922{
923 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
924 PAT_VAR_LOCATION_STATUS (x) = status;
925 return x;
926}
7014838c 927\f
15bbde2b 928
cf9ac040 929/* Create an rtvec and stores within it the RTXen passed in the arguments. */
930
15bbde2b 931rtvec
ee582a61 932gen_rtvec (int n, ...)
15bbde2b 933{
cf9ac040 934 int i;
935 rtvec rt_val;
ee582a61 936 va_list p;
15bbde2b 937
ee582a61 938 va_start (p, n);
15bbde2b 939
cf9ac040 940 /* Don't allocate an empty rtvec... */
15bbde2b 941 if (n == 0)
451c8e2f 942 {
943 va_end (p);
944 return NULL_RTVEC;
945 }
15bbde2b 946
cf9ac040 947 rt_val = rtvec_alloc (n);
e5fcd76a 948
15bbde2b 949 for (i = 0; i < n; i++)
cf9ac040 950 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 951
ee582a61 952 va_end (p);
cf9ac040 953 return rt_val;
15bbde2b 954}
955
956rtvec
35cb5232 957gen_rtvec_v (int n, rtx *argp)
15bbde2b 958{
19cb6b50 959 int i;
960 rtvec rt_val;
15bbde2b 961
cf9ac040 962 /* Don't allocate an empty rtvec... */
15bbde2b 963 if (n == 0)
cf9ac040 964 return NULL_RTVEC;
15bbde2b 965
cf9ac040 966 rt_val = rtvec_alloc (n);
15bbde2b 967
968 for (i = 0; i < n; i++)
a4070a91 969 rt_val->elem[i] = *argp++;
15bbde2b 970
971 return rt_val;
972}
f17e3fff 973
974rtvec
975gen_rtvec_v (int n, rtx_insn **argp)
976{
977 int i;
978 rtvec rt_val;
979
980 /* Don't allocate an empty rtvec... */
981 if (n == 0)
982 return NULL_RTVEC;
983
984 rt_val = rtvec_alloc (n);
985
986 for (i = 0; i < n; i++)
987 rt_val->elem[i] = *argp++;
988
989 return rt_val;
990}
991
15bbde2b 992\f
80c70e76 993/* Return the number of bytes between the start of an OUTER_MODE
994 in-memory value and the start of an INNER_MODE in-memory value,
995 given that the former is a lowpart of the latter. It may be a
996 paradoxical lowpart, in which case the offset will be negative
997 on big-endian targets. */
998
999int
3754d046 1000byte_lowpart_offset (machine_mode outer_mode,
1001 machine_mode inner_mode)
80c70e76 1002{
d0257d43 1003 if (paradoxical_subreg_p (outer_mode, inner_mode))
80c70e76 1004 return -subreg_lowpart_offset (inner_mode, outer_mode);
d0257d43 1005 else
1006 return subreg_lowpart_offset (outer_mode, inner_mode);
80c70e76 1007}
1008\f
15bbde2b 1009/* Generate a REG rtx for a new pseudo register of mode MODE.
1010 This pseudo is assigned the next sequential register number. */
1011
1012rtx
3754d046 1013gen_reg_rtx (machine_mode mode)
15bbde2b 1014{
19cb6b50 1015 rtx val;
27a7a23a 1016 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 1017
1b7ff857 1018 gcc_assert (can_create_pseudo_p ());
15bbde2b 1019
27a7a23a 1020 /* If a virtual register with bigger mode alignment is generated,
1021 increase stack alignment estimation because it might be spilled
1022 to stack later. */
48e1416a 1023 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 1024 && crtl->stack_alignment_estimated < align
1025 && !crtl->stack_realign_processed)
8645d3e7 1026 {
1027 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1028 if (crtl->stack_alignment_estimated < min_align)
1029 crtl->stack_alignment_estimated = min_align;
1030 }
27a7a23a 1031
316bc009 1032 if (generating_concat_p
1033 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1034 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 1035 {
1036 /* For complex modes, don't make a single pseudo.
1037 Instead, make a CONCAT of two pseudos.
1038 This allows noncontiguous allocation of the real and imaginary parts,
1039 which makes much better code. Besides, allocating DCmode
1040 pseudos overstrains reload on some machines like the 386. */
1041 rtx realpart, imagpart;
3754d046 1042 machine_mode partmode = GET_MODE_INNER (mode);
76c37538 1043
1044 realpart = gen_reg_rtx (partmode);
1045 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 1046 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 1047 }
1048
b4c6ce9b 1049 /* Do not call gen_reg_rtx with uninitialized crtl. */
1050 gcc_assert (crtl->emit.regno_pointer_align_length);
1051
cd769037 1052 crtl->emit.ensure_regno_capacity ();
1053 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
15bbde2b 1054
cd769037 1055 val = gen_raw_REG (mode, reg_rtx_no);
1056 regno_reg_rtx[reg_rtx_no++] = val;
1057 return val;
1058}
fcdc122e 1059
cd769037 1060/* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1061 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
0a893c29 1062
cd769037 1063void
1064emit_status::ensure_regno_capacity ()
1065{
1066 int old_size = regno_pointer_align_length;
15bbde2b 1067
cd769037 1068 if (reg_rtx_no < old_size)
1069 return;
15bbde2b 1070
cd769037 1071 int new_size = old_size * 2;
1072 while (reg_rtx_no >= new_size)
1073 new_size *= 2;
1074
1075 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1076 memset (tmp + old_size, 0, new_size - old_size);
1077 regno_pointer_align = (unsigned char *) tmp;
1078
1079 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1080 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1081 regno_reg_rtx = new1;
1082
1083 crtl->emit.regno_pointer_align_length = new_size;
15bbde2b 1084}
1085
ea239197 1086/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1087
1088bool
1089reg_is_parm_p (rtx reg)
1090{
1091 tree decl;
1092
1093 gcc_assert (REG_P (reg));
1094 decl = REG_EXPR (reg);
1095 return (decl && TREE_CODE (decl) == PARM_DECL);
1096}
1097
80c70e76 1098/* Update NEW with the same attributes as REG, but with OFFSET added
1099 to the REG_OFFSET. */
ca74b940 1100
1a6a0f2a 1101static void
9ce37fa7 1102update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 1103{
9ce37fa7 1104 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 1105 REG_OFFSET (reg) + offset);
1a6a0f2a 1106}
1107
80c70e76 1108/* Generate a register with same attributes as REG, but with OFFSET
1109 added to the REG_OFFSET. */
1a6a0f2a 1110
1111rtx
3754d046 1112gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1a6a0f2a 1113 int offset)
1114{
9ce37fa7 1115 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 1116
9ce37fa7 1117 update_reg_offset (new_rtx, reg, offset);
1118 return new_rtx;
1a6a0f2a 1119}
1120
1121/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 1122 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 1123
1124rtx
3754d046 1125gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1a6a0f2a 1126{
9ce37fa7 1127 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 1128
9ce37fa7 1129 update_reg_offset (new_rtx, reg, offset);
1130 return new_rtx;
ca74b940 1131}
1132
80c70e76 1133/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1134 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 1135
1136void
3754d046 1137adjust_reg_mode (rtx reg, machine_mode mode)
ca74b940 1138{
80c70e76 1139 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1140 PUT_MODE (reg, mode);
1141}
1142
1143/* Copy REG's attributes from X, if X has any attributes. If REG and X
1144 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1145
1146void
1147set_reg_attrs_from_value (rtx reg, rtx x)
1148{
1149 int offset;
e623c80a 1150 bool can_be_reg_pointer = true;
1151
1152 /* Don't call mark_reg_pointer for incompatible pointer sign
1153 extension. */
1154 while (GET_CODE (x) == SIGN_EXTEND
1155 || GET_CODE (x) == ZERO_EXTEND
1156 || GET_CODE (x) == TRUNCATE
1157 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1158 {
4dd7c283 1159#if defined(POINTERS_EXTEND_UNSIGNED)
1160 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
afcace5c 1161 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1162 || (paradoxical_subreg_p (x)
1163 && ! (SUBREG_PROMOTED_VAR_P (x)
1164 && SUBREG_CHECK_PROMOTED_SIGN (x,
1165 POINTERS_EXTEND_UNSIGNED))))
4dd7c283 1166 && !targetm.have_ptr_extend ())
e623c80a 1167 can_be_reg_pointer = false;
1168#endif
1169 x = XEXP (x, 0);
1170 }
80c70e76 1171
ac56145e 1172 /* Hard registers can be reused for multiple purposes within the same
1173 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1174 on them is wrong. */
1175 if (HARD_REGISTER_P (reg))
1176 return;
1177
80c70e76 1178 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 1179 if (MEM_P (x))
1180 {
da443c27 1181 if (MEM_OFFSET_KNOWN_P (x))
1182 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1183 MEM_OFFSET (x) + offset);
e623c80a 1184 if (can_be_reg_pointer && MEM_POINTER (x))
40b93dba 1185 mark_reg_pointer (reg, 0);
ae12ddda 1186 }
1187 else if (REG_P (x))
1188 {
1189 if (REG_ATTRS (x))
1190 update_reg_offset (reg, x, offset);
e623c80a 1191 if (can_be_reg_pointer && REG_POINTER (x))
ae12ddda 1192 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1193 }
1194}
1195
1196/* Generate a REG rtx for a new pseudo register, copying the mode
1197 and attributes from X. */
1198
1199rtx
1200gen_reg_rtx_and_attrs (rtx x)
1201{
1202 rtx reg = gen_reg_rtx (GET_MODE (x));
1203 set_reg_attrs_from_value (reg, x);
1204 return reg;
ca74b940 1205}
1206
263c416c 1207/* Set the register attributes for registers contained in PARM_RTX.
1208 Use needed values from memory attributes of MEM. */
1209
1210void
35cb5232 1211set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1212{
8ad4c111 1213 if (REG_P (parm_rtx))
80c70e76 1214 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1215 else if (GET_CODE (parm_rtx) == PARALLEL)
1216 {
1217 /* Check for a NULL entry in the first slot, used to indicate that the
1218 parameter goes both on the stack and in registers. */
1219 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1220 for (; i < XVECLEN (parm_rtx, 0); i++)
1221 {
1222 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1223 if (REG_P (XEXP (x, 0)))
263c416c 1224 REG_ATTRS (XEXP (x, 0))
1225 = get_reg_attrs (MEM_EXPR (mem),
1226 INTVAL (XEXP (x, 1)));
1227 }
1228 }
1229}
1230
80c70e76 1231/* Set the REG_ATTRS for registers in value X, given that X represents
1232 decl T. */
ca74b940 1233
a8dd994c 1234void
80c70e76 1235set_reg_attrs_for_decl_rtl (tree t, rtx x)
1236{
94f92c36 1237 if (!t)
1238 return;
1239 tree tdecl = t;
80c70e76 1240 if (GET_CODE (x) == SUBREG)
ebfc27f5 1241 {
80c70e76 1242 gcc_assert (subreg_lowpart_p (x));
1243 x = SUBREG_REG (x);
ebfc27f5 1244 }
8ad4c111 1245 if (REG_P (x))
80c70e76 1246 REG_ATTRS (x)
1247 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
94f92c36 1248 DECL_P (tdecl)
1249 ? DECL_MODE (tdecl)
1250 : TYPE_MODE (TREE_TYPE (tdecl))));
ca74b940 1251 if (GET_CODE (x) == CONCAT)
1252 {
1253 if (REG_P (XEXP (x, 0)))
1254 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1255 if (REG_P (XEXP (x, 1)))
1256 REG_ATTRS (XEXP (x, 1))
1257 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1258 }
1259 if (GET_CODE (x) == PARALLEL)
1260 {
85d25060 1261 int i, start;
1262
1263 /* Check for a NULL entry, used to indicate that the parameter goes
1264 both on the stack and in registers. */
1265 if (XEXP (XVECEXP (x, 0, 0), 0))
1266 start = 0;
1267 else
1268 start = 1;
1269
1270 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1271 {
1272 rtx y = XVECEXP (x, 0, i);
1273 if (REG_P (XEXP (y, 0)))
1274 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1275 }
1276 }
1277}
1278
80c70e76 1279/* Assign the RTX X to declaration T. */
1280
1281void
1282set_decl_rtl (tree t, rtx x)
1283{
1284 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1285 if (x)
1286 set_reg_attrs_for_decl_rtl (t, x);
1287}
1288
d91cf567 1289/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1290 if the ABI requires the parameter to be passed by reference. */
80c70e76 1291
1292void
d91cf567 1293set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1294{
1295 DECL_INCOMING_RTL (t) = x;
d91cf567 1296 if (x && !by_reference_p)
80c70e76 1297 set_reg_attrs_for_decl_rtl (t, x);
1298}
1299
de8ecfb5 1300/* Identify REG (which may be a CONCAT) as a user register. */
1301
1302void
35cb5232 1303mark_user_reg (rtx reg)
de8ecfb5 1304{
1305 if (GET_CODE (reg) == CONCAT)
1306 {
1307 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1308 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1309 }
de8ecfb5 1310 else
611234b4 1311 {
1312 gcc_assert (REG_P (reg));
1313 REG_USERVAR_P (reg) = 1;
1314 }
de8ecfb5 1315}
1316
d4c332ff 1317/* Identify REG as a probable pointer register and show its alignment
1318 as ALIGN, if nonzero. */
15bbde2b 1319
1320void
35cb5232 1321mark_reg_pointer (rtx reg, int align)
15bbde2b 1322{
e61a0a7f 1323 if (! REG_POINTER (reg))
612409a6 1324 {
e61a0a7f 1325 REG_POINTER (reg) = 1;
d4c332ff 1326
612409a6 1327 if (align)
1328 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1329 }
1330 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1331 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1332 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1333}
1334
1335/* Return 1 plus largest pseudo reg number used in the current function. */
1336
1337int
35cb5232 1338max_reg_num (void)
15bbde2b 1339{
1340 return reg_rtx_no;
1341}
1342
1343/* Return 1 + the largest label number used so far in the current function. */
1344
1345int
35cb5232 1346max_label_num (void)
15bbde2b 1347{
15bbde2b 1348 return label_num;
1349}
1350
1351/* Return first label number used in this function (if any were used). */
1352
1353int
35cb5232 1354get_first_label_num (void)
15bbde2b 1355{
1356 return first_label_num;
1357}
4ee9c684 1358
1359/* If the rtx for label was created during the expansion of a nested
1360 function, then first_label_num won't include this label number.
f0b5f617 1361 Fix this now so that array indices work later. */
4ee9c684 1362
1363void
6313d5da 1364maybe_set_first_label_num (rtx_code_label *x)
4ee9c684 1365{
1366 if (CODE_LABEL_NUMBER (x) < first_label_num)
1367 first_label_num = CODE_LABEL_NUMBER (x);
1368}
836c1c68 1369
1370/* For use by the RTL function loader, when mingling with normal
1371 functions.
1372 Ensure that label_num is greater than the label num of X, to avoid
1373 duplicate labels in the generated assembler. */
1374
1375void
1376maybe_set_max_label_num (rtx_code_label *x)
1377{
1378 if (CODE_LABEL_NUMBER (x) >= label_num)
1379 label_num = CODE_LABEL_NUMBER (x) + 1;
1380}
1381
15bbde2b 1382\f
1383/* Return a value representing some low-order bits of X, where the number
1384 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1385 between floating-point and fixed-point values, rather, the bit
15bbde2b 1386 representation is returned.
1387
1388 This function handles the cases in common between gen_lowpart, below,
1389 and two variants in cse.c and combine.c. These are the cases that can
1390 be safely handled at all points in the compilation.
1391
1392 If this is not a case we can handle, return 0. */
1393
1394rtx
3754d046 1395gen_lowpart_common (machine_mode mode, rtx x)
15bbde2b 1396{
701e46d0 1397 int msize = GET_MODE_SIZE (mode);
791172c5 1398 int xsize;
3754d046 1399 machine_mode innermode;
791172c5 1400
1401 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1402 so we have to make one up. Yuk. */
1403 innermode = GET_MODE (x);
971ba038 1404 if (CONST_INT_P (x)
6c799a83 1405 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1406 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1407 else if (innermode == VOIDmode)
24cd46a7 1408 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
48e1416a 1409
791172c5 1410 xsize = GET_MODE_SIZE (innermode);
1411
611234b4 1412 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1413
791172c5 1414 if (innermode == mode)
15bbde2b 1415 return x;
1416
1417 /* MODE must occupy no more words than the mode of X. */
791172c5 1418 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1419 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1420 return 0;
1421
9abe1e73 1422 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1423 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1424 return 0;
1425
58a70f63 1426 scalar_int_mode int_mode, int_innermode, from_mode;
15bbde2b 1427 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
58a70f63 1428 && is_a <scalar_int_mode> (mode, &int_mode)
1429 && is_a <scalar_int_mode> (innermode, &int_innermode)
1430 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
15bbde2b 1431 {
1432 /* If we are getting the low-order part of something that has been
1433 sign- or zero-extended, we can either just use the object being
1434 extended or make a narrower extension. If we want an even smaller
1435 piece than the size of the object being extended, call ourselves
1436 recursively.
1437
1438 This case is used mostly by combine and cse. */
1439
58a70f63 1440 if (from_mode == int_mode)
15bbde2b 1441 return XEXP (x, 0);
58a70f63 1442 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1443 return gen_lowpart_common (int_mode, XEXP (x, 0));
1444 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1445 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
15bbde2b 1446 }
8ad4c111 1447 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1448 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
efa08fc2 1449 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
a8a727ad 1450 return lowpart_subreg (mode, x, innermode);
4a307dd5 1451
15bbde2b 1452 /* Otherwise, we can't do this. */
1453 return 0;
1454}
1455\f
d56d0ca2 1456rtx
3754d046 1457gen_highpart (machine_mode mode, rtx x)
d56d0ca2 1458{
701e46d0 1459 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1460 rtx result;
701e46d0 1461
d56d0ca2 1462 /* This case loses if X is a subreg. To catch bugs early,
1463 complain if an invalid MODE is used even in other cases. */
611234b4 1464 gcc_assert (msize <= UNITS_PER_WORD
1465 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1466
81802af6 1467 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1468 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1469 gcc_assert (result);
48e1416a 1470
a8c36ab2 1471 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1472 the target if we have a MEM. gen_highpart must return a valid operand,
1473 emitting code if necessary to do so. */
611234b4 1474 if (MEM_P (result))
1475 {
1476 result = validize_mem (result);
1477 gcc_assert (result);
1478 }
48e1416a 1479
81802af6 1480 return result;
1481}
704fcf2b 1482
29d56731 1483/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1484 be VOIDmode constant. */
1485rtx
3754d046 1486gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
704fcf2b 1487{
1488 if (GET_MODE (exp) != VOIDmode)
1489 {
611234b4 1490 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1491 return gen_highpart (outermode, exp);
1492 }
1493 return simplify_gen_subreg (outermode, exp, innermode,
1494 subreg_highpart_offset (outermode, innermode));
1495}
d4c5e26d 1496
ca99c787 1497/* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1498 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
10ef59ac 1499
81802af6 1500unsigned int
ca99c787 1501subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
81802af6 1502{
ca99c787 1503 if (outer_bytes > inner_bytes)
1504 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1505 return 0;
701e46d0 1506
ca99c787 1507 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1508 return inner_bytes - outer_bytes;
1509 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1510 return 0;
1511 else
1512 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
d56d0ca2 1513}
64ab453f 1514
ca99c787 1515/* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1516 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1517
81802af6 1518unsigned int
ca99c787 1519subreg_size_highpart_offset (unsigned int outer_bytes,
1520 unsigned int inner_bytes)
64ab453f 1521{
ca99c787 1522 gcc_assert (inner_bytes >= outer_bytes);
64ab453f 1523
ca99c787 1524 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1525 return 0;
1526 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1527 return inner_bytes - outer_bytes;
1528 else
1529 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1530 (inner_bytes - outer_bytes)
1531 * BITS_PER_UNIT);
64ab453f 1532}
d56d0ca2 1533
15bbde2b 1534/* Return 1 iff X, assumed to be a SUBREG,
1535 refers to the least significant part of its containing reg.
1536 If X is not a SUBREG, always return 1 (it is its own low part!). */
1537
1538int
b7bf20db 1539subreg_lowpart_p (const_rtx x)
15bbde2b 1540{
1541 if (GET_CODE (x) != SUBREG)
1542 return 1;
7e14c1bf 1543 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1544 return 0;
15bbde2b 1545
81802af6 1546 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1547 == SUBREG_BYTE (x));
15bbde2b 1548}
1549\f
701e46d0 1550/* Return subword OFFSET of operand OP.
1551 The word number, OFFSET, is interpreted as the word number starting
1552 at the low-order address. OFFSET 0 is the low-order word if not
1553 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1554
1555 If we cannot extract the required word, we return zero. Otherwise,
1556 an rtx corresponding to the requested word will be returned.
1557
1558 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1559 reload has completed, a valid address will always be returned. After
1560 reload, if a valid address cannot be returned, we return zero.
1561
1562 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1563 it is the responsibility of the caller.
1564
1565 MODE is the mode of OP in case it is a CONST_INT.
1566
1567 ??? This is still rather broken for some cases. The problem for the
1568 moment is that all callers of this thing provide no 'goal mode' to
1569 tell us to work with. This exists because all callers were written
84e81e84 1570 in a word based SUBREG world.
1571 Now use of this function can be deprecated by simplify_subreg in most
1572 cases.
1573 */
701e46d0 1574
1575rtx
3754d046 1576operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
701e46d0 1577{
1578 if (mode == VOIDmode)
1579 mode = GET_MODE (op);
1580
611234b4 1581 gcc_assert (mode != VOIDmode);
701e46d0 1582
6312a35e 1583 /* If OP is narrower than a word, fail. */
701e46d0 1584 if (mode != BLKmode
1585 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1586 return 0;
1587
6312a35e 1588 /* If we want a word outside OP, return zero. */
701e46d0 1589 if (mode != BLKmode
1590 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1591 return const0_rtx;
1592
701e46d0 1593 /* Form a new MEM at the requested address. */
e16ceb8e 1594 if (MEM_P (op))
701e46d0 1595 {
9ce37fa7 1596 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1597
e4e86ec5 1598 if (! validate_address)
9ce37fa7 1599 return new_rtx;
e4e86ec5 1600
1601 else if (reload_completed)
701e46d0 1602 {
bd1a81f7 1603 if (! strict_memory_address_addr_space_p (word_mode,
1604 XEXP (new_rtx, 0),
1605 MEM_ADDR_SPACE (op)))
e4e86ec5 1606 return 0;
701e46d0 1607 }
e4e86ec5 1608 else
9ce37fa7 1609 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1610 }
1611
84e81e84 1612 /* Rest can be handled by simplify_subreg. */
1613 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1614}
1615
89f18f73 1616/* Similar to `operand_subword', but never return 0. If we can't
1617 extract the required subword, put OP into a register and try again.
1618 The second attempt must succeed. We always validate the address in
1619 this case.
15bbde2b 1620
1621 MODE is the mode of OP, in case it is CONST_INT. */
1622
1623rtx
3754d046 1624operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
15bbde2b 1625{
701e46d0 1626 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1627
1628 if (result)
1629 return result;
1630
1631 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1632 {
1633 /* If this is a register which can not be accessed by words, copy it
1634 to a pseudo register. */
8ad4c111 1635 if (REG_P (op))
ac825d29 1636 op = copy_to_reg (op);
1637 else
1638 op = force_reg (mode, op);
1639 }
15bbde2b 1640
701e46d0 1641 result = operand_subword (op, offset, 1, mode);
611234b4 1642 gcc_assert (result);
15bbde2b 1643
1644 return result;
1645}
1646\f
b3ff8d90 1647/* Returns 1 if both MEM_EXPR can be considered equal
1648 and 0 otherwise. */
1649
1650int
52d07779 1651mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1652{
1653 if (expr1 == expr2)
1654 return 1;
1655
1656 if (! expr1 || ! expr2)
1657 return 0;
1658
1659 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1660 return 0;
1661
3a443843 1662 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1663}
1664
ad0a178f 1665/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1666 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1667 -1 if not known. */
1668
1669int
7cfdc2f0 1670get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1671{
1672 tree expr;
1673 unsigned HOST_WIDE_INT offset;
1674
1675 /* This function can't use
da443c27 1676 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
98ab9e8f 1677 || (MAX (MEM_ALIGN (mem),
957d0361 1678 MAX (align, get_object_alignment (MEM_EXPR (mem))))
ad0a178f 1679 < align))
1680 return -1;
1681 else
da443c27 1682 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
ad0a178f 1683 for two reasons:
1684 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1685 for <variable>. get_inner_reference doesn't handle it and
1686 even if it did, the alignment in that case needs to be determined
1687 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1688 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1689 isn't sufficiently aligned, the object it is in might be. */
1690 gcc_assert (MEM_P (mem));
1691 expr = MEM_EXPR (mem);
da443c27 1692 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
ad0a178f 1693 return -1;
1694
da443c27 1695 offset = MEM_OFFSET (mem);
ad0a178f 1696 if (DECL_P (expr))
1697 {
1698 if (DECL_ALIGN (expr) < align)
1699 return -1;
1700 }
1701 else if (INDIRECT_REF_P (expr))
1702 {
1703 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1704 return -1;
1705 }
1706 else if (TREE_CODE (expr) == COMPONENT_REF)
1707 {
1708 while (1)
1709 {
1710 tree inner = TREE_OPERAND (expr, 0);
1711 tree field = TREE_OPERAND (expr, 1);
1712 tree byte_offset = component_ref_field_offset (expr);
1713 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1714
1715 if (!byte_offset
e913b5cd 1716 || !tree_fits_uhwi_p (byte_offset)
1717 || !tree_fits_uhwi_p (bit_offset))
ad0a178f 1718 return -1;
1719
e913b5cd 1720 offset += tree_to_uhwi (byte_offset);
1721 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
ad0a178f 1722
1723 if (inner == NULL_TREE)
1724 {
1725 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1726 < (unsigned int) align)
1727 return -1;
1728 break;
1729 }
1730 else if (DECL_P (inner))
1731 {
1732 if (DECL_ALIGN (inner) < align)
1733 return -1;
1734 break;
1735 }
1736 else if (TREE_CODE (inner) != COMPONENT_REF)
1737 return -1;
1738 expr = inner;
1739 }
1740 }
1741 else
1742 return -1;
1743
1744 return offset & ((align / BITS_PER_UNIT) - 1);
1745}
1746
310b57a1 1747/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1748 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1749 if we are making a new object of this type. BITPOS is nonzero if
1750 there is an offset outstanding on T that will be applied later. */
c6259b83 1751
1752void
35cb5232 1753set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1754 HOST_WIDE_INT bitpos)
c6259b83 1755{
6f717f77 1756 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1757 tree type;
d72886b5 1758 struct mem_attrs attrs, *defattrs, *refattrs;
3f06bd1b 1759 addr_space_t as;
c6259b83 1760
1761 /* It can happen that type_for_mode was given a mode for which there
1762 is no language-level type. In which case it returns NULL, which
1763 we can see here. */
1764 if (t == NULL_TREE)
1765 return;
1766
1767 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1768 if (type == error_mark_node)
1769 return;
c6259b83 1770
c6259b83 1771 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1772 wrong answer, as it assumes that DECL_RTL already has the right alias
1773 info. Callers should not set DECL_RTL until after the call to
1774 set_mem_attributes. */
611234b4 1775 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1776
d72886b5 1777 memset (&attrs, 0, sizeof (attrs));
1778
96216d37 1779 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1780 front-end routine) and use it. */
d72886b5 1781 attrs.alias = get_alias_set (t);
c6259b83 1782
fbc6244b 1783 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
8d350e69 1784 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1785
d8dccfe9 1786 /* Default values from pre-existing memory attributes if present. */
d72886b5 1787 refattrs = MEM_ATTRS (ref);
1788 if (refattrs)
d8dccfe9 1789 {
1790 /* ??? Can this ever happen? Calling this routine on a MEM that
1791 already carries memory attributes should probably be invalid. */
d72886b5 1792 attrs.expr = refattrs->expr;
6d58bcba 1793 attrs.offset_known_p = refattrs->offset_known_p;
d72886b5 1794 attrs.offset = refattrs->offset;
6d58bcba 1795 attrs.size_known_p = refattrs->size_known_p;
d72886b5 1796 attrs.size = refattrs->size;
1797 attrs.align = refattrs->align;
d8dccfe9 1798 }
1799
1800 /* Otherwise, default values from the mode of the MEM reference. */
d72886b5 1801 else
d8dccfe9 1802 {
d72886b5 1803 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1804 gcc_assert (!defattrs->expr);
6d58bcba 1805 gcc_assert (!defattrs->offset_known_p);
d72886b5 1806
d8dccfe9 1807 /* Respect mode size. */
6d58bcba 1808 attrs.size_known_p = defattrs->size_known_p;
d72886b5 1809 attrs.size = defattrs->size;
d8dccfe9 1810 /* ??? Is this really necessary? We probably should always get
1811 the size from the type below. */
1812
1813 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1814 if T is an object, always compute the object alignment below. */
d72886b5 1815 if (TYPE_P (t))
1816 attrs.align = defattrs->align;
1817 else
1818 attrs.align = BITS_PER_UNIT;
d8dccfe9 1819 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1820 e.g. if the type carries an alignment attribute. Should we be
1821 able to simply always use TYPE_ALIGN? */
1822 }
1823
b3b6e4b5 1824 /* We can set the alignment from the type if we are making an object or if
1825 this is an INDIRECT_REF. */
1826 if (objectp || TREE_CODE (t) == INDIRECT_REF)
d72886b5 1827 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
679e0056 1828
96216d37 1829 /* If the size is known, we can set that. */
50ba3acc 1830 tree new_size = TYPE_SIZE_UNIT (type);
96216d37 1831
9eec20bf 1832 /* The address-space is that of the type. */
1833 as = TYPE_ADDR_SPACE (type);
1834
579bccf9 1835 /* If T is not a type, we may be able to deduce some more information about
1836 the expression. */
1837 if (! TYPE_P (t))
2a631e19 1838 {
ae2dd339 1839 tree base;
b04fab2a 1840
2a631e19 1841 if (TREE_THIS_VOLATILE (t))
1842 MEM_VOLATILE_P (ref) = 1;
c6259b83 1843
3c00f11c 1844 /* Now remove any conversions: they don't change what the underlying
1845 object is. Likewise for SAVE_EXPR. */
72dd6141 1846 while (CONVERT_EXPR_P (t)
3c00f11c 1847 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1848 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1849 t = TREE_OPERAND (t, 0);
1850
73eb0a09 1851 /* Note whether this expression can trap. */
1852 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1853
1854 base = get_base_address (t);
3f06bd1b 1855 if (base)
1856 {
1857 if (DECL_P (base)
1858 && TREE_READONLY (base)
1859 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1860 && !TREE_THIS_VOLATILE (base))
1861 MEM_READONLY_P (ref) = 1;
1862
1863 /* Mark static const strings readonly as well. */
1864 if (TREE_CODE (base) == STRING_CST
1865 && TREE_READONLY (base)
1866 && TREE_STATIC (base))
1867 MEM_READONLY_P (ref) = 1;
1868
9eec20bf 1869 /* Address-space information is on the base object. */
3f06bd1b 1870 if (TREE_CODE (base) == MEM_REF
1871 || TREE_CODE (base) == TARGET_MEM_REF)
1872 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1873 0))));
1874 else
1875 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1876 }
cab98a0d 1877
2b02580f 1878 /* If this expression uses it's parent's alias set, mark it such
1879 that we won't change it. */
d400f5e1 1880 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
5cc193e7 1881 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1882
2a631e19 1883 /* If this is a decl, set the attributes of the MEM from it. */
1884 if (DECL_P (t))
1885 {
d72886b5 1886 attrs.expr = t;
6d58bcba 1887 attrs.offset_known_p = true;
1888 attrs.offset = 0;
6f717f77 1889 apply_bitpos = bitpos;
50ba3acc 1890 new_size = DECL_SIZE_UNIT (t);
2a631e19 1891 }
1892
9eec20bf 1893 /* ??? If we end up with a constant here do record a MEM_EXPR. */
ce45a448 1894 else if (CONSTANT_CLASS_P (t))
9eec20bf 1895 ;
b10dbbca 1896
50ba3acc 1897 /* If this is a field reference, record it. */
1898 else if (TREE_CODE (t) == COMPONENT_REF)
b10dbbca 1899 {
d72886b5 1900 attrs.expr = t;
6d58bcba 1901 attrs.offset_known_p = true;
1902 attrs.offset = 0;
6f717f77 1903 apply_bitpos = bitpos;
50ba3acc 1904 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1905 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
b10dbbca 1906 }
1907
1908 /* If this is an array reference, look for an outer field reference. */
1909 else if (TREE_CODE (t) == ARRAY_REF)
1910 {
1911 tree off_tree = size_zero_node;
6b039979 1912 /* We can't modify t, because we use it at the end of the
1913 function. */
1914 tree t2 = t;
b10dbbca 1915
1916 do
1917 {
6b039979 1918 tree index = TREE_OPERAND (t2, 1);
6374121b 1919 tree low_bound = array_ref_low_bound (t2);
1920 tree unit_size = array_ref_element_size (t2);
97f8ce30 1921
1922 /* We assume all arrays have sizes that are a multiple of a byte.
1923 First subtract the lower bound, if any, in the type of the
6374121b 1924 index, then convert to sizetype and multiply by the size of
1925 the array element. */
1926 if (! integer_zerop (low_bound))
faa43f85 1927 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1928 index, low_bound);
97f8ce30 1929
6374121b 1930 off_tree = size_binop (PLUS_EXPR,
535664e3 1931 size_binop (MULT_EXPR,
1932 fold_convert (sizetype,
1933 index),
6374121b 1934 unit_size),
1935 off_tree);
6b039979 1936 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1937 }
6b039979 1938 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1939
9eec20bf 1940 if (DECL_P (t2)
6a57a1e8 1941 || (TREE_CODE (t2) == COMPONENT_REF
1942 /* For trailing arrays t2 doesn't have a size that
1943 covers all valid accesses. */
07110764 1944 && ! array_at_struct_end_p (t)))
b10dbbca 1945 {
d72886b5 1946 attrs.expr = t2;
6d58bcba 1947 attrs.offset_known_p = false;
e913b5cd 1948 if (tree_fits_uhwi_p (off_tree))
6f717f77 1949 {
6d58bcba 1950 attrs.offset_known_p = true;
e913b5cd 1951 attrs.offset = tree_to_uhwi (off_tree);
6f717f77 1952 apply_bitpos = bitpos;
1953 }
b10dbbca 1954 }
9eec20bf 1955 /* Else do not record a MEM_EXPR. */
2d8fe5d0 1956 }
1957
6d72287b 1958 /* If this is an indirect reference, record it. */
182cf5a9 1959 else if (TREE_CODE (t) == MEM_REF
5d9de213 1960 || TREE_CODE (t) == TARGET_MEM_REF)
6d72287b 1961 {
d72886b5 1962 attrs.expr = t;
6d58bcba 1963 attrs.offset_known_p = true;
1964 attrs.offset = 0;
6d72287b 1965 apply_bitpos = bitpos;
1966 }
1967
9eec20bf 1968 /* Compute the alignment. */
1969 unsigned int obj_align;
1970 unsigned HOST_WIDE_INT obj_bitpos;
1971 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1972 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1973 if (obj_bitpos != 0)
ac29ece2 1974 obj_align = least_bit_hwi (obj_bitpos);
9eec20bf 1975 attrs.align = MAX (attrs.align, obj_align);
2a631e19 1976 }
1977
e913b5cd 1978 if (tree_fits_uhwi_p (new_size))
50ba3acc 1979 {
1980 attrs.size_known_p = true;
e913b5cd 1981 attrs.size = tree_to_uhwi (new_size);
50ba3acc 1982 }
1983
e2e205b3 1984 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1985 bit position offset. Similarly, increase the size of the accessed
1986 object to contain the negative offset. */
6f717f77 1987 if (apply_bitpos)
595f1461 1988 {
6d58bcba 1989 gcc_assert (attrs.offset_known_p);
1990 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1991 if (attrs.size_known_p)
1992 attrs.size += apply_bitpos / BITS_PER_UNIT;
595f1461 1993 }
6f717f77 1994
2a631e19 1995 /* Now set the attributes we computed above. */
3f06bd1b 1996 attrs.addrspace = as;
d72886b5 1997 set_mem_attrs (ref, &attrs);
c6259b83 1998}
1999
6f717f77 2000void
35cb5232 2001set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 2002{
2003 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2004}
2005
c6259b83 2006/* Set the alias set of MEM to SET. */
2007
2008void
32c2fdea 2009set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 2010{
d72886b5 2011 struct mem_attrs attrs;
2012
c6259b83 2013 /* If the new and old alias sets don't conflict, something is wrong. */
1b4345f7 2014 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d72886b5 2015 attrs = *get_mem_attrs (mem);
2016 attrs.alias = set;
2017 set_mem_attrs (mem, &attrs);
bd1a81f7 2018}
2019
2020/* Set the address space of MEM to ADDRSPACE (target-defined). */
2021
2022void
2023set_mem_addr_space (rtx mem, addr_space_t addrspace)
2024{
d72886b5 2025 struct mem_attrs attrs;
2026
2027 attrs = *get_mem_attrs (mem);
2028 attrs.addrspace = addrspace;
2029 set_mem_attrs (mem, &attrs);
c6259b83 2030}
96216d37 2031
1c4512da 2032/* Set the alignment of MEM to ALIGN bits. */
96216d37 2033
2034void
35cb5232 2035set_mem_align (rtx mem, unsigned int align)
96216d37 2036{
d72886b5 2037 struct mem_attrs attrs;
2038
2039 attrs = *get_mem_attrs (mem);
2040 attrs.align = align;
2041 set_mem_attrs (mem, &attrs);
96216d37 2042}
278fe152 2043
b10dbbca 2044/* Set the expr for MEM to EXPR. */
278fe152 2045
2046void
35cb5232 2047set_mem_expr (rtx mem, tree expr)
278fe152 2048{
d72886b5 2049 struct mem_attrs attrs;
2050
2051 attrs = *get_mem_attrs (mem);
2052 attrs.expr = expr;
2053 set_mem_attrs (mem, &attrs);
278fe152 2054}
b10dbbca 2055
2056/* Set the offset of MEM to OFFSET. */
2057
2058void
da443c27 2059set_mem_offset (rtx mem, HOST_WIDE_INT offset)
b10dbbca 2060{
d72886b5 2061 struct mem_attrs attrs;
2062
2063 attrs = *get_mem_attrs (mem);
6d58bcba 2064 attrs.offset_known_p = true;
2065 attrs.offset = offset;
da443c27 2066 set_mem_attrs (mem, &attrs);
2067}
2068
2069/* Clear the offset of MEM. */
2070
2071void
2072clear_mem_offset (rtx mem)
2073{
2074 struct mem_attrs attrs;
2075
2076 attrs = *get_mem_attrs (mem);
6d58bcba 2077 attrs.offset_known_p = false;
d72886b5 2078 set_mem_attrs (mem, &attrs);
f0500469 2079}
2080
2081/* Set the size of MEM to SIZE. */
2082
2083void
5b2a69fa 2084set_mem_size (rtx mem, HOST_WIDE_INT size)
f0500469 2085{
d72886b5 2086 struct mem_attrs attrs;
2087
2088 attrs = *get_mem_attrs (mem);
6d58bcba 2089 attrs.size_known_p = true;
2090 attrs.size = size;
5b2a69fa 2091 set_mem_attrs (mem, &attrs);
2092}
2093
2094/* Clear the size of MEM. */
2095
2096void
2097clear_mem_size (rtx mem)
2098{
2099 struct mem_attrs attrs;
2100
2101 attrs = *get_mem_attrs (mem);
6d58bcba 2102 attrs.size_known_p = false;
d72886b5 2103 set_mem_attrs (mem, &attrs);
b10dbbca 2104}
c6259b83 2105\f
96216d37 2106/* Return a memory reference like MEMREF, but with its mode changed to MODE
2107 and its address changed to ADDR. (VOIDmode means don't change the mode.
2108 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
5cc04e45 2109 returned memory location is required to be valid. INPLACE is true if any
2110 changes can be made directly to MEMREF or false if MEMREF must be treated
2111 as immutable.
2112
2113 The memory attributes are not changed. */
15bbde2b 2114
96216d37 2115static rtx
3754d046 2116change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
5cc04e45 2117 bool inplace)
15bbde2b 2118{
bd1a81f7 2119 addr_space_t as;
9ce37fa7 2120 rtx new_rtx;
15bbde2b 2121
611234b4 2122 gcc_assert (MEM_P (memref));
bd1a81f7 2123 as = MEM_ADDR_SPACE (memref);
15bbde2b 2124 if (mode == VOIDmode)
2125 mode = GET_MODE (memref);
2126 if (addr == 0)
2127 addr = XEXP (memref, 0);
3988ef8b 2128 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 2129 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 2130 return memref;
15bbde2b 2131
73a18f44 2132 /* Don't validate address for LRA. LRA can make the address valid
2133 by itself in most efficient way. */
2134 if (validate && !lra_in_progress)
15bbde2b 2135 {
e4e86ec5 2136 if (reload_in_progress || reload_completed)
bd1a81f7 2137 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 2138 else
bd1a81f7 2139 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 2140 }
d823ba47 2141
e8976cd7 2142 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2143 return memref;
2144
5cc04e45 2145 if (inplace)
2146 {
2147 XEXP (memref, 0) = addr;
2148 return memref;
2149 }
2150
9ce37fa7 2151 new_rtx = gen_rtx_MEM (mode, addr);
2152 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2153 return new_rtx;
15bbde2b 2154}
537ffcfc 2155
96216d37 2156/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2157 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 2158
2159rtx
3754d046 2160change_address (rtx memref, machine_mode mode, rtx addr)
e513d163 2161{
5cc04e45 2162 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
3754d046 2163 machine_mode mmode = GET_MODE (new_rtx);
d72886b5 2164 struct mem_attrs attrs, *defattrs;
0ab96142 2165
d72886b5 2166 attrs = *get_mem_attrs (memref);
2167 defattrs = mode_mem_attrs[(int) mmode];
6d58bcba 2168 attrs.expr = NULL_TREE;
2169 attrs.offset_known_p = false;
2170 attrs.size_known_p = defattrs->size_known_p;
d72886b5 2171 attrs.size = defattrs->size;
2172 attrs.align = defattrs->align;
6cc60c4d 2173
d28edf0d 2174 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2175 if (new_rtx == memref)
0ab96142 2176 {
d72886b5 2177 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
9ce37fa7 2178 return new_rtx;
0ab96142 2179
9ce37fa7 2180 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2181 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 2182 }
d28edf0d 2183
d72886b5 2184 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2185 return new_rtx;
e513d163 2186}
537ffcfc 2187
96216d37 2188/* Return a memory reference like MEMREF, but with its mode changed
2189 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2190 nonzero, the memory address is forced to be valid.
2d0fd66d 2191 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2192 and the caller is responsible for adjusting MEMREF base register.
2193 If ADJUST_OBJECT is zero, the underlying object associated with the
2194 memory reference is left unchanged and the caller is responsible for
2195 dealing with it. Otherwise, if the new memory reference is outside
226c6baf 2196 the underlying object, even partially, then the object is dropped.
2197 SIZE, if nonzero, is the size of an access in cases where MODE
2198 has no inherent size. */
e4e86ec5 2199
2200rtx
3754d046 2201adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
226c6baf 2202 int validate, int adjust_address, int adjust_object,
2203 HOST_WIDE_INT size)
e4e86ec5 2204{
fb257ae6 2205 rtx addr = XEXP (memref, 0);
9ce37fa7 2206 rtx new_rtx;
f77c4496 2207 scalar_int_mode address_mode;
cfb75cdf 2208 int pbits;
21b8bc7e 2209 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
d72886b5 2210 unsigned HOST_WIDE_INT max_align;
21b8bc7e 2211#ifdef POINTERS_EXTEND_UNSIGNED
f77c4496 2212 scalar_int_mode pointer_mode
21b8bc7e 2213 = targetm.addr_space.pointer_mode (attrs.addrspace);
2214#endif
fb257ae6 2215
4733f549 2216 /* VOIDmode means no mode change for change_address_1. */
2217 if (mode == VOIDmode)
2218 mode = GET_MODE (memref);
2219
226c6baf 2220 /* Take the size of non-BLKmode accesses from the mode. */
2221 defattrs = mode_mem_attrs[(int) mode];
2222 if (defattrs->size_known_p)
2223 size = defattrs->size;
2224
d28edf0d 2225 /* If there are no changes, just return the original memory reference. */
2226 if (mode == GET_MODE (memref) && !offset
226c6baf 2227 && (size == 0 || (attrs.size_known_p && attrs.size == size))
d72886b5 2228 && (!validate || memory_address_addr_space_p (mode, addr,
2229 attrs.addrspace)))
d28edf0d 2230 return memref;
2231
e36c3d58 2232 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2233 This may happen even if offset is nonzero -- consider
e36c3d58 2234 (plus (plus reg reg) const_int) -- so do this always. */
2235 addr = copy_rtx (addr);
2236
cfb75cdf 2237 /* Convert a possibly large offset to a signed value within the
2238 range of the target address space. */
87cf5753 2239 address_mode = get_address_mode (memref);
98155838 2240 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2241 if (HOST_BITS_PER_WIDE_INT > pbits)
2242 {
2243 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2244 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2245 >> shift);
2246 }
2247
2d0fd66d 2248 if (adjust_address)
cd358719 2249 {
2250 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2251 object, we can merge it into the LO_SUM. */
2252 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2253 && offset >= 0
2254 && (unsigned HOST_WIDE_INT) offset
2255 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2256 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
29c05e22 2257 plus_constant (address_mode,
2258 XEXP (addr, 1), offset));
21b8bc7e 2259#ifdef POINTERS_EXTEND_UNSIGNED
2260 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2261 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2262 the fact that pointers are not allowed to overflow. */
2263 else if (POINTERS_EXTEND_UNSIGNED > 0
2264 && GET_CODE (addr) == ZERO_EXTEND
2265 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2266 && trunc_int_for_mode (offset, pointer_mode) == offset)
2267 addr = gen_rtx_ZERO_EXTEND (address_mode,
2268 plus_constant (pointer_mode,
2269 XEXP (addr, 0), offset));
2270#endif
cd358719 2271 else
29c05e22 2272 addr = plus_constant (address_mode, addr, offset);
cd358719 2273 }
fb257ae6 2274
5cc04e45 2275 new_rtx = change_address_1 (memref, mode, addr, validate, false);
96216d37 2276
e077413c 2277 /* If the address is a REG, change_address_1 rightfully returns memref,
2278 but this would destroy memref's MEM_ATTRS. */
2279 if (new_rtx == memref && offset != 0)
2280 new_rtx = copy_rtx (new_rtx);
2281
2d0fd66d 2282 /* Conservatively drop the object if we don't know where we start from. */
2283 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2284 {
2285 attrs.expr = NULL_TREE;
2286 attrs.alias = 0;
2287 }
2288
96216d37 2289 /* Compute the new values of the memory attributes due to this adjustment.
2290 We add the offsets and update the alignment. */
6d58bcba 2291 if (attrs.offset_known_p)
2d0fd66d 2292 {
2293 attrs.offset += offset;
2294
2295 /* Drop the object if the new left end is not within its bounds. */
2296 if (adjust_object && attrs.offset < 0)
2297 {
2298 attrs.expr = NULL_TREE;
2299 attrs.alias = 0;
2300 }
2301 }
96216d37 2302
b8098e5b 2303 /* Compute the new alignment by taking the MIN of the alignment and the
2304 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2305 if zero. */
2306 if (offset != 0)
d72886b5 2307 {
ac29ece2 2308 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
d72886b5 2309 attrs.align = MIN (attrs.align, max_align);
2310 }
96216d37 2311
226c6baf 2312 if (size)
6d58bcba 2313 {
2d0fd66d 2314 /* Drop the object if the new right end is not within its bounds. */
226c6baf 2315 if (adjust_object && (offset + size) > attrs.size)
2d0fd66d 2316 {
2317 attrs.expr = NULL_TREE;
2318 attrs.alias = 0;
2319 }
6d58bcba 2320 attrs.size_known_p = true;
226c6baf 2321 attrs.size = size;
6d58bcba 2322 }
2323 else if (attrs.size_known_p)
2d0fd66d 2324 {
226c6baf 2325 gcc_assert (!adjust_object);
2d0fd66d 2326 attrs.size -= offset;
226c6baf 2327 /* ??? The store_by_pieces machinery generates negative sizes,
2328 so don't assert for that here. */
2d0fd66d 2329 }
5cc193e7 2330
d72886b5 2331 set_mem_attrs (new_rtx, &attrs);
96216d37 2332
9ce37fa7 2333 return new_rtx;
e4e86ec5 2334}
2335
bf42c62d 2336/* Return a memory reference like MEMREF, but with its mode changed
2337 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2338 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2339 nonzero, the memory address is forced to be valid. */
2340
2341rtx
3754d046 2342adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
35cb5232 2343 HOST_WIDE_INT offset, int validate)
bf42c62d 2344{
5cc04e45 2345 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
226c6baf 2346 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
bf42c62d 2347}
2348
2a631e19 2349/* Return a memory reference like MEMREF, but whose address is changed by
2350 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2351 known to be in OFFSET (possibly 1). */
fcdc122e 2352
2353rtx
35cb5232 2354offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2355{
9ce37fa7 2356 rtx new_rtx, addr = XEXP (memref, 0);
3754d046 2357 machine_mode address_mode;
6d58bcba 2358 struct mem_attrs attrs, *defattrs;
fac6aae6 2359
d72886b5 2360 attrs = *get_mem_attrs (memref);
87cf5753 2361 address_mode = get_address_mode (memref);
98155838 2362 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2363
d4c5e26d 2364 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2365 could have secondary memory references, multiplies or anything.
fac6aae6 2366
2367 However, if we did go and rearrange things, we can wind up not
2368 being able to recognize the magic around pic_offset_table_rtx.
2369 This stuff is fragile, and is yet another example of why it is
2370 bad to expose PIC machinery too early. */
d72886b5 2371 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2372 attrs.addrspace)
fac6aae6 2373 && GET_CODE (addr) == PLUS
2374 && XEXP (addr, 0) == pic_offset_table_rtx)
2375 {
2376 addr = force_reg (GET_MODE (addr), addr);
98155838 2377 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2378 }
2379
9ce37fa7 2380 update_temp_slot_address (XEXP (memref, 0), new_rtx);
5cc04e45 2381 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
fcdc122e 2382
d28edf0d 2383 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2384 if (new_rtx == memref)
2385 return new_rtx;
d28edf0d 2386
fcdc122e 2387 /* Update the alignment to reflect the offset. Reset the offset, which
2388 we don't know. */
6d58bcba 2389 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2390 attrs.offset_known_p = false;
2391 attrs.size_known_p = defattrs->size_known_p;
2392 attrs.size = defattrs->size;
d72886b5 2393 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2394 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2395 return new_rtx;
fcdc122e 2396}
d4c5e26d 2397
537ffcfc 2398/* Return a memory reference like MEMREF, but with its address changed to
2399 ADDR. The caller is asserting that the actual piece of memory pointed
2400 to is the same, just the form of the address is being changed, such as
5cc04e45 2401 by putting something into a register. INPLACE is true if any changes
2402 can be made directly to MEMREF or false if MEMREF must be treated as
2403 immutable. */
537ffcfc 2404
2405rtx
5cc04e45 2406replace_equiv_address (rtx memref, rtx addr, bool inplace)
537ffcfc 2407{
96216d37 2408 /* change_address_1 copies the memory attribute structure without change
2409 and that's exactly what we want here. */
ecfe4ca9 2410 update_temp_slot_address (XEXP (memref, 0), addr);
5cc04e45 2411 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
537ffcfc 2412}
96216d37 2413
e4e86ec5 2414/* Likewise, but the reference is not required to be valid. */
2415
2416rtx
5cc04e45 2417replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
e4e86ec5 2418{
5cc04e45 2419 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
e4e86ec5 2420}
8259ab07 2421
2422/* Return a memory reference like MEMREF, but with its mode widened to
2423 MODE and offset by OFFSET. This would be used by targets that e.g.
2424 cannot issue QImode memory operations and have to use SImode memory
2425 operations plus masking logic. */
2426
2427rtx
3754d046 2428widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2429{
226c6baf 2430 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
d72886b5 2431 struct mem_attrs attrs;
8259ab07 2432 unsigned int size = GET_MODE_SIZE (mode);
2433
d28edf0d 2434 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2435 if (new_rtx == memref)
2436 return new_rtx;
d28edf0d 2437
d72886b5 2438 attrs = *get_mem_attrs (new_rtx);
2439
8259ab07 2440 /* If we don't know what offset we were at within the expression, then
2441 we can't know if we've overstepped the bounds. */
6d58bcba 2442 if (! attrs.offset_known_p)
d72886b5 2443 attrs.expr = NULL_TREE;
8259ab07 2444
d72886b5 2445 while (attrs.expr)
8259ab07 2446 {
d72886b5 2447 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
8259ab07 2448 {
d72886b5 2449 tree field = TREE_OPERAND (attrs.expr, 1);
2450 tree offset = component_ref_field_offset (attrs.expr);
8259ab07 2451
2452 if (! DECL_SIZE_UNIT (field))
2453 {
d72886b5 2454 attrs.expr = NULL_TREE;
8259ab07 2455 break;
2456 }
2457
2458 /* Is the field at least as large as the access? If so, ok,
2459 otherwise strip back to the containing structure. */
8359cfb4 2460 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2461 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
6d58bcba 2462 && attrs.offset >= 0)
8259ab07 2463 break;
2464
e913b5cd 2465 if (! tree_fits_uhwi_p (offset))
8259ab07 2466 {
d72886b5 2467 attrs.expr = NULL_TREE;
8259ab07 2468 break;
2469 }
2470
d72886b5 2471 attrs.expr = TREE_OPERAND (attrs.expr, 0);
e913b5cd 2472 attrs.offset += tree_to_uhwi (offset);
2473 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
6d58bcba 2474 / BITS_PER_UNIT);
8259ab07 2475 }
2476 /* Similarly for the decl. */
d72886b5 2477 else if (DECL_P (attrs.expr)
2478 && DECL_SIZE_UNIT (attrs.expr)
2479 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2480 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
6d58bcba 2481 && (! attrs.offset_known_p || attrs.offset >= 0))
8259ab07 2482 break;
2483 else
2484 {
2485 /* The widened memory access overflows the expression, which means
2486 that it could alias another expression. Zap it. */
d72886b5 2487 attrs.expr = NULL_TREE;
8259ab07 2488 break;
2489 }
2490 }
2491
d72886b5 2492 if (! attrs.expr)
6d58bcba 2493 attrs.offset_known_p = false;
8259ab07 2494
2495 /* The widened memory may alias other stuff, so zap the alias set. */
2496 /* ??? Maybe use get_alias_set on any remaining expression. */
d72886b5 2497 attrs.alias = 0;
6d58bcba 2498 attrs.size_known_p = true;
2499 attrs.size = size;
d72886b5 2500 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2501 return new_rtx;
8259ab07 2502}
15bbde2b 2503\f
ac681e84 2504/* A fake decl that is used as the MEM_EXPR of spill slots. */
2505static GTY(()) tree spill_slot_decl;
2506
58029e61 2507tree
2508get_spill_slot_decl (bool force_build_p)
ac681e84 2509{
2510 tree d = spill_slot_decl;
2511 rtx rd;
d72886b5 2512 struct mem_attrs attrs;
ac681e84 2513
58029e61 2514 if (d || !force_build_p)
ac681e84 2515 return d;
2516
e60a6f7b 2517 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2518 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2519 DECL_ARTIFICIAL (d) = 1;
2520 DECL_IGNORED_P (d) = 1;
2521 TREE_USED (d) = 1;
ac681e84 2522 spill_slot_decl = d;
2523
2524 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2525 MEM_NOTRAP_P (rd) = 1;
d72886b5 2526 attrs = *mode_mem_attrs[(int) BLKmode];
2527 attrs.alias = new_alias_set ();
2528 attrs.expr = d;
2529 set_mem_attrs (rd, &attrs);
ac681e84 2530 SET_DECL_RTL (d, rd);
2531
2532 return d;
2533}
2534
2535/* Given MEM, a result from assign_stack_local, fill in the memory
2536 attributes as appropriate for a register allocator spill slot.
2537 These slots are not aliasable by other memory. We arrange for
2538 them all to use a single MEM_EXPR, so that the aliasing code can
2539 work properly in the case of shared spill slots. */
2540
2541void
2542set_mem_attrs_for_spill (rtx mem)
2543{
d72886b5 2544 struct mem_attrs attrs;
2545 rtx addr;
ac681e84 2546
d72886b5 2547 attrs = *get_mem_attrs (mem);
2548 attrs.expr = get_spill_slot_decl (true);
2549 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2550 attrs.addrspace = ADDR_SPACE_GENERIC;
ac681e84 2551
2552 /* We expect the incoming memory to be of the form:
2553 (mem:MODE (plus (reg sfp) (const_int offset)))
2554 with perhaps the plus missing for offset = 0. */
2555 addr = XEXP (mem, 0);
6d58bcba 2556 attrs.offset_known_p = true;
2557 attrs.offset = 0;
ac681e84 2558 if (GET_CODE (addr) == PLUS
971ba038 2559 && CONST_INT_P (XEXP (addr, 1)))
6d58bcba 2560 attrs.offset = INTVAL (XEXP (addr, 1));
ac681e84 2561
d72886b5 2562 set_mem_attrs (mem, &attrs);
ac681e84 2563 MEM_NOTRAP_P (mem) = 1;
2564}
2565\f
15bbde2b 2566/* Return a newly created CODE_LABEL rtx with a unique label number. */
2567
be95c7c7 2568rtx_code_label *
35cb5232 2569gen_label_rtx (void)
15bbde2b 2570{
be95c7c7 2571 return as_a <rtx_code_label *> (
2572 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2573 NULL, label_num++, NULL));
15bbde2b 2574}
2575\f
2576/* For procedure integration. */
2577
15bbde2b 2578/* Install new pointers to the first and last insns in the chain.
d4c332ff 2579 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2580 Used for an inline-procedure after copying the insn chain. */
2581
2582void
57c26b3a 2583set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
15bbde2b 2584{
57c26b3a 2585 rtx_insn *insn;
d4c332ff 2586
06f9d6ef 2587 set_first_insn (first);
2588 set_last_insn (last);
d4c332ff 2589 cur_insn_uid = 0;
2590
9845d120 2591 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2592 {
2593 int debug_count = 0;
2594
2595 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2596 cur_debug_insn_uid = 0;
2597
2598 for (insn = first; insn; insn = NEXT_INSN (insn))
2599 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2600 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2601 else
2602 {
2603 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2604 if (DEBUG_INSN_P (insn))
2605 debug_count++;
2606 }
2607
2608 if (debug_count)
2609 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2610 else
2611 cur_debug_insn_uid++;
2612 }
2613 else
2614 for (insn = first; insn; insn = NEXT_INSN (insn))
2615 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2616
2617 cur_insn_uid++;
15bbde2b 2618}
15bbde2b 2619\f
d823ba47 2620/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2621 structure. This routine should only be called once. */
15bbde2b 2622
a40c0eeb 2623static void
58945f46 2624unshare_all_rtl_1 (rtx_insn *insn)
15bbde2b 2625{
2d96a59a 2626 /* Unshare just about everything else. */
1cd4cfea 2627 unshare_all_rtl_in_chain (insn);
d823ba47 2628
15bbde2b 2629 /* Make sure the addresses of stack slots found outside the insn chain
2630 (such as, in DECL_RTL of a variable) are not shared
2631 with the insn chain.
2632
2633 This special care is necessary when the stack slot MEM does not
2634 actually appear in the insn chain. If it does appear, its address
2635 is unshared from all else at that point. */
84f4f7bf 2636 unsigned int i;
2637 rtx temp;
2638 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2639 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
15bbde2b 2640}
2641
d823ba47 2642/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2643 structure, again. This is a fairly expensive thing to do so it
2644 should be done sparingly. */
2645
2646void
58945f46 2647unshare_all_rtl_again (rtx_insn *insn)
2d96a59a 2648{
58945f46 2649 rtx_insn *p;
5244079b 2650 tree decl;
2651
2d96a59a 2652 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2653 if (INSN_P (p))
2d96a59a 2654 {
2655 reset_used_flags (PATTERN (p));
2656 reset_used_flags (REG_NOTES (p));
6d2a4bac 2657 if (CALL_P (p))
2658 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2d96a59a 2659 }
5244079b 2660
01dc9f0c 2661 /* Make sure that virtual stack slots are not shared. */
265be050 2662 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2663
5244079b 2664 /* Make sure that virtual parameters are not shared. */
1767a056 2665 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2666 set_used_flags (DECL_RTL (decl));
5244079b 2667
84f4f7bf 2668 rtx temp;
2669 unsigned int i;
2670 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2671 reset_used_flags (temp);
5244079b 2672
df329266 2673 unshare_all_rtl_1 (insn);
a40c0eeb 2674}
2675
2a1990e9 2676unsigned int
a40c0eeb 2677unshare_all_rtl (void)
2678{
df329266 2679 unshare_all_rtl_1 (get_insns ());
607381a9 2680
2681 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2682 {
2683 if (DECL_RTL_SET_P (decl))
2684 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2685 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2686 }
2687
2a1990e9 2688 return 0;
2d96a59a 2689}
2690
77fce4cd 2691
1cd4cfea 2692/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2693 Recursively does the same for subexpressions. */
2694
2695static void
2696verify_rtx_sharing (rtx orig, rtx insn)
2697{
2698 rtx x = orig;
2699 int i;
2700 enum rtx_code code;
2701 const char *format_ptr;
2702
2703 if (x == 0)
2704 return;
2705
2706 code = GET_CODE (x);
2707
2708 /* These types may be freely shared. */
2709
2710 switch (code)
2711 {
2712 case REG:
688ff29b 2713 case DEBUG_EXPR:
2714 case VALUE:
0349edce 2715 CASE_CONST_ANY:
1cd4cfea 2716 case SYMBOL_REF:
2717 case LABEL_REF:
2718 case CODE_LABEL:
2719 case PC:
2720 case CC0:
1a860023 2721 case RETURN:
9cb2517e 2722 case SIMPLE_RETURN:
1cd4cfea 2723 case SCRATCH:
c09425a0 2724 /* SCRATCH must be shared because they represent distinct values. */
b291008a 2725 return;
c09425a0 2726 case CLOBBER:
b291008a 2727 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2728 clobbers or clobbers of hard registers that originated as pseudos.
2729 This is needed to allow safe register renaming. */
2b5f32ae 2730 if (REG_P (XEXP (x, 0))
2731 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2732 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
c09425a0 2733 return;
2734 break;
1cd4cfea 2735
2736 case CONST:
3072d30e 2737 if (shared_const_p (orig))
1cd4cfea 2738 return;
2739 break;
2740
2741 case MEM:
2742 /* A MEM is allowed to be shared if its address is constant. */
2743 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2744 || reload_completed || reload_in_progress)
2745 return;
2746
2747 break;
2748
2749 default:
2750 break;
2751 }
2752
2753 /* This rtx may not be shared. If it has already been seen,
2754 replace it with a copy of itself. */
382ecba7 2755 if (flag_checking && RTX_FLAG (x, used))
1cd4cfea 2756 {
0a81f5a0 2757 error ("invalid rtl sharing found in the insn");
1cd4cfea 2758 debug_rtx (insn);
0a81f5a0 2759 error ("shared rtx");
1cd4cfea 2760 debug_rtx (x);
0a81f5a0 2761 internal_error ("internal consistency failure");
1cd4cfea 2762 }
9cee7c3f 2763 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2764
1cd4cfea 2765 RTX_FLAG (x, used) = 1;
2766
8b332087 2767 /* Now scan the subexpressions recursively. */
1cd4cfea 2768
2769 format_ptr = GET_RTX_FORMAT (code);
2770
2771 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2772 {
2773 switch (*format_ptr++)
2774 {
2775 case 'e':
2776 verify_rtx_sharing (XEXP (x, i), insn);
2777 break;
2778
2779 case 'E':
2780 if (XVEC (x, i) != NULL)
2781 {
2782 int j;
2783 int len = XVECLEN (x, i);
2784
2785 for (j = 0; j < len; j++)
2786 {
9cee7c3f 2787 /* We allow sharing of ASM_OPERANDS inside single
2788 instruction. */
1cd4cfea 2789 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2790 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2791 == ASM_OPERANDS))
1cd4cfea 2792 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2793 else
2794 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2795 }
2796 }
2797 break;
2798 }
2799 }
2800 return;
2801}
2802
1e9af25c 2803/* Reset used-flags for INSN. */
2804
2805static void
2806reset_insn_used_flags (rtx insn)
2807{
2808 gcc_assert (INSN_P (insn));
2809 reset_used_flags (PATTERN (insn));
2810 reset_used_flags (REG_NOTES (insn));
2811 if (CALL_P (insn))
2812 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2813}
2814
7cdd84a2 2815/* Go through all the RTL insn bodies and clear all the USED bits. */
1cd4cfea 2816
7cdd84a2 2817static void
2818reset_all_used_flags (void)
1cd4cfea 2819{
4cd001d5 2820 rtx_insn *p;
1cd4cfea 2821
2822 for (p = get_insns (); p; p = NEXT_INSN (p))
2823 if (INSN_P (p))
2824 {
1e9af25c 2825 rtx pat = PATTERN (p);
2826 if (GET_CODE (pat) != SEQUENCE)
2827 reset_insn_used_flags (p);
2828 else
764f640f 2829 {
1e9af25c 2830 gcc_assert (REG_NOTES (p) == NULL);
2831 for (int i = 0; i < XVECLEN (pat, 0); i++)
11c8949c 2832 {
2833 rtx insn = XVECEXP (pat, 0, i);
2834 if (INSN_P (insn))
2835 reset_insn_used_flags (insn);
2836 }
764f640f 2837 }
1cd4cfea 2838 }
7cdd84a2 2839}
2840
1e9af25c 2841/* Verify sharing in INSN. */
2842
2843static void
2844verify_insn_sharing (rtx insn)
2845{
2846 gcc_assert (INSN_P (insn));
44bf3f4e 2847 verify_rtx_sharing (PATTERN (insn), insn);
2848 verify_rtx_sharing (REG_NOTES (insn), insn);
1e9af25c 2849 if (CALL_P (insn))
44bf3f4e 2850 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
1e9af25c 2851}
2852
7cdd84a2 2853/* Go through all the RTL insn bodies and check that there is no unexpected
2854 sharing in between the subexpressions. */
2855
2856DEBUG_FUNCTION void
2857verify_rtl_sharing (void)
2858{
4cd001d5 2859 rtx_insn *p;
7cdd84a2 2860
2861 timevar_push (TV_VERIFY_RTL_SHARING);
2862
2863 reset_all_used_flags ();
1cd4cfea 2864
2865 for (p = get_insns (); p; p = NEXT_INSN (p))
2866 if (INSN_P (p))
2867 {
1e9af25c 2868 rtx pat = PATTERN (p);
2869 if (GET_CODE (pat) != SEQUENCE)
2870 verify_insn_sharing (p);
2871 else
2872 for (int i = 0; i < XVECLEN (pat, 0); i++)
11c8949c 2873 {
2874 rtx insn = XVECEXP (pat, 0, i);
2875 if (INSN_P (insn))
2876 verify_insn_sharing (insn);
2877 }
1cd4cfea 2878 }
4b366dd3 2879
7cdd84a2 2880 reset_all_used_flags ();
2881
4b366dd3 2882 timevar_pop (TV_VERIFY_RTL_SHARING);
1cd4cfea 2883}
2884
2d96a59a 2885/* Go through all the RTL insn bodies and copy any invalid shared structure.
2886 Assumes the mark bits are cleared at entry. */
2887
1cd4cfea 2888void
4cd001d5 2889unshare_all_rtl_in_chain (rtx_insn *insn)
2d96a59a 2890{
2891 for (; insn; insn = NEXT_INSN (insn))
9204e736 2892 if (INSN_P (insn))
2d96a59a 2893 {
2894 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2895 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
6d2a4bac 2896 if (CALL_P (insn))
2897 CALL_INSN_FUNCTION_USAGE (insn)
2898 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2d96a59a 2899 }
2900}
2901
01dc9f0c 2902/* Go through all virtual stack slots of a function and mark them as
265be050 2903 shared. We never replace the DECL_RTLs themselves with a copy,
2904 but expressions mentioned into a DECL_RTL cannot be shared with
2905 expressions in the instruction stream.
2906
2907 Note that reload may convert pseudo registers into memories in-place.
2908 Pseudo registers are always shared, but MEMs never are. Thus if we
2909 reset the used flags on MEMs in the instruction stream, we must set
2910 them again on MEMs that appear in DECL_RTLs. */
2911
01dc9f0c 2912static void
265be050 2913set_used_decls (tree blk)
01dc9f0c 2914{
2915 tree t;
2916
2917 /* Mark decls. */
1767a056 2918 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2919 if (DECL_RTL_SET_P (t))
265be050 2920 set_used_flags (DECL_RTL (t));
01dc9f0c 2921
2922 /* Now process sub-blocks. */
93110716 2923 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2924 set_used_decls (t);
01dc9f0c 2925}
2926
15bbde2b 2927/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2928 Recursively does the same for subexpressions. Uses
2929 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2930
2931rtx
35cb5232 2932copy_rtx_if_shared (rtx orig)
15bbde2b 2933{
0e0727c4 2934 copy_rtx_if_shared_1 (&orig);
2935 return orig;
2936}
2937
7ba6ce7a 2938/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2939 use. Recursively does the same for subexpressions. */
2940
0e0727c4 2941static void
2942copy_rtx_if_shared_1 (rtx *orig1)
2943{
2944 rtx x;
19cb6b50 2945 int i;
2946 enum rtx_code code;
0e0727c4 2947 rtx *last_ptr;
19cb6b50 2948 const char *format_ptr;
15bbde2b 2949 int copied = 0;
0e0727c4 2950 int length;
2951
2952 /* Repeat is used to turn tail-recursion into iteration. */
2953repeat:
2954 x = *orig1;
15bbde2b 2955
2956 if (x == 0)
0e0727c4 2957 return;
15bbde2b 2958
2959 code = GET_CODE (x);
2960
2961 /* These types may be freely shared. */
2962
2963 switch (code)
2964 {
2965 case REG:
688ff29b 2966 case DEBUG_EXPR:
2967 case VALUE:
0349edce 2968 CASE_CONST_ANY:
15bbde2b 2969 case SYMBOL_REF:
1cd4cfea 2970 case LABEL_REF:
15bbde2b 2971 case CODE_LABEL:
2972 case PC:
2973 case CC0:
e0691b9a 2974 case RETURN:
9cb2517e 2975 case SIMPLE_RETURN:
15bbde2b 2976 case SCRATCH:
a92771b8 2977 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2978 return;
c09425a0 2979 case CLOBBER:
b291008a 2980 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2981 clobbers or clobbers of hard registers that originated as pseudos.
2982 This is needed to allow safe register renaming. */
2b5f32ae 2983 if (REG_P (XEXP (x, 0))
2984 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2985 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
c09425a0 2986 return;
2987 break;
15bbde2b 2988
f63d12e3 2989 case CONST:
3072d30e 2990 if (shared_const_p (x))
0e0727c4 2991 return;
f63d12e3 2992 break;
2993
9845d120 2994 case DEBUG_INSN:
15bbde2b 2995 case INSN:
2996 case JUMP_INSN:
2997 case CALL_INSN:
2998 case NOTE:
15bbde2b 2999 case BARRIER:
3000 /* The chain of insns is not being copied. */
0e0727c4 3001 return;
15bbde2b 3002
0dbd1c74 3003 default:
3004 break;
15bbde2b 3005 }
3006
3007 /* This rtx may not be shared. If it has already been seen,
3008 replace it with a copy of itself. */
3009
7c25cb91 3010 if (RTX_FLAG (x, used))
15bbde2b 3011 {
f2d0e9f1 3012 x = shallow_copy_rtx (x);
15bbde2b 3013 copied = 1;
3014 }
7c25cb91 3015 RTX_FLAG (x, used) = 1;
15bbde2b 3016
3017 /* Now scan the subexpressions recursively.
3018 We can store any replaced subexpressions directly into X
3019 since we know X is not shared! Any vectors in X
3020 must be copied if X was copied. */
3021
3022 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 3023 length = GET_RTX_LENGTH (code);
3024 last_ptr = NULL;
48e1416a 3025
0e0727c4 3026 for (i = 0; i < length; i++)
15bbde2b 3027 {
3028 switch (*format_ptr++)
3029 {
3030 case 'e':
0e0727c4 3031 if (last_ptr)
3032 copy_rtx_if_shared_1 (last_ptr);
3033 last_ptr = &XEXP (x, i);
15bbde2b 3034 break;
3035
3036 case 'E':
3037 if (XVEC (x, i) != NULL)
3038 {
19cb6b50 3039 int j;
ffe0869b 3040 int len = XVECLEN (x, i);
48e1416a 3041
8b332087 3042 /* Copy the vector iff I copied the rtx and the length
3043 is nonzero. */
ffe0869b 3044 if (copied && len > 0)
a4070a91 3045 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 3046
d632b59a 3047 /* Call recursively on all inside the vector. */
ffe0869b 3048 for (j = 0; j < len; j++)
0e0727c4 3049 {
3050 if (last_ptr)
3051 copy_rtx_if_shared_1 (last_ptr);
3052 last_ptr = &XVECEXP (x, i, j);
3053 }
15bbde2b 3054 }
3055 break;
3056 }
3057 }
0e0727c4 3058 *orig1 = x;
3059 if (last_ptr)
3060 {
3061 orig1 = last_ptr;
3062 goto repeat;
3063 }
3064 return;
15bbde2b 3065}
3066
709947e6 3067/* Set the USED bit in X and its non-shareable subparts to FLAG. */
15bbde2b 3068
709947e6 3069static void
3070mark_used_flags (rtx x, int flag)
15bbde2b 3071{
19cb6b50 3072 int i, j;
3073 enum rtx_code code;
3074 const char *format_ptr;
0e0727c4 3075 int length;
15bbde2b 3076
0e0727c4 3077 /* Repeat is used to turn tail-recursion into iteration. */
3078repeat:
15bbde2b 3079 if (x == 0)
3080 return;
3081
3082 code = GET_CODE (x);
3083
c3418f42 3084 /* These types may be freely shared so we needn't do any resetting
15bbde2b 3085 for them. */
3086
3087 switch (code)
3088 {
3089 case REG:
688ff29b 3090 case DEBUG_EXPR:
3091 case VALUE:
0349edce 3092 CASE_CONST_ANY:
15bbde2b 3093 case SYMBOL_REF:
3094 case CODE_LABEL:
3095 case PC:
3096 case CC0:
e0691b9a 3097 case RETURN:
9cb2517e 3098 case SIMPLE_RETURN:
15bbde2b 3099 return;
3100
9845d120 3101 case DEBUG_INSN:
15bbde2b 3102 case INSN:
3103 case JUMP_INSN:
3104 case CALL_INSN:
3105 case NOTE:
3106 case LABEL_REF:
3107 case BARRIER:
3108 /* The chain of insns is not being copied. */
3109 return;
d823ba47 3110
0dbd1c74 3111 default:
3112 break;
15bbde2b 3113 }
3114
709947e6 3115 RTX_FLAG (x, used) = flag;
15bbde2b 3116
3117 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 3118 length = GET_RTX_LENGTH (code);
48e1416a 3119
0e0727c4 3120 for (i = 0; i < length; i++)
15bbde2b 3121 {
3122 switch (*format_ptr++)
3123 {
3124 case 'e':
0e0727c4 3125 if (i == length-1)
3126 {
3127 x = XEXP (x, i);
3128 goto repeat;
3129 }
709947e6 3130 mark_used_flags (XEXP (x, i), flag);
15bbde2b 3131 break;
3132
3133 case 'E':
3134 for (j = 0; j < XVECLEN (x, i); j++)
709947e6 3135 mark_used_flags (XVECEXP (x, i, j), flag);
15bbde2b 3136 break;
3137 }
3138 }
3139}
1cd4cfea 3140
709947e6 3141/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1cd4cfea 3142 to look for shared sub-parts. */
3143
3144void
709947e6 3145reset_used_flags (rtx x)
1cd4cfea 3146{
709947e6 3147 mark_used_flags (x, 0);
3148}
1cd4cfea 3149
709947e6 3150/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3151 to look for shared sub-parts. */
1cd4cfea 3152
709947e6 3153void
3154set_used_flags (rtx x)
3155{
3156 mark_used_flags (x, 1);
1cd4cfea 3157}
15bbde2b 3158\f
3159/* Copy X if necessary so that it won't be altered by changes in OTHER.
3160 Return X or the rtx for the pseudo reg the value of X was copied into.
3161 OTHER must be valid as a SET_DEST. */
3162
3163rtx
35cb5232 3164make_safe_from (rtx x, rtx other)
15bbde2b 3165{
3166 while (1)
3167 switch (GET_CODE (other))
3168 {
3169 case SUBREG:
3170 other = SUBREG_REG (other);
3171 break;
3172 case STRICT_LOW_PART:
3173 case SIGN_EXTEND:
3174 case ZERO_EXTEND:
3175 other = XEXP (other, 0);
3176 break;
3177 default:
3178 goto done;
3179 }
3180 done:
e16ceb8e 3181 if ((MEM_P (other)
15bbde2b 3182 && ! CONSTANT_P (x)
8ad4c111 3183 && !REG_P (x)
15bbde2b 3184 && GET_CODE (x) != SUBREG)
8ad4c111 3185 || (REG_P (other)
15bbde2b 3186 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3187 || reg_mentioned_p (other, x))))
3188 {
3189 rtx temp = gen_reg_rtx (GET_MODE (x));
3190 emit_move_insn (temp, x);
3191 return temp;
3192 }
3193 return x;
3194}
3195\f
3196/* Emission of insns (adding them to the doubly-linked list). */
3197
15bbde2b 3198/* Return the last insn emitted, even if it is in a sequence now pushed. */
3199
447ab0fc 3200rtx_insn *
35cb5232 3201get_last_insn_anywhere (void)
15bbde2b 3202{
c36aa54b 3203 struct sequence_stack *seq;
3204 for (seq = get_current_sequence (); seq; seq = seq->next)
3205 if (seq->last != 0)
3206 return seq->last;
15bbde2b 3207 return 0;
3208}
3209
70545de4 3210/* Return the first nonnote insn emitted in current sequence or current
3211 function. This routine looks inside SEQUENCEs. */
3212
2eb8c261 3213rtx_insn *
35cb5232 3214get_first_nonnote_insn (void)
70545de4 3215{
4cd001d5 3216 rtx_insn *insn = get_insns ();
f86e856e 3217
3218 if (insn)
3219 {
3220 if (NOTE_P (insn))
3221 for (insn = next_insn (insn);
3222 insn && NOTE_P (insn);
3223 insn = next_insn (insn))
3224 continue;
3225 else
3226 {
1c14a50e 3227 if (NONJUMP_INSN_P (insn)
f86e856e 3228 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4cd001d5 3229 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
f86e856e 3230 }
3231 }
70545de4 3232
3233 return insn;
3234}
3235
3236/* Return the last nonnote insn emitted in current sequence or current
3237 function. This routine looks inside SEQUENCEs. */
3238
2eb8c261 3239rtx_insn *
35cb5232 3240get_last_nonnote_insn (void)
70545de4 3241{
4cd001d5 3242 rtx_insn *insn = get_last_insn ();
f86e856e 3243
3244 if (insn)
3245 {
3246 if (NOTE_P (insn))
3247 for (insn = previous_insn (insn);
3248 insn && NOTE_P (insn);
3249 insn = previous_insn (insn))
3250 continue;
3251 else
3252 {
4cd001d5 3253 if (NONJUMP_INSN_P (insn))
3254 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3255 insn = seq->insn (seq->len () - 1);
f86e856e 3256 }
3257 }
70545de4 3258
3259 return insn;
3260}
3261
9845d120 3262/* Return the number of actual (non-debug) insns emitted in this
3263 function. */
3264
3265int
3266get_max_insn_count (void)
3267{
3268 int n = cur_insn_uid;
3269
3270 /* The table size must be stable across -g, to avoid codegen
3271 differences due to debug insns, and not be affected by
3272 -fmin-insn-uid, to avoid excessive table size and to simplify
3273 debugging of -fcompare-debug failures. */
3274 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3275 n -= cur_debug_insn_uid;
3276 else
3277 n -= MIN_NONDEBUG_INSN_UID;
3278
3279 return n;
3280}
3281
15bbde2b 3282\f
3283/* Return the next insn. If it is a SEQUENCE, return the first insn
3284 of the sequence. */
3285
7bac25b3 3286rtx_insn *
50895eab 3287next_insn (rtx_insn *insn)
15bbde2b 3288{
ce4469fa 3289 if (insn)
3290 {
3291 insn = NEXT_INSN (insn);
3292 if (insn && NONJUMP_INSN_P (insn)
3293 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4cd001d5 3294 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
ce4469fa 3295 }
15bbde2b 3296
4cd001d5 3297 return insn;
15bbde2b 3298}
3299
3300/* Return the previous insn. If it is a SEQUENCE, return the last insn
3301 of the sequence. */
3302
7bac25b3 3303rtx_insn *
50895eab 3304previous_insn (rtx_insn *insn)
15bbde2b 3305{
ce4469fa 3306 if (insn)
3307 {
3308 insn = PREV_INSN (insn);
4cd001d5 3309 if (insn && NONJUMP_INSN_P (insn))
3310 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3311 insn = seq->insn (seq->len () - 1);
ce4469fa 3312 }
15bbde2b 3313
4cd001d5 3314 return insn;
15bbde2b 3315}
3316
3317/* Return the next insn after INSN that is not a NOTE. This routine does not
3318 look inside SEQUENCEs. */
3319
7bac25b3 3320rtx_insn *
4066f31e 3321next_nonnote_insn (rtx_insn *insn)
15bbde2b 3322{
ce4469fa 3323 while (insn)
3324 {
3325 insn = NEXT_INSN (insn);
3326 if (insn == 0 || !NOTE_P (insn))
3327 break;
3328 }
15bbde2b 3329
4cd001d5 3330 return insn;
15bbde2b 3331}
3332
c4d13c5c 3333/* Return the next insn after INSN that is not a NOTE, but stop the
3334 search before we enter another basic block. This routine does not
3335 look inside SEQUENCEs. */
3336
7bac25b3 3337rtx_insn *
2eb8c261 3338next_nonnote_insn_bb (rtx_insn *insn)
c4d13c5c 3339{
3340 while (insn)
3341 {
3342 insn = NEXT_INSN (insn);
3343 if (insn == 0 || !NOTE_P (insn))
3344 break;
3345 if (NOTE_INSN_BASIC_BLOCK_P (insn))
7bac25b3 3346 return NULL;
c4d13c5c 3347 }
3348
4cd001d5 3349 return insn;
c4d13c5c 3350}
3351
15bbde2b 3352/* Return the previous insn before INSN that is not a NOTE. This routine does
3353 not look inside SEQUENCEs. */
3354
7bac25b3 3355rtx_insn *
4066f31e 3356prev_nonnote_insn (rtx_insn *insn)
15bbde2b 3357{
ce4469fa 3358 while (insn)
3359 {
3360 insn = PREV_INSN (insn);
3361 if (insn == 0 || !NOTE_P (insn))
3362 break;
3363 }
15bbde2b 3364
4cd001d5 3365 return insn;
15bbde2b 3366}
3367
bcc66782 3368/* Return the previous insn before INSN that is not a NOTE, but stop
3369 the search before we enter another basic block. This routine does
3370 not look inside SEQUENCEs. */
3371
7bac25b3 3372rtx_insn *
89bcfdab 3373prev_nonnote_insn_bb (rtx_insn *insn)
bcc66782 3374{
4cd001d5 3375
bcc66782 3376 while (insn)
3377 {
3378 insn = PREV_INSN (insn);
3379 if (insn == 0 || !NOTE_P (insn))
3380 break;
3381 if (NOTE_INSN_BASIC_BLOCK_P (insn))
7bac25b3 3382 return NULL;
bcc66782 3383 }
3384
4cd001d5 3385 return insn;
bcc66782 3386}
3387
9845d120 3388/* Return the next insn after INSN that is not a DEBUG_INSN. This
3389 routine does not look inside SEQUENCEs. */
3390
7bac25b3 3391rtx_insn *
3a8bb9ce 3392next_nondebug_insn (rtx_insn *insn)
9845d120 3393{
3394 while (insn)
3395 {
3396 insn = NEXT_INSN (insn);
3397 if (insn == 0 || !DEBUG_INSN_P (insn))
3398 break;
3399 }
3400
4cd001d5 3401 return insn;
9845d120 3402}
3403
3404/* Return the previous insn before INSN that is not a DEBUG_INSN.
3405 This routine does not look inside SEQUENCEs. */
3406
7bac25b3 3407rtx_insn *
3a8bb9ce 3408prev_nondebug_insn (rtx_insn *insn)
9845d120 3409{
3410 while (insn)
3411 {
3412 insn = PREV_INSN (insn);
3413 if (insn == 0 || !DEBUG_INSN_P (insn))
3414 break;
3415 }
3416
4cd001d5 3417 return insn;
9845d120 3418}
3419
5b8537a8 3420/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3421 This routine does not look inside SEQUENCEs. */
3422
7bac25b3 3423rtx_insn *
15b80194 3424next_nonnote_nondebug_insn (rtx_insn *insn)
5b8537a8 3425{
3426 while (insn)
3427 {
3428 insn = NEXT_INSN (insn);
3429 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3430 break;
3431 }
3432
4cd001d5 3433 return insn;
5b8537a8 3434}
3435
3436/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3437 This routine does not look inside SEQUENCEs. */
3438
7bac25b3 3439rtx_insn *
15b80194 3440prev_nonnote_nondebug_insn (rtx_insn *insn)
5b8537a8 3441{
3442 while (insn)
3443 {
3444 insn = PREV_INSN (insn);
3445 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3446 break;
3447 }
3448
4cd001d5 3449 return insn;
5b8537a8 3450}
3451
15bbde2b 3452/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3453 or 0, if there is none. This routine does not look inside
a92771b8 3454 SEQUENCEs. */
15bbde2b 3455
7bac25b3 3456rtx_insn *
4cd001d5 3457next_real_insn (rtx uncast_insn)
15bbde2b 3458{
4cd001d5 3459 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3460
ce4469fa 3461 while (insn)
3462 {
3463 insn = NEXT_INSN (insn);
3464 if (insn == 0 || INSN_P (insn))
3465 break;
3466 }
15bbde2b 3467
4cd001d5 3468 return insn;
15bbde2b 3469}
3470
3471/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3472 or 0, if there is none. This routine does not look inside
3473 SEQUENCEs. */
3474
7bac25b3 3475rtx_insn *
4067fcc6 3476prev_real_insn (rtx_insn *insn)
15bbde2b 3477{
ce4469fa 3478 while (insn)
3479 {
3480 insn = PREV_INSN (insn);
3481 if (insn == 0 || INSN_P (insn))
3482 break;
3483 }
15bbde2b 3484
4cd001d5 3485 return insn;
15bbde2b 3486}
3487
d5f9786f 3488/* Return the last CALL_INSN in the current list, or 0 if there is none.
3489 This routine does not look inside SEQUENCEs. */
3490
ec22da62 3491rtx_call_insn *
35cb5232 3492last_call_insn (void)
d5f9786f 3493{
ec22da62 3494 rtx_insn *insn;
d5f9786f 3495
3496 for (insn = get_last_insn ();
6d7dc5b9 3497 insn && !CALL_P (insn);
d5f9786f 3498 insn = PREV_INSN (insn))
3499 ;
3500
ec22da62 3501 return safe_as_a <rtx_call_insn *> (insn);
d5f9786f 3502}
3503
15bbde2b 3504/* Find the next insn after INSN that really does something. This routine
084950ee 3505 does not look inside SEQUENCEs. After reload this also skips over
3506 standalone USE and CLOBBER insn. */
15bbde2b 3507
2215ca0d 3508int
41503955 3509active_insn_p (const rtx_insn *insn)
2215ca0d 3510{
6d7dc5b9 3511 return (CALL_P (insn) || JUMP_P (insn)
91f71fa3 3512 || JUMP_TABLE_DATA_P (insn) /* FIXME */
6d7dc5b9 3513 || (NONJUMP_INSN_P (insn)
3a66feab 3514 && (! reload_completed
3515 || (GET_CODE (PATTERN (insn)) != USE
3516 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3517}
3518
7bac25b3 3519rtx_insn *
41503955 3520next_active_insn (rtx_insn *insn)
15bbde2b 3521{
ce4469fa 3522 while (insn)
3523 {
3524 insn = NEXT_INSN (insn);
3525 if (insn == 0 || active_insn_p (insn))
3526 break;
3527 }
15bbde2b 3528
4cd001d5 3529 return insn;
15bbde2b 3530}
3531
3532/* Find the last insn before INSN that really does something. This routine
084950ee 3533 does not look inside SEQUENCEs. After reload this also skips over
3534 standalone USE and CLOBBER insn. */
15bbde2b 3535
7bac25b3 3536rtx_insn *
41503955 3537prev_active_insn (rtx_insn *insn)
15bbde2b 3538{
ce4469fa 3539 while (insn)
3540 {
3541 insn = PREV_INSN (insn);
3542 if (insn == 0 || active_insn_p (insn))
3543 break;
3544 }
15bbde2b 3545
4cd001d5 3546 return insn;
15bbde2b 3547}
15bbde2b 3548\f
15bbde2b 3549/* Return the next insn that uses CC0 after INSN, which is assumed to
3550 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3551 applied to the result of this function should yield INSN).
3552
3553 Normally, this is simply the next insn. However, if a REG_CC_USER note
3554 is present, it contains the insn that uses CC0.
3555
3556 Return 0 if we can't find the insn. */
3557
0be88abd 3558rtx_insn *
924a5cee 3559next_cc0_user (rtx_insn *insn)
15bbde2b 3560{
b572011e 3561 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3562
3563 if (note)
0be88abd 3564 return safe_as_a <rtx_insn *> (XEXP (note, 0));
15bbde2b 3565
3566 insn = next_nonnote_insn (insn);
6d7dc5b9 3567 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4cd001d5 3568 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
15bbde2b 3569
9204e736 3570 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
4cd001d5 3571 return insn;
15bbde2b 3572
3573 return 0;
3574}
3575
3576/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3577 note, it is the previous insn. */
3578
0be88abd 3579rtx_insn *
fd8b0a1a 3580prev_cc0_setter (rtx_insn *insn)
15bbde2b 3581{
b572011e 3582 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3583
3584 if (note)
0be88abd 3585 return safe_as_a <rtx_insn *> (XEXP (note, 0));
15bbde2b 3586
3587 insn = prev_nonnote_insn (insn);
611234b4 3588 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3589
4cd001d5 3590 return insn;
15bbde2b 3591}
344dc2fa 3592
698ff1f0 3593/* Find a RTX_AUTOINC class rtx which matches DATA. */
3594
3595static int
4073adaa 3596find_auto_inc (const_rtx x, const_rtx reg)
698ff1f0 3597{
4073adaa 3598 subrtx_iterator::array_type array;
3599 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
698ff1f0 3600 {
4073adaa 3601 const_rtx x = *iter;
3602 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3603 && rtx_equal_p (reg, XEXP (x, 0)))
3604 return true;
698ff1f0 3605 }
4073adaa 3606 return false;
698ff1f0 3607}
698ff1f0 3608
344dc2fa 3609/* Increment the label uses for all labels present in rtx. */
3610
3611static void
35cb5232 3612mark_label_nuses (rtx x)
344dc2fa 3613{
19cb6b50 3614 enum rtx_code code;
3615 int i, j;
3616 const char *fmt;
344dc2fa 3617
3618 code = GET_CODE (x);
c7799456 3619 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3620 LABEL_NUSES (label_ref_label (x))++;
344dc2fa 3621
3622 fmt = GET_RTX_FORMAT (code);
3623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3624 {
3625 if (fmt[i] == 'e')
ff385626 3626 mark_label_nuses (XEXP (x, i));
344dc2fa 3627 else if (fmt[i] == 'E')
ff385626 3628 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3629 mark_label_nuses (XVECEXP (x, i, j));
3630 }
3631}
3632
15bbde2b 3633\f
3634/* Try splitting insns that can be split for better scheduling.
3635 PAT is the pattern which might split.
3636 TRIAL is the insn providing PAT.
6ef828f9 3637 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3638
3639 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3640 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3641 returns TRIAL. If the insn to be returned can be split, it will be. */
3642
bffa1357 3643rtx_insn *
58a87a29 3644try_split (rtx pat, rtx_insn *trial, int last)
15bbde2b 3645{
3b50f202 3646 rtx_insn *before, *after;
4cd001d5 3647 rtx note;
3648 rtx_insn *seq, *tem;
61cb1816 3649 profile_probability probability;
4cd001d5 3650 rtx_insn *insn_last, *insn;
e13693ec 3651 int njumps = 0;
9ed997be 3652 rtx_insn *call_insn = NULL;
3cd757b1 3653
25e880b1 3654 /* We're not good at redistributing frame information. */
3655 if (RTX_FRAME_RELATED_P (trial))
4cd001d5 3656 return trial;
25e880b1 3657
3cd757b1 3658 if (any_condjump_p (trial)
3659 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
61cb1816 3660 split_branch_probability
3661 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3662 else
3663 split_branch_probability = profile_probability::uninitialized ();
3664
3cd757b1 3665 probability = split_branch_probability;
3666
58a87a29 3667 seq = split_insns (pat, trial);
3cd757b1 3668
61cb1816 3669 split_branch_probability = profile_probability::uninitialized ();
15bbde2b 3670
e13693ec 3671 if (!seq)
4cd001d5 3672 return trial;
e13693ec 3673
3674 /* Avoid infinite loop if any insn of the result matches
3675 the original pattern. */
3676 insn_last = seq;
3677 while (1)
15bbde2b 3678 {
e13693ec 3679 if (INSN_P (insn_last)
3680 && rtx_equal_p (PATTERN (insn_last), pat))
4cd001d5 3681 return trial;
e13693ec 3682 if (!NEXT_INSN (insn_last))
3683 break;
3684 insn_last = NEXT_INSN (insn_last);
3685 }
d823ba47 3686
3072d30e 3687 /* We will be adding the new sequence to the function. The splitters
3688 may have introduced invalid RTL sharing, so unshare the sequence now. */
3689 unshare_all_rtl_in_chain (seq);
3690
8f869004 3691 /* Mark labels and copy flags. */
e13693ec 3692 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3693 {
6d7dc5b9 3694 if (JUMP_P (insn))
e13693ec 3695 {
8f869004 3696 if (JUMP_P (trial))
3697 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
e13693ec 3698 mark_jump_label (PATTERN (insn), insn, 0);
3699 njumps++;
61cb1816 3700 if (probability.initialized_p ()
e13693ec 3701 && any_condjump_p (insn)
3702 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3703 {
e13693ec 3704 /* We can preserve the REG_BR_PROB notes only if exactly
3705 one jump is created, otherwise the machine description
3706 is responsible for this step using
3707 split_branch_probability variable. */
611234b4 3708 gcc_assert (njumps == 1);
61cb1816 3709 add_reg_br_prob_note (insn, probability);
31d3e01c 3710 }
e13693ec 3711 }
3712 }
3713
3714 /* If we are splitting a CALL_INSN, look for the CALL_INSN
b0bd0491 3715 in SEQ and copy any additional information across. */
6d7dc5b9 3716 if (CALL_P (trial))
e13693ec 3717 {
3718 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3719 if (CALL_P (insn))
e13693ec 3720 {
4cd001d5 3721 rtx_insn *next;
3722 rtx *p;
b0bd0491 3723
2e3b0d0f 3724 gcc_assert (call_insn == NULL_RTX);
3725 call_insn = insn;
3726
b0bd0491 3727 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3728 target may have explicitly specified. */
3729 p = &CALL_INSN_FUNCTION_USAGE (insn);
0bb5a6cd 3730 while (*p)
3731 p = &XEXP (*p, 1);
3732 *p = CALL_INSN_FUNCTION_USAGE (trial);
b0bd0491 3733
3734 /* If the old call was a sibling call, the new one must
3735 be too. */
e13693ec 3736 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b0bd0491 3737
3738 /* If the new call is the last instruction in the sequence,
3739 it will effectively replace the old call in-situ. Otherwise
3740 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3741 so that it comes immediately after the new call. */
3742 if (NEXT_INSN (insn))
47e1410d 3743 for (next = NEXT_INSN (trial);
3744 next && NOTE_P (next);
3745 next = NEXT_INSN (next))
3746 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
b0bd0491 3747 {
3748 remove_insn (next);
3749 add_insn_after (next, insn, NULL);
47e1410d 3750 break;
b0bd0491 3751 }
e13693ec 3752 }
3753 }
5262c253 3754
e13693ec 3755 /* Copy notes, particularly those related to the CFG. */
3756 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3757 {
3758 switch (REG_NOTE_KIND (note))
3759 {
3760 case REG_EH_REGION:
e38def9c 3761 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3762 break;
381eb1e7 3763
e13693ec 3764 case REG_NORETURN:
3765 case REG_SETJMP:
4c0315d0 3766 case REG_TM:
698ff1f0 3767 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3768 {
6d7dc5b9 3769 if (CALL_P (insn))
a1ddb869 3770 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3771 }
e13693ec 3772 break;
5bb27a4b 3773
e13693ec 3774 case REG_NON_LOCAL_GOTO:
698ff1f0 3775 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3776 {
6d7dc5b9 3777 if (JUMP_P (insn))
a1ddb869 3778 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3779 }
e13693ec 3780 break;
344dc2fa 3781
698ff1f0 3782 case REG_INC:
32aa77d9 3783 if (!AUTO_INC_DEC)
3784 break;
3785
698ff1f0 3786 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3787 {
3788 rtx reg = XEXP (note, 0);
3789 if (!FIND_REG_INC_NOTE (insn, reg)
4073adaa 3790 && find_auto_inc (PATTERN (insn), reg))
a1ddb869 3791 add_reg_note (insn, REG_INC, reg);
698ff1f0 3792 }
3793 break;
698ff1f0 3794
dfe00a8f 3795 case REG_ARGS_SIZE:
32f1a0c8 3796 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
dfe00a8f 3797 break;
3798
2e3b0d0f 3799 case REG_CALL_DECL:
3800 gcc_assert (call_insn != NULL_RTX);
3801 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3802 break;
3803
e13693ec 3804 default:
3805 break;
15bbde2b 3806 }
e13693ec 3807 }
3808
3809 /* If there are LABELS inside the split insns increment the
3810 usage count so we don't delete the label. */
19d2fe05 3811 if (INSN_P (trial))
e13693ec 3812 {
3813 insn = insn_last;
3814 while (insn != NULL_RTX)
15bbde2b 3815 {
19d2fe05 3816 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3817 if (NONJUMP_INSN_P (insn))
e13693ec 3818 mark_label_nuses (PATTERN (insn));
15bbde2b 3819
e13693ec 3820 insn = PREV_INSN (insn);
3821 }
15bbde2b 3822 }
3823
3b50f202 3824 before = PREV_INSN (trial);
3825 after = NEXT_INSN (trial);
3826
5169661d 3827 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
e13693ec 3828
3829 delete_insn (trial);
e13693ec 3830
3831 /* Recursively call try_split for each new insn created; by the
3832 time control returns here that insn will be fully split, so
3833 set LAST and continue from the insn after the one returned.
3834 We can't use next_active_insn here since AFTER may be a note.
3835 Ignore deleted insns, which can be occur if not optimizing. */
3836 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
dd1286fb 3837 if (! tem->deleted () && INSN_P (tem))
e13693ec 3838 tem = try_split (PATTERN (tem), tem, 1);
3839
3840 /* Return either the first or the last insn, depending on which was
3841 requested. */
3842 return last
06f9d6ef 3843 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3844 : NEXT_INSN (before);
15bbde2b 3845}
3846\f
3847/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3848 Store PATTERN in the pattern slots. */
15bbde2b 3849
2c57d586 3850rtx_insn *
35cb5232 3851make_insn_raw (rtx pattern)
15bbde2b 3852{
2c57d586 3853 rtx_insn *insn;
15bbde2b 3854
2c57d586 3855 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
15bbde2b 3856
575333f9 3857 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3858 PATTERN (insn) = pattern;
3859 INSN_CODE (insn) = -1;
fc92fa61 3860 REG_NOTES (insn) = NULL;
5169661d 3861 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3862 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3863
fe7f701d 3864#ifdef ENABLE_RTL_CHECKING
3865 if (insn
9204e736 3866 && INSN_P (insn)
fe7f701d 3867 && (returnjump_p (insn)
3868 || (GET_CODE (insn) == SET
3869 && SET_DEST (insn) == pc_rtx)))
3870 {
c3ceba8e 3871 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3872 debug_rtx (insn);
3873 }
3874#endif
d823ba47 3875
15bbde2b 3876 return insn;
3877}
3878
9845d120 3879/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3880
2c57d586 3881static rtx_insn *
9845d120 3882make_debug_insn_raw (rtx pattern)
3883{
2c57d586 3884 rtx_debug_insn *insn;
9845d120 3885
2c57d586 3886 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
9845d120 3887 INSN_UID (insn) = cur_debug_insn_uid++;
3888 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3889 INSN_UID (insn) = cur_insn_uid++;
3890
3891 PATTERN (insn) = pattern;
3892 INSN_CODE (insn) = -1;
3893 REG_NOTES (insn) = NULL;
5169661d 3894 INSN_LOCATION (insn) = curr_insn_location ();
9845d120 3895 BLOCK_FOR_INSN (insn) = NULL;
3896
3897 return insn;
3898}
3899
31d3e01c 3900/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3901
2c57d586 3902static rtx_insn *
35cb5232 3903make_jump_insn_raw (rtx pattern)
15bbde2b 3904{
2c57d586 3905 rtx_jump_insn *insn;
15bbde2b 3906
2c57d586 3907 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
fc92fa61 3908 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3909
3910 PATTERN (insn) = pattern;
3911 INSN_CODE (insn) = -1;
fc92fa61 3912 REG_NOTES (insn) = NULL;
3913 JUMP_LABEL (insn) = NULL;
5169661d 3914 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3915 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3916
3917 return insn;
3918}
6e911104 3919
31d3e01c 3920/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3921
2c57d586 3922static rtx_insn *
35cb5232 3923make_call_insn_raw (rtx pattern)
6e911104 3924{
2c57d586 3925 rtx_call_insn *insn;
6e911104 3926
2c57d586 3927 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
6e911104 3928 INSN_UID (insn) = cur_insn_uid++;
3929
3930 PATTERN (insn) = pattern;
3931 INSN_CODE (insn) = -1;
6e911104 3932 REG_NOTES (insn) = NULL;
3933 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5169661d 3934 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3935 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3936
3937 return insn;
3938}
35f3420b 3939
3940/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3941
cef3d8ad 3942static rtx_note *
35f3420b 3943make_note_raw (enum insn_note subtype)
3944{
3945 /* Some notes are never created this way at all. These notes are
3946 only created by patching out insns. */
3947 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3948 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3949
cef3d8ad 3950 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
35f3420b 3951 INSN_UID (note) = cur_insn_uid++;
3952 NOTE_KIND (note) = subtype;
3953 BLOCK_FOR_INSN (note) = NULL;
3954 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3955 return note;
3956}
15bbde2b 3957\f
35f3420b 3958/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3959 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3960 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3961
3962static inline void
3e75e92b 3963link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
35f3420b 3964{
4a57a2e8 3965 SET_PREV_INSN (insn) = prev;
3966 SET_NEXT_INSN (insn) = next;
35f3420b 3967 if (prev != NULL)
3968 {
4a57a2e8 3969 SET_NEXT_INSN (prev) = insn;
35f3420b 3970 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3971 {
f17e3fff 3972 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3973 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
35f3420b 3974 }
3975 }
3976 if (next != NULL)
3977 {
4a57a2e8 3978 SET_PREV_INSN (next) = insn;
35f3420b 3979 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
f17e3fff 3980 {
3981 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3982 SET_PREV_INSN (sequence->insn (0)) = insn;
3983 }
35f3420b 3984 }
34f5b9ac 3985
3986 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3987 {
f17e3fff 3988 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3989 SET_PREV_INSN (sequence->insn (0)) = prev;
3990 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
34f5b9ac 3991 }
35f3420b 3992}
3993
15bbde2b 3994/* Add INSN to the end of the doubly-linked list.
3995 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3996
3997void
3e75e92b 3998add_insn (rtx_insn *insn)
15bbde2b 3999{
3e75e92b 4000 rtx_insn *prev = get_last_insn ();
35f3420b 4001 link_insn_into_chain (insn, prev, NULL);
06f9d6ef 4002 if (NULL == get_insns ())
4003 set_first_insn (insn);
06f9d6ef 4004 set_last_insn (insn);
15bbde2b 4005}
4006
35f3420b 4007/* Add INSN into the doubly-linked list after insn AFTER. */
15bbde2b 4008
35f3420b 4009static void
3e75e92b 4010add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
15bbde2b 4011{
3e75e92b 4012 rtx_insn *next = NEXT_INSN (after);
15bbde2b 4013
dd1286fb 4014 gcc_assert (!optimize || !after->deleted ());
f65c10c0 4015
35f3420b 4016 link_insn_into_chain (insn, after, next);
15bbde2b 4017
35f3420b 4018 if (next == NULL)
15bbde2b 4019 {
c36aa54b 4020 struct sequence_stack *seq;
4021
4022 for (seq = get_current_sequence (); seq; seq = seq->next)
4023 if (after == seq->last)
4024 {
4025 seq->last = insn;
4026 break;
4027 }
15bbde2b 4028 }
35f3420b 4029}
4030
4031/* Add INSN into the doubly-linked list before insn BEFORE. */
4032
4033static void
3e75e92b 4034add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
35f3420b 4035{
3e75e92b 4036 rtx_insn *prev = PREV_INSN (before);
35f3420b 4037
dd1286fb 4038 gcc_assert (!optimize || !before->deleted ());
35f3420b 4039
4040 link_insn_into_chain (insn, prev, before);
4041
4042 if (prev == NULL)
15bbde2b 4043 {
c36aa54b 4044 struct sequence_stack *seq;
312de84d 4045
c36aa54b 4046 for (seq = get_current_sequence (); seq; seq = seq->next)
4047 if (before == seq->first)
4048 {
4049 seq->first = insn;
4050 break;
4051 }
4052
4053 gcc_assert (seq);
15bbde2b 4054 }
35f3420b 4055}
4056
4057/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4058 If BB is NULL, an attempt is made to infer the bb from before.
4059
4060 This and the next function should be the only functions called
4061 to insert an insn once delay slots have been filled since only
4062 they know how to update a SEQUENCE. */
15bbde2b 4063
35f3420b 4064void
3e75e92b 4065add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
35f3420b 4066{
26bb3cb2 4067 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
3e75e92b 4068 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
35f3420b 4069 add_insn_after_nobb (insn, after);
6d7dc5b9 4070 if (!BARRIER_P (after)
4071 && !BARRIER_P (insn)
9dda7915 4072 && (bb = BLOCK_FOR_INSN (after)))
4073 {
4074 set_block_for_insn (insn, bb);
308f9b79 4075 if (INSN_P (insn))
3072d30e 4076 df_insn_rescan (insn);
9dda7915 4077 /* Should not happen as first in the BB is always
3fb1e43b 4078 either NOTE or LABEL. */
5496dbfc 4079 if (BB_END (bb) == after
9dda7915 4080 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 4081 && !BARRIER_P (insn)
ad4583d9 4082 && !NOTE_INSN_BASIC_BLOCK_P (insn))
26bb3cb2 4083 BB_END (bb) = insn;
9dda7915 4084 }
15bbde2b 4085}
4086
35f3420b 4087/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4088 If BB is NULL, an attempt is made to infer the bb from before.
4089
4090 This and the previous function should be the only functions called
4091 to insert an insn once delay slots have been filled since only
4092 they know how to update a SEQUENCE. */
312de84d 4093
4094void
3e75e92b 4095add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
312de84d 4096{
3e75e92b 4097 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4098 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
35f3420b 4099 add_insn_before_nobb (insn, before);
312de84d 4100
48e1416a 4101 if (!bb
3072d30e 4102 && !BARRIER_P (before)
4103 && !BARRIER_P (insn))
4104 bb = BLOCK_FOR_INSN (before);
4105
4106 if (bb)
9dda7915 4107 {
4108 set_block_for_insn (insn, bb);
308f9b79 4109 if (INSN_P (insn))
3072d30e 4110 df_insn_rescan (insn);
611234b4 4111 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 4112 LABEL. */
611234b4 4113 gcc_assert (BB_HEAD (bb) != insn
4114 /* Avoid clobbering of structure when creating new BB. */
4115 || BARRIER_P (insn)
ad4583d9 4116 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 4117 }
312de84d 4118}
4119
3072d30e 4120/* Replace insn with an deleted instruction note. */
4121
fc3d1695 4122void
4123set_insn_deleted (rtx insn)
3072d30e 4124{
91f71fa3 4125 if (INSN_P (insn))
e149ca56 4126 df_insn_delete (as_a <rtx_insn *> (insn));
3072d30e 4127 PUT_CODE (insn, NOTE);
4128 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4129}
4130
4131
93ff53d3 4132/* Unlink INSN from the insn chain.
4133
4134 This function knows how to handle sequences.
4135
4136 This function does not invalidate data flow information associated with
4137 INSN (i.e. does not call df_insn_delete). That makes this function
4138 usable for only disconnecting an insn from the chain, and re-emit it
4139 elsewhere later.
4140
4141 To later insert INSN elsewhere in the insn chain via add_insn and
4142 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4143 the caller. Nullifying them here breaks many insn chain walks.
4144
4145 To really delete an insn and related DF information, use delete_insn. */
4146
7ddcf2bf 4147void
4cd001d5 4148remove_insn (rtx uncast_insn)
7ddcf2bf 4149{
4cd001d5 4150 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
26bb3cb2 4151 rtx_insn *next = NEXT_INSN (insn);
4152 rtx_insn *prev = PREV_INSN (insn);
e4bf866d 4153 basic_block bb;
4154
7ddcf2bf 4155 if (prev)
4156 {
4a57a2e8 4157 SET_NEXT_INSN (prev) = next;
6d7dc5b9 4158 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 4159 {
f17e3fff 4160 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4161 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
7ddcf2bf 4162 }
4163 }
7ddcf2bf 4164 else
4165 {
c36aa54b 4166 struct sequence_stack *seq;
4167
4168 for (seq = get_current_sequence (); seq; seq = seq->next)
4169 if (insn == seq->first)
7ddcf2bf 4170 {
c36aa54b 4171 seq->first = next;
7ddcf2bf 4172 break;
4173 }
4174
c36aa54b 4175 gcc_assert (seq);
7ddcf2bf 4176 }
4177
4178 if (next)
4179 {
4a57a2e8 4180 SET_PREV_INSN (next) = prev;
6d7dc5b9 4181 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
f17e3fff 4182 {
4183 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4184 SET_PREV_INSN (sequence->insn (0)) = prev;
4185 }
7ddcf2bf 4186 }
7ddcf2bf 4187 else
4188 {
c36aa54b 4189 struct sequence_stack *seq;
4190
4191 for (seq = get_current_sequence (); seq; seq = seq->next)
4192 if (insn == seq->last)
7ddcf2bf 4193 {
c36aa54b 4194 seq->last = prev;
7ddcf2bf 4195 break;
4196 }
4197
c36aa54b 4198 gcc_assert (seq);
7ddcf2bf 4199 }
b983ea33 4200
b983ea33 4201 /* Fix up basic block boundaries, if necessary. */
6d7dc5b9 4202 if (!BARRIER_P (insn)
e4bf866d 4203 && (bb = BLOCK_FOR_INSN (insn)))
4204 {
5496dbfc 4205 if (BB_HEAD (bb) == insn)
e4bf866d 4206 {
f4aee538 4207 /* Never ever delete the basic block note without deleting whole
4208 basic block. */
611234b4 4209 gcc_assert (!NOTE_P (insn));
26bb3cb2 4210 BB_HEAD (bb) = next;
e4bf866d 4211 }
5496dbfc 4212 if (BB_END (bb) == insn)
26bb3cb2 4213 BB_END (bb) = prev;
e4bf866d 4214 }
7ddcf2bf 4215}
4216
d5f9786f 4217/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4218
4219void
35cb5232 4220add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 4221{
611234b4 4222 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 4223
4224 /* Put the register usage information on the CALL. If there is already
4225 some usage information, put ours at the end. */
4226 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4227 {
4228 rtx link;
4229
4230 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4231 link = XEXP (link, 1))
4232 ;
4233
4234 XEXP (link, 1) = call_fusage;
4235 }
4236 else
4237 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4238}
4239
15bbde2b 4240/* Delete all insns made since FROM.
4241 FROM becomes the new last instruction. */
4242
4243void
57c26b3a 4244delete_insns_since (rtx_insn *from)
15bbde2b 4245{
4246 if (from == 0)
06f9d6ef 4247 set_first_insn (0);
15bbde2b 4248 else
4a57a2e8 4249 SET_NEXT_INSN (from) = 0;
06f9d6ef 4250 set_last_insn (from);
15bbde2b 4251}
4252
34e2ddcd 4253/* This function is deprecated, please use sequences instead.
4254
4255 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 4256 The insns to be moved are those between FROM and TO.
4257 They are moved to a new position after the insn AFTER.
4258 AFTER must not be FROM or TO or any insn in between.
4259
4260 This function does not know about SEQUENCEs and hence should not be
4261 called after delay-slot filling has been done. */
4262
4263void
57c26b3a 4264reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
15bbde2b 4265{
382ecba7 4266 if (flag_checking)
4267 {
4268 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4269 gcc_assert (after != x);
4270 gcc_assert (after != to);
4271 }
7f6ca11f 4272
15bbde2b 4273 /* Splice this bunch out of where it is now. */
4274 if (PREV_INSN (from))
4a57a2e8 4275 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
15bbde2b 4276 if (NEXT_INSN (to))
4a57a2e8 4277 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 4278 if (get_last_insn () == to)
4279 set_last_insn (PREV_INSN (from));
4280 if (get_insns () == from)
4281 set_first_insn (NEXT_INSN (to));
15bbde2b 4282
4283 /* Make the new neighbors point to it and it to them. */
4284 if (NEXT_INSN (after))
4a57a2e8 4285 SET_PREV_INSN (NEXT_INSN (after)) = to;
15bbde2b 4286
4a57a2e8 4287 SET_NEXT_INSN (to) = NEXT_INSN (after);
4288 SET_PREV_INSN (from) = after;
4289 SET_NEXT_INSN (after) = from;
9af5ce0c 4290 if (after == get_last_insn ())
06f9d6ef 4291 set_last_insn (to);
15bbde2b 4292}
4293
9dda7915 4294/* Same as function above, but take care to update BB boundaries. */
4295void
4a3fb716 4296reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
9dda7915 4297{
4a3fb716 4298 rtx_insn *prev = PREV_INSN (from);
9dda7915 4299 basic_block bb, bb2;
4300
4301 reorder_insns_nobb (from, to, after);
4302
6d7dc5b9 4303 if (!BARRIER_P (after)
9dda7915 4304 && (bb = BLOCK_FOR_INSN (after)))
4305 {
e149ca56 4306 rtx_insn *x;
3072d30e 4307 df_set_bb_dirty (bb);
d4c5e26d 4308
6d7dc5b9 4309 if (!BARRIER_P (from)
9dda7915 4310 && (bb2 = BLOCK_FOR_INSN (from)))
4311 {
5496dbfc 4312 if (BB_END (bb2) == to)
26bb3cb2 4313 BB_END (bb2) = prev;
3072d30e 4314 df_set_bb_dirty (bb2);
9dda7915 4315 }
4316
5496dbfc 4317 if (BB_END (bb) == after)
26bb3cb2 4318 BB_END (bb) = to;
9dda7915 4319
4320 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4321 if (!BARRIER_P (x))
a2bdd643 4322 df_insn_change_bb (x, bb);
9dda7915 4323 }
4324}
4325
15bbde2b 4326\f
31d3e01c 4327/* Emit insn(s) of given code and pattern
4328 at a specified place within the doubly-linked list.
15bbde2b 4329
31d3e01c 4330 All of the emit_foo global entry points accept an object
4331 X which is either an insn list or a PATTERN of a single
4332 instruction.
15bbde2b 4333
31d3e01c 4334 There are thus a few canonical ways to generate code and
4335 emit it at a specific place in the instruction stream. For
4336 example, consider the instruction named SPOT and the fact that
4337 we would like to emit some instructions before SPOT. We might
4338 do it like this:
15bbde2b 4339
31d3e01c 4340 start_sequence ();
4341 ... emit the new instructions ...
4342 insns_head = get_insns ();
4343 end_sequence ();
15bbde2b 4344
31d3e01c 4345 emit_insn_before (insns_head, SPOT);
15bbde2b 4346
31d3e01c 4347 It used to be common to generate SEQUENCE rtl instead, but that
4348 is a relic of the past which no longer occurs. The reason is that
4349 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4350 generated would almost certainly die right after it was created. */
15bbde2b 4351
722334ea 4352static rtx_insn *
5f7c5ddd 4353emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
2c57d586 4354 rtx_insn *(*make_raw) (rtx))
15bbde2b 4355{
2c57d586 4356 rtx_insn *insn;
15bbde2b 4357
611234b4 4358 gcc_assert (before);
31d3e01c 4359
4360 if (x == NULL_RTX)
722334ea 4361 return safe_as_a <rtx_insn *> (last);
31d3e01c 4362
4363 switch (GET_CODE (x))
15bbde2b 4364 {
9845d120 4365 case DEBUG_INSN:
31d3e01c 4366 case INSN:
4367 case JUMP_INSN:
4368 case CALL_INSN:
4369 case CODE_LABEL:
4370 case BARRIER:
4371 case NOTE:
2c57d586 4372 insn = as_a <rtx_insn *> (x);
31d3e01c 4373 while (insn)
4374 {
2c57d586 4375 rtx_insn *next = NEXT_INSN (insn);
3072d30e 4376 add_insn_before (insn, before, bb);
31d3e01c 4377 last = insn;
4378 insn = next;
4379 }
4380 break;
4381
4382#ifdef ENABLE_RTL_CHECKING
4383 case SEQUENCE:
611234b4 4384 gcc_unreachable ();
31d3e01c 4385 break;
4386#endif
4387
4388 default:
5f7c5ddd 4389 last = (*make_raw) (x);
3072d30e 4390 add_insn_before (last, before, bb);
31d3e01c 4391 break;
15bbde2b 4392 }
4393
722334ea 4394 return safe_as_a <rtx_insn *> (last);
15bbde2b 4395}
4396
5f7c5ddd 4397/* Make X be output before the instruction BEFORE. */
4398
722334ea 4399rtx_insn *
c9a09955 4400emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f7c5ddd 4401{
4402 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4403}
4404
31d3e01c 4405/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4406 and output it before the instruction BEFORE. */
4407
f9a00e9e 4408rtx_jump_insn *
c9a09955 4409emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
15bbde2b 4410{
f9a00e9e 4411 return as_a <rtx_jump_insn *> (
4412 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4413 make_jump_insn_raw));
15bbde2b 4414}
4415
31d3e01c 4416/* Make an instruction with body X and code CALL_INSN
cd0fe062 4417 and output it before the instruction BEFORE. */
4418
722334ea 4419rtx_insn *
c9a09955 4420emit_call_insn_before_noloc (rtx x, rtx_insn *before)
cd0fe062 4421{
5f7c5ddd 4422 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4423 make_call_insn_raw);
cd0fe062 4424}
4425
9845d120 4426/* Make an instruction with body X and code DEBUG_INSN
4427 and output it before the instruction BEFORE. */
4428
722334ea 4429rtx_insn *
9845d120 4430emit_debug_insn_before_noloc (rtx x, rtx before)
4431{
5f7c5ddd 4432 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4433 make_debug_insn_raw);
9845d120 4434}
4435
15bbde2b 4436/* Make an insn of code BARRIER
71caadc0 4437 and output it before the insn BEFORE. */
15bbde2b 4438
722334ea 4439rtx_barrier *
35cb5232 4440emit_barrier_before (rtx before)
15bbde2b 4441{
722334ea 4442 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
15bbde2b 4443
4444 INSN_UID (insn) = cur_insn_uid++;
4445
3072d30e 4446 add_insn_before (insn, before, NULL);
15bbde2b 4447 return insn;
4448}
4449
71caadc0 4450/* Emit the label LABEL before the insn BEFORE. */
4451
f9a00e9e 4452rtx_code_label *
c9a09955 4453emit_label_before (rtx label, rtx_insn *before)
71caadc0 4454{
596ef494 4455 gcc_checking_assert (INSN_UID (label) == 0);
4456 INSN_UID (label) = cur_insn_uid++;
4457 add_insn_before (label, before, NULL);
f9a00e9e 4458 return as_a <rtx_code_label *> (label);
71caadc0 4459}
15bbde2b 4460\f
31d3e01c 4461/* Helper for emit_insn_after, handles lists of instructions
4462 efficiently. */
15bbde2b 4463
f17e3fff 4464static rtx_insn *
4465emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
15bbde2b 4466{
f17e3fff 4467 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
26bb3cb2 4468 rtx_insn *last;
4469 rtx_insn *after_after;
3072d30e 4470 if (!bb && !BARRIER_P (after))
4471 bb = BLOCK_FOR_INSN (after);
15bbde2b 4472
3072d30e 4473 if (bb)
15bbde2b 4474 {
3072d30e 4475 df_set_bb_dirty (bb);
31d3e01c 4476 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4477 if (!BARRIER_P (last))
3072d30e 4478 {
4479 set_block_for_insn (last, bb);
4480 df_insn_rescan (last);
4481 }
6d7dc5b9 4482 if (!BARRIER_P (last))
3072d30e 4483 {
4484 set_block_for_insn (last, bb);
4485 df_insn_rescan (last);
4486 }
5496dbfc 4487 if (BB_END (bb) == after)
26bb3cb2 4488 BB_END (bb) = last;
15bbde2b 4489 }
4490 else
31d3e01c 4491 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4492 continue;
4493
4494 after_after = NEXT_INSN (after);
4495
4a57a2e8 4496 SET_NEXT_INSN (after) = first;
4497 SET_PREV_INSN (first) = after;
4498 SET_NEXT_INSN (last) = after_after;
31d3e01c 4499 if (after_after)
4a57a2e8 4500 SET_PREV_INSN (after_after) = last;
31d3e01c 4501
9af5ce0c 4502 if (after == get_last_insn ())
06f9d6ef 4503 set_last_insn (last);
e1ab7874 4504
31d3e01c 4505 return last;
4506}
4507
722334ea 4508static rtx_insn *
f17e3fff 4509emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
2c57d586 4510 rtx_insn *(*make_raw)(rtx))
31d3e01c 4511{
f17e3fff 4512 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4513 rtx_insn *last = after;
31d3e01c 4514
611234b4 4515 gcc_assert (after);
31d3e01c 4516
4517 if (x == NULL_RTX)
f17e3fff 4518 return last;
31d3e01c 4519
4520 switch (GET_CODE (x))
15bbde2b 4521 {
9845d120 4522 case DEBUG_INSN:
31d3e01c 4523 case INSN:
4524 case JUMP_INSN:
4525 case CALL_INSN:
4526 case CODE_LABEL:
4527 case BARRIER:
4528 case NOTE:
26bb3cb2 4529 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
31d3e01c 4530 break;
4531
4532#ifdef ENABLE_RTL_CHECKING
4533 case SEQUENCE:
611234b4 4534 gcc_unreachable ();
31d3e01c 4535 break;
4536#endif
4537
4538 default:
5f7c5ddd 4539 last = (*make_raw) (x);
3072d30e 4540 add_insn_after (last, after, bb);
31d3e01c 4541 break;
15bbde2b 4542 }
4543
f17e3fff 4544 return last;
15bbde2b 4545}
4546
5f7c5ddd 4547/* Make X be output after the insn AFTER and set the BB of insn. If
4548 BB is NULL, an attempt is made to infer the BB from AFTER. */
4549
722334ea 4550rtx_insn *
5f7c5ddd 4551emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4552{
4553 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4554}
4555
1bea98fb 4556
31d3e01c 4557/* Make an insn of code JUMP_INSN with body X
15bbde2b 4558 and output it after the insn AFTER. */
4559
f9a00e9e 4560rtx_jump_insn *
0891f67c 4561emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4562{
f9a00e9e 4563 return as_a <rtx_jump_insn *> (
4564 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
31d3e01c 4565}
4566
4567/* Make an instruction with body X and code CALL_INSN
4568 and output it after the instruction AFTER. */
4569
722334ea 4570rtx_insn *
0891f67c 4571emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4572{
5f7c5ddd 4573 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
15bbde2b 4574}
4575
9845d120 4576/* Make an instruction with body X and code CALL_INSN
4577 and output it after the instruction AFTER. */
4578
722334ea 4579rtx_insn *
9845d120 4580emit_debug_insn_after_noloc (rtx x, rtx after)
4581{
5f7c5ddd 4582 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
9845d120 4583}
4584
15bbde2b 4585/* Make an insn of code BARRIER
4586 and output it after the insn AFTER. */
4587
722334ea 4588rtx_barrier *
35cb5232 4589emit_barrier_after (rtx after)
15bbde2b 4590{
722334ea 4591 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
15bbde2b 4592
4593 INSN_UID (insn) = cur_insn_uid++;
4594
3072d30e 4595 add_insn_after (insn, after, NULL);
15bbde2b 4596 return insn;
4597}
4598
4599/* Emit the label LABEL after the insn AFTER. */
4600
722334ea 4601rtx_insn *
c9a09955 4602emit_label_after (rtx label, rtx_insn *after)
15bbde2b 4603{
596ef494 4604 gcc_checking_assert (INSN_UID (label) == 0);
4605 INSN_UID (label) = cur_insn_uid++;
4606 add_insn_after (label, after, NULL);
722334ea 4607 return as_a <rtx_insn *> (label);
15bbde2b 4608}
35f3420b 4609\f
4610/* Notes require a bit of special handling: Some notes need to have their
4611 BLOCK_FOR_INSN set, others should never have it set, and some should
4612 have it set or clear depending on the context. */
4613
4614/* Return true iff a note of kind SUBTYPE should be emitted with routines
4615 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4616 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4617
4618static bool
4619note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4620{
4621 switch (subtype)
4622 {
4623 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4624 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4625 return true;
4626
4627 /* Notes for var tracking and EH region markers can appear between or
4628 inside basic blocks. If the caller is emitting on the basic block
4629 boundary, do not set BLOCK_FOR_INSN on the new note. */
4630 case NOTE_INSN_VAR_LOCATION:
4631 case NOTE_INSN_CALL_ARG_LOCATION:
4632 case NOTE_INSN_EH_REGION_BEG:
4633 case NOTE_INSN_EH_REGION_END:
4634 return on_bb_boundary_p;
4635
4636 /* Otherwise, BLOCK_FOR_INSN must be set. */
4637 default:
4638 return false;
4639 }
4640}
15bbde2b 4641
4642/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4643
cef3d8ad 4644rtx_note *
4d86329d 4645emit_note_after (enum insn_note subtype, rtx_insn *after)
15bbde2b 4646{
cef3d8ad 4647 rtx_note *note = make_note_raw (subtype);
35f3420b 4648 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4649 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4650
4651 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4652 add_insn_after_nobb (note, after);
4653 else
4654 add_insn_after (note, after, bb);
4655 return note;
4656}
4657
4658/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4659
cef3d8ad 4660rtx_note *
1dc26636 4661emit_note_before (enum insn_note subtype, rtx_insn *before)
35f3420b 4662{
cef3d8ad 4663 rtx_note *note = make_note_raw (subtype);
35f3420b 4664 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4665 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4666
4667 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4668 add_insn_before_nobb (note, before);
4669 else
4670 add_insn_before (note, before, bb);
15bbde2b 4671 return note;
4672}
15bbde2b 4673\f
ede4ebcb 4674/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4675 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4676
722334ea 4677static rtx_insn *
4cd001d5 4678emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
2c57d586 4679 rtx_insn *(*make_raw) (rtx))
d321a68b 4680{
4cd001d5 4681 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
9ed997be 4682 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
d321a68b 4683
0891f67c 4684 if (pattern == NULL_RTX || !loc)
9ed997be 4685 return last;
ca154f3f 4686
31d3e01c 4687 after = NEXT_INSN (after);
4688 while (1)
4689 {
57e999d9 4690 if (active_insn_p (after)
4691 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4692 && !INSN_LOCATION (after))
5169661d 4693 INSN_LOCATION (after) = loc;
31d3e01c 4694 if (after == last)
4695 break;
4696 after = NEXT_INSN (after);
4697 }
9ed997be 4698 return last;
d321a68b 4699}
4700
ede4ebcb 4701/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4702 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4703 any DEBUG_INSNs. */
4704
722334ea 4705static rtx_insn *
4cd001d5 4706emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
2c57d586 4707 rtx_insn *(*make_raw) (rtx))
0891f67c 4708{
4cd001d5 4709 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4710 rtx_insn *prev = after;
9845d120 4711
ede4ebcb 4712 if (skip_debug_insns)
4713 while (DEBUG_INSN_P (prev))
4714 prev = PREV_INSN (prev);
9845d120 4715
4716 if (INSN_P (prev))
5169661d 4717 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
ede4ebcb 4718 make_raw);
0891f67c 4719 else
ede4ebcb 4720 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0891f67c 4721}
4722
5169661d 4723/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4724rtx_insn *
ede4ebcb 4725emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4726{
ede4ebcb 4727 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4728}
31d3e01c 4729
5169661d 4730/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
722334ea 4731rtx_insn *
ede4ebcb 4732emit_insn_after (rtx pattern, rtx after)
4733{
4734 return emit_pattern_after (pattern, after, true, make_insn_raw);
4735}
ca154f3f 4736
5169661d 4737/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
f9a00e9e 4738rtx_jump_insn *
ede4ebcb 4739emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4740{
f9a00e9e 4741 return as_a <rtx_jump_insn *> (
4742 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
d321a68b 4743}
4744
5169661d 4745/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
f9a00e9e 4746rtx_jump_insn *
0891f67c 4747emit_jump_insn_after (rtx pattern, rtx after)
4748{
f9a00e9e 4749 return as_a <rtx_jump_insn *> (
4750 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
0891f67c 4751}
4752
5169661d 4753/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4754rtx_insn *
35cb5232 4755emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4756{
ede4ebcb 4757 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
d321a68b 4758}
4759
5169661d 4760/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
722334ea 4761rtx_insn *
0891f67c 4762emit_call_insn_after (rtx pattern, rtx after)
4763{
ede4ebcb 4764 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
0891f67c 4765}
4766
5169661d 4767/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4768rtx_insn *
9845d120 4769emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4770{
ede4ebcb 4771 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
9845d120 4772}
4773
5169661d 4774/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
722334ea 4775rtx_insn *
9845d120 4776emit_debug_insn_after (rtx pattern, rtx after)
4777{
ede4ebcb 4778 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
9845d120 4779}
4780
ede4ebcb 4781/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4782 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4783 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4784 CALL_INSN, etc. */
4785
722334ea 4786static rtx_insn *
4cd001d5 4787emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
2c57d586 4788 rtx_insn *(*make_raw) (rtx))
d321a68b 4789{
4cd001d5 4790 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4791 rtx_insn *first = PREV_INSN (before);
4792 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4793 insnp ? before : NULL_RTX,
4794 NULL, make_raw);
0891f67c 4795
4796 if (pattern == NULL_RTX || !loc)
4cd001d5 4797 return last;
0891f67c 4798
4486418e 4799 if (!first)
4800 first = get_insns ();
4801 else
4802 first = NEXT_INSN (first);
0891f67c 4803 while (1)
4804 {
57e999d9 4805 if (active_insn_p (first)
4806 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4807 && !INSN_LOCATION (first))
5169661d 4808 INSN_LOCATION (first) = loc;
0891f67c 4809 if (first == last)
4810 break;
4811 first = NEXT_INSN (first);
4812 }
4cd001d5 4813 return last;
0891f67c 4814}
4815
ede4ebcb 4816/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4817 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4818 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4819 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4820
722334ea 4821static rtx_insn *
4cd001d5 4822emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
2c57d586 4823 bool insnp, rtx_insn *(*make_raw) (rtx))
0891f67c 4824{
4cd001d5 4825 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4826 rtx_insn *next = before;
9845d120 4827
ede4ebcb 4828 if (skip_debug_insns)
4829 while (DEBUG_INSN_P (next))
4830 next = PREV_INSN (next);
9845d120 4831
4832 if (INSN_P (next))
5169661d 4833 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
ede4ebcb 4834 insnp, make_raw);
0891f67c 4835 else
ede4ebcb 4836 return emit_pattern_before_noloc (pattern, before,
db7dd023 4837 insnp ? before : NULL_RTX,
ede4ebcb 4838 NULL, make_raw);
0891f67c 4839}
4840
5169661d 4841/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4842rtx_insn *
c9a09955 4843emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
0891f67c 4844{
ede4ebcb 4845 return emit_pattern_before_setloc (pattern, before, loc, true,
4846 make_insn_raw);
4847}
0891f67c 4848
5169661d 4849/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
722334ea 4850rtx_insn *
ede4ebcb 4851emit_insn_before (rtx pattern, rtx before)
4852{
4853 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4854}
0891f67c 4855
5169661d 4856/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
f9a00e9e 4857rtx_jump_insn *
c9a09955 4858emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
ede4ebcb 4859{
f9a00e9e 4860 return as_a <rtx_jump_insn *> (
4861 emit_pattern_before_setloc (pattern, before, loc, false,
4862 make_jump_insn_raw));
0891f67c 4863}
4864
5169661d 4865/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
f9a00e9e 4866rtx_jump_insn *
0891f67c 4867emit_jump_insn_before (rtx pattern, rtx before)
4868{
f9a00e9e 4869 return as_a <rtx_jump_insn *> (
4870 emit_pattern_before (pattern, before, true, false,
4871 make_jump_insn_raw));
0891f67c 4872}
4873
5169661d 4874/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4875rtx_insn *
c9a09955 4876emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
0891f67c 4877{
ede4ebcb 4878 return emit_pattern_before_setloc (pattern, before, loc, false,
4879 make_call_insn_raw);
d321a68b 4880}
0891f67c 4881
ede4ebcb 4882/* Like emit_call_insn_before_noloc,
5169661d 4883 but set insn_location according to BEFORE. */
722334ea 4884rtx_insn *
c9a09955 4885emit_call_insn_before (rtx pattern, rtx_insn *before)
0891f67c 4886{
ede4ebcb 4887 return emit_pattern_before (pattern, before, true, false,
4888 make_call_insn_raw);
0891f67c 4889}
9845d120 4890
5169661d 4891/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
722334ea 4892rtx_insn *
9845d120 4893emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4894{
ede4ebcb 4895 return emit_pattern_before_setloc (pattern, before, loc, false,
4896 make_debug_insn_raw);
9845d120 4897}
4898
ede4ebcb 4899/* Like emit_debug_insn_before_noloc,
5169661d 4900 but set insn_location according to BEFORE. */
722334ea 4901rtx_insn *
5518cf83 4902emit_debug_insn_before (rtx pattern, rtx_insn *before)
9845d120 4903{
ede4ebcb 4904 return emit_pattern_before (pattern, before, false, false,
4905 make_debug_insn_raw);
9845d120 4906}
d321a68b 4907\f
31d3e01c 4908/* Take X and emit it at the end of the doubly-linked
4909 INSN list.
15bbde2b 4910
4911 Returns the last insn emitted. */
4912
722334ea 4913rtx_insn *
35cb5232 4914emit_insn (rtx x)
15bbde2b 4915{
722334ea 4916 rtx_insn *last = get_last_insn ();
4917 rtx_insn *insn;
15bbde2b 4918
31d3e01c 4919 if (x == NULL_RTX)
4920 return last;
15bbde2b 4921
31d3e01c 4922 switch (GET_CODE (x))
4923 {
9845d120 4924 case DEBUG_INSN:
31d3e01c 4925 case INSN:
4926 case JUMP_INSN:
4927 case CALL_INSN:
4928 case CODE_LABEL:
4929 case BARRIER:
4930 case NOTE:
722334ea 4931 insn = as_a <rtx_insn *> (x);
31d3e01c 4932 while (insn)
15bbde2b 4933 {
722334ea 4934 rtx_insn *next = NEXT_INSN (insn);
15bbde2b 4935 add_insn (insn);
31d3e01c 4936 last = insn;
4937 insn = next;
15bbde2b 4938 }
31d3e01c 4939 break;
15bbde2b 4940
31d3e01c 4941#ifdef ENABLE_RTL_CHECKING
91f71fa3 4942 case JUMP_TABLE_DATA:
31d3e01c 4943 case SEQUENCE:
611234b4 4944 gcc_unreachable ();
31d3e01c 4945 break;
4946#endif
15bbde2b 4947
31d3e01c 4948 default:
4949 last = make_insn_raw (x);
4950 add_insn (last);
4951 break;
15bbde2b 4952 }
4953
4954 return last;
4955}
4956
9845d120 4957/* Make an insn of code DEBUG_INSN with pattern X
4958 and add it to the end of the doubly-linked list. */
4959
722334ea 4960rtx_insn *
9845d120 4961emit_debug_insn (rtx x)
4962{
722334ea 4963 rtx_insn *last = get_last_insn ();
4964 rtx_insn *insn;
9845d120 4965
4966 if (x == NULL_RTX)
4967 return last;
4968
4969 switch (GET_CODE (x))
4970 {
4971 case DEBUG_INSN:
4972 case INSN:
4973 case JUMP_INSN:
4974 case CALL_INSN:
4975 case CODE_LABEL:
4976 case BARRIER:
4977 case NOTE:
722334ea 4978 insn = as_a <rtx_insn *> (x);
9845d120 4979 while (insn)
4980 {
722334ea 4981 rtx_insn *next = NEXT_INSN (insn);
9845d120 4982 add_insn (insn);
4983 last = insn;
4984 insn = next;
4985 }
4986 break;
4987
4988#ifdef ENABLE_RTL_CHECKING
91f71fa3 4989 case JUMP_TABLE_DATA:
9845d120 4990 case SEQUENCE:
4991 gcc_unreachable ();
4992 break;
4993#endif
4994
4995 default:
4996 last = make_debug_insn_raw (x);
4997 add_insn (last);
4998 break;
4999 }
5000
5001 return last;
5002}
5003
31d3e01c 5004/* Make an insn of code JUMP_INSN with pattern X
5005 and add it to the end of the doubly-linked list. */
15bbde2b 5006
722334ea 5007rtx_insn *
35cb5232 5008emit_jump_insn (rtx x)
15bbde2b 5009{
722334ea 5010 rtx_insn *last = NULL;
5011 rtx_insn *insn;
15bbde2b 5012
31d3e01c 5013 switch (GET_CODE (x))
15bbde2b 5014 {
9845d120 5015 case DEBUG_INSN:
31d3e01c 5016 case INSN:
5017 case JUMP_INSN:
5018 case CALL_INSN:
5019 case CODE_LABEL:
5020 case BARRIER:
5021 case NOTE:
722334ea 5022 insn = as_a <rtx_insn *> (x);
31d3e01c 5023 while (insn)
5024 {
722334ea 5025 rtx_insn *next = NEXT_INSN (insn);
31d3e01c 5026 add_insn (insn);
5027 last = insn;
5028 insn = next;
5029 }
5030 break;
b36b07d8 5031
31d3e01c 5032#ifdef ENABLE_RTL_CHECKING
91f71fa3 5033 case JUMP_TABLE_DATA:
31d3e01c 5034 case SEQUENCE:
611234b4 5035 gcc_unreachable ();
31d3e01c 5036 break;
5037#endif
b36b07d8 5038
31d3e01c 5039 default:
5040 last = make_jump_insn_raw (x);
5041 add_insn (last);
5042 break;
9dda7915 5043 }
b36b07d8 5044
5045 return last;
5046}
5047
31d3e01c 5048/* Make an insn of code CALL_INSN with pattern X
15bbde2b 5049 and add it to the end of the doubly-linked list. */
5050
722334ea 5051rtx_insn *
35cb5232 5052emit_call_insn (rtx x)
15bbde2b 5053{
722334ea 5054 rtx_insn *insn;
31d3e01c 5055
5056 switch (GET_CODE (x))
15bbde2b 5057 {
9845d120 5058 case DEBUG_INSN:
31d3e01c 5059 case INSN:
5060 case JUMP_INSN:
5061 case CALL_INSN:
5062 case CODE_LABEL:
5063 case BARRIER:
5064 case NOTE:
5065 insn = emit_insn (x);
5066 break;
15bbde2b 5067
31d3e01c 5068#ifdef ENABLE_RTL_CHECKING
5069 case SEQUENCE:
91f71fa3 5070 case JUMP_TABLE_DATA:
611234b4 5071 gcc_unreachable ();
31d3e01c 5072 break;
5073#endif
15bbde2b 5074
31d3e01c 5075 default:
5076 insn = make_call_insn_raw (x);
15bbde2b 5077 add_insn (insn);
31d3e01c 5078 break;
15bbde2b 5079 }
31d3e01c 5080
5081 return insn;
15bbde2b 5082}
5083
5084/* Add the label LABEL to the end of the doubly-linked list. */
5085
f9a00e9e 5086rtx_code_label *
5087emit_label (rtx uncast_label)
15bbde2b 5088{
f9a00e9e 5089 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5090
596ef494 5091 gcc_checking_assert (INSN_UID (label) == 0);
5092 INSN_UID (label) = cur_insn_uid++;
f9a00e9e 5093 add_insn (label);
5094 return label;
15bbde2b 5095}
5096
91f71fa3 5097/* Make an insn of code JUMP_TABLE_DATA
5098 and add it to the end of the doubly-linked list. */
5099
e41badc0 5100rtx_jump_table_data *
91f71fa3 5101emit_jump_table_data (rtx table)
5102{
e41badc0 5103 rtx_jump_table_data *jump_table_data =
5104 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
91f71fa3 5105 INSN_UID (jump_table_data) = cur_insn_uid++;
5106 PATTERN (jump_table_data) = table;
5107 BLOCK_FOR_INSN (jump_table_data) = NULL;
5108 add_insn (jump_table_data);
5109 return jump_table_data;
5110}
5111
15bbde2b 5112/* Make an insn of code BARRIER
5113 and add it to the end of the doubly-linked list. */
5114
722334ea 5115rtx_barrier *
35cb5232 5116emit_barrier (void)
15bbde2b 5117{
722334ea 5118 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
15bbde2b 5119 INSN_UID (barrier) = cur_insn_uid++;
5120 add_insn (barrier);
5121 return barrier;
5122}
5123
2f57e3d9 5124/* Emit a copy of note ORIG. */
35cb5232 5125
cef3d8ad 5126rtx_note *
5127emit_note_copy (rtx_note *orig)
2f57e3d9 5128{
35f3420b 5129 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
cef3d8ad 5130 rtx_note *note = make_note_raw (kind);
2f57e3d9 5131 NOTE_DATA (note) = NOTE_DATA (orig);
2f57e3d9 5132 add_insn (note);
31b97e8f 5133 return note;
15bbde2b 5134}
5135
31b97e8f 5136/* Make an insn of code NOTE or type NOTE_NO
5137 and add it to the end of the doubly-linked list. */
15bbde2b 5138
cef3d8ad 5139rtx_note *
ad4583d9 5140emit_note (enum insn_note kind)
15bbde2b 5141{
cef3d8ad 5142 rtx_note *note = make_note_raw (kind);
15bbde2b 5143 add_insn (note);
5144 return note;
5145}
5146
18b42941 5147/* Emit a clobber of lvalue X. */
5148
722334ea 5149rtx_insn *
18b42941 5150emit_clobber (rtx x)
5151{
5152 /* CONCATs should not appear in the insn stream. */
5153 if (GET_CODE (x) == CONCAT)
5154 {
5155 emit_clobber (XEXP (x, 0));
5156 return emit_clobber (XEXP (x, 1));
5157 }
5158 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5159}
5160
5161/* Return a sequence of insns to clobber lvalue X. */
5162
722334ea 5163rtx_insn *
18b42941 5164gen_clobber (rtx x)
5165{
722334ea 5166 rtx_insn *seq;
18b42941 5167
5168 start_sequence ();
5169 emit_clobber (x);
5170 seq = get_insns ();
5171 end_sequence ();
5172 return seq;
5173}
5174
5175/* Emit a use of rvalue X. */
5176
722334ea 5177rtx_insn *
18b42941 5178emit_use (rtx x)
5179{
5180 /* CONCATs should not appear in the insn stream. */
5181 if (GET_CODE (x) == CONCAT)
5182 {
5183 emit_use (XEXP (x, 0));
5184 return emit_use (XEXP (x, 1));
5185 }
5186 return emit_insn (gen_rtx_USE (VOIDmode, x));
5187}
5188
5189/* Return a sequence of insns to use rvalue X. */
5190
722334ea 5191rtx_insn *
18b42941 5192gen_use (rtx x)
5193{
722334ea 5194 rtx_insn *seq;
18b42941 5195
5196 start_sequence ();
5197 emit_use (x);
5198 seq = get_insns ();
5199 end_sequence ();
5200 return seq;
5201}
5202
3a286419 5203/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5204 Return the set in INSN that such notes describe, or NULL if the notes
5205 have no meaning for INSN. */
5206
5207rtx
5208set_for_reg_notes (rtx insn)
5209{
5210 rtx pat, reg;
5211
5212 if (!INSN_P (insn))
5213 return NULL_RTX;
5214
5215 pat = PATTERN (insn);
5216 if (GET_CODE (pat) == PARALLEL)
5217 {
5218 /* We do not use single_set because that ignores SETs of unused
5219 registers. REG_EQUAL and REG_EQUIV notes really do require the
5220 PARALLEL to have a single SET. */
5221 if (multiple_sets (insn))
5222 return NULL_RTX;
5223 pat = XVECEXP (pat, 0, 0);
5224 }
5225
5226 if (GET_CODE (pat) != SET)
5227 return NULL_RTX;
5228
5229 reg = SET_DEST (pat);
5230
5231 /* Notes apply to the contents of a STRICT_LOW_PART. */
f2c7e335 5232 if (GET_CODE (reg) == STRICT_LOW_PART
5233 || GET_CODE (reg) == ZERO_EXTRACT)
3a286419 5234 reg = XEXP (reg, 0);
5235
5236 /* Check that we have a register. */
5237 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5238 return NULL_RTX;
5239
5240 return pat;
5241}
5242
f1934a33 5243/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 5244 note of this type already exists, remove it first. */
f1934a33 5245
c080d8f0 5246rtx
35cb5232 5247set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 5248{
5249 rtx note = find_reg_note (insn, kind, NULL_RTX);
5250
7e6224ab 5251 switch (kind)
5252 {
5253 case REG_EQUAL:
5254 case REG_EQUIV:
7b0b2add 5255 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5256 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
3a286419 5257 return NULL_RTX;
7e6224ab 5258
5259 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5260 It serves no useful purpose and breaks eliminate_regs. */
5261 if (GET_CODE (datum) == ASM_OPERANDS)
5262 return NULL_RTX;
2f8cf22c 5263
5264 /* Notes with side effects are dangerous. Even if the side-effect
5265 initially mirrors one in PATTERN (INSN), later optimizations
5266 might alter the way that the final register value is calculated
5267 and so move or alter the side-effect in some way. The note would
5268 then no longer be a valid substitution for SET_SRC. */
5269 if (side_effects_p (datum))
5270 return NULL_RTX;
7e6224ab 5271 break;
5272
5273 default:
5274 break;
5275 }
c080d8f0 5276
3a286419 5277 if (note)
5278 XEXP (note, 0) = datum;
5279 else
5280 {
5281 add_reg_note (insn, kind, datum);
5282 note = REG_NOTES (insn);
5283 }
3072d30e 5284
5285 switch (kind)
c080d8f0 5286 {
3072d30e 5287 case REG_EQUAL:
5288 case REG_EQUIV:
e149ca56 5289 df_notes_rescan (as_a <rtx_insn *> (insn));
3072d30e 5290 break;
5291 default:
5292 break;
c080d8f0 5293 }
f1934a33 5294
3a286419 5295 return note;
f1934a33 5296}
41cf444a 5297
5298/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5299rtx
5300set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5301{
3a286419 5302 rtx set = set_for_reg_notes (insn);
41cf444a 5303
5304 if (set && SET_DEST (set) == dst)
5305 return set_unique_reg_note (insn, kind, datum);
5306 return NULL_RTX;
5307}
15bbde2b 5308\f
16d83c02 5309/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5310 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5311 is true.
5312
15bbde2b 5313 If X is a label, it is simply added into the insn chain. */
5314
722334ea 5315rtx_insn *
16d83c02 5316emit (rtx x, bool allow_barrier_p)
15bbde2b 5317{
5318 enum rtx_code code = classify_insn (x);
5319
611234b4 5320 switch (code)
15bbde2b 5321 {
611234b4 5322 case CODE_LABEL:
5323 return emit_label (x);
5324 case INSN:
5325 return emit_insn (x);
5326 case JUMP_INSN:
5327 {
722334ea 5328 rtx_insn *insn = emit_jump_insn (x);
16d83c02 5329 if (allow_barrier_p
5330 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
611234b4 5331 return emit_barrier ();
5332 return insn;
5333 }
5334 case CALL_INSN:
5335 return emit_call_insn (x);
9845d120 5336 case DEBUG_INSN:
5337 return emit_debug_insn (x);
611234b4 5338 default:
5339 gcc_unreachable ();
15bbde2b 5340 }
15bbde2b 5341}
5342\f
1f3233d1 5343/* Space for free sequence stack entries. */
7035b2ab 5344static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5345
735f4358 5346/* Begin emitting insns to a sequence. If this sequence will contain
5347 something that might cause the compiler to pop arguments to function
5348 calls (because those pops have previously been deferred; see
5349 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5350 before calling this function. That will ensure that the deferred
5351 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5352
5353void
35cb5232 5354start_sequence (void)
15bbde2b 5355{
5356 struct sequence_stack *tem;
5357
1f3233d1 5358 if (free_sequence_stack != NULL)
5359 {
5360 tem = free_sequence_stack;
5361 free_sequence_stack = tem->next;
5362 }
5363 else
25a27413 5364 tem = ggc_alloc<sequence_stack> ();
15bbde2b 5365
c36aa54b 5366 tem->next = get_current_sequence ()->next;
06f9d6ef 5367 tem->first = get_insns ();
5368 tem->last = get_last_insn ();
c36aa54b 5369 get_current_sequence ()->next = tem;
15bbde2b 5370
06f9d6ef 5371 set_first_insn (0);
5372 set_last_insn (0);
15bbde2b 5373}
5374
b49854c6 5375/* Set up the insn chain starting with FIRST as the current sequence,
5376 saving the previously current one. See the documentation for
5377 start_sequence for more information about how to use this function. */
15bbde2b 5378
5379void
57c26b3a 5380push_to_sequence (rtx_insn *first)
15bbde2b 5381{
57c26b3a 5382 rtx_insn *last;
15bbde2b 5383
5384 start_sequence ();
5385
3c802a1e 5386 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5387 ;
15bbde2b 5388
06f9d6ef 5389 set_first_insn (first);
5390 set_last_insn (last);
15bbde2b 5391}
5392
28bf151d 5393/* Like push_to_sequence, but take the last insn as an argument to avoid
5394 looping through the list. */
5395
5396void
57c26b3a 5397push_to_sequence2 (rtx_insn *first, rtx_insn *last)
28bf151d 5398{
5399 start_sequence ();
5400
06f9d6ef 5401 set_first_insn (first);
5402 set_last_insn (last);
28bf151d 5403}
5404
ab74c92f 5405/* Set up the outer-level insn chain
5406 as the current sequence, saving the previously current one. */
5407
5408void
35cb5232 5409push_topmost_sequence (void)
ab74c92f 5410{
c36aa54b 5411 struct sequence_stack *top;
ab74c92f 5412
5413 start_sequence ();
5414
c36aa54b 5415 top = get_topmost_sequence ();
06f9d6ef 5416 set_first_insn (top->first);
5417 set_last_insn (top->last);
ab74c92f 5418}
5419
5420/* After emitting to the outer-level insn chain, update the outer-level
5421 insn chain, and restore the previous saved state. */
5422
5423void
35cb5232 5424pop_topmost_sequence (void)
ab74c92f 5425{
c36aa54b 5426 struct sequence_stack *top;
ab74c92f 5427
c36aa54b 5428 top = get_topmost_sequence ();
06f9d6ef 5429 top->first = get_insns ();
5430 top->last = get_last_insn ();
ab74c92f 5431
5432 end_sequence ();
5433}
5434
15bbde2b 5435/* After emitting to a sequence, restore previous saved state.
5436
b49854c6 5437 To get the contents of the sequence just made, you must call
31d3e01c 5438 `get_insns' *before* calling here.
b49854c6 5439
5440 If the compiler might have deferred popping arguments while
5441 generating this sequence, and this sequence will not be immediately
5442 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5443 before calling get_insns. That will ensure that the deferred
b49854c6 5444 pops are inserted into this sequence, and not into some random
5445 location in the instruction stream. See INHIBIT_DEFER_POP for more
5446 information about deferred popping of arguments. */
15bbde2b 5447
5448void
35cb5232 5449end_sequence (void)
15bbde2b 5450{
c36aa54b 5451 struct sequence_stack *tem = get_current_sequence ()->next;
15bbde2b 5452
06f9d6ef 5453 set_first_insn (tem->first);
5454 set_last_insn (tem->last);
c36aa54b 5455 get_current_sequence ()->next = tem->next;
15bbde2b 5456
1f3233d1 5457 memset (tem, 0, sizeof (*tem));
5458 tem->next = free_sequence_stack;
5459 free_sequence_stack = tem;
15bbde2b 5460}
5461
5462/* Return 1 if currently emitting into a sequence. */
5463
5464int
35cb5232 5465in_sequence_p (void)
15bbde2b 5466{
c36aa54b 5467 return get_current_sequence ()->next != 0;
15bbde2b 5468}
15bbde2b 5469\f
02ebfa52 5470/* Put the various virtual registers into REGNO_REG_RTX. */
5471
2f3874ce 5472static void
b079a207 5473init_virtual_regs (void)
02ebfa52 5474{
b079a207 5475 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5476 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5477 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5478 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5479 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
60778e62 5480 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5481 = virtual_preferred_stack_boundary_rtx;
0a893c29 5482}
5483
928d57e3 5484\f
5485/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5486static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5487static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5488static int copy_insn_n_scratches;
5489
5490/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5491 copied an ASM_OPERANDS.
5492 In that case, it is the original input-operand vector. */
5493static rtvec orig_asm_operands_vector;
5494
5495/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5496 copied an ASM_OPERANDS.
5497 In that case, it is the copied input-operand vector. */
5498static rtvec copy_asm_operands_vector;
5499
5500/* Likewise for the constraints vector. */
5501static rtvec orig_asm_constraints_vector;
5502static rtvec copy_asm_constraints_vector;
5503
5504/* Recursively create a new copy of an rtx for copy_insn.
5505 This function differs from copy_rtx in that it handles SCRATCHes and
5506 ASM_OPERANDs properly.
5507 Normally, this function is not used directly; use copy_insn as front end.
5508 However, you could first copy an insn pattern with copy_insn and then use
5509 this function afterwards to properly copy any REG_NOTEs containing
5510 SCRATCHes. */
5511
5512rtx
35cb5232 5513copy_insn_1 (rtx orig)
928d57e3 5514{
19cb6b50 5515 rtx copy;
5516 int i, j;
5517 RTX_CODE code;
5518 const char *format_ptr;
928d57e3 5519
25e880b1 5520 if (orig == NULL)
5521 return NULL;
5522
928d57e3 5523 code = GET_CODE (orig);
5524
5525 switch (code)
5526 {
5527 case REG:
d7fce3c8 5528 case DEBUG_EXPR:
0349edce 5529 CASE_CONST_ANY:
928d57e3 5530 case SYMBOL_REF:
5531 case CODE_LABEL:
5532 case PC:
5533 case CC0:
e0691b9a 5534 case RETURN:
9cb2517e 5535 case SIMPLE_RETURN:
928d57e3 5536 return orig;
c09425a0 5537 case CLOBBER:
b291008a 5538 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5539 clobbers or clobbers of hard registers that originated as pseudos.
5540 This is needed to allow safe register renaming. */
2b5f32ae 5541 if (REG_P (XEXP (orig, 0))
5542 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5543 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
c09425a0 5544 return orig;
5545 break;
928d57e3 5546
5547 case SCRATCH:
5548 for (i = 0; i < copy_insn_n_scratches; i++)
5549 if (copy_insn_scratch_in[i] == orig)
5550 return copy_insn_scratch_out[i];
5551 break;
5552
5553 case CONST:
3072d30e 5554 if (shared_const_p (orig))
928d57e3 5555 return orig;
5556 break;
d823ba47 5557
928d57e3 5558 /* A MEM with a constant address is not sharable. The problem is that
5559 the constant address may need to be reloaded. If the mem is shared,
5560 then reloading one copy of this mem will cause all copies to appear
5561 to have been reloaded. */
5562
5563 default:
5564 break;
5565 }
5566
f2d0e9f1 5567 /* Copy the various flags, fields, and other information. We assume
5568 that all fields need copying, and then clear the fields that should
928d57e3 5569 not be copied. That is the sensible default behavior, and forces
5570 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5571 copy = shallow_copy_rtx (orig);
928d57e3 5572
928d57e3 5573 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5574 if (INSN_P (orig))
928d57e3 5575 {
7c25cb91 5576 RTX_FLAG (copy, jump) = 0;
5577 RTX_FLAG (copy, call) = 0;
5578 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5579 }
d823ba47 5580
928d57e3 5581 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5582
5583 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5584 switch (*format_ptr++)
5585 {
5586 case 'e':
5587 if (XEXP (orig, i) != NULL)
5588 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5589 break;
928d57e3 5590
f2d0e9f1 5591 case 'E':
5592 case 'V':
5593 if (XVEC (orig, i) == orig_asm_constraints_vector)
5594 XVEC (copy, i) = copy_asm_constraints_vector;
5595 else if (XVEC (orig, i) == orig_asm_operands_vector)
5596 XVEC (copy, i) = copy_asm_operands_vector;
5597 else if (XVEC (orig, i) != NULL)
5598 {
5599 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5600 for (j = 0; j < XVECLEN (copy, i); j++)
5601 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5602 }
5603 break;
928d57e3 5604
f2d0e9f1 5605 case 't':
5606 case 'w':
5607 case 'i':
5608 case 's':
5609 case 'S':
5610 case 'u':
5611 case '0':
5612 /* These are left unchanged. */
5613 break;
928d57e3 5614
f2d0e9f1 5615 default:
5616 gcc_unreachable ();
5617 }
928d57e3 5618
5619 if (code == SCRATCH)
5620 {
5621 i = copy_insn_n_scratches++;
611234b4 5622 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5623 copy_insn_scratch_in[i] = orig;
5624 copy_insn_scratch_out[i] = copy;
5625 }
5626 else if (code == ASM_OPERANDS)
5627 {
d91f2122 5628 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5629 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5630 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5631 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5632 }
5633
5634 return copy;
5635}
5636
5637/* Create a new copy of an rtx.
5638 This function differs from copy_rtx in that it handles SCRATCHes and
5639 ASM_OPERANDs properly.
5640 INSN doesn't really have to be a full INSN; it could be just the
5641 pattern. */
5642rtx
35cb5232 5643copy_insn (rtx insn)
928d57e3 5644{
5645 copy_insn_n_scratches = 0;
5646 orig_asm_operands_vector = 0;
5647 orig_asm_constraints_vector = 0;
5648 copy_asm_operands_vector = 0;
5649 copy_asm_constraints_vector = 0;
5650 return copy_insn_1 (insn);
5651}
02ebfa52 5652
a9abe1f1 5653/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5654 on that assumption that INSN itself remains in its original place. */
5655
575a12f2 5656rtx_insn *
5657copy_delay_slot_insn (rtx_insn *insn)
a9abe1f1 5658{
5659 /* Copy INSN with its rtx_code, all its notes, location etc. */
575a12f2 5660 insn = as_a <rtx_insn *> (copy_rtx (insn));
a9abe1f1 5661 INSN_UID (insn) = cur_insn_uid++;
5662 return insn;
5663}
5664
15bbde2b 5665/* Initialize data structures and variables in this file
5666 before generating rtl for each function. */
5667
5668void
35cb5232 5669init_emit (void)
15bbde2b 5670{
06f9d6ef 5671 set_first_insn (NULL);
5672 set_last_insn (NULL);
9845d120 5673 if (MIN_NONDEBUG_INSN_UID)
5674 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5675 else
5676 cur_insn_uid = 1;
5677 cur_debug_insn_uid = 1;
15bbde2b 5678 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
15bbde2b 5679 first_label_num = label_num;
c36aa54b 5680 get_current_sequence ()->next = NULL;
15bbde2b 5681
15bbde2b 5682 /* Init the tables that describe all the pseudo regs. */
5683
fd6ffb7c 5684 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5685
fd6ffb7c 5686 crtl->emit.regno_pointer_align
2457c754 5687 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5688
cd769037 5689 regno_reg_rtx
5690 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
fcdc122e 5691
936082bb 5692 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5693 memcpy (regno_reg_rtx,
679bcc8d 5694 initial_regno_reg_rtx,
90295bd2 5695 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5696
15bbde2b 5697 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5698 init_virtual_regs ();
888e0d33 5699
5700 /* Indicate that the virtual registers and stack locations are
5701 all pointers. */
e61a0a7f 5702 REG_POINTER (stack_pointer_rtx) = 1;
5703 REG_POINTER (frame_pointer_rtx) = 1;
5704 REG_POINTER (hard_frame_pointer_rtx) = 1;
5705 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5706
e61a0a7f 5707 REG_POINTER (virtual_incoming_args_rtx) = 1;
5708 REG_POINTER (virtual_stack_vars_rtx) = 1;
5709 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5710 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5711 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5712
d4c332ff 5713#ifdef STACK_BOUNDARY
80909c64 5714 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5715 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5716 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5717 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5718
213d1448 5719 /* ??? These are problematic (for example, 3 out of 4 are wrong on
5720 32-bit SPARC and cannot be all fixed because of the ABI). */
80909c64 5721 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5722 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5723 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5724 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
213d1448 5725
80909c64 5726 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5727#endif
5728
89525da0 5729#ifdef INIT_EXPANDERS
5730 INIT_EXPANDERS;
5731#endif
15bbde2b 5732}
5733
6e68dcb2 5734/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5735
5736static rtx
3754d046 5737gen_const_vector (machine_mode mode, int constant)
886cfd4f 5738{
5739 rtx tem;
5740 rtvec v;
5741 int units, i;
3754d046 5742 machine_mode inner;
886cfd4f 5743
5744 units = GET_MODE_NUNITS (mode);
5745 inner = GET_MODE_INNER (mode);
5746
069b07bf 5747 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5748
886cfd4f 5749 v = rtvec_alloc (units);
5750
6e68dcb2 5751 /* We need to call this function after we set the scalar const_tiny_rtx
5752 entries. */
5753 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5754
5755 for (i = 0; i < units; ++i)
6e68dcb2 5756 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5757
9426b612 5758 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5759 return tem;
5760}
5761
9426b612 5762/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5763 all elements are zero, and the one vector when all elements are one. */
9426b612 5764rtx
3754d046 5765gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
9426b612 5766{
3754d046 5767 machine_mode inner = GET_MODE_INNER (mode);
6e68dcb2 5768 int nunits = GET_MODE_NUNITS (mode);
5769 rtx x;
9426b612 5770 int i;
5771
6e68dcb2 5772 /* Check to see if all of the elements have the same value. */
5773 x = RTVEC_ELT (v, nunits - 1);
5774 for (i = nunits - 2; i >= 0; i--)
5775 if (RTVEC_ELT (v, i) != x)
5776 break;
5777
5778 /* If the values are all the same, check to see if we can use one of the
5779 standard constant vectors. */
5780 if (i == -1)
5781 {
5782 if (x == CONST0_RTX (inner))
5783 return CONST0_RTX (mode);
5784 else if (x == CONST1_RTX (inner))
5785 return CONST1_RTX (mode);
ba8dfb08 5786 else if (x == CONSTM1_RTX (inner))
5787 return CONSTM1_RTX (mode);
6e68dcb2 5788 }
5789
5790 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5791}
5792
6d8b68a3 5793/* Initialise global register information required by all functions. */
5794
5795void
5796init_emit_regs (void)
5797{
5798 int i;
3754d046 5799 machine_mode mode;
d83fcaa1 5800 mem_attrs *attrs;
6d8b68a3 5801
5802 /* Reset register attributes */
f863a586 5803 reg_attrs_htab->empty ();
6d8b68a3 5804
5805 /* We need reg_raw_mode, so initialize the modes now. */
5806 init_reg_modes_target ();
5807
5808 /* Assign register numbers to the globally defined register rtx. */
6d8b68a3 5809 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5810 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5811 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5812 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5813 virtual_incoming_args_rtx =
5814 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5815 virtual_stack_vars_rtx =
5816 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5817 virtual_stack_dynamic_rtx =
5818 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5819 virtual_outgoing_args_rtx =
5820 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5821 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
60778e62 5822 virtual_preferred_stack_boundary_rtx =
5823 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6d8b68a3 5824
5825 /* Initialize RTL for commonly used hard registers. These are
5826 copied into regno_reg_rtx as we begin to compile each function. */
5827 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5828 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5829
5830#ifdef RETURN_ADDRESS_POINTER_REGNUM
5831 return_address_pointer_rtx
5832 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5833#endif
5834
639f32a2 5835 pic_offset_table_rtx = NULL_RTX;
6d8b68a3 5836 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5837 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
d83fcaa1 5838
5839 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5840 {
3754d046 5841 mode = (machine_mode) i;
25a27413 5842 attrs = ggc_cleared_alloc<mem_attrs> ();
d83fcaa1 5843 attrs->align = BITS_PER_UNIT;
5844 attrs->addrspace = ADDR_SPACE_GENERIC;
5845 if (mode != BLKmode)
5846 {
6d58bcba 5847 attrs->size_known_p = true;
5848 attrs->size = GET_MODE_SIZE (mode);
d83fcaa1 5849 if (STRICT_ALIGNMENT)
5850 attrs->align = GET_MODE_ALIGNMENT (mode);
5851 }
5852 mode_mem_attrs[i] = attrs;
5853 }
6d8b68a3 5854}
5855
8059b95a 5856/* Initialize global machine_mode variables. */
5857
5858void
5859init_derived_machine_modes (void)
5860{
af8303fa 5861 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
5862 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
8059b95a 5863 {
af8303fa 5864 scalar_int_mode mode = mode_iter.require ();
5865
8059b95a 5866 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
af8303fa 5867 && !opt_byte_mode.exists ())
5868 opt_byte_mode = mode;
8059b95a 5869
5870 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
af8303fa 5871 && !opt_word_mode.exists ())
5872 opt_word_mode = mode;
8059b95a 5873 }
5874
af8303fa 5875 byte_mode = opt_byte_mode.require ();
5876 word_mode = opt_word_mode.require ();
44504d18 5877 ptr_mode = int_mode_for_size (POINTER_SIZE, 0).require ();
8059b95a 5878}
5879
01703575 5880/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5881
5882void
01703575 5883init_emit_once (void)
15bbde2b 5884{
5885 int i;
3754d046 5886 machine_mode mode;
99d671f4 5887 scalar_float_mode double_mode;
15bbde2b 5888
e913b5cd 5889 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5890 CONST_FIXED, and memory attribute hash tables. */
f863a586 5891 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
c6259b83 5892
e913b5cd 5893#if TARGET_SUPPORTS_WIDE_INT
f863a586 5894 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
e913b5cd 5895#endif
f863a586 5896 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
2ff23ed0 5897
f863a586 5898 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
e397ad8e 5899
f863a586 5900 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
77695070 5901
57c097d5 5902#ifdef INIT_EXPANDERS
ab5beff9 5903 /* This is to initialize {init|mark|free}_machine_status before the first
5904 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5905 end which calls push_function_context_to before the first call to
57c097d5 5906 init_function_start. */
5907 INIT_EXPANDERS;
5908#endif
5909
15bbde2b 5910 /* Create the unique rtx's for certain rtx codes and operand values. */
5911
48a7e3d1 5912 /* Process stack-limiting command-line options. */
5913 if (opt_fstack_limit_symbol_arg != NULL)
5914 stack_limit_rtx
5915 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5916 if (opt_fstack_limit_register_no >= 0)
5917 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5918
8fd5918e 5919 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5920 tries to use these variables. */
15bbde2b 5921 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5922 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5923 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5924
1a60f06a 5925 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5926 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5927 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5928 else
3ad7bb1c 5929 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5930
99d671f4 5931 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
8059b95a 5932
cc69d08a 5933 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5934 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5935 real_from_integer (&dconst2, double_mode, 2, SIGNED);
3fa759a9 5936
5937 dconstm1 = dconst1;
5938 dconstm1.sign = 1;
77e89269 5939
5940 dconsthalf = dconst1;
9d96125b 5941 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5942
ba8dfb08 5943 for (i = 0; i < 3; i++)
15bbde2b 5944 {
3fa759a9 5945 const REAL_VALUE_TYPE *const r =
badfe841 5946 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5947
19a4dce4 5948 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
069b07bf 5949 const_tiny_rtx[i][(int) mode] =
d5f9611d 5950 const_double_from_real_value (*r, mode);
069b07bf 5951
19a4dce4 5952 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
2ff23ed0 5953 const_tiny_rtx[i][(int) mode] =
d5f9611d 5954 const_double_from_real_value (*r, mode);
15bbde2b 5955
b572011e 5956 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5957
19a4dce4 5958 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
b572011e 5959 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5960
8c20007a 5961 for (mode = MIN_MODE_PARTIAL_INT;
5962 mode <= MAX_MODE_PARTIAL_INT;
3754d046 5963 mode = (machine_mode)((int)(mode) + 1))
7540dcc4 5964 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5965 }
5966
ba8dfb08 5967 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5968
19a4dce4 5969 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
ba8dfb08 5970 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5971
8c20007a 5972 for (mode = MIN_MODE_PARTIAL_INT;
5973 mode <= MAX_MODE_PARTIAL_INT;
3754d046 5974 mode = (machine_mode)((int)(mode) + 1))
dd276d20 5975 const_tiny_rtx[3][(int) mode] = constm1_rtx;
19a4dce4 5976
5977 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
4248fc32 5978 {
5979 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5980 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5981 }
5982
19a4dce4 5983 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
4248fc32 5984 {
5985 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5986 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5987 }
5988
19a4dce4 5989 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6e68dcb2 5990 {
5991 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5992 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
ba8dfb08 5993 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6e68dcb2 5994 }
886cfd4f 5995
19a4dce4 5996 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6e68dcb2 5997 {
5998 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5999 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6000 }
886cfd4f 6001
19a4dce4 6002 FOR_EACH_MODE_IN_CLASS (mode, MODE_FRACT)
06f0b99c 6003 {
9af5ce0c 6004 FCONST0 (mode).data.high = 0;
6005 FCONST0 (mode).data.low = 0;
6006 FCONST0 (mode).mode = mode;
e397ad8e 6007 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6008 FCONST0 (mode), mode);
06f0b99c 6009 }
6010
19a4dce4 6011 FOR_EACH_MODE_IN_CLASS (mode, MODE_UFRACT)
06f0b99c 6012 {
9af5ce0c 6013 FCONST0 (mode).data.high = 0;
6014 FCONST0 (mode).data.low = 0;
6015 FCONST0 (mode).mode = mode;
e397ad8e 6016 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6017 FCONST0 (mode), mode);
06f0b99c 6018 }
6019
19a4dce4 6020 FOR_EACH_MODE_IN_CLASS (mode, MODE_ACCUM)
06f0b99c 6021 {
9af5ce0c 6022 FCONST0 (mode).data.high = 0;
6023 FCONST0 (mode).data.low = 0;
6024 FCONST0 (mode).mode = mode;
e397ad8e 6025 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6026 FCONST0 (mode), mode);
06f0b99c 6027
6028 /* We store the value 1. */
9af5ce0c 6029 FCONST1 (mode).data.high = 0;
6030 FCONST1 (mode).data.low = 0;
6031 FCONST1 (mode).mode = mode;
6032 FCONST1 (mode).data
d67b7119 6033 = double_int_one.lshift (GET_MODE_FBIT (mode),
6034 HOST_BITS_PER_DOUBLE_INT,
6035 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 6036 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6037 FCONST1 (mode), mode);
06f0b99c 6038 }
6039
19a4dce4 6040 FOR_EACH_MODE_IN_CLASS (mode, MODE_UACCUM)
06f0b99c 6041 {
9af5ce0c 6042 FCONST0 (mode).data.high = 0;
6043 FCONST0 (mode).data.low = 0;
6044 FCONST0 (mode).mode = mode;
e397ad8e 6045 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6046 FCONST0 (mode), mode);
06f0b99c 6047
6048 /* We store the value 1. */
9af5ce0c 6049 FCONST1 (mode).data.high = 0;
6050 FCONST1 (mode).data.low = 0;
6051 FCONST1 (mode).mode = mode;
6052 FCONST1 (mode).data
d67b7119 6053 = double_int_one.lshift (GET_MODE_FBIT (mode),
6054 HOST_BITS_PER_DOUBLE_INT,
6055 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 6056 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6057 FCONST1 (mode), mode);
6058 }
6059
19a4dce4 6060 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
e397ad8e 6061 {
6062 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6063 }
6064
19a4dce4 6065 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
e397ad8e 6066 {
6067 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6068 }
6069
19a4dce4 6070 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
e397ad8e 6071 {
6072 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6073 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6074 }
6075
19a4dce4 6076 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
e397ad8e 6077 {
6078 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6079 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 6080 }
6081
0fd4500a 6082 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
3754d046 6083 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
0fd4500a 6084 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 6085
065336b4 6086 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6087 if (STORE_FLAG_VALUE == 1)
6088 const_tiny_rtx[1][(int) BImode] = const1_rtx;
7d7b0bac 6089
19a4dce4 6090 FOR_EACH_MODE_IN_CLASS (mode, MODE_POINTER_BOUNDS)
058a1b7a 6091 {
6092 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6093 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6094 }
6095
7d7b0bac 6096 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6097 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6098 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6099 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
f9a00e9e 6100 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6101 /*prev_insn=*/NULL,
6102 /*next_insn=*/NULL,
6103 /*bb=*/NULL,
6104 /*pattern=*/NULL_RTX,
6105 /*location=*/-1,
6106 CODE_FOR_nothing,
6107 /*reg_notes=*/NULL_RTX);
15bbde2b 6108}
ac6c481d 6109\f
cd0fe062 6110/* Produce exact duplicate of insn INSN after AFTER.
6111 Care updating of libcall regions if present. */
6112
722334ea 6113rtx_insn *
5e9c670f 6114emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
cd0fe062 6115{
722334ea 6116 rtx_insn *new_rtx;
6117 rtx link;
cd0fe062 6118
6119 switch (GET_CODE (insn))
6120 {
6121 case INSN:
9ce37fa7 6122 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6123 break;
6124
6125 case JUMP_INSN:
9ce37fa7 6126 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
01762951 6127 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
cd0fe062 6128 break;
6129
9845d120 6130 case DEBUG_INSN:
6131 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6132 break;
6133
cd0fe062 6134 case CALL_INSN:
9ce37fa7 6135 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6136 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 6137 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 6138 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 6139 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6140 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6141 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 6142 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 6143 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 6144 break;
6145
6146 default:
611234b4 6147 gcc_unreachable ();
cd0fe062 6148 }
6149
6150 /* Update LABEL_NUSES. */
9ce37fa7 6151 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 6152
5169661d 6153 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ab87d1bc 6154
98116afd 6155 /* If the old insn is frame related, then so is the new one. This is
6156 primarily needed for IA-64 unwind info which marks epilogue insns,
6157 which may be duplicated by the basic block reordering code. */
9ce37fa7 6158 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 6159
bb99ba64 6160 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6161 rtx *ptail = &REG_NOTES (new_rtx);
6162 while (*ptail != NULL_RTX)
6163 ptail = &XEXP (*ptail, 1);
6164
19d2fe05 6165 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6166 will make them. REG_LABEL_TARGETs are created there too, but are
6167 supposed to be sticky, so we copy them. */
cd0fe062 6168 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 6169 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 6170 {
bb99ba64 6171 *ptail = duplicate_reg_note (link);
6172 ptail = &XEXP (*ptail, 1);
cd0fe062 6173 }
6174
9ce37fa7 6175 INSN_CODE (new_rtx) = INSN_CODE (insn);
6176 return new_rtx;
cd0fe062 6177}
1f3233d1 6178
7035b2ab 6179static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 6180rtx
3754d046 6181gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
c09425a0 6182{
6183 if (hard_reg_clobbers[mode][regno])
6184 return hard_reg_clobbers[mode][regno];
6185 else
6186 return (hard_reg_clobbers[mode][regno] =
6187 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6188}
6189
5169661d 6190location_t prologue_location;
6191location_t epilogue_location;
23a070f3 6192
6193/* Hold current location information and last location information, so the
6194 datastructures are built lazily only when some instructions in given
6195 place are needed. */
c7abeac5 6196static location_t curr_location;
23a070f3 6197
5169661d 6198/* Allocate insn location datastructure. */
23a070f3 6199void
5169661d 6200insn_locations_init (void)
23a070f3 6201{
5169661d 6202 prologue_location = epilogue_location = 0;
23a070f3 6203 curr_location = UNKNOWN_LOCATION;
23a070f3 6204}
6205
6206/* At the end of emit stage, clear current location. */
6207void
5169661d 6208insn_locations_finalize (void)
23a070f3 6209{
5169661d 6210 epilogue_location = curr_location;
6211 curr_location = UNKNOWN_LOCATION;
23a070f3 6212}
6213
6214/* Set current location. */
6215void
5169661d 6216set_curr_insn_location (location_t location)
23a070f3 6217{
23a070f3 6218 curr_location = location;
6219}
6220
6221/* Get current location. */
6222location_t
5169661d 6223curr_insn_location (void)
23a070f3 6224{
6225 return curr_location;
6226}
6227
23a070f3 6228/* Return lexical scope block insn belongs to. */
6229tree
5e9c670f 6230insn_scope (const rtx_insn *insn)
23a070f3 6231{
5169661d 6232 return LOCATION_BLOCK (INSN_LOCATION (insn));
23a070f3 6233}
6234
6235/* Return line number of the statement that produced this insn. */
6236int
5e9c670f 6237insn_line (const rtx_insn *insn)
23a070f3 6238{
5169661d 6239 return LOCATION_LINE (INSN_LOCATION (insn));
23a070f3 6240}
6241
6242/* Return source file of the statement that produced this insn. */
6243const char *
5e9c670f 6244insn_file (const rtx_insn *insn)
23a070f3 6245{
5169661d 6246 return LOCATION_FILE (INSN_LOCATION (insn));
23a070f3 6247}
30c3c442 6248
0e7ae557 6249/* Return expanded location of the statement that produced this insn. */
6250expanded_location
5e9c670f 6251insn_location (const rtx_insn *insn)
0e7ae557 6252{
6253 return expand_location (INSN_LOCATION (insn));
6254}
6255
30c3c442 6256/* Return true if memory model MODEL requires a pre-operation (release-style)
6257 barrier or a post-operation (acquire-style) barrier. While not universal,
6258 this function matches behavior of several targets. */
6259
6260bool
6261need_atomic_barrier_p (enum memmodel model, bool pre)
6262{
e205c62d 6263 switch (model & MEMMODEL_BASE_MASK)
30c3c442 6264 {
6265 case MEMMODEL_RELAXED:
6266 case MEMMODEL_CONSUME:
6267 return false;
6268 case MEMMODEL_RELEASE:
6269 return pre;
6270 case MEMMODEL_ACQUIRE:
6271 return !pre;
6272 case MEMMODEL_ACQ_REL:
6273 case MEMMODEL_SEQ_CST:
6274 return true;
6275 default:
6276 gcc_unreachable ();
6277 }
6278}
2add0b64 6279
6280/* Initialize fields of rtl_data related to stack alignment. */
6281
6282void
6283rtl_data::init_stack_alignment ()
6284{
6285 stack_alignment_needed = STACK_BOUNDARY;
6286 max_used_stack_slot_alignment = STACK_BOUNDARY;
6287 stack_alignment_estimated = 0;
6288 preferred_stack_boundary = STACK_BOUNDARY;
6289}
6290
30c3c442 6291\f
1f3233d1 6292#include "gt-emit-rtl.h"