]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
make next/prev nonnote_nondebug_insn take rtx_insn *
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
818ab71a 2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
c7131fb2 37#include "backend.h"
957060b5 38#include "target.h"
23b2ce53 39#include "rtl.h"
957060b5 40#include "tree.h"
c7131fb2 41#include "df.h"
957060b5
AM
42#include "tm_p.h"
43#include "stringpool.h"
957060b5
AM
44#include "insn-config.h"
45#include "regs.h"
46#include "emit-rtl.h"
47#include "recog.h"
c7131fb2 48#include "diagnostic-core.h"
40e23961 49#include "alias.h"
40e23961 50#include "fold-const.h"
d8a2d370 51#include "varasm.h"
60393bbc 52#include "cfgrtl.h"
60393bbc 53#include "tree-eh.h"
36566b39 54#include "explow.h"
23b2ce53 55#include "expr.h"
b5b8b0ac 56#include "params.h"
9b2b7279 57#include "builtins.h"
9021b8ec 58#include "rtl-iter.h"
1f9ceff1 59#include "stor-layout.h"
ecf835e9 60#include "opts.h"
ca695ac9 61
5fb0e246
RS
62struct target_rtl default_target_rtl;
63#if SWITCHABLE_TARGET
64struct target_rtl *this_target_rtl = &default_target_rtl;
65#endif
66
67#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
68
1d445e9e
ILT
69/* Commonly used modes. */
70
ef4bddc2
RS
71machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
72machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
73machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
74machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 75
bd60bab2
JH
76/* Datastructures maintained for currently processed function in RTL form. */
77
3e029763 78struct rtl_data x_rtl;
bd60bab2
JH
79
80/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 81 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
82 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
83 with length attribute nested in top level structures. */
84
85rtx * regno_reg_rtx;
23b2ce53
RS
86
87/* This is *not* reset after each function. It gives each CODE_LABEL
88 in the entire compilation a unique label number. */
89
044b4de3 90static GTY(()) int label_num = 1;
23b2ce53 91
23b2ce53
RS
92/* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
94 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
95 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 96
e7c82a99 97rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 98
68d75312
JC
99rtx const_true_rtx;
100
23b2ce53
RS
101REAL_VALUE_TYPE dconst0;
102REAL_VALUE_TYPE dconst1;
103REAL_VALUE_TYPE dconst2;
104REAL_VALUE_TYPE dconstm1;
03f2ea93 105REAL_VALUE_TYPE dconsthalf;
23b2ce53 106
325217ed
CF
107/* Record fixed-point constant 0 and 1. */
108FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
110
23b2ce53
RS
111/* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
115
5da077de 116rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 117
ca4adc91
RS
118/* Standard pieces of rtx, to be substituted directly into things. */
119rtx pc_rtx;
120rtx ret_rtx;
121rtx simple_return_rtx;
122rtx cc0_rtx;
123
1476d1bd
MM
124/* Marker used for denoting an INSN, which should never be accessed (i.e.,
125 this pointer should normally never be dereferenced), but is required to be
126 distinct from NULL_RTX. Currently used by peephole2 pass. */
127rtx_insn *invalid_insn_rtx;
128
c13e8210
MM
129/* A hash table storing CONST_INTs whose absolute value is greater
130 than MAX_SAVED_CONST_INT. */
131
6c907cff 132struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
133{
134 typedef HOST_WIDE_INT compare_type;
135
136 static hashval_t hash (rtx i);
137 static bool equal (rtx i, HOST_WIDE_INT h);
138};
c13e8210 139
aebf76a2
TS
140static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
141
6c907cff 142struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
143{
144 static hashval_t hash (rtx x);
145 static bool equal (rtx x, rtx y);
146};
147
148static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 149
a560d4d4 150/* A hash table storing register attribute structures. */
6c907cff 151struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
152{
153 static hashval_t hash (reg_attrs *x);
154 static bool equal (reg_attrs *a, reg_attrs *b);
155};
156
157static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 158
5692c7bc 159/* A hash table storing all CONST_DOUBLEs. */
6c907cff 160struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
161{
162 static hashval_t hash (rtx x);
163 static bool equal (rtx x, rtx y);
164};
165
166static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 167
091a3ac7 168/* A hash table storing all CONST_FIXEDs. */
6c907cff 169struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
170{
171 static hashval_t hash (rtx x);
172 static bool equal (rtx x, rtx y);
173};
174
175static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 176
3e029763 177#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 178#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 179#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 180
5eb2a9f2 181static void set_used_decls (tree);
502b8322 182static void mark_label_nuses (rtx);
807e902e 183#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
184static rtx lookup_const_wide_int (rtx);
185#endif
502b8322 186static rtx lookup_const_double (rtx);
091a3ac7 187static rtx lookup_const_fixed (rtx);
502b8322 188static reg_attrs *get_reg_attrs (tree, int);
ef4bddc2 189static rtx gen_const_vector (machine_mode, int);
32b32b16 190static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 191
6b24c259
JH
192/* Probability of the conditional branch currently proceeded by try_split.
193 Set to -1 otherwise. */
194int split_branch_probability = -1;
ca695ac9 195\f
c13e8210
MM
196/* Returns a hash code for X (which is a really a CONST_INT). */
197
aebf76a2
TS
198hashval_t
199const_int_hasher::hash (rtx x)
c13e8210 200{
aebf76a2 201 return (hashval_t) INTVAL (x);
c13e8210
MM
202}
203
cc2902df 204/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
207
aebf76a2
TS
208bool
209const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 210{
aebf76a2 211 return (INTVAL (x) == y);
5692c7bc
ZW
212}
213
807e902e
KZ
214#if TARGET_SUPPORTS_WIDE_INT
215/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
216
aebf76a2
TS
217hashval_t
218const_wide_int_hasher::hash (rtx x)
807e902e
KZ
219{
220 int i;
d7ca26e4 221 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 222 const_rtx xr = x;
807e902e
KZ
223
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
226
227 return (hashval_t) hash;
228}
229
230/* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
233
aebf76a2
TS
234bool
235const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
236{
237 int i;
aebf76a2
TS
238 const_rtx xr = x;
239 const_rtx yr = y;
807e902e 240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 241 return false;
807e902e
KZ
242
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 245 return false;
807e902e 246
aebf76a2 247 return true;
807e902e
KZ
248}
249#endif
250
5692c7bc 251/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
252hashval_t
253const_double_hasher::hash (rtx x)
5692c7bc 254{
aebf76a2 255 const_rtx const value = x;
46b33600 256 hashval_t h;
5692c7bc 257
807e902e 258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
fe352c29 261 {
15c812e3 262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 }
5692c7bc
ZW
266 return h;
267}
268
cc2902df 269/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 270 is the same as that represented by Y (really a ...) */
aebf76a2
TS
271bool
272const_double_hasher::equal (rtx x, rtx y)
5692c7bc 273{
aebf76a2 274 const_rtx const a = x, b = y;
5692c7bc
ZW
275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
807e902e 278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
284}
285
091a3ac7
CF
286/* Returns a hash code for X (which is really a CONST_FIXED). */
287
aebf76a2
TS
288hashval_t
289const_fixed_hasher::hash (rtx x)
091a3ac7 290{
aebf76a2 291 const_rtx const value = x;
091a3ac7
CF
292 hashval_t h;
293
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
298}
299
aebf76a2
TS
300/* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
091a3ac7 302
aebf76a2
TS
303bool
304const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 305{
aebf76a2 306 const_rtx const a = x, b = y;
091a3ac7
CF
307
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
311}
312
f12144dd 313/* Return true if the given memory attributes are equal. */
c13e8210 314
96b3c03f 315bool
f12144dd 316mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 317{
96b3c03f
RB
318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
754c3d5d
RS
322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
09e881c9 328 && p->addrspace == q->addrspace
78b76d08
SB
329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
332}
333
f12144dd 334/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 335
f12144dd
RS
336static void
337set_mem_attrs (rtx mem, mem_attrs *attrs)
338{
f12144dd
RS
339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
341 {
342 MEM_ATTRS (mem) = 0;
343 return;
344 }
173b24b9 345
84053e02
RB
346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 348 {
766090c2 349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 351 }
c13e8210
MM
352}
353
a560d4d4
JH
354/* Returns a hash code for X (which is a really a reg_attrs *). */
355
aebf76a2
TS
356hashval_t
357reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 358{
aebf76a2 359 const reg_attrs *const p = x;
a560d4d4 360
9841210f 361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
362}
363
aebf76a2
TS
364/* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
a560d4d4 366
aebf76a2
TS
367bool
368reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 369{
aebf76a2
TS
370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
a560d4d4
JH
372
373 return (p->decl == q->decl && p->offset == q->offset);
374}
375/* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
378
379static reg_attrs *
502b8322 380get_reg_attrs (tree decl, int offset)
a560d4d4
JH
381{
382 reg_attrs attrs;
a560d4d4
JH
383
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
387
388 attrs.decl = decl;
389 attrs.offset = offset;
390
aebf76a2 391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
392 if (*slot == 0)
393 {
766090c2 394 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
395 memcpy (*slot, &attrs, sizeof (reg_attrs));
396 }
397
aebf76a2 398 return *slot;
a560d4d4
JH
399}
400
6fb5fa3c
DB
401
402#if !HAVE_blockage
adddc347
HPN
403/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
405
406rtx
407gen_blockage (void)
408{
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
412}
413#endif
414
415
8deccbb7
RS
416/* Set the mode and register number of X to MODE and REGNO. */
417
418void
419set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
420{
9188b286
RS
421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
422 ? hard_regno_nregs[regno][mode]
423 : 1);
8deccbb7 424 PUT_MODE_RAW (x, mode);
9188b286 425 set_regno_raw (x, regno, nregs);
8deccbb7
RS
426}
427
08394eef
BS
428/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432rtx
8deccbb7 433gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 434{
2d44c7de 435 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
8deccbb7 436 set_mode_and_regno (x, mode, regno);
9fccb335 437 REG_ATTRS (x) = NULL;
08394eef
BS
438 ORIGINAL_REGNO (x) = regno;
439 return x;
440}
441
c5c76735
JL
442/* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
445
38e60c55 446rtx_expr_list *
ef4bddc2 447gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
448{
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
451}
452
a756c6be 453rtx_insn_list *
ef4bddc2 454gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
455{
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
458}
459
d6e1e8b8 460rtx_insn *
ef4bddc2 461gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
464{
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
469}
470
3b80f6ca 471rtx
ef4bddc2 472gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
473{
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
476
477#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480#endif
481
c13e8210 482 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
29105cea 485 if (*slot == 0)
1f8f4a0b 486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 487
aebf76a2 488 return *slot;
3b80f6ca
RH
489}
490
2496c7bd 491rtx
ef4bddc2 492gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2496c7bd
LB
493{
494 return GEN_INT (trunc_int_for_mode (c, mode));
495}
496
5692c7bc
ZW
497/* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500
501/* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504static rtx
502b8322 505lookup_const_double (rtx real)
5692c7bc 506{
aebf76a2 507 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
508 if (*slot == 0)
509 *slot = real;
510
aebf76a2 511 return *slot;
5692c7bc 512}
29105cea 513
5692c7bc
ZW
514/* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
0133b7d9 516rtx
ef4bddc2 517const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 518{
5692c7bc
ZW
519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
521
9e254451 522 real->u.rv = value;
5692c7bc
ZW
523
524 return lookup_const_double (real);
525}
526
091a3ac7
CF
527/* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
530
531static rtx
532lookup_const_fixed (rtx fixed)
533{
aebf76a2 534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
535 if (*slot == 0)
536 *slot = fixed;
537
aebf76a2 538 return *slot;
091a3ac7
CF
539}
540
541/* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
543
544rtx
ef4bddc2 545const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
546{
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
549
550 fixed->u.fv = value;
551
552 return lookup_const_fixed (fixed);
553}
554
807e902e 555#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
556/* Constructs double_int from rtx CST. */
557
558double_int
559rtx_to_double_int (const_rtx cst)
560{
561 double_int r;
562
563 if (CONST_INT_P (cst))
27bcd47c 564 r = double_int::from_shwi (INTVAL (cst));
48175537 565 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
566 {
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
569 }
570 else
571 gcc_unreachable ();
572
573 return r;
574}
807e902e 575#endif
3e93ff81 576
807e902e
KZ
577#if TARGET_SUPPORTS_WIDE_INT
578/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
580 return it. */
3e93ff81 581
807e902e
KZ
582static rtx
583lookup_const_wide_int (rtx wint)
584{
aebf76a2 585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
586 if (*slot == 0)
587 *slot = wint;
588
aebf76a2 589 return *slot;
807e902e
KZ
590}
591#endif
592
593/* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
597
598rtx
ef4bddc2 599immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
54fb1ae0 600{
807e902e
KZ
601 unsigned int len = v.get_len ();
602 unsigned int prec = GET_MODE_PRECISION (mode);
603
604 /* Allow truncation but not extension since we do not know if the
605 number is signed or unsigned. */
606 gcc_assert (prec <= v.get_precision ());
607
608 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (v.elt (0), mode);
610
611#if TARGET_SUPPORTS_WIDE_INT
612 {
613 unsigned int i;
614 rtx value;
615 unsigned int blocks_needed
616 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
617
618 if (len > blocks_needed)
619 len = blocks_needed;
620
621 value = const_wide_int_alloc (len);
622
623 /* It is so tempting to just put the mode in here. Must control
624 myself ... */
625 PUT_MODE (value, VOIDmode);
626 CWI_PUT_NUM_ELEM (value, len);
627
628 for (i = 0; i < len; i++)
629 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
630
631 return lookup_const_wide_int (value);
632 }
633#else
634 return immed_double_const (v.elt (0), v.elt (1), mode);
635#endif
54fb1ae0
AS
636}
637
807e902e 638#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
639/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
640 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 641 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
642 implied upper bits are copies of the high bit of i1. The value
643 itself is neither signed nor unsigned. Do not use this routine for
644 non-integer modes; convert to REAL_VALUE_TYPE and use
555affd7 645 const_double_from_real_value. */
5692c7bc
ZW
646
647rtx
ef4bddc2 648immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
649{
650 rtx value;
651 unsigned int i;
652
65acccdd 653 /* There are the following cases (note that there are no modes with
49ab6098 654 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
655
656 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
657 gen_int_mode.
929e10f4
MS
658 2) If the value of the integer fits into HOST_WIDE_INT anyway
659 (i.e., i1 consists only from copies of the sign bit, and sign
660 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 661 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
662 if (mode != VOIDmode)
663 {
5b0264cb
NS
664 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
665 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
666 /* We can get a 0 for an error mark. */
667 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
d5e254e1
IE
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
669 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
5692c7bc 670
65acccdd
ZD
671 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
672 return gen_int_mode (i0, mode);
5692c7bc
ZW
673 }
674
675 /* If this integer fits in one word, return a CONST_INT. */
676 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
677 return GEN_INT (i0);
678
679 /* We use VOIDmode for integers. */
680 value = rtx_alloc (CONST_DOUBLE);
681 PUT_MODE (value, VOIDmode);
682
683 CONST_DOUBLE_LOW (value) = i0;
684 CONST_DOUBLE_HIGH (value) = i1;
685
686 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
687 XWINT (value, i) = 0;
688
689 return lookup_const_double (value);
0133b7d9 690}
807e902e 691#endif
0133b7d9 692
3b80f6ca 693rtx
ef4bddc2 694gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
695{
696 /* In case the MD file explicitly references the frame pointer, have
697 all such references point to the same frame pointer. This is
698 used during frame pointer elimination to distinguish the explicit
699 references to these registers from pseudos that happened to be
700 assigned to them.
701
702 If we have eliminated the frame pointer or arg pointer, we will
703 be using it as a normal register, for example as a spill
704 register. In such cases, we might be accessing it in a mode that
705 is not Pmode and therefore cannot use the pre-allocated rtx.
706
707 Also don't do this when we are making new REGs in reload, since
708 we don't want to get confused with the real pointers. */
709
55a2c322 710 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 711 {
e10c79fe
LB
712 if (regno == FRAME_POINTER_REGNUM
713 && (!reload_completed || frame_pointer_needed))
3b80f6ca 714 return frame_pointer_rtx;
c3e08036
TS
715
716 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
717 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 718 && (!reload_completed || frame_pointer_needed))
3b80f6ca 719 return hard_frame_pointer_rtx;
3f393fc6
TS
720#if !HARD_FRAME_POINTER_IS_ARG_POINTER
721 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
722 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
723 return arg_pointer_rtx;
724#endif
725#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 726 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
727 return return_address_pointer_rtx;
728#endif
fc555370 729 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 730 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 731 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 732 return pic_offset_table_rtx;
bcb33994 733 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
734 return stack_pointer_rtx;
735 }
736
006a94b0 737#if 0
6cde4876 738 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
739 an existing entry in that table to avoid useless generation of RTL.
740
741 This code is disabled for now until we can fix the various backends
742 which depend on having non-shared hard registers in some cases. Long
743 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
744 on the amount of useless RTL that gets generated.
745
746 We'll also need to fix some code that runs after reload that wants to
747 set ORIGINAL_REGNO. */
748
6cde4876
JL
749 if (cfun
750 && cfun->emit
751 && regno_reg_rtx
752 && regno < FIRST_PSEUDO_REGISTER
753 && reg_raw_mode[regno] == mode)
754 return regno_reg_rtx[regno];
006a94b0 755#endif
6cde4876 756
08394eef 757 return gen_raw_REG (mode, regno);
3b80f6ca
RH
758}
759
41472af8 760rtx
ef4bddc2 761gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
762{
763 rtx rt = gen_rtx_raw_MEM (mode, addr);
764
765 /* This field is not cleared by the mere allocation of the rtx, so
766 we clear it here. */
173b24b9 767 MEM_ATTRS (rt) = 0;
41472af8
MM
768
769 return rt;
770}
ddef6bc7 771
542a8afa
RH
772/* Generate a memory referring to non-trapping constant memory. */
773
774rtx
ef4bddc2 775gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
776{
777 rtx mem = gen_rtx_MEM (mode, addr);
778 MEM_READONLY_P (mem) = 1;
779 MEM_NOTRAP_P (mem) = 1;
780 return mem;
781}
782
bf877a76
R
783/* Generate a MEM referring to fixed portions of the frame, e.g., register
784 save areas. */
785
786rtx
ef4bddc2 787gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
788{
789 rtx mem = gen_rtx_MEM (mode, addr);
790 MEM_NOTRAP_P (mem) = 1;
791 set_mem_alias_set (mem, get_frame_alias_set ());
792 return mem;
793}
794
795/* Generate a MEM referring to a temporary use of the stack, not part
796 of the fixed stack frame. For example, something which is pushed
797 by a target splitter. */
798rtx
ef4bddc2 799gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
800{
801 rtx mem = gen_rtx_MEM (mode, addr);
802 MEM_NOTRAP_P (mem) = 1;
e3b5732b 803 if (!cfun->calls_alloca)
bf877a76
R
804 set_mem_alias_set (mem, get_frame_alias_set ());
805 return mem;
806}
807
beb72684
RH
808/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
809 this construct would be valid, and false otherwise. */
810
811bool
ef4bddc2 812validate_subreg (machine_mode omode, machine_mode imode,
ed7a4b4b 813 const_rtx reg, unsigned int offset)
ddef6bc7 814{
beb72684
RH
815 unsigned int isize = GET_MODE_SIZE (imode);
816 unsigned int osize = GET_MODE_SIZE (omode);
817
818 /* All subregs must be aligned. */
819 if (offset % osize != 0)
820 return false;
821
822 /* The subreg offset cannot be outside the inner object. */
823 if (offset >= isize)
824 return false;
825
826 /* ??? This should not be here. Temporarily continue to allow word_mode
827 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
828 Generally, backends are doing something sketchy but it'll take time to
829 fix them all. */
830 if (omode == word_mode)
831 ;
832 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
833 is the culprit here, and not the backends. */
834 else if (osize >= UNITS_PER_WORD && isize >= osize)
835 ;
836 /* Allow component subregs of complex and vector. Though given the below
837 extraction rules, it's not always clear what that means. */
838 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
839 && GET_MODE_INNER (imode) == omode)
840 ;
841 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
842 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
843 represent this. It's questionable if this ought to be represented at
844 all -- why can't this all be hidden in post-reload splitters that make
845 arbitrarily mode changes to the registers themselves. */
846 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
847 ;
848 /* Subregs involving floating point modes are not allowed to
849 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
850 (subreg:SI (reg:DF) 0) isn't. */
851 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
852 {
55a2c322
VM
853 if (! (isize == osize
854 /* LRA can use subreg to store a floating point value in
855 an integer mode. Although the floating point and the
856 integer modes need the same number of hard registers,
857 the size of floating point mode can be less than the
858 integer mode. LRA also uses subregs for a register
859 should be used in different mode in on insn. */
860 || lra_in_progress))
beb72684
RH
861 return false;
862 }
ddef6bc7 863
beb72684
RH
864 /* Paradoxical subregs must have offset zero. */
865 if (osize > isize)
866 return offset == 0;
867
868 /* This is a normal subreg. Verify that the offset is representable. */
869
870 /* For hard registers, we already have most of these rules collected in
871 subreg_offset_representable_p. */
872 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
873 {
874 unsigned int regno = REGNO (reg);
875
876#ifdef CANNOT_CHANGE_MODE_CLASS
877 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
878 && GET_MODE_INNER (imode) == omode)
879 ;
880 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
881 return false;
ddef6bc7 882#endif
beb72684
RH
883
884 return subreg_offset_representable_p (regno, imode, offset, omode);
885 }
886
887 /* For pseudo registers, we want most of the same checks. Namely:
888 If the register no larger than a word, the subreg must be lowpart.
889 If the register is larger than a word, the subreg must be the lowpart
890 of a subword. A subreg does *not* perform arbitrary bit extraction.
891 Given that we've already checked mode/offset alignment, we only have
892 to check subword subregs here. */
55a2c322
VM
893 if (osize < UNITS_PER_WORD
894 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 895 {
ef4bddc2 896 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
beb72684
RH
897 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
898 if (offset % UNITS_PER_WORD != low_off)
899 return false;
900 }
901 return true;
902}
903
904rtx
ef4bddc2 905gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
beb72684
RH
906{
907 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 908 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
909}
910
173b24b9
RK
911/* Generate a SUBREG representing the least-significant part of REG if MODE
912 is smaller than mode of REG, otherwise paradoxical SUBREG. */
913
ddef6bc7 914rtx
ef4bddc2 915gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 916{
ef4bddc2 917 machine_mode inmode;
ddef6bc7
JJ
918
919 inmode = GET_MODE (reg);
920 if (inmode == VOIDmode)
921 inmode = mode;
e0e08ac2
JH
922 return gen_rtx_SUBREG (mode, reg,
923 subreg_lowpart_offset (mode, inmode));
ddef6bc7 924}
fcc74520
RS
925
926rtx
ef4bddc2 927gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
928 enum var_init_status status)
929{
930 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
931 PAT_VAR_LOCATION_STATUS (x) = status;
932 return x;
933}
c5c76735 934\f
23b2ce53 935
80379f51
PB
936/* Create an rtvec and stores within it the RTXen passed in the arguments. */
937
23b2ce53 938rtvec
e34d07f2 939gen_rtvec (int n, ...)
23b2ce53 940{
80379f51
PB
941 int i;
942 rtvec rt_val;
e34d07f2 943 va_list p;
23b2ce53 944
e34d07f2 945 va_start (p, n);
23b2ce53 946
80379f51 947 /* Don't allocate an empty rtvec... */
23b2ce53 948 if (n == 0)
0edf1bb2
JL
949 {
950 va_end (p);
951 return NULL_RTVEC;
952 }
23b2ce53 953
80379f51 954 rt_val = rtvec_alloc (n);
4f90e4a0 955
23b2ce53 956 for (i = 0; i < n; i++)
80379f51 957 rt_val->elem[i] = va_arg (p, rtx);
6268b922 958
e34d07f2 959 va_end (p);
80379f51 960 return rt_val;
23b2ce53
RS
961}
962
963rtvec
502b8322 964gen_rtvec_v (int n, rtx *argp)
23b2ce53 965{
b3694847
SS
966 int i;
967 rtvec rt_val;
23b2ce53 968
80379f51 969 /* Don't allocate an empty rtvec... */
23b2ce53 970 if (n == 0)
80379f51 971 return NULL_RTVEC;
23b2ce53 972
80379f51 973 rt_val = rtvec_alloc (n);
23b2ce53
RS
974
975 for (i = 0; i < n; i++)
8f985ec4 976 rt_val->elem[i] = *argp++;
23b2ce53
RS
977
978 return rt_val;
979}
e6eda746
DM
980
981rtvec
982gen_rtvec_v (int n, rtx_insn **argp)
983{
984 int i;
985 rtvec rt_val;
986
987 /* Don't allocate an empty rtvec... */
988 if (n == 0)
989 return NULL_RTVEC;
990
991 rt_val = rtvec_alloc (n);
992
993 for (i = 0; i < n; i++)
994 rt_val->elem[i] = *argp++;
995
996 return rt_val;
997}
998
23b2ce53 999\f
38ae7651
RS
1000/* Return the number of bytes between the start of an OUTER_MODE
1001 in-memory value and the start of an INNER_MODE in-memory value,
1002 given that the former is a lowpart of the latter. It may be a
1003 paradoxical lowpart, in which case the offset will be negative
1004 on big-endian targets. */
1005
1006int
ef4bddc2
RS
1007byte_lowpart_offset (machine_mode outer_mode,
1008 machine_mode inner_mode)
38ae7651
RS
1009{
1010 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1011 return subreg_lowpart_offset (outer_mode, inner_mode);
1012 else
1013 return -subreg_lowpart_offset (inner_mode, outer_mode);
1014}
1015\f
23b2ce53
RS
1016/* Generate a REG rtx for a new pseudo register of mode MODE.
1017 This pseudo is assigned the next sequential register number. */
1018
1019rtx
ef4bddc2 1020gen_reg_rtx (machine_mode mode)
23b2ce53 1021{
b3694847 1022 rtx val;
2e3f842f 1023 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1024
f8335a4f 1025 gcc_assert (can_create_pseudo_p ());
23b2ce53 1026
2e3f842f
L
1027 /* If a virtual register with bigger mode alignment is generated,
1028 increase stack alignment estimation because it might be spilled
1029 to stack later. */
b8698a0f 1030 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1031 && crtl->stack_alignment_estimated < align
1032 && !crtl->stack_realign_processed)
ae58e548
JJ
1033 {
1034 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1035 if (crtl->stack_alignment_estimated < min_align)
1036 crtl->stack_alignment_estimated = min_align;
1037 }
2e3f842f 1038
1b3d8f8a
GK
1039 if (generating_concat_p
1040 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1041 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1042 {
1043 /* For complex modes, don't make a single pseudo.
1044 Instead, make a CONCAT of two pseudos.
1045 This allows noncontiguous allocation of the real and imaginary parts,
1046 which makes much better code. Besides, allocating DCmode
1047 pseudos overstrains reload on some machines like the 386. */
1048 rtx realpart, imagpart;
ef4bddc2 1049 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1050
1051 realpart = gen_reg_rtx (partmode);
1052 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1053 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1054 }
1055
004a7e45
UB
1056 /* Do not call gen_reg_rtx with uninitialized crtl. */
1057 gcc_assert (crtl->emit.regno_pointer_align_length);
1058
a560d4d4 1059 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 1060 enough to have an element for this pseudo reg number. */
23b2ce53 1061
3e029763 1062 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 1063 {
3e029763 1064 int old_size = crtl->emit.regno_pointer_align_length;
60564289 1065 char *tmp;
0d4903b8 1066 rtx *new1;
0d4903b8 1067
60564289
KG
1068 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1069 memset (tmp + old_size, 0, old_size);
1070 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 1071
1b4572a8 1072 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 1073 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
1074 regno_reg_rtx = new1;
1075
3e029763 1076 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
1077 }
1078
08394eef 1079 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
1080 regno_reg_rtx[reg_rtx_no++] = val;
1081 return val;
1082}
1083
a698cc03
JL
1084/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1085
1086bool
1087reg_is_parm_p (rtx reg)
1088{
1089 tree decl;
1090
1091 gcc_assert (REG_P (reg));
1092 decl = REG_EXPR (reg);
1093 return (decl && TREE_CODE (decl) == PARM_DECL);
1094}
1095
38ae7651
RS
1096/* Update NEW with the same attributes as REG, but with OFFSET added
1097 to the REG_OFFSET. */
a560d4d4 1098
e53a16e7 1099static void
60564289 1100update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1101{
60564289 1102 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1103 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1104}
1105
38ae7651
RS
1106/* Generate a register with same attributes as REG, but with OFFSET
1107 added to the REG_OFFSET. */
e53a16e7
ILT
1108
1109rtx
ef4bddc2 1110gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
e53a16e7
ILT
1111 int offset)
1112{
60564289 1113 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1114
60564289
KG
1115 update_reg_offset (new_rtx, reg, offset);
1116 return new_rtx;
e53a16e7
ILT
1117}
1118
1119/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1120 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1121
1122rtx
ef4bddc2 1123gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1124{
60564289 1125 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1126
60564289
KG
1127 update_reg_offset (new_rtx, reg, offset);
1128 return new_rtx;
a560d4d4
JH
1129}
1130
38ae7651
RS
1131/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1132 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1133
1134void
ef4bddc2 1135adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1136{
38ae7651
RS
1137 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1138 PUT_MODE (reg, mode);
1139}
1140
1141/* Copy REG's attributes from X, if X has any attributes. If REG and X
1142 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1143
1144void
1145set_reg_attrs_from_value (rtx reg, rtx x)
1146{
1147 int offset;
de6f3f7a
L
1148 bool can_be_reg_pointer = true;
1149
1150 /* Don't call mark_reg_pointer for incompatible pointer sign
1151 extension. */
1152 while (GET_CODE (x) == SIGN_EXTEND
1153 || GET_CODE (x) == ZERO_EXTEND
1154 || GET_CODE (x) == TRUNCATE
1155 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1156 {
2a870875
RS
1157#if defined(POINTERS_EXTEND_UNSIGNED)
1158 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
8d8e740c
BE
1159 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1160 || (paradoxical_subreg_p (x)
1161 && ! (SUBREG_PROMOTED_VAR_P (x)
1162 && SUBREG_CHECK_PROMOTED_SIGN (x,
1163 POINTERS_EXTEND_UNSIGNED))))
2a870875 1164 && !targetm.have_ptr_extend ())
de6f3f7a
L
1165 can_be_reg_pointer = false;
1166#endif
1167 x = XEXP (x, 0);
1168 }
38ae7651 1169
923ba36f
JJ
1170 /* Hard registers can be reused for multiple purposes within the same
1171 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1172 on them is wrong. */
1173 if (HARD_REGISTER_P (reg))
1174 return;
1175
38ae7651 1176 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1177 if (MEM_P (x))
1178 {
527210c4
RS
1179 if (MEM_OFFSET_KNOWN_P (x))
1180 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1181 MEM_OFFSET (x) + offset);
de6f3f7a 1182 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1183 mark_reg_pointer (reg, 0);
46b71b03
PB
1184 }
1185 else if (REG_P (x))
1186 {
1187 if (REG_ATTRS (x))
1188 update_reg_offset (reg, x, offset);
de6f3f7a 1189 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1190 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1191 }
1192}
1193
1194/* Generate a REG rtx for a new pseudo register, copying the mode
1195 and attributes from X. */
1196
1197rtx
1198gen_reg_rtx_and_attrs (rtx x)
1199{
1200 rtx reg = gen_reg_rtx (GET_MODE (x));
1201 set_reg_attrs_from_value (reg, x);
1202 return reg;
a560d4d4
JH
1203}
1204
9d18e06b
JZ
1205/* Set the register attributes for registers contained in PARM_RTX.
1206 Use needed values from memory attributes of MEM. */
1207
1208void
502b8322 1209set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1210{
f8cfc6aa 1211 if (REG_P (parm_rtx))
38ae7651 1212 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1213 else if (GET_CODE (parm_rtx) == PARALLEL)
1214 {
1215 /* Check for a NULL entry in the first slot, used to indicate that the
1216 parameter goes both on the stack and in registers. */
1217 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1218 for (; i < XVECLEN (parm_rtx, 0); i++)
1219 {
1220 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1221 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1222 REG_ATTRS (XEXP (x, 0))
1223 = get_reg_attrs (MEM_EXPR (mem),
1224 INTVAL (XEXP (x, 1)));
1225 }
1226 }
1227}
1228
38ae7651
RS
1229/* Set the REG_ATTRS for registers in value X, given that X represents
1230 decl T. */
a560d4d4 1231
4e3825db 1232void
38ae7651
RS
1233set_reg_attrs_for_decl_rtl (tree t, rtx x)
1234{
1f9ceff1
AO
1235 if (!t)
1236 return;
1237 tree tdecl = t;
38ae7651 1238 if (GET_CODE (x) == SUBREG)
fbe6ec81 1239 {
38ae7651
RS
1240 gcc_assert (subreg_lowpart_p (x));
1241 x = SUBREG_REG (x);
fbe6ec81 1242 }
f8cfc6aa 1243 if (REG_P (x))
38ae7651
RS
1244 REG_ATTRS (x)
1245 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1f9ceff1
AO
1246 DECL_P (tdecl)
1247 ? DECL_MODE (tdecl)
1248 : TYPE_MODE (TREE_TYPE (tdecl))));
a560d4d4
JH
1249 if (GET_CODE (x) == CONCAT)
1250 {
1251 if (REG_P (XEXP (x, 0)))
1252 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1253 if (REG_P (XEXP (x, 1)))
1254 REG_ATTRS (XEXP (x, 1))
1255 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1256 }
1257 if (GET_CODE (x) == PARALLEL)
1258 {
d4afac5b
JZ
1259 int i, start;
1260
1261 /* Check for a NULL entry, used to indicate that the parameter goes
1262 both on the stack and in registers. */
1263 if (XEXP (XVECEXP (x, 0, 0), 0))
1264 start = 0;
1265 else
1266 start = 1;
1267
1268 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1269 {
1270 rtx y = XVECEXP (x, 0, i);
1271 if (REG_P (XEXP (y, 0)))
1272 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1273 }
1274 }
1275}
1276
38ae7651
RS
1277/* Assign the RTX X to declaration T. */
1278
1279void
1280set_decl_rtl (tree t, rtx x)
1281{
1282 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1283 if (x)
1284 set_reg_attrs_for_decl_rtl (t, x);
1285}
1286
5141868d
RS
1287/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1288 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1289
1290void
5141868d 1291set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1292{
1293 DECL_INCOMING_RTL (t) = x;
5141868d 1294 if (x && !by_reference_p)
38ae7651
RS
1295 set_reg_attrs_for_decl_rtl (t, x);
1296}
1297
754fdcca
RK
1298/* Identify REG (which may be a CONCAT) as a user register. */
1299
1300void
502b8322 1301mark_user_reg (rtx reg)
754fdcca
RK
1302{
1303 if (GET_CODE (reg) == CONCAT)
1304 {
1305 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1306 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1307 }
754fdcca 1308 else
5b0264cb
NS
1309 {
1310 gcc_assert (REG_P (reg));
1311 REG_USERVAR_P (reg) = 1;
1312 }
754fdcca
RK
1313}
1314
86fe05e0
RK
1315/* Identify REG as a probable pointer register and show its alignment
1316 as ALIGN, if nonzero. */
23b2ce53
RS
1317
1318void
502b8322 1319mark_reg_pointer (rtx reg, int align)
23b2ce53 1320{
3502dc9c 1321 if (! REG_POINTER (reg))
00995e78 1322 {
3502dc9c 1323 REG_POINTER (reg) = 1;
86fe05e0 1324
00995e78
RE
1325 if (align)
1326 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1327 }
1328 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1329 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1330 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1331}
1332
1333/* Return 1 plus largest pseudo reg number used in the current function. */
1334
1335int
502b8322 1336max_reg_num (void)
23b2ce53
RS
1337{
1338 return reg_rtx_no;
1339}
1340
1341/* Return 1 + the largest label number used so far in the current function. */
1342
1343int
502b8322 1344max_label_num (void)
23b2ce53 1345{
23b2ce53
RS
1346 return label_num;
1347}
1348
1349/* Return first label number used in this function (if any were used). */
1350
1351int
502b8322 1352get_first_label_num (void)
23b2ce53
RS
1353{
1354 return first_label_num;
1355}
6de9cd9a
DN
1356
1357/* If the rtx for label was created during the expansion of a nested
1358 function, then first_label_num won't include this label number.
fa10beec 1359 Fix this now so that array indices work later. */
6de9cd9a
DN
1360
1361void
9aa50db7 1362maybe_set_first_label_num (rtx_code_label *x)
6de9cd9a
DN
1363{
1364 if (CODE_LABEL_NUMBER (x) < first_label_num)
1365 first_label_num = CODE_LABEL_NUMBER (x);
1366}
23b2ce53
RS
1367\f
1368/* Return a value representing some low-order bits of X, where the number
1369 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1370 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1371 representation is returned.
1372
1373 This function handles the cases in common between gen_lowpart, below,
1374 and two variants in cse.c and combine.c. These are the cases that can
1375 be safely handled at all points in the compilation.
1376
1377 If this is not a case we can handle, return 0. */
1378
1379rtx
ef4bddc2 1380gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1381{
ddef6bc7 1382 int msize = GET_MODE_SIZE (mode);
550d1387 1383 int xsize;
ef4bddc2 1384 machine_mode innermode;
550d1387
GK
1385
1386 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1387 so we have to make one up. Yuk. */
1388 innermode = GET_MODE (x);
481683e1 1389 if (CONST_INT_P (x)
db487452 1390 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1391 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1392 else if (innermode == VOIDmode)
49ab6098 1393 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1394
550d1387
GK
1395 xsize = GET_MODE_SIZE (innermode);
1396
5b0264cb 1397 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1398
550d1387 1399 if (innermode == mode)
23b2ce53
RS
1400 return x;
1401
1402 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1403 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1404 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1405 return 0;
1406
53501a19 1407 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1408 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1409 return 0;
1410
23b2ce53 1411 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1412 && (GET_MODE_CLASS (mode) == MODE_INT
1413 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1414 {
1415 /* If we are getting the low-order part of something that has been
1416 sign- or zero-extended, we can either just use the object being
1417 extended or make a narrower extension. If we want an even smaller
1418 piece than the size of the object being extended, call ourselves
1419 recursively.
1420
1421 This case is used mostly by combine and cse. */
1422
1423 if (GET_MODE (XEXP (x, 0)) == mode)
1424 return XEXP (x, 0);
550d1387 1425 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1426 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1427 else if (msize < xsize)
3b80f6ca 1428 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1429 }
f8cfc6aa 1430 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1431 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1432 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
3403a1a9 1433 return lowpart_subreg (mode, x, innermode);
8aada4ad 1434
23b2ce53
RS
1435 /* Otherwise, we can't do this. */
1436 return 0;
1437}
1438\f
ccba022b 1439rtx
ef4bddc2 1440gen_highpart (machine_mode mode, rtx x)
ccba022b 1441{
ddef6bc7 1442 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1443 rtx result;
ddef6bc7 1444
ccba022b
RS
1445 /* This case loses if X is a subreg. To catch bugs early,
1446 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1447 gcc_assert (msize <= UNITS_PER_WORD
1448 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1449
e0e08ac2
JH
1450 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1451 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1452 gcc_assert (result);
b8698a0f 1453
09482e0d
JW
1454 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1455 the target if we have a MEM. gen_highpart must return a valid operand,
1456 emitting code if necessary to do so. */
5b0264cb
NS
1457 if (MEM_P (result))
1458 {
1459 result = validize_mem (result);
1460 gcc_assert (result);
1461 }
b8698a0f 1462
e0e08ac2
JH
1463 return result;
1464}
5222e470 1465
26d249eb 1466/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1467 be VOIDmode constant. */
1468rtx
ef4bddc2 1469gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1470{
1471 if (GET_MODE (exp) != VOIDmode)
1472 {
5b0264cb 1473 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1474 return gen_highpart (outermode, exp);
1475 }
1476 return simplify_gen_subreg (outermode, exp, innermode,
1477 subreg_highpart_offset (outermode, innermode));
1478}
68252e27 1479
38ae7651 1480/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1481
e0e08ac2 1482unsigned int
ef4bddc2 1483subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
e0e08ac2
JH
1484{
1485 unsigned int offset = 0;
1486 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1487
e0e08ac2 1488 if (difference > 0)
ccba022b 1489 {
e0e08ac2
JH
1490 if (WORDS_BIG_ENDIAN)
1491 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1492 if (BYTES_BIG_ENDIAN)
1493 offset += difference % UNITS_PER_WORD;
ccba022b 1494 }
ddef6bc7 1495
e0e08ac2 1496 return offset;
ccba022b 1497}
eea50aa0 1498
e0e08ac2
JH
1499/* Return offset in bytes to get OUTERMODE high part
1500 of the value in mode INNERMODE stored in memory in target format. */
1501unsigned int
ef4bddc2 1502subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
eea50aa0
JH
1503{
1504 unsigned int offset = 0;
1505 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1506
5b0264cb 1507 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1508
eea50aa0
JH
1509 if (difference > 0)
1510 {
e0e08ac2 1511 if (! WORDS_BIG_ENDIAN)
eea50aa0 1512 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1513 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1514 offset += difference % UNITS_PER_WORD;
1515 }
1516
e0e08ac2 1517 return offset;
eea50aa0 1518}
ccba022b 1519
23b2ce53
RS
1520/* Return 1 iff X, assumed to be a SUBREG,
1521 refers to the least significant part of its containing reg.
1522 If X is not a SUBREG, always return 1 (it is its own low part!). */
1523
1524int
fa233e34 1525subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1526{
1527 if (GET_CODE (x) != SUBREG)
1528 return 1;
a3a03040
RK
1529 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1530 return 0;
23b2ce53 1531
e0e08ac2
JH
1532 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1533 == SUBREG_BYTE (x));
23b2ce53 1534}
6a4bdc79
BS
1535
1536/* Return true if X is a paradoxical subreg, false otherwise. */
1537bool
1538paradoxical_subreg_p (const_rtx x)
1539{
1540 if (GET_CODE (x) != SUBREG)
1541 return false;
1542 return (GET_MODE_PRECISION (GET_MODE (x))
1543 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1544}
23b2ce53 1545\f
ddef6bc7
JJ
1546/* Return subword OFFSET of operand OP.
1547 The word number, OFFSET, is interpreted as the word number starting
1548 at the low-order address. OFFSET 0 is the low-order word if not
1549 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1550
1551 If we cannot extract the required word, we return zero. Otherwise,
1552 an rtx corresponding to the requested word will be returned.
1553
1554 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1555 reload has completed, a valid address will always be returned. After
1556 reload, if a valid address cannot be returned, we return zero.
1557
1558 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1559 it is the responsibility of the caller.
1560
1561 MODE is the mode of OP in case it is a CONST_INT.
1562
1563 ??? This is still rather broken for some cases. The problem for the
1564 moment is that all callers of this thing provide no 'goal mode' to
1565 tell us to work with. This exists because all callers were written
0631e0bf
JH
1566 in a word based SUBREG world.
1567 Now use of this function can be deprecated by simplify_subreg in most
1568 cases.
1569 */
ddef6bc7
JJ
1570
1571rtx
ef4bddc2 1572operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
ddef6bc7
JJ
1573{
1574 if (mode == VOIDmode)
1575 mode = GET_MODE (op);
1576
5b0264cb 1577 gcc_assert (mode != VOIDmode);
ddef6bc7 1578
30f7a378 1579 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1580 if (mode != BLKmode
1581 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1582 return 0;
1583
30f7a378 1584 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1585 if (mode != BLKmode
1586 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1587 return const0_rtx;
1588
ddef6bc7 1589 /* Form a new MEM at the requested address. */
3c0cb5de 1590 if (MEM_P (op))
ddef6bc7 1591 {
60564289 1592 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1593
f1ec5147 1594 if (! validate_address)
60564289 1595 return new_rtx;
f1ec5147
RK
1596
1597 else if (reload_completed)
ddef6bc7 1598 {
09e881c9
BE
1599 if (! strict_memory_address_addr_space_p (word_mode,
1600 XEXP (new_rtx, 0),
1601 MEM_ADDR_SPACE (op)))
f1ec5147 1602 return 0;
ddef6bc7 1603 }
f1ec5147 1604 else
60564289 1605 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1606 }
1607
0631e0bf
JH
1608 /* Rest can be handled by simplify_subreg. */
1609 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1610}
1611
535a42b1
NS
1612/* Similar to `operand_subword', but never return 0. If we can't
1613 extract the required subword, put OP into a register and try again.
1614 The second attempt must succeed. We always validate the address in
1615 this case.
23b2ce53
RS
1616
1617 MODE is the mode of OP, in case it is CONST_INT. */
1618
1619rtx
ef4bddc2 1620operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
23b2ce53 1621{
ddef6bc7 1622 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1623
1624 if (result)
1625 return result;
1626
1627 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1628 {
1629 /* If this is a register which can not be accessed by words, copy it
1630 to a pseudo register. */
f8cfc6aa 1631 if (REG_P (op))
77e6b0eb
JC
1632 op = copy_to_reg (op);
1633 else
1634 op = force_reg (mode, op);
1635 }
23b2ce53 1636
ddef6bc7 1637 result = operand_subword (op, offset, 1, mode);
5b0264cb 1638 gcc_assert (result);
23b2ce53
RS
1639
1640 return result;
1641}
1642\f
2b3493c8
AK
1643/* Returns 1 if both MEM_EXPR can be considered equal
1644 and 0 otherwise. */
1645
1646int
4f588890 1647mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1648{
1649 if (expr1 == expr2)
1650 return 1;
1651
1652 if (! expr1 || ! expr2)
1653 return 0;
1654
1655 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1656 return 0;
1657
55b34b5f 1658 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1659}
1660
805903b5
JJ
1661/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1662 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1663 -1 if not known. */
1664
1665int
d9223014 1666get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1667{
1668 tree expr;
1669 unsigned HOST_WIDE_INT offset;
1670
1671 /* This function can't use
527210c4 1672 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1673 || (MAX (MEM_ALIGN (mem),
0eb77834 1674 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1675 < align))
1676 return -1;
1677 else
527210c4 1678 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1679 for two reasons:
1680 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1681 for <variable>. get_inner_reference doesn't handle it and
1682 even if it did, the alignment in that case needs to be determined
1683 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1684 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1685 isn't sufficiently aligned, the object it is in might be. */
1686 gcc_assert (MEM_P (mem));
1687 expr = MEM_EXPR (mem);
527210c4 1688 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1689 return -1;
1690
527210c4 1691 offset = MEM_OFFSET (mem);
805903b5
JJ
1692 if (DECL_P (expr))
1693 {
1694 if (DECL_ALIGN (expr) < align)
1695 return -1;
1696 }
1697 else if (INDIRECT_REF_P (expr))
1698 {
1699 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1700 return -1;
1701 }
1702 else if (TREE_CODE (expr) == COMPONENT_REF)
1703 {
1704 while (1)
1705 {
1706 tree inner = TREE_OPERAND (expr, 0);
1707 tree field = TREE_OPERAND (expr, 1);
1708 tree byte_offset = component_ref_field_offset (expr);
1709 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1710
1711 if (!byte_offset
cc269bb6
RS
1712 || !tree_fits_uhwi_p (byte_offset)
1713 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1714 return -1;
1715
ae7e9ddd
RS
1716 offset += tree_to_uhwi (byte_offset);
1717 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1718
1719 if (inner == NULL_TREE)
1720 {
1721 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1722 < (unsigned int) align)
1723 return -1;
1724 break;
1725 }
1726 else if (DECL_P (inner))
1727 {
1728 if (DECL_ALIGN (inner) < align)
1729 return -1;
1730 break;
1731 }
1732 else if (TREE_CODE (inner) != COMPONENT_REF)
1733 return -1;
1734 expr = inner;
1735 }
1736 }
1737 else
1738 return -1;
1739
1740 return offset & ((align / BITS_PER_UNIT) - 1);
1741}
1742
6926c713 1743/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1744 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1745 if we are making a new object of this type. BITPOS is nonzero if
1746 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1747
1748void
502b8322
AJ
1749set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1750 HOST_WIDE_INT bitpos)
173b24b9 1751{
6f1087be 1752 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1753 tree type;
f12144dd 1754 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1755 addr_space_t as;
173b24b9
RK
1756
1757 /* It can happen that type_for_mode was given a mode for which there
1758 is no language-level type. In which case it returns NULL, which
1759 we can see here. */
1760 if (t == NULL_TREE)
1761 return;
1762
1763 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1764 if (type == error_mark_node)
1765 return;
173b24b9 1766
173b24b9
RK
1767 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1768 wrong answer, as it assumes that DECL_RTL already has the right alias
1769 info. Callers should not set DECL_RTL until after the call to
1770 set_mem_attributes. */
5b0264cb 1771 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1772
f12144dd
RS
1773 memset (&attrs, 0, sizeof (attrs));
1774
738cc472 1775 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1776 front-end routine) and use it. */
f12144dd 1777 attrs.alias = get_alias_set (t);
173b24b9 1778
a5e9c810 1779 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1780 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1781
268f7033 1782 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1783 refattrs = MEM_ATTRS (ref);
1784 if (refattrs)
268f7033
UW
1785 {
1786 /* ??? Can this ever happen? Calling this routine on a MEM that
1787 already carries memory attributes should probably be invalid. */
f12144dd 1788 attrs.expr = refattrs->expr;
754c3d5d 1789 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1790 attrs.offset = refattrs->offset;
754c3d5d 1791 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1792 attrs.size = refattrs->size;
1793 attrs.align = refattrs->align;
268f7033
UW
1794 }
1795
1796 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1797 else
268f7033 1798 {
f12144dd
RS
1799 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1800 gcc_assert (!defattrs->expr);
754c3d5d 1801 gcc_assert (!defattrs->offset_known_p);
f12144dd 1802
268f7033 1803 /* Respect mode size. */
754c3d5d 1804 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1805 attrs.size = defattrs->size;
268f7033
UW
1806 /* ??? Is this really necessary? We probably should always get
1807 the size from the type below. */
1808
1809 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1810 if T is an object, always compute the object alignment below. */
f12144dd
RS
1811 if (TYPE_P (t))
1812 attrs.align = defattrs->align;
1813 else
1814 attrs.align = BITS_PER_UNIT;
268f7033
UW
1815 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1816 e.g. if the type carries an alignment attribute. Should we be
1817 able to simply always use TYPE_ALIGN? */
1818 }
1819
25b75a48
BE
1820 /* We can set the alignment from the type if we are making an object or if
1821 this is an INDIRECT_REF. */
1822 if (objectp || TREE_CODE (t) == INDIRECT_REF)
f12144dd 1823 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1824
738cc472 1825 /* If the size is known, we can set that. */
a787ccc3 1826 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1827
30b0317c
RB
1828 /* The address-space is that of the type. */
1829 as = TYPE_ADDR_SPACE (type);
1830
80965c18
RK
1831 /* If T is not a type, we may be able to deduce some more information about
1832 the expression. */
1833 if (! TYPE_P (t))
8ac61af7 1834 {
8476af98 1835 tree base;
389fdba0 1836
8ac61af7
RK
1837 if (TREE_THIS_VOLATILE (t))
1838 MEM_VOLATILE_P (ref) = 1;
173b24b9 1839
c56e3582
RK
1840 /* Now remove any conversions: they don't change what the underlying
1841 object is. Likewise for SAVE_EXPR. */
1043771b 1842 while (CONVERT_EXPR_P (t)
c56e3582
RK
1843 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1844 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1845 t = TREE_OPERAND (t, 0);
1846
4994da65
RG
1847 /* Note whether this expression can trap. */
1848 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1849
1850 base = get_base_address (t);
f18a7b25
MJ
1851 if (base)
1852 {
1853 if (DECL_P (base)
1854 && TREE_READONLY (base)
1855 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1856 && !TREE_THIS_VOLATILE (base))
1857 MEM_READONLY_P (ref) = 1;
1858
1859 /* Mark static const strings readonly as well. */
1860 if (TREE_CODE (base) == STRING_CST
1861 && TREE_READONLY (base)
1862 && TREE_STATIC (base))
1863 MEM_READONLY_P (ref) = 1;
1864
30b0317c 1865 /* Address-space information is on the base object. */
f18a7b25
MJ
1866 if (TREE_CODE (base) == MEM_REF
1867 || TREE_CODE (base) == TARGET_MEM_REF)
1868 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1869 0))));
1870 else
1871 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1872 }
ba30e50d 1873
2039d7aa
RH
1874 /* If this expression uses it's parent's alias set, mark it such
1875 that we won't change it. */
b4ada065 1876 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1877 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1878
8ac61af7
RK
1879 /* If this is a decl, set the attributes of the MEM from it. */
1880 if (DECL_P (t))
1881 {
f12144dd 1882 attrs.expr = t;
754c3d5d
RS
1883 attrs.offset_known_p = true;
1884 attrs.offset = 0;
6f1087be 1885 apply_bitpos = bitpos;
a787ccc3 1886 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1887 }
1888
30b0317c 1889 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1890 else if (CONSTANT_CLASS_P (t))
30b0317c 1891 ;
998d7deb 1892
a787ccc3
RS
1893 /* If this is a field reference, record it. */
1894 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1895 {
f12144dd 1896 attrs.expr = t;
754c3d5d
RS
1897 attrs.offset_known_p = true;
1898 attrs.offset = 0;
6f1087be 1899 apply_bitpos = bitpos;
a787ccc3
RS
1900 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1901 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1902 }
1903
1904 /* If this is an array reference, look for an outer field reference. */
1905 else if (TREE_CODE (t) == ARRAY_REF)
1906 {
1907 tree off_tree = size_zero_node;
1b1838b6
JW
1908 /* We can't modify t, because we use it at the end of the
1909 function. */
1910 tree t2 = t;
998d7deb
RH
1911
1912 do
1913 {
1b1838b6 1914 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1915 tree low_bound = array_ref_low_bound (t2);
1916 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1917
1918 /* We assume all arrays have sizes that are a multiple of a byte.
1919 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1920 index, then convert to sizetype and multiply by the size of
1921 the array element. */
1922 if (! integer_zerop (low_bound))
4845b383
KH
1923 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1924 index, low_bound);
2567406a 1925
44de5aeb 1926 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1927 size_binop (MULT_EXPR,
1928 fold_convert (sizetype,
1929 index),
44de5aeb
RK
1930 unit_size),
1931 off_tree);
1b1838b6 1932 t2 = TREE_OPERAND (t2, 0);
998d7deb 1933 }
1b1838b6 1934 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1935
30b0317c
RB
1936 if (DECL_P (t2)
1937 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1938 {
f12144dd 1939 attrs.expr = t2;
754c3d5d 1940 attrs.offset_known_p = false;
cc269bb6 1941 if (tree_fits_uhwi_p (off_tree))
6f1087be 1942 {
754c3d5d 1943 attrs.offset_known_p = true;
ae7e9ddd 1944 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1945 apply_bitpos = bitpos;
1946 }
998d7deb 1947 }
30b0317c 1948 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1949 }
1950
56c47f22 1951 /* If this is an indirect reference, record it. */
70f34814 1952 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1953 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1954 {
f12144dd 1955 attrs.expr = t;
754c3d5d
RS
1956 attrs.offset_known_p = true;
1957 attrs.offset = 0;
56c47f22
RG
1958 apply_bitpos = bitpos;
1959 }
1960
30b0317c
RB
1961 /* Compute the alignment. */
1962 unsigned int obj_align;
1963 unsigned HOST_WIDE_INT obj_bitpos;
1964 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1965 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1966 if (obj_bitpos != 0)
146ec50f 1967 obj_align = least_bit_hwi (obj_bitpos);
30b0317c 1968 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1969 }
1970
cc269bb6 1971 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1972 {
1973 attrs.size_known_p = true;
ae7e9ddd 1974 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1975 }
1976
15c812e3 1977 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1978 bit position offset. Similarly, increase the size of the accessed
1979 object to contain the negative offset. */
6f1087be 1980 if (apply_bitpos)
8c317c5f 1981 {
754c3d5d
RS
1982 gcc_assert (attrs.offset_known_p);
1983 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1984 if (attrs.size_known_p)
1985 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1986 }
6f1087be 1987
8ac61af7 1988 /* Now set the attributes we computed above. */
f18a7b25 1989 attrs.addrspace = as;
f12144dd 1990 set_mem_attrs (ref, &attrs);
173b24b9
RK
1991}
1992
6f1087be 1993void
502b8322 1994set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1995{
1996 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1997}
1998
173b24b9
RK
1999/* Set the alias set of MEM to SET. */
2000
2001void
4862826d 2002set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 2003{
f12144dd
RS
2004 struct mem_attrs attrs;
2005
173b24b9 2006 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2007 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
2008 attrs = *get_mem_attrs (mem);
2009 attrs.alias = set;
2010 set_mem_attrs (mem, &attrs);
09e881c9
BE
2011}
2012
2013/* Set the address space of MEM to ADDRSPACE (target-defined). */
2014
2015void
2016set_mem_addr_space (rtx mem, addr_space_t addrspace)
2017{
f12144dd
RS
2018 struct mem_attrs attrs;
2019
2020 attrs = *get_mem_attrs (mem);
2021 attrs.addrspace = addrspace;
2022 set_mem_attrs (mem, &attrs);
173b24b9 2023}
738cc472 2024
d022d93e 2025/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2026
2027void
502b8322 2028set_mem_align (rtx mem, unsigned int align)
738cc472 2029{
f12144dd
RS
2030 struct mem_attrs attrs;
2031
2032 attrs = *get_mem_attrs (mem);
2033 attrs.align = align;
2034 set_mem_attrs (mem, &attrs);
738cc472 2035}
1285011e 2036
998d7deb 2037/* Set the expr for MEM to EXPR. */
1285011e
RK
2038
2039void
502b8322 2040set_mem_expr (rtx mem, tree expr)
1285011e 2041{
f12144dd
RS
2042 struct mem_attrs attrs;
2043
2044 attrs = *get_mem_attrs (mem);
2045 attrs.expr = expr;
2046 set_mem_attrs (mem, &attrs);
1285011e 2047}
998d7deb
RH
2048
2049/* Set the offset of MEM to OFFSET. */
2050
2051void
527210c4 2052set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2053{
f12144dd
RS
2054 struct mem_attrs attrs;
2055
2056 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2057 attrs.offset_known_p = true;
2058 attrs.offset = offset;
527210c4
RS
2059 set_mem_attrs (mem, &attrs);
2060}
2061
2062/* Clear the offset of MEM. */
2063
2064void
2065clear_mem_offset (rtx mem)
2066{
2067 struct mem_attrs attrs;
2068
2069 attrs = *get_mem_attrs (mem);
754c3d5d 2070 attrs.offset_known_p = false;
f12144dd 2071 set_mem_attrs (mem, &attrs);
35aff10b
AM
2072}
2073
2074/* Set the size of MEM to SIZE. */
2075
2076void
f5541398 2077set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2078{
f12144dd
RS
2079 struct mem_attrs attrs;
2080
2081 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2082 attrs.size_known_p = true;
2083 attrs.size = size;
f5541398
RS
2084 set_mem_attrs (mem, &attrs);
2085}
2086
2087/* Clear the size of MEM. */
2088
2089void
2090clear_mem_size (rtx mem)
2091{
2092 struct mem_attrs attrs;
2093
2094 attrs = *get_mem_attrs (mem);
754c3d5d 2095 attrs.size_known_p = false;
f12144dd 2096 set_mem_attrs (mem, &attrs);
998d7deb 2097}
173b24b9 2098\f
738cc472
RK
2099/* Return a memory reference like MEMREF, but with its mode changed to MODE
2100 and its address changed to ADDR. (VOIDmode means don't change the mode.
2101 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2102 returned memory location is required to be valid. INPLACE is true if any
2103 changes can be made directly to MEMREF or false if MEMREF must be treated
2104 as immutable.
2105
2106 The memory attributes are not changed. */
23b2ce53 2107
738cc472 2108static rtx
ef4bddc2 2109change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2110 bool inplace)
23b2ce53 2111{
09e881c9 2112 addr_space_t as;
60564289 2113 rtx new_rtx;
23b2ce53 2114
5b0264cb 2115 gcc_assert (MEM_P (memref));
09e881c9 2116 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2117 if (mode == VOIDmode)
2118 mode = GET_MODE (memref);
2119 if (addr == 0)
2120 addr = XEXP (memref, 0);
a74ff877 2121 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2122 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2123 return memref;
23b2ce53 2124
91c5ee5b
VM
2125 /* Don't validate address for LRA. LRA can make the address valid
2126 by itself in most efficient way. */
2127 if (validate && !lra_in_progress)
23b2ce53 2128 {
f1ec5147 2129 if (reload_in_progress || reload_completed)
09e881c9 2130 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2131 else
09e881c9 2132 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2133 }
750c9258 2134
9b04c6a8
RK
2135 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2136 return memref;
2137
23b33725
RS
2138 if (inplace)
2139 {
2140 XEXP (memref, 0) = addr;
2141 return memref;
2142 }
2143
60564289
KG
2144 new_rtx = gen_rtx_MEM (mode, addr);
2145 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2146 return new_rtx;
23b2ce53 2147}
792760b9 2148
738cc472
RK
2149/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2150 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2151
2152rtx
ef4bddc2 2153change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2154{
23b33725 2155 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2156 machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2157 struct mem_attrs attrs, *defattrs;
4e44c1ef 2158
f12144dd
RS
2159 attrs = *get_mem_attrs (memref);
2160 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2161 attrs.expr = NULL_TREE;
2162 attrs.offset_known_p = false;
2163 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2164 attrs.size = defattrs->size;
2165 attrs.align = defattrs->align;
c2f7bcc3 2166
fdb1c7b3 2167 /* If there are no changes, just return the original memory reference. */
60564289 2168 if (new_rtx == memref)
4e44c1ef 2169 {
f12144dd 2170 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2171 return new_rtx;
4e44c1ef 2172
60564289
KG
2173 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2174 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2175 }
fdb1c7b3 2176
f12144dd 2177 set_mem_attrs (new_rtx, &attrs);
60564289 2178 return new_rtx;
f4ef873c 2179}
792760b9 2180
738cc472
RK
2181/* Return a memory reference like MEMREF, but with its mode changed
2182 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2183 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2184 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2185 and the caller is responsible for adjusting MEMREF base register.
2186 If ADJUST_OBJECT is zero, the underlying object associated with the
2187 memory reference is left unchanged and the caller is responsible for
2188 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2189 the underlying object, even partially, then the object is dropped.
2190 SIZE, if nonzero, is the size of an access in cases where MODE
2191 has no inherent size. */
f1ec5147
RK
2192
2193rtx
ef4bddc2 2194adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2195 int validate, int adjust_address, int adjust_object,
2196 HOST_WIDE_INT size)
f1ec5147 2197{
823e3574 2198 rtx addr = XEXP (memref, 0);
60564289 2199 rtx new_rtx;
ef4bddc2 2200 machine_mode address_mode;
a6fe9ed4 2201 int pbits;
0207fa90 2202 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2203 unsigned HOST_WIDE_INT max_align;
0207fa90 2204#ifdef POINTERS_EXTEND_UNSIGNED
ef4bddc2 2205 machine_mode pointer_mode
0207fa90
EB
2206 = targetm.addr_space.pointer_mode (attrs.addrspace);
2207#endif
823e3574 2208
ee88e690
EB
2209 /* VOIDmode means no mode change for change_address_1. */
2210 if (mode == VOIDmode)
2211 mode = GET_MODE (memref);
2212
5f2cbd0d
RS
2213 /* Take the size of non-BLKmode accesses from the mode. */
2214 defattrs = mode_mem_attrs[(int) mode];
2215 if (defattrs->size_known_p)
2216 size = defattrs->size;
2217
fdb1c7b3
JH
2218 /* If there are no changes, just return the original memory reference. */
2219 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2220 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2221 && (!validate || memory_address_addr_space_p (mode, addr,
2222 attrs.addrspace)))
fdb1c7b3
JH
2223 return memref;
2224
d14419e4 2225 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2226 This may happen even if offset is nonzero -- consider
d14419e4
RH
2227 (plus (plus reg reg) const_int) -- so do this always. */
2228 addr = copy_rtx (addr);
2229
a6fe9ed4
JM
2230 /* Convert a possibly large offset to a signed value within the
2231 range of the target address space. */
372d6395 2232 address_mode = get_address_mode (memref);
d4ebfa65 2233 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2234 if (HOST_BITS_PER_WIDE_INT > pbits)
2235 {
2236 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2237 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2238 >> shift);
2239 }
2240
5ef0b50d 2241 if (adjust_address)
4a78c787
RH
2242 {
2243 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2244 object, we can merge it into the LO_SUM. */
2245 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2246 && offset >= 0
2247 && (unsigned HOST_WIDE_INT) offset
2248 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2249 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2250 plus_constant (address_mode,
2251 XEXP (addr, 1), offset));
0207fa90
EB
2252#ifdef POINTERS_EXTEND_UNSIGNED
2253 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2254 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2255 the fact that pointers are not allowed to overflow. */
2256 else if (POINTERS_EXTEND_UNSIGNED > 0
2257 && GET_CODE (addr) == ZERO_EXTEND
2258 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2259 && trunc_int_for_mode (offset, pointer_mode) == offset)
2260 addr = gen_rtx_ZERO_EXTEND (address_mode,
2261 plus_constant (pointer_mode,
2262 XEXP (addr, 0), offset));
2263#endif
4a78c787 2264 else
0a81f074 2265 addr = plus_constant (address_mode, addr, offset);
4a78c787 2266 }
823e3574 2267
23b33725 2268 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2269
09efeca1
PB
2270 /* If the address is a REG, change_address_1 rightfully returns memref,
2271 but this would destroy memref's MEM_ATTRS. */
2272 if (new_rtx == memref && offset != 0)
2273 new_rtx = copy_rtx (new_rtx);
2274
5ef0b50d
EB
2275 /* Conservatively drop the object if we don't know where we start from. */
2276 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2277 {
2278 attrs.expr = NULL_TREE;
2279 attrs.alias = 0;
2280 }
2281
738cc472
RK
2282 /* Compute the new values of the memory attributes due to this adjustment.
2283 We add the offsets and update the alignment. */
754c3d5d 2284 if (attrs.offset_known_p)
5ef0b50d
EB
2285 {
2286 attrs.offset += offset;
2287
2288 /* Drop the object if the new left end is not within its bounds. */
2289 if (adjust_object && attrs.offset < 0)
2290 {
2291 attrs.expr = NULL_TREE;
2292 attrs.alias = 0;
2293 }
2294 }
738cc472 2295
03bf2c23
RK
2296 /* Compute the new alignment by taking the MIN of the alignment and the
2297 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2298 if zero. */
2299 if (offset != 0)
f12144dd 2300 {
146ec50f 2301 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
f12144dd
RS
2302 attrs.align = MIN (attrs.align, max_align);
2303 }
738cc472 2304
5f2cbd0d 2305 if (size)
754c3d5d 2306 {
5ef0b50d 2307 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2308 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2309 {
2310 attrs.expr = NULL_TREE;
2311 attrs.alias = 0;
2312 }
754c3d5d 2313 attrs.size_known_p = true;
5f2cbd0d 2314 attrs.size = size;
754c3d5d
RS
2315 }
2316 else if (attrs.size_known_p)
5ef0b50d 2317 {
5f2cbd0d 2318 gcc_assert (!adjust_object);
5ef0b50d 2319 attrs.size -= offset;
5f2cbd0d
RS
2320 /* ??? The store_by_pieces machinery generates negative sizes,
2321 so don't assert for that here. */
5ef0b50d 2322 }
10b76d73 2323
f12144dd 2324 set_mem_attrs (new_rtx, &attrs);
738cc472 2325
60564289 2326 return new_rtx;
f1ec5147
RK
2327}
2328
630036c6
JJ
2329/* Return a memory reference like MEMREF, but with its mode changed
2330 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2331 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2332 nonzero, the memory address is forced to be valid. */
2333
2334rtx
ef4bddc2 2335adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
502b8322 2336 HOST_WIDE_INT offset, int validate)
630036c6 2337{
23b33725 2338 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2339 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2340}
2341
8ac61af7
RK
2342/* Return a memory reference like MEMREF, but whose address is changed by
2343 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2344 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2345
2346rtx
502b8322 2347offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2348{
60564289 2349 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2350 machine_mode address_mode;
754c3d5d 2351 struct mem_attrs attrs, *defattrs;
e3c8ea67 2352
f12144dd 2353 attrs = *get_mem_attrs (memref);
372d6395 2354 address_mode = get_address_mode (memref);
d4ebfa65 2355 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2356
68252e27 2357 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2358 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2359
2360 However, if we did go and rearrange things, we can wind up not
2361 being able to recognize the magic around pic_offset_table_rtx.
2362 This stuff is fragile, and is yet another example of why it is
2363 bad to expose PIC machinery too early. */
f12144dd
RS
2364 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2365 attrs.addrspace)
e3c8ea67
RH
2366 && GET_CODE (addr) == PLUS
2367 && XEXP (addr, 0) == pic_offset_table_rtx)
2368 {
2369 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2370 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2371 }
2372
60564289 2373 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2374 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2375
fdb1c7b3 2376 /* If there are no changes, just return the original memory reference. */
60564289
KG
2377 if (new_rtx == memref)
2378 return new_rtx;
fdb1c7b3 2379
0d4903b8
RK
2380 /* Update the alignment to reflect the offset. Reset the offset, which
2381 we don't know. */
754c3d5d
RS
2382 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2383 attrs.offset_known_p = false;
2384 attrs.size_known_p = defattrs->size_known_p;
2385 attrs.size = defattrs->size;
f12144dd
RS
2386 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2387 set_mem_attrs (new_rtx, &attrs);
60564289 2388 return new_rtx;
0d4903b8 2389}
68252e27 2390
792760b9
RK
2391/* Return a memory reference like MEMREF, but with its address changed to
2392 ADDR. The caller is asserting that the actual piece of memory pointed
2393 to is the same, just the form of the address is being changed, such as
23b33725
RS
2394 by putting something into a register. INPLACE is true if any changes
2395 can be made directly to MEMREF or false if MEMREF must be treated as
2396 immutable. */
792760b9
RK
2397
2398rtx
23b33725 2399replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2400{
738cc472
RK
2401 /* change_address_1 copies the memory attribute structure without change
2402 and that's exactly what we want here. */
40c0668b 2403 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2404 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2405}
738cc472 2406
f1ec5147
RK
2407/* Likewise, but the reference is not required to be valid. */
2408
2409rtx
23b33725 2410replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2411{
23b33725 2412 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2413}
e7dfe4bb
RH
2414
2415/* Return a memory reference like MEMREF, but with its mode widened to
2416 MODE and offset by OFFSET. This would be used by targets that e.g.
2417 cannot issue QImode memory operations and have to use SImode memory
2418 operations plus masking logic. */
2419
2420rtx
ef4bddc2 2421widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2422{
5f2cbd0d 2423 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2424 struct mem_attrs attrs;
e7dfe4bb
RH
2425 unsigned int size = GET_MODE_SIZE (mode);
2426
fdb1c7b3 2427 /* If there are no changes, just return the original memory reference. */
60564289
KG
2428 if (new_rtx == memref)
2429 return new_rtx;
fdb1c7b3 2430
f12144dd
RS
2431 attrs = *get_mem_attrs (new_rtx);
2432
e7dfe4bb
RH
2433 /* If we don't know what offset we were at within the expression, then
2434 we can't know if we've overstepped the bounds. */
754c3d5d 2435 if (! attrs.offset_known_p)
f12144dd 2436 attrs.expr = NULL_TREE;
e7dfe4bb 2437
f12144dd 2438 while (attrs.expr)
e7dfe4bb 2439 {
f12144dd 2440 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2441 {
f12144dd
RS
2442 tree field = TREE_OPERAND (attrs.expr, 1);
2443 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2444
2445 if (! DECL_SIZE_UNIT (field))
2446 {
f12144dd 2447 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2448 break;
2449 }
2450
2451 /* Is the field at least as large as the access? If so, ok,
2452 otherwise strip back to the containing structure. */
03667700
RK
2453 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2454 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2455 && attrs.offset >= 0)
e7dfe4bb
RH
2456 break;
2457
cc269bb6 2458 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2459 {
f12144dd 2460 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2461 break;
2462 }
2463
f12144dd 2464 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2465 attrs.offset += tree_to_uhwi (offset);
2466 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2467 / BITS_PER_UNIT);
e7dfe4bb
RH
2468 }
2469 /* Similarly for the decl. */
f12144dd
RS
2470 else if (DECL_P (attrs.expr)
2471 && DECL_SIZE_UNIT (attrs.expr)
2472 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2473 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2474 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2475 break;
2476 else
2477 {
2478 /* The widened memory access overflows the expression, which means
2479 that it could alias another expression. Zap it. */
f12144dd 2480 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2481 break;
2482 }
2483 }
2484
f12144dd 2485 if (! attrs.expr)
754c3d5d 2486 attrs.offset_known_p = false;
e7dfe4bb
RH
2487
2488 /* The widened memory may alias other stuff, so zap the alias set. */
2489 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2490 attrs.alias = 0;
754c3d5d
RS
2491 attrs.size_known_p = true;
2492 attrs.size = size;
f12144dd 2493 set_mem_attrs (new_rtx, &attrs);
60564289 2494 return new_rtx;
e7dfe4bb 2495}
23b2ce53 2496\f
f6129d66
RH
2497/* A fake decl that is used as the MEM_EXPR of spill slots. */
2498static GTY(()) tree spill_slot_decl;
2499
3d7e23f6
RH
2500tree
2501get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2502{
2503 tree d = spill_slot_decl;
2504 rtx rd;
f12144dd 2505 struct mem_attrs attrs;
f6129d66 2506
3d7e23f6 2507 if (d || !force_build_p)
f6129d66
RH
2508 return d;
2509
c2255bc4
AH
2510 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2511 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2512 DECL_ARTIFICIAL (d) = 1;
2513 DECL_IGNORED_P (d) = 1;
2514 TREE_USED (d) = 1;
f6129d66
RH
2515 spill_slot_decl = d;
2516
2517 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2518 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2519 attrs = *mode_mem_attrs[(int) BLKmode];
2520 attrs.alias = new_alias_set ();
2521 attrs.expr = d;
2522 set_mem_attrs (rd, &attrs);
f6129d66
RH
2523 SET_DECL_RTL (d, rd);
2524
2525 return d;
2526}
2527
2528/* Given MEM, a result from assign_stack_local, fill in the memory
2529 attributes as appropriate for a register allocator spill slot.
2530 These slots are not aliasable by other memory. We arrange for
2531 them all to use a single MEM_EXPR, so that the aliasing code can
2532 work properly in the case of shared spill slots. */
2533
2534void
2535set_mem_attrs_for_spill (rtx mem)
2536{
f12144dd
RS
2537 struct mem_attrs attrs;
2538 rtx addr;
f6129d66 2539
f12144dd
RS
2540 attrs = *get_mem_attrs (mem);
2541 attrs.expr = get_spill_slot_decl (true);
2542 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2543 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2544
2545 /* We expect the incoming memory to be of the form:
2546 (mem:MODE (plus (reg sfp) (const_int offset)))
2547 with perhaps the plus missing for offset = 0. */
2548 addr = XEXP (mem, 0);
754c3d5d
RS
2549 attrs.offset_known_p = true;
2550 attrs.offset = 0;
f6129d66 2551 if (GET_CODE (addr) == PLUS
481683e1 2552 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2553 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2554
f12144dd 2555 set_mem_attrs (mem, &attrs);
f6129d66
RH
2556 MEM_NOTRAP_P (mem) = 1;
2557}
2558\f
23b2ce53
RS
2559/* Return a newly created CODE_LABEL rtx with a unique label number. */
2560
7dcc3ab5 2561rtx_code_label *
502b8322 2562gen_label_rtx (void)
23b2ce53 2563{
7dcc3ab5
DM
2564 return as_a <rtx_code_label *> (
2565 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2566 NULL, label_num++, NULL));
23b2ce53
RS
2567}
2568\f
2569/* For procedure integration. */
2570
23b2ce53 2571/* Install new pointers to the first and last insns in the chain.
86fe05e0 2572 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2573 Used for an inline-procedure after copying the insn chain. */
2574
2575void
fee3e72c 2576set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2577{
fee3e72c 2578 rtx_insn *insn;
86fe05e0 2579
5936d944
JH
2580 set_first_insn (first);
2581 set_last_insn (last);
86fe05e0
RK
2582 cur_insn_uid = 0;
2583
b5b8b0ac
AO
2584 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2585 {
2586 int debug_count = 0;
2587
2588 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2589 cur_debug_insn_uid = 0;
2590
2591 for (insn = first; insn; insn = NEXT_INSN (insn))
2592 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2593 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2594 else
2595 {
2596 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2597 if (DEBUG_INSN_P (insn))
2598 debug_count++;
2599 }
2600
2601 if (debug_count)
2602 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2603 else
2604 cur_debug_insn_uid++;
2605 }
2606 else
2607 for (insn = first; insn; insn = NEXT_INSN (insn))
2608 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2609
2610 cur_insn_uid++;
23b2ce53 2611}
23b2ce53 2612\f
750c9258 2613/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2614 structure. This routine should only be called once. */
23b2ce53 2615
fd743bc1 2616static void
6bb9bf63 2617unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2618{
d1b81779 2619 /* Unshare just about everything else. */
2c07f13b 2620 unshare_all_rtl_in_chain (insn);
750c9258 2621
23b2ce53
RS
2622 /* Make sure the addresses of stack slots found outside the insn chain
2623 (such as, in DECL_RTL of a variable) are not shared
2624 with the insn chain.
2625
2626 This special care is necessary when the stack slot MEM does not
2627 actually appear in the insn chain. If it does appear, its address
2628 is unshared from all else at that point. */
8c39f8ae
TS
2629 unsigned int i;
2630 rtx temp;
2631 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2632 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
23b2ce53
RS
2633}
2634
750c9258 2635/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2636 structure, again. This is a fairly expensive thing to do so it
2637 should be done sparingly. */
2638
2639void
6bb9bf63 2640unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2641{
6bb9bf63 2642 rtx_insn *p;
624c87aa
RE
2643 tree decl;
2644
d1b81779 2645 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2646 if (INSN_P (p))
d1b81779
GK
2647 {
2648 reset_used_flags (PATTERN (p));
2649 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2650 if (CALL_P (p))
2651 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2652 }
624c87aa 2653
2d4aecb3 2654 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2655 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2656
624c87aa 2657 /* Make sure that virtual parameters are not shared. */
910ad8de 2658 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2659 set_used_flags (DECL_RTL (decl));
624c87aa 2660
8c39f8ae
TS
2661 rtx temp;
2662 unsigned int i;
2663 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2664 reset_used_flags (temp);
624c87aa 2665
b4aaa77b 2666 unshare_all_rtl_1 (insn);
fd743bc1
PB
2667}
2668
c2924966 2669unsigned int
fd743bc1
PB
2670unshare_all_rtl (void)
2671{
b4aaa77b 2672 unshare_all_rtl_1 (get_insns ());
c2924966 2673 return 0;
d1b81779
GK
2674}
2675
ef330312 2676
2c07f13b
JH
2677/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2678 Recursively does the same for subexpressions. */
2679
2680static void
2681verify_rtx_sharing (rtx orig, rtx insn)
2682{
2683 rtx x = orig;
2684 int i;
2685 enum rtx_code code;
2686 const char *format_ptr;
2687
2688 if (x == 0)
2689 return;
2690
2691 code = GET_CODE (x);
2692
2693 /* These types may be freely shared. */
2694
2695 switch (code)
2696 {
2697 case REG:
0ca5af51
AO
2698 case DEBUG_EXPR:
2699 case VALUE:
d8116890 2700 CASE_CONST_ANY:
2c07f13b
JH
2701 case SYMBOL_REF:
2702 case LABEL_REF:
2703 case CODE_LABEL:
2704 case PC:
2705 case CC0:
3810076b 2706 case RETURN:
26898771 2707 case SIMPLE_RETURN:
2c07f13b 2708 case SCRATCH:
3e89ed8d 2709 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2710 return;
3e89ed8d 2711 case CLOBBER:
c5c5ba89
JH
2712 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2713 clobbers or clobbers of hard registers that originated as pseudos.
2714 This is needed to allow safe register renaming. */
2715 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2716 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2717 return;
2718 break;
2c07f13b
JH
2719
2720 case CONST:
6fb5fa3c 2721 if (shared_const_p (orig))
2c07f13b
JH
2722 return;
2723 break;
2724
2725 case MEM:
2726 /* A MEM is allowed to be shared if its address is constant. */
2727 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2728 || reload_completed || reload_in_progress)
2729 return;
2730
2731 break;
2732
2733 default:
2734 break;
2735 }
2736
2737 /* This rtx may not be shared. If it has already been seen,
2738 replace it with a copy of itself. */
b2b29377 2739 if (flag_checking && RTX_FLAG (x, used))
2c07f13b 2740 {
ab532386 2741 error ("invalid rtl sharing found in the insn");
2c07f13b 2742 debug_rtx (insn);
ab532386 2743 error ("shared rtx");
2c07f13b 2744 debug_rtx (x);
ab532386 2745 internal_error ("internal consistency failure");
2c07f13b 2746 }
1a2caa7a 2747 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2748
2c07f13b
JH
2749 RTX_FLAG (x, used) = 1;
2750
6614fd40 2751 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2752
2753 format_ptr = GET_RTX_FORMAT (code);
2754
2755 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2756 {
2757 switch (*format_ptr++)
2758 {
2759 case 'e':
2760 verify_rtx_sharing (XEXP (x, i), insn);
2761 break;
2762
2763 case 'E':
2764 if (XVEC (x, i) != NULL)
2765 {
2766 int j;
2767 int len = XVECLEN (x, i);
2768
2769 for (j = 0; j < len; j++)
2770 {
1a2caa7a
NS
2771 /* We allow sharing of ASM_OPERANDS inside single
2772 instruction. */
2c07f13b 2773 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2774 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2775 == ASM_OPERANDS))
2c07f13b
JH
2776 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2777 else
2778 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2779 }
2780 }
2781 break;
2782 }
2783 }
2784 return;
2785}
2786
0e0f87d4
SB
2787/* Reset used-flags for INSN. */
2788
2789static void
2790reset_insn_used_flags (rtx insn)
2791{
2792 gcc_assert (INSN_P (insn));
2793 reset_used_flags (PATTERN (insn));
2794 reset_used_flags (REG_NOTES (insn));
2795 if (CALL_P (insn))
2796 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2797}
2798
a24243a0 2799/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2800
a24243a0
AK
2801static void
2802reset_all_used_flags (void)
2c07f13b 2803{
dc01c3d1 2804 rtx_insn *p;
2c07f13b
JH
2805
2806 for (p = get_insns (); p; p = NEXT_INSN (p))
2807 if (INSN_P (p))
2808 {
0e0f87d4
SB
2809 rtx pat = PATTERN (p);
2810 if (GET_CODE (pat) != SEQUENCE)
2811 reset_insn_used_flags (p);
2812 else
2954a813 2813 {
0e0f87d4
SB
2814 gcc_assert (REG_NOTES (p) == NULL);
2815 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2816 {
2817 rtx insn = XVECEXP (pat, 0, i);
2818 if (INSN_P (insn))
2819 reset_insn_used_flags (insn);
2820 }
2954a813 2821 }
2c07f13b 2822 }
a24243a0
AK
2823}
2824
0e0f87d4
SB
2825/* Verify sharing in INSN. */
2826
2827static void
2828verify_insn_sharing (rtx insn)
2829{
2830 gcc_assert (INSN_P (insn));
2831 reset_used_flags (PATTERN (insn));
2832 reset_used_flags (REG_NOTES (insn));
2833 if (CALL_P (insn))
2834 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2835}
2836
a24243a0
AK
2837/* Go through all the RTL insn bodies and check that there is no unexpected
2838 sharing in between the subexpressions. */
2839
2840DEBUG_FUNCTION void
2841verify_rtl_sharing (void)
2842{
dc01c3d1 2843 rtx_insn *p;
a24243a0
AK
2844
2845 timevar_push (TV_VERIFY_RTL_SHARING);
2846
2847 reset_all_used_flags ();
2c07f13b
JH
2848
2849 for (p = get_insns (); p; p = NEXT_INSN (p))
2850 if (INSN_P (p))
2851 {
0e0f87d4
SB
2852 rtx pat = PATTERN (p);
2853 if (GET_CODE (pat) != SEQUENCE)
2854 verify_insn_sharing (p);
2855 else
2856 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2857 {
2858 rtx insn = XVECEXP (pat, 0, i);
2859 if (INSN_P (insn))
2860 verify_insn_sharing (insn);
2861 }
2c07f13b 2862 }
a222c01a 2863
a24243a0
AK
2864 reset_all_used_flags ();
2865
a222c01a 2866 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2867}
2868
d1b81779
GK
2869/* Go through all the RTL insn bodies and copy any invalid shared structure.
2870 Assumes the mark bits are cleared at entry. */
2871
2c07f13b 2872void
dc01c3d1 2873unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2874{
2875 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2876 if (INSN_P (insn))
d1b81779
GK
2877 {
2878 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2879 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2880 if (CALL_P (insn))
2881 CALL_INSN_FUNCTION_USAGE (insn)
2882 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2883 }
2884}
2885
2d4aecb3 2886/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2887 shared. We never replace the DECL_RTLs themselves with a copy,
2888 but expressions mentioned into a DECL_RTL cannot be shared with
2889 expressions in the instruction stream.
2890
2891 Note that reload may convert pseudo registers into memories in-place.
2892 Pseudo registers are always shared, but MEMs never are. Thus if we
2893 reset the used flags on MEMs in the instruction stream, we must set
2894 them again on MEMs that appear in DECL_RTLs. */
2895
2d4aecb3 2896static void
5eb2a9f2 2897set_used_decls (tree blk)
2d4aecb3
AO
2898{
2899 tree t;
2900
2901 /* Mark decls. */
910ad8de 2902 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2903 if (DECL_RTL_SET_P (t))
5eb2a9f2 2904 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2905
2906 /* Now process sub-blocks. */
87caf699 2907 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2908 set_used_decls (t);
2d4aecb3
AO
2909}
2910
23b2ce53 2911/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2912 Recursively does the same for subexpressions. Uses
2913 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2914
2915rtx
502b8322 2916copy_rtx_if_shared (rtx orig)
23b2ce53 2917{
32b32b16
AP
2918 copy_rtx_if_shared_1 (&orig);
2919 return orig;
2920}
2921
ff954f39
AP
2922/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2923 use. Recursively does the same for subexpressions. */
2924
32b32b16
AP
2925static void
2926copy_rtx_if_shared_1 (rtx *orig1)
2927{
2928 rtx x;
b3694847
SS
2929 int i;
2930 enum rtx_code code;
32b32b16 2931 rtx *last_ptr;
b3694847 2932 const char *format_ptr;
23b2ce53 2933 int copied = 0;
32b32b16
AP
2934 int length;
2935
2936 /* Repeat is used to turn tail-recursion into iteration. */
2937repeat:
2938 x = *orig1;
23b2ce53
RS
2939
2940 if (x == 0)
32b32b16 2941 return;
23b2ce53
RS
2942
2943 code = GET_CODE (x);
2944
2945 /* These types may be freely shared. */
2946
2947 switch (code)
2948 {
2949 case REG:
0ca5af51
AO
2950 case DEBUG_EXPR:
2951 case VALUE:
d8116890 2952 CASE_CONST_ANY:
23b2ce53 2953 case SYMBOL_REF:
2c07f13b 2954 case LABEL_REF:
23b2ce53
RS
2955 case CODE_LABEL:
2956 case PC:
2957 case CC0:
276e0224 2958 case RETURN:
26898771 2959 case SIMPLE_RETURN:
23b2ce53 2960 case SCRATCH:
0f41302f 2961 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2962 return;
3e89ed8d 2963 case CLOBBER:
c5c5ba89
JH
2964 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2965 clobbers or clobbers of hard registers that originated as pseudos.
2966 This is needed to allow safe register renaming. */
2967 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2968 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2969 return;
2970 break;
23b2ce53 2971
b851ea09 2972 case CONST:
6fb5fa3c 2973 if (shared_const_p (x))
32b32b16 2974 return;
b851ea09
RK
2975 break;
2976
b5b8b0ac 2977 case DEBUG_INSN:
23b2ce53
RS
2978 case INSN:
2979 case JUMP_INSN:
2980 case CALL_INSN:
2981 case NOTE:
23b2ce53
RS
2982 case BARRIER:
2983 /* The chain of insns is not being copied. */
32b32b16 2984 return;
23b2ce53 2985
e9a25f70
JL
2986 default:
2987 break;
23b2ce53
RS
2988 }
2989
2990 /* This rtx may not be shared. If it has already been seen,
2991 replace it with a copy of itself. */
2992
2adc7f12 2993 if (RTX_FLAG (x, used))
23b2ce53 2994 {
aacd3885 2995 x = shallow_copy_rtx (x);
23b2ce53
RS
2996 copied = 1;
2997 }
2adc7f12 2998 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2999
3000 /* Now scan the subexpressions recursively.
3001 We can store any replaced subexpressions directly into X
3002 since we know X is not shared! Any vectors in X
3003 must be copied if X was copied. */
3004
3005 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
3006 length = GET_RTX_LENGTH (code);
3007 last_ptr = NULL;
b8698a0f 3008
32b32b16 3009 for (i = 0; i < length; i++)
23b2ce53
RS
3010 {
3011 switch (*format_ptr++)
3012 {
3013 case 'e':
32b32b16
AP
3014 if (last_ptr)
3015 copy_rtx_if_shared_1 (last_ptr);
3016 last_ptr = &XEXP (x, i);
23b2ce53
RS
3017 break;
3018
3019 case 'E':
3020 if (XVEC (x, i) != NULL)
3021 {
b3694847 3022 int j;
f0722107 3023 int len = XVECLEN (x, i);
b8698a0f 3024
6614fd40
KH
3025 /* Copy the vector iff I copied the rtx and the length
3026 is nonzero. */
f0722107 3027 if (copied && len > 0)
8f985ec4 3028 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3029
5d3cc252 3030 /* Call recursively on all inside the vector. */
f0722107 3031 for (j = 0; j < len; j++)
32b32b16
AP
3032 {
3033 if (last_ptr)
3034 copy_rtx_if_shared_1 (last_ptr);
3035 last_ptr = &XVECEXP (x, i, j);
3036 }
23b2ce53
RS
3037 }
3038 break;
3039 }
3040 }
32b32b16
AP
3041 *orig1 = x;
3042 if (last_ptr)
3043 {
3044 orig1 = last_ptr;
3045 goto repeat;
3046 }
3047 return;
23b2ce53
RS
3048}
3049
76369a82 3050/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3051
76369a82
NF
3052static void
3053mark_used_flags (rtx x, int flag)
23b2ce53 3054{
b3694847
SS
3055 int i, j;
3056 enum rtx_code code;
3057 const char *format_ptr;
32b32b16 3058 int length;
23b2ce53 3059
32b32b16
AP
3060 /* Repeat is used to turn tail-recursion into iteration. */
3061repeat:
23b2ce53
RS
3062 if (x == 0)
3063 return;
3064
3065 code = GET_CODE (x);
3066
9faa82d8 3067 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3068 for them. */
3069
3070 switch (code)
3071 {
3072 case REG:
0ca5af51
AO
3073 case DEBUG_EXPR:
3074 case VALUE:
d8116890 3075 CASE_CONST_ANY:
23b2ce53
RS
3076 case SYMBOL_REF:
3077 case CODE_LABEL:
3078 case PC:
3079 case CC0:
276e0224 3080 case RETURN:
26898771 3081 case SIMPLE_RETURN:
23b2ce53
RS
3082 return;
3083
b5b8b0ac 3084 case DEBUG_INSN:
23b2ce53
RS
3085 case INSN:
3086 case JUMP_INSN:
3087 case CALL_INSN:
3088 case NOTE:
3089 case LABEL_REF:
3090 case BARRIER:
3091 /* The chain of insns is not being copied. */
3092 return;
750c9258 3093
e9a25f70
JL
3094 default:
3095 break;
23b2ce53
RS
3096 }
3097
76369a82 3098 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3099
3100 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3101 length = GET_RTX_LENGTH (code);
b8698a0f 3102
32b32b16 3103 for (i = 0; i < length; i++)
23b2ce53
RS
3104 {
3105 switch (*format_ptr++)
3106 {
3107 case 'e':
32b32b16
AP
3108 if (i == length-1)
3109 {
3110 x = XEXP (x, i);
3111 goto repeat;
3112 }
76369a82 3113 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3114 break;
3115
3116 case 'E':
3117 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3118 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3119 break;
3120 }
3121 }
3122}
2c07f13b 3123
76369a82 3124/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3125 to look for shared sub-parts. */
3126
3127void
76369a82 3128reset_used_flags (rtx x)
2c07f13b 3129{
76369a82
NF
3130 mark_used_flags (x, 0);
3131}
2c07f13b 3132
76369a82
NF
3133/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3134 to look for shared sub-parts. */
2c07f13b 3135
76369a82
NF
3136void
3137set_used_flags (rtx x)
3138{
3139 mark_used_flags (x, 1);
2c07f13b 3140}
23b2ce53
RS
3141\f
3142/* Copy X if necessary so that it won't be altered by changes in OTHER.
3143 Return X or the rtx for the pseudo reg the value of X was copied into.
3144 OTHER must be valid as a SET_DEST. */
3145
3146rtx
502b8322 3147make_safe_from (rtx x, rtx other)
23b2ce53
RS
3148{
3149 while (1)
3150 switch (GET_CODE (other))
3151 {
3152 case SUBREG:
3153 other = SUBREG_REG (other);
3154 break;
3155 case STRICT_LOW_PART:
3156 case SIGN_EXTEND:
3157 case ZERO_EXTEND:
3158 other = XEXP (other, 0);
3159 break;
3160 default:
3161 goto done;
3162 }
3163 done:
3c0cb5de 3164 if ((MEM_P (other)
23b2ce53 3165 && ! CONSTANT_P (x)
f8cfc6aa 3166 && !REG_P (x)
23b2ce53 3167 && GET_CODE (x) != SUBREG)
f8cfc6aa 3168 || (REG_P (other)
23b2ce53
RS
3169 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3170 || reg_mentioned_p (other, x))))
3171 {
3172 rtx temp = gen_reg_rtx (GET_MODE (x));
3173 emit_move_insn (temp, x);
3174 return temp;
3175 }
3176 return x;
3177}
3178\f
3179/* Emission of insns (adding them to the doubly-linked list). */
3180
23b2ce53
RS
3181/* Return the last insn emitted, even if it is in a sequence now pushed. */
3182
db76cf1e 3183rtx_insn *
502b8322 3184get_last_insn_anywhere (void)
23b2ce53 3185{
614d5bd8
AM
3186 struct sequence_stack *seq;
3187 for (seq = get_current_sequence (); seq; seq = seq->next)
3188 if (seq->last != 0)
3189 return seq->last;
23b2ce53
RS
3190 return 0;
3191}
3192
2a496e8b
JDA
3193/* Return the first nonnote insn emitted in current sequence or current
3194 function. This routine looks inside SEQUENCEs. */
3195
e4685bc8 3196rtx_insn *
502b8322 3197get_first_nonnote_insn (void)
2a496e8b 3198{
dc01c3d1 3199 rtx_insn *insn = get_insns ();
91373fe8
JDA
3200
3201 if (insn)
3202 {
3203 if (NOTE_P (insn))
3204 for (insn = next_insn (insn);
3205 insn && NOTE_P (insn);
3206 insn = next_insn (insn))
3207 continue;
3208 else
3209 {
2ca202e7 3210 if (NONJUMP_INSN_P (insn)
91373fe8 3211 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3212 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3213 }
3214 }
2a496e8b
JDA
3215
3216 return insn;
3217}
3218
3219/* Return the last nonnote insn emitted in current sequence or current
3220 function. This routine looks inside SEQUENCEs. */
3221
e4685bc8 3222rtx_insn *
502b8322 3223get_last_nonnote_insn (void)
2a496e8b 3224{
dc01c3d1 3225 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3226
3227 if (insn)
3228 {
3229 if (NOTE_P (insn))
3230 for (insn = previous_insn (insn);
3231 insn && NOTE_P (insn);
3232 insn = previous_insn (insn))
3233 continue;
3234 else
3235 {
dc01c3d1
DM
3236 if (NONJUMP_INSN_P (insn))
3237 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3238 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3239 }
3240 }
2a496e8b
JDA
3241
3242 return insn;
3243}
3244
b5b8b0ac
AO
3245/* Return the number of actual (non-debug) insns emitted in this
3246 function. */
3247
3248int
3249get_max_insn_count (void)
3250{
3251 int n = cur_insn_uid;
3252
3253 /* The table size must be stable across -g, to avoid codegen
3254 differences due to debug insns, and not be affected by
3255 -fmin-insn-uid, to avoid excessive table size and to simplify
3256 debugging of -fcompare-debug failures. */
3257 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3258 n -= cur_debug_insn_uid;
3259 else
3260 n -= MIN_NONDEBUG_INSN_UID;
3261
3262 return n;
3263}
3264
23b2ce53
RS
3265\f
3266/* Return the next insn. If it is a SEQUENCE, return the first insn
3267 of the sequence. */
3268
eb51c837 3269rtx_insn *
4ce524a1 3270next_insn (rtx_insn *insn)
23b2ce53 3271{
75547801
KG
3272 if (insn)
3273 {
3274 insn = NEXT_INSN (insn);
3275 if (insn && NONJUMP_INSN_P (insn)
3276 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3277 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3278 }
23b2ce53 3279
dc01c3d1 3280 return insn;
23b2ce53
RS
3281}
3282
3283/* Return the previous insn. If it is a SEQUENCE, return the last insn
3284 of the sequence. */
3285
eb51c837 3286rtx_insn *
4ce524a1 3287previous_insn (rtx_insn *insn)
23b2ce53 3288{
75547801
KG
3289 if (insn)
3290 {
3291 insn = PREV_INSN (insn);
dc01c3d1
DM
3292 if (insn && NONJUMP_INSN_P (insn))
3293 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3294 insn = seq->insn (seq->len () - 1);
75547801 3295 }
23b2ce53 3296
dc01c3d1 3297 return insn;
23b2ce53
RS
3298}
3299
3300/* Return the next insn after INSN that is not a NOTE. This routine does not
3301 look inside SEQUENCEs. */
3302
eb51c837 3303rtx_insn *
c9b0a227 3304next_nonnote_insn (rtx_insn *insn)
23b2ce53 3305{
75547801
KG
3306 while (insn)
3307 {
3308 insn = NEXT_INSN (insn);
3309 if (insn == 0 || !NOTE_P (insn))
3310 break;
3311 }
23b2ce53 3312
dc01c3d1 3313 return insn;
23b2ce53
RS
3314}
3315
1e211590
DD
3316/* Return the next insn after INSN that is not a NOTE, but stop the
3317 search before we enter another basic block. This routine does not
3318 look inside SEQUENCEs. */
3319
eb51c837 3320rtx_insn *
e4685bc8 3321next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3322{
3323 while (insn)
3324 {
3325 insn = NEXT_INSN (insn);
3326 if (insn == 0 || !NOTE_P (insn))
3327 break;
3328 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3329 return NULL;
1e211590
DD
3330 }
3331
dc01c3d1 3332 return insn;
1e211590
DD
3333}
3334
23b2ce53
RS
3335/* Return the previous insn before INSN that is not a NOTE. This routine does
3336 not look inside SEQUENCEs. */
3337
eb51c837 3338rtx_insn *
c9b0a227 3339prev_nonnote_insn (rtx_insn *insn)
23b2ce53 3340{
75547801
KG
3341 while (insn)
3342 {
3343 insn = PREV_INSN (insn);
3344 if (insn == 0 || !NOTE_P (insn))
3345 break;
3346 }
23b2ce53 3347
dc01c3d1 3348 return insn;
23b2ce53
RS
3349}
3350
896aa4ea
DD
3351/* Return the previous insn before INSN that is not a NOTE, but stop
3352 the search before we enter another basic block. This routine does
3353 not look inside SEQUENCEs. */
3354
eb51c837 3355rtx_insn *
dc01c3d1 3356prev_nonnote_insn_bb (rtx uncast_insn)
896aa4ea 3357{
dc01c3d1
DM
3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
896aa4ea
DD
3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !NOTE_P (insn))
3364 break;
3365 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3366 return NULL;
896aa4ea
DD
3367 }
3368
dc01c3d1 3369 return insn;
896aa4ea
DD
3370}
3371
b5b8b0ac
AO
3372/* Return the next insn after INSN that is not a DEBUG_INSN. This
3373 routine does not look inside SEQUENCEs. */
3374
eb51c837 3375rtx_insn *
30d2ef86 3376next_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3377{
3378 while (insn)
3379 {
3380 insn = NEXT_INSN (insn);
3381 if (insn == 0 || !DEBUG_INSN_P (insn))
3382 break;
3383 }
3384
dc01c3d1 3385 return insn;
b5b8b0ac
AO
3386}
3387
3388/* Return the previous insn before INSN that is not a DEBUG_INSN.
3389 This routine does not look inside SEQUENCEs. */
3390
eb51c837 3391rtx_insn *
30d2ef86 3392prev_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3393{
3394 while (insn)
3395 {
3396 insn = PREV_INSN (insn);
3397 if (insn == 0 || !DEBUG_INSN_P (insn))
3398 break;
3399 }
3400
dc01c3d1 3401 return insn;
b5b8b0ac
AO
3402}
3403
f0fc0803
JJ
3404/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3405 This routine does not look inside SEQUENCEs. */
3406
eb51c837 3407rtx_insn *
1f00691e 3408next_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3409{
3410 while (insn)
3411 {
3412 insn = NEXT_INSN (insn);
3413 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3414 break;
3415 }
3416
dc01c3d1 3417 return insn;
f0fc0803
JJ
3418}
3419
3420/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3421 This routine does not look inside SEQUENCEs. */
3422
eb51c837 3423rtx_insn *
1f00691e 3424prev_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3425{
3426 while (insn)
3427 {
3428 insn = PREV_INSN (insn);
3429 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3430 break;
3431 }
3432
dc01c3d1 3433 return insn;
f0fc0803
JJ
3434}
3435
23b2ce53
RS
3436/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3437 or 0, if there is none. This routine does not look inside
0f41302f 3438 SEQUENCEs. */
23b2ce53 3439
eb51c837 3440rtx_insn *
dc01c3d1 3441next_real_insn (rtx uncast_insn)
23b2ce53 3442{
dc01c3d1
DM
3443 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3444
75547801
KG
3445 while (insn)
3446 {
3447 insn = NEXT_INSN (insn);
3448 if (insn == 0 || INSN_P (insn))
3449 break;
3450 }
23b2ce53 3451
dc01c3d1 3452 return insn;
23b2ce53
RS
3453}
3454
3455/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3456 or 0, if there is none. This routine does not look inside
3457 SEQUENCEs. */
3458
eb51c837 3459rtx_insn *
d8fd56b2 3460prev_real_insn (rtx_insn *insn)
23b2ce53 3461{
75547801
KG
3462 while (insn)
3463 {
3464 insn = PREV_INSN (insn);
3465 if (insn == 0 || INSN_P (insn))
3466 break;
3467 }
23b2ce53 3468
dc01c3d1 3469 return insn;
23b2ce53
RS
3470}
3471
ee960939
OH
3472/* Return the last CALL_INSN in the current list, or 0 if there is none.
3473 This routine does not look inside SEQUENCEs. */
3474
049cfc4a 3475rtx_call_insn *
502b8322 3476last_call_insn (void)
ee960939 3477{
049cfc4a 3478 rtx_insn *insn;
ee960939
OH
3479
3480 for (insn = get_last_insn ();
4b4bf941 3481 insn && !CALL_P (insn);
ee960939
OH
3482 insn = PREV_INSN (insn))
3483 ;
3484
049cfc4a 3485 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3486}
3487
23b2ce53 3488/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3489 does not look inside SEQUENCEs. After reload this also skips over
3490 standalone USE and CLOBBER insn. */
23b2ce53 3491
69732dcb 3492int
4f588890 3493active_insn_p (const_rtx insn)
69732dcb 3494{
4b4bf941 3495 return (CALL_P (insn) || JUMP_P (insn)
39718607 3496 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3497 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3498 && (! reload_completed
3499 || (GET_CODE (PATTERN (insn)) != USE
3500 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3501}
3502
eb51c837 3503rtx_insn *
dc01c3d1 3504next_active_insn (rtx uncast_insn)
23b2ce53 3505{
dc01c3d1
DM
3506 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3507
75547801
KG
3508 while (insn)
3509 {
3510 insn = NEXT_INSN (insn);
3511 if (insn == 0 || active_insn_p (insn))
3512 break;
3513 }
23b2ce53 3514
dc01c3d1 3515 return insn;
23b2ce53
RS
3516}
3517
3518/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3519 does not look inside SEQUENCEs. After reload this also skips over
3520 standalone USE and CLOBBER insn. */
23b2ce53 3521
eb51c837 3522rtx_insn *
dc01c3d1 3523prev_active_insn (rtx uncast_insn)
23b2ce53 3524{
dc01c3d1
DM
3525 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3526
75547801
KG
3527 while (insn)
3528 {
3529 insn = PREV_INSN (insn);
3530 if (insn == 0 || active_insn_p (insn))
3531 break;
3532 }
23b2ce53 3533
dc01c3d1 3534 return insn;
23b2ce53 3535}
23b2ce53 3536\f
23b2ce53
RS
3537/* Return the next insn that uses CC0 after INSN, which is assumed to
3538 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3539 applied to the result of this function should yield INSN).
3540
3541 Normally, this is simply the next insn. However, if a REG_CC_USER note
3542 is present, it contains the insn that uses CC0.
3543
3544 Return 0 if we can't find the insn. */
3545
75b46023 3546rtx_insn *
dc01c3d1 3547next_cc0_user (rtx uncast_insn)
23b2ce53 3548{
dc01c3d1
DM
3549 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3550
906c4e36 3551 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3552
3553 if (note)
75b46023 3554 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3555
3556 insn = next_nonnote_insn (insn);
4b4bf941 3557 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3558 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3559
2c3c49de 3560 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3561 return insn;
23b2ce53
RS
3562
3563 return 0;
3564}
3565
3566/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3567 note, it is the previous insn. */
3568
75b46023 3569rtx_insn *
5c8db5b4 3570prev_cc0_setter (rtx_insn *insn)
23b2ce53 3571{
906c4e36 3572 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3573
3574 if (note)
75b46023 3575 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3576
3577 insn = prev_nonnote_insn (insn);
5b0264cb 3578 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3579
dc01c3d1 3580 return insn;
23b2ce53 3581}
e5bef2e4 3582
594f8779
RZ
3583/* Find a RTX_AUTOINC class rtx which matches DATA. */
3584
3585static int
9021b8ec 3586find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3587{
9021b8ec
RS
3588 subrtx_iterator::array_type array;
3589 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3590 {
9021b8ec
RS
3591 const_rtx x = *iter;
3592 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3593 && rtx_equal_p (reg, XEXP (x, 0)))
3594 return true;
594f8779 3595 }
9021b8ec 3596 return false;
594f8779 3597}
594f8779 3598
e5bef2e4
HB
3599/* Increment the label uses for all labels present in rtx. */
3600
3601static void
502b8322 3602mark_label_nuses (rtx x)
e5bef2e4 3603{
b3694847
SS
3604 enum rtx_code code;
3605 int i, j;
3606 const char *fmt;
e5bef2e4
HB
3607
3608 code = GET_CODE (x);
a827d9b1
DM
3609 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3610 LABEL_NUSES (LABEL_REF_LABEL (x))++;
e5bef2e4
HB
3611
3612 fmt = GET_RTX_FORMAT (code);
3613 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3614 {
3615 if (fmt[i] == 'e')
0fb7aeda 3616 mark_label_nuses (XEXP (x, i));
e5bef2e4 3617 else if (fmt[i] == 'E')
0fb7aeda 3618 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3619 mark_label_nuses (XVECEXP (x, i, j));
3620 }
3621}
3622
23b2ce53
RS
3623\f
3624/* Try splitting insns that can be split for better scheduling.
3625 PAT is the pattern which might split.
3626 TRIAL is the insn providing PAT.
cc2902df 3627 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3628
3629 If this routine succeeds in splitting, it returns the first or last
11147ebe 3630 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3631 returns TRIAL. If the insn to be returned can be split, it will be. */
3632
53f04688 3633rtx_insn *
bb5c4956 3634try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3635{
53f04688
DM
3636 rtx_insn *before = PREV_INSN (trial);
3637 rtx_insn *after = NEXT_INSN (trial);
dc01c3d1
DM
3638 rtx note;
3639 rtx_insn *seq, *tem;
6b24c259 3640 int probability;
dc01c3d1 3641 rtx_insn *insn_last, *insn;
599aedd9 3642 int njumps = 0;
e67d1102 3643 rtx_insn *call_insn = NULL;
6b24c259 3644
cd9c1ca8
RH
3645 /* We're not good at redistributing frame information. */
3646 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3647 return trial;
cd9c1ca8 3648
6b24c259
JH
3649 if (any_condjump_p (trial)
3650 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3651 split_branch_probability = XINT (note, 0);
6b24c259
JH
3652 probability = split_branch_probability;
3653
bb5c4956 3654 seq = split_insns (pat, trial);
6b24c259
JH
3655
3656 split_branch_probability = -1;
23b2ce53 3657
599aedd9 3658 if (!seq)
dc01c3d1 3659 return trial;
599aedd9
RH
3660
3661 /* Avoid infinite loop if any insn of the result matches
3662 the original pattern. */
3663 insn_last = seq;
3664 while (1)
23b2ce53 3665 {
599aedd9
RH
3666 if (INSN_P (insn_last)
3667 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3668 return trial;
599aedd9
RH
3669 if (!NEXT_INSN (insn_last))
3670 break;
3671 insn_last = NEXT_INSN (insn_last);
3672 }
750c9258 3673
6fb5fa3c
DB
3674 /* We will be adding the new sequence to the function. The splitters
3675 may have introduced invalid RTL sharing, so unshare the sequence now. */
3676 unshare_all_rtl_in_chain (seq);
3677
339ba33b 3678 /* Mark labels and copy flags. */
599aedd9
RH
3679 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3680 {
4b4bf941 3681 if (JUMP_P (insn))
599aedd9 3682 {
339ba33b
RS
3683 if (JUMP_P (trial))
3684 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3685 mark_jump_label (PATTERN (insn), insn, 0);
3686 njumps++;
3687 if (probability != -1
3688 && any_condjump_p (insn)
3689 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3690 {
599aedd9
RH
3691 /* We can preserve the REG_BR_PROB notes only if exactly
3692 one jump is created, otherwise the machine description
3693 is responsible for this step using
3694 split_branch_probability variable. */
5b0264cb 3695 gcc_assert (njumps == 1);
e5af9ddd 3696 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3697 }
599aedd9
RH
3698 }
3699 }
3700
3701 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3702 in SEQ and copy any additional information across. */
4b4bf941 3703 if (CALL_P (trial))
599aedd9
RH
3704 {
3705 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3706 if (CALL_P (insn))
599aedd9 3707 {
dc01c3d1
DM
3708 rtx_insn *next;
3709 rtx *p;
65712d5c 3710
4f660b15
RO
3711 gcc_assert (call_insn == NULL_RTX);
3712 call_insn = insn;
3713
65712d5c
RS
3714 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3715 target may have explicitly specified. */
3716 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3717 while (*p)
3718 p = &XEXP (*p, 1);
3719 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3720
3721 /* If the old call was a sibling call, the new one must
3722 be too. */
599aedd9 3723 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3724
3725 /* If the new call is the last instruction in the sequence,
3726 it will effectively replace the old call in-situ. Otherwise
3727 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3728 so that it comes immediately after the new call. */
3729 if (NEXT_INSN (insn))
65f3dedb
RS
3730 for (next = NEXT_INSN (trial);
3731 next && NOTE_P (next);
3732 next = NEXT_INSN (next))
3733 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3734 {
3735 remove_insn (next);
3736 add_insn_after (next, insn, NULL);
65f3dedb 3737 break;
65712d5c 3738 }
599aedd9
RH
3739 }
3740 }
4b5e8abe 3741
599aedd9
RH
3742 /* Copy notes, particularly those related to the CFG. */
3743 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3744 {
3745 switch (REG_NOTE_KIND (note))
3746 {
3747 case REG_EH_REGION:
1d65f45c 3748 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3749 break;
216183ce 3750
599aedd9
RH
3751 case REG_NORETURN:
3752 case REG_SETJMP:
0a35513e 3753 case REG_TM:
594f8779 3754 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3755 {
4b4bf941 3756 if (CALL_P (insn))
65c5f2a6 3757 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3758 }
599aedd9 3759 break;
d6e95df8 3760
599aedd9 3761 case REG_NON_LOCAL_GOTO:
594f8779 3762 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3763 {
4b4bf941 3764 if (JUMP_P (insn))
65c5f2a6 3765 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3766 }
599aedd9 3767 break;
e5bef2e4 3768
594f8779 3769 case REG_INC:
760edf20
TS
3770 if (!AUTO_INC_DEC)
3771 break;
3772
594f8779
RZ
3773 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3774 {
3775 rtx reg = XEXP (note, 0);
3776 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3777 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3778 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3779 }
3780 break;
594f8779 3781
9a08d230 3782 case REG_ARGS_SIZE:
e5b51ca0 3783 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3784 break;
3785
4f660b15
RO
3786 case REG_CALL_DECL:
3787 gcc_assert (call_insn != NULL_RTX);
3788 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3789 break;
3790
599aedd9
RH
3791 default:
3792 break;
23b2ce53 3793 }
599aedd9
RH
3794 }
3795
3796 /* If there are LABELS inside the split insns increment the
3797 usage count so we don't delete the label. */
cf7c4aa6 3798 if (INSN_P (trial))
599aedd9
RH
3799 {
3800 insn = insn_last;
3801 while (insn != NULL_RTX)
23b2ce53 3802 {
cf7c4aa6 3803 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3804 if (NONJUMP_INSN_P (insn))
599aedd9 3805 mark_label_nuses (PATTERN (insn));
23b2ce53 3806
599aedd9
RH
3807 insn = PREV_INSN (insn);
3808 }
23b2ce53
RS
3809 }
3810
5368224f 3811 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3812
3813 delete_insn (trial);
599aedd9
RH
3814
3815 /* Recursively call try_split for each new insn created; by the
3816 time control returns here that insn will be fully split, so
3817 set LAST and continue from the insn after the one returned.
3818 We can't use next_active_insn here since AFTER may be a note.
3819 Ignore deleted insns, which can be occur if not optimizing. */
3820 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3821 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3822 tem = try_split (PATTERN (tem), tem, 1);
3823
3824 /* Return either the first or the last insn, depending on which was
3825 requested. */
3826 return last
5936d944 3827 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3828 : NEXT_INSN (before);
23b2ce53
RS
3829}
3830\f
3831/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3832 Store PATTERN in the pattern slots. */
23b2ce53 3833
167b9fae 3834rtx_insn *
502b8322 3835make_insn_raw (rtx pattern)
23b2ce53 3836{
167b9fae 3837 rtx_insn *insn;
23b2ce53 3838
167b9fae 3839 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3840
43127294 3841 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3842 PATTERN (insn) = pattern;
3843 INSN_CODE (insn) = -1;
1632afca 3844 REG_NOTES (insn) = NULL;
5368224f 3845 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3846 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3847
47984720
NC
3848#ifdef ENABLE_RTL_CHECKING
3849 if (insn
2c3c49de 3850 && INSN_P (insn)
47984720
NC
3851 && (returnjump_p (insn)
3852 || (GET_CODE (insn) == SET
3853 && SET_DEST (insn) == pc_rtx)))
3854 {
d4ee4d25 3855 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3856 debug_rtx (insn);
3857 }
3858#endif
750c9258 3859
23b2ce53
RS
3860 return insn;
3861}
3862
b5b8b0ac
AO
3863/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3864
167b9fae 3865static rtx_insn *
b5b8b0ac
AO
3866make_debug_insn_raw (rtx pattern)
3867{
167b9fae 3868 rtx_debug_insn *insn;
b5b8b0ac 3869
167b9fae 3870 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3871 INSN_UID (insn) = cur_debug_insn_uid++;
3872 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3873 INSN_UID (insn) = cur_insn_uid++;
3874
3875 PATTERN (insn) = pattern;
3876 INSN_CODE (insn) = -1;
3877 REG_NOTES (insn) = NULL;
5368224f 3878 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3879 BLOCK_FOR_INSN (insn) = NULL;
3880
3881 return insn;
3882}
3883
2f937369 3884/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3885
167b9fae 3886static rtx_insn *
502b8322 3887make_jump_insn_raw (rtx pattern)
23b2ce53 3888{
167b9fae 3889 rtx_jump_insn *insn;
23b2ce53 3890
167b9fae 3891 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3892 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3893
3894 PATTERN (insn) = pattern;
3895 INSN_CODE (insn) = -1;
1632afca
RS
3896 REG_NOTES (insn) = NULL;
3897 JUMP_LABEL (insn) = NULL;
5368224f 3898 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3899 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3900
3901 return insn;
3902}
aff507f4 3903
2f937369 3904/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3905
167b9fae 3906static rtx_insn *
502b8322 3907make_call_insn_raw (rtx pattern)
aff507f4 3908{
167b9fae 3909 rtx_call_insn *insn;
aff507f4 3910
167b9fae 3911 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3912 INSN_UID (insn) = cur_insn_uid++;
3913
3914 PATTERN (insn) = pattern;
3915 INSN_CODE (insn) = -1;
aff507f4
RK
3916 REG_NOTES (insn) = NULL;
3917 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3918 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3919 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3920
3921 return insn;
3922}
96fba521
SB
3923
3924/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3925
66e8df53 3926static rtx_note *
96fba521
SB
3927make_note_raw (enum insn_note subtype)
3928{
3929 /* Some notes are never created this way at all. These notes are
3930 only created by patching out insns. */
3931 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3932 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3933
66e8df53 3934 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3935 INSN_UID (note) = cur_insn_uid++;
3936 NOTE_KIND (note) = subtype;
3937 BLOCK_FOR_INSN (note) = NULL;
3938 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3939 return note;
3940}
23b2ce53 3941\f
96fba521
SB
3942/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3943 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3944 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3945
3946static inline void
9152e0aa 3947link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3948{
0f82e5c9
DM
3949 SET_PREV_INSN (insn) = prev;
3950 SET_NEXT_INSN (insn) = next;
96fba521
SB
3951 if (prev != NULL)
3952 {
0f82e5c9 3953 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3954 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3955 {
e6eda746
DM
3956 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3957 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3958 }
3959 }
3960 if (next != NULL)
3961 {
0f82e5c9 3962 SET_PREV_INSN (next) = insn;
96fba521 3963 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3964 {
3965 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3966 SET_PREV_INSN (sequence->insn (0)) = insn;
3967 }
96fba521 3968 }
3ccb989e
SB
3969
3970 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3971 {
e6eda746
DM
3972 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3973 SET_PREV_INSN (sequence->insn (0)) = prev;
3974 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3975 }
96fba521
SB
3976}
3977
23b2ce53
RS
3978/* Add INSN to the end of the doubly-linked list.
3979 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3980
3981void
9152e0aa 3982add_insn (rtx_insn *insn)
23b2ce53 3983{
9152e0aa 3984 rtx_insn *prev = get_last_insn ();
96fba521 3985 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3986 if (NULL == get_insns ())
3987 set_first_insn (insn);
5936d944 3988 set_last_insn (insn);
23b2ce53
RS
3989}
3990
96fba521 3991/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 3992
96fba521 3993static void
9152e0aa 3994add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 3995{
9152e0aa 3996 rtx_insn *next = NEXT_INSN (after);
23b2ce53 3997
4654c0cf 3998 gcc_assert (!optimize || !after->deleted ());
ba213285 3999
96fba521 4000 link_insn_into_chain (insn, after, next);
23b2ce53 4001
96fba521 4002 if (next == NULL)
23b2ce53 4003 {
614d5bd8
AM
4004 struct sequence_stack *seq;
4005
4006 for (seq = get_current_sequence (); seq; seq = seq->next)
4007 if (after == seq->last)
4008 {
4009 seq->last = insn;
4010 break;
4011 }
23b2ce53 4012 }
96fba521
SB
4013}
4014
4015/* Add INSN into the doubly-linked list before insn BEFORE. */
4016
4017static void
9152e0aa 4018add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4019{
9152e0aa 4020 rtx_insn *prev = PREV_INSN (before);
96fba521 4021
4654c0cf 4022 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4023
4024 link_insn_into_chain (insn, prev, before);
4025
4026 if (prev == NULL)
23b2ce53 4027 {
614d5bd8 4028 struct sequence_stack *seq;
a0ae8e8d 4029
614d5bd8
AM
4030 for (seq = get_current_sequence (); seq; seq = seq->next)
4031 if (before == seq->first)
4032 {
4033 seq->first = insn;
4034 break;
4035 }
4036
4037 gcc_assert (seq);
23b2ce53 4038 }
96fba521
SB
4039}
4040
4041/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4042 If BB is NULL, an attempt is made to infer the bb from before.
4043
4044 This and the next function should be the only functions called
4045 to insert an insn once delay slots have been filled since only
4046 they know how to update a SEQUENCE. */
23b2ce53 4047
96fba521 4048void
9152e0aa 4049add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4050{
1130d5e3 4051 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4052 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4053 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4054 if (!BARRIER_P (after)
4055 && !BARRIER_P (insn)
3c030e88
JH
4056 && (bb = BLOCK_FOR_INSN (after)))
4057 {
4058 set_block_for_insn (insn, bb);
38c1593d 4059 if (INSN_P (insn))
6fb5fa3c 4060 df_insn_rescan (insn);
3c030e88 4061 /* Should not happen as first in the BB is always
a1f300c0 4062 either NOTE or LABEL. */
a813c111 4063 if (BB_END (bb) == after
3c030e88 4064 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4065 && !BARRIER_P (insn)
a38e7aa5 4066 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4067 BB_END (bb) = insn;
3c030e88 4068 }
23b2ce53
RS
4069}
4070
96fba521
SB
4071/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4072 If BB is NULL, an attempt is made to infer the bb from before.
4073
4074 This and the previous function should be the only functions called
4075 to insert an insn once delay slots have been filled since only
4076 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4077
4078void
9152e0aa 4079add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4080{
9152e0aa
DM
4081 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4082 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4083 add_insn_before_nobb (insn, before);
a0ae8e8d 4084
b8698a0f 4085 if (!bb
6fb5fa3c
DB
4086 && !BARRIER_P (before)
4087 && !BARRIER_P (insn))
4088 bb = BLOCK_FOR_INSN (before);
4089
4090 if (bb)
3c030e88
JH
4091 {
4092 set_block_for_insn (insn, bb);
38c1593d 4093 if (INSN_P (insn))
6fb5fa3c 4094 df_insn_rescan (insn);
5b0264cb 4095 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4096 LABEL. */
5b0264cb
NS
4097 gcc_assert (BB_HEAD (bb) != insn
4098 /* Avoid clobbering of structure when creating new BB. */
4099 || BARRIER_P (insn)
a38e7aa5 4100 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4101 }
a0ae8e8d
RK
4102}
4103
6fb5fa3c
DB
4104/* Replace insn with an deleted instruction note. */
4105
0ce2b299
EB
4106void
4107set_insn_deleted (rtx insn)
6fb5fa3c 4108{
39718607 4109 if (INSN_P (insn))
b2908ba6 4110 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4111 PUT_CODE (insn, NOTE);
4112 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4113}
4114
4115
1f397f45
SB
4116/* Unlink INSN from the insn chain.
4117
4118 This function knows how to handle sequences.
4119
4120 This function does not invalidate data flow information associated with
4121 INSN (i.e. does not call df_insn_delete). That makes this function
4122 usable for only disconnecting an insn from the chain, and re-emit it
4123 elsewhere later.
4124
4125 To later insert INSN elsewhere in the insn chain via add_insn and
4126 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4127 the caller. Nullifying them here breaks many insn chain walks.
4128
4129 To really delete an insn and related DF information, use delete_insn. */
4130
89e99eea 4131void
dc01c3d1 4132remove_insn (rtx uncast_insn)
89e99eea 4133{
dc01c3d1 4134 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4135 rtx_insn *next = NEXT_INSN (insn);
4136 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4137 basic_block bb;
4138
89e99eea
DB
4139 if (prev)
4140 {
0f82e5c9 4141 SET_NEXT_INSN (prev) = next;
4b4bf941 4142 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4143 {
e6eda746
DM
4144 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4145 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4146 }
4147 }
89e99eea
DB
4148 else
4149 {
614d5bd8
AM
4150 struct sequence_stack *seq;
4151
4152 for (seq = get_current_sequence (); seq; seq = seq->next)
4153 if (insn == seq->first)
89e99eea 4154 {
614d5bd8 4155 seq->first = next;
89e99eea
DB
4156 break;
4157 }
4158
614d5bd8 4159 gcc_assert (seq);
89e99eea
DB
4160 }
4161
4162 if (next)
4163 {
0f82e5c9 4164 SET_PREV_INSN (next) = prev;
4b4bf941 4165 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4166 {
4167 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4168 SET_PREV_INSN (sequence->insn (0)) = prev;
4169 }
89e99eea 4170 }
89e99eea
DB
4171 else
4172 {
614d5bd8
AM
4173 struct sequence_stack *seq;
4174
4175 for (seq = get_current_sequence (); seq; seq = seq->next)
4176 if (insn == seq->last)
89e99eea 4177 {
614d5bd8 4178 seq->last = prev;
89e99eea
DB
4179 break;
4180 }
4181
614d5bd8 4182 gcc_assert (seq);
89e99eea 4183 }
80eb8028 4184
80eb8028 4185 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4186 if (!BARRIER_P (insn)
53c17031
JH
4187 && (bb = BLOCK_FOR_INSN (insn)))
4188 {
a813c111 4189 if (BB_HEAD (bb) == insn)
53c17031 4190 {
3bf1e984
RK
4191 /* Never ever delete the basic block note without deleting whole
4192 basic block. */
5b0264cb 4193 gcc_assert (!NOTE_P (insn));
1130d5e3 4194 BB_HEAD (bb) = next;
53c17031 4195 }
a813c111 4196 if (BB_END (bb) == insn)
1130d5e3 4197 BB_END (bb) = prev;
53c17031 4198 }
89e99eea
DB
4199}
4200
ee960939
OH
4201/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4202
4203void
502b8322 4204add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4205{
5b0264cb 4206 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4207
4208 /* Put the register usage information on the CALL. If there is already
4209 some usage information, put ours at the end. */
4210 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4211 {
4212 rtx link;
4213
4214 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4215 link = XEXP (link, 1))
4216 ;
4217
4218 XEXP (link, 1) = call_fusage;
4219 }
4220 else
4221 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4222}
4223
23b2ce53
RS
4224/* Delete all insns made since FROM.
4225 FROM becomes the new last instruction. */
4226
4227void
fee3e72c 4228delete_insns_since (rtx_insn *from)
23b2ce53
RS
4229{
4230 if (from == 0)
5936d944 4231 set_first_insn (0);
23b2ce53 4232 else
0f82e5c9 4233 SET_NEXT_INSN (from) = 0;
5936d944 4234 set_last_insn (from);
23b2ce53
RS
4235}
4236
5dab5552
MS
4237/* This function is deprecated, please use sequences instead.
4238
4239 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4240 The insns to be moved are those between FROM and TO.
4241 They are moved to a new position after the insn AFTER.
4242 AFTER must not be FROM or TO or any insn in between.
4243
4244 This function does not know about SEQUENCEs and hence should not be
4245 called after delay-slot filling has been done. */
4246
4247void
fee3e72c 4248reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4249{
b2b29377
MM
4250 if (flag_checking)
4251 {
4252 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4253 gcc_assert (after != x);
4254 gcc_assert (after != to);
4255 }
4f8344eb 4256
23b2ce53
RS
4257 /* Splice this bunch out of where it is now. */
4258 if (PREV_INSN (from))
0f82e5c9 4259 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4260 if (NEXT_INSN (to))
0f82e5c9 4261 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4262 if (get_last_insn () == to)
4263 set_last_insn (PREV_INSN (from));
4264 if (get_insns () == from)
4265 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4266
4267 /* Make the new neighbors point to it and it to them. */
4268 if (NEXT_INSN (after))
0f82e5c9 4269 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4270
0f82e5c9
DM
4271 SET_NEXT_INSN (to) = NEXT_INSN (after);
4272 SET_PREV_INSN (from) = after;
4273 SET_NEXT_INSN (after) = from;
c3284718 4274 if (after == get_last_insn ())
5936d944 4275 set_last_insn (to);
23b2ce53
RS
4276}
4277
3c030e88
JH
4278/* Same as function above, but take care to update BB boundaries. */
4279void
ac9d2d2c 4280reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4281{
ac9d2d2c 4282 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4283 basic_block bb, bb2;
4284
4285 reorder_insns_nobb (from, to, after);
4286
4b4bf941 4287 if (!BARRIER_P (after)
3c030e88
JH
4288 && (bb = BLOCK_FOR_INSN (after)))
4289 {
b2908ba6 4290 rtx_insn *x;
6fb5fa3c 4291 df_set_bb_dirty (bb);
68252e27 4292
4b4bf941 4293 if (!BARRIER_P (from)
3c030e88
JH
4294 && (bb2 = BLOCK_FOR_INSN (from)))
4295 {
a813c111 4296 if (BB_END (bb2) == to)
1130d5e3 4297 BB_END (bb2) = prev;
6fb5fa3c 4298 df_set_bb_dirty (bb2);
3c030e88
JH
4299 }
4300
a813c111 4301 if (BB_END (bb) == after)
1130d5e3 4302 BB_END (bb) = to;
3c030e88
JH
4303
4304 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4305 if (!BARRIER_P (x))
63642d5a 4306 df_insn_change_bb (x, bb);
3c030e88
JH
4307 }
4308}
4309
23b2ce53 4310\f
2f937369
DM
4311/* Emit insn(s) of given code and pattern
4312 at a specified place within the doubly-linked list.
23b2ce53 4313
2f937369
DM
4314 All of the emit_foo global entry points accept an object
4315 X which is either an insn list or a PATTERN of a single
4316 instruction.
23b2ce53 4317
2f937369
DM
4318 There are thus a few canonical ways to generate code and
4319 emit it at a specific place in the instruction stream. For
4320 example, consider the instruction named SPOT and the fact that
4321 we would like to emit some instructions before SPOT. We might
4322 do it like this:
23b2ce53 4323
2f937369
DM
4324 start_sequence ();
4325 ... emit the new instructions ...
4326 insns_head = get_insns ();
4327 end_sequence ();
23b2ce53 4328
2f937369 4329 emit_insn_before (insns_head, SPOT);
23b2ce53 4330
2f937369
DM
4331 It used to be common to generate SEQUENCE rtl instead, but that
4332 is a relic of the past which no longer occurs. The reason is that
4333 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4334 generated would almost certainly die right after it was created. */
23b2ce53 4335
cd459bf8 4336static rtx_insn *
5f02387d 4337emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4338 rtx_insn *(*make_raw) (rtx))
23b2ce53 4339{
167b9fae 4340 rtx_insn *insn;
23b2ce53 4341
5b0264cb 4342 gcc_assert (before);
2f937369
DM
4343
4344 if (x == NULL_RTX)
cd459bf8 4345 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4346
4347 switch (GET_CODE (x))
23b2ce53 4348 {
b5b8b0ac 4349 case DEBUG_INSN:
2f937369
DM
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
167b9fae 4356 insn = as_a <rtx_insn *> (x);
2f937369
DM
4357 while (insn)
4358 {
167b9fae 4359 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4360 add_insn_before (insn, before, bb);
2f937369
DM
4361 last = insn;
4362 insn = next;
4363 }
4364 break;
4365
4366#ifdef ENABLE_RTL_CHECKING
4367 case SEQUENCE:
5b0264cb 4368 gcc_unreachable ();
2f937369
DM
4369 break;
4370#endif
4371
4372 default:
5f02387d 4373 last = (*make_raw) (x);
6fb5fa3c 4374 add_insn_before (last, before, bb);
2f937369 4375 break;
23b2ce53
RS
4376 }
4377
cd459bf8 4378 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4379}
4380
5f02387d
NF
4381/* Make X be output before the instruction BEFORE. */
4382
cd459bf8 4383rtx_insn *
596f2b17 4384emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4385{
4386 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4387}
4388
2f937369 4389/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4390 and output it before the instruction BEFORE. */
4391
1476d1bd 4392rtx_jump_insn *
596f2b17 4393emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4394{
1476d1bd
MM
4395 return as_a <rtx_jump_insn *> (
4396 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4397 make_jump_insn_raw));
23b2ce53
RS
4398}
4399
2f937369 4400/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4401 and output it before the instruction BEFORE. */
4402
cd459bf8 4403rtx_insn *
596f2b17 4404emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4405{
5f02387d
NF
4406 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4407 make_call_insn_raw);
969d70ca
JH
4408}
4409
b5b8b0ac
AO
4410/* Make an instruction with body X and code DEBUG_INSN
4411 and output it before the instruction BEFORE. */
4412
cd459bf8 4413rtx_insn *
b5b8b0ac
AO
4414emit_debug_insn_before_noloc (rtx x, rtx before)
4415{
5f02387d
NF
4416 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4417 make_debug_insn_raw);
b5b8b0ac
AO
4418}
4419
23b2ce53 4420/* Make an insn of code BARRIER
e881bb1b 4421 and output it before the insn BEFORE. */
23b2ce53 4422
cd459bf8 4423rtx_barrier *
502b8322 4424emit_barrier_before (rtx before)
23b2ce53 4425{
cd459bf8 4426 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4427
4428 INSN_UID (insn) = cur_insn_uid++;
4429
6fb5fa3c 4430 add_insn_before (insn, before, NULL);
23b2ce53
RS
4431 return insn;
4432}
4433
e881bb1b
RH
4434/* Emit the label LABEL before the insn BEFORE. */
4435
1476d1bd 4436rtx_code_label *
596f2b17 4437emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4438{
468660d3
SB
4439 gcc_checking_assert (INSN_UID (label) == 0);
4440 INSN_UID (label) = cur_insn_uid++;
4441 add_insn_before (label, before, NULL);
1476d1bd 4442 return as_a <rtx_code_label *> (label);
e881bb1b 4443}
23b2ce53 4444\f
2f937369
DM
4445/* Helper for emit_insn_after, handles lists of instructions
4446 efficiently. */
23b2ce53 4447
e6eda746
DM
4448static rtx_insn *
4449emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4450{
e6eda746 4451 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4452 rtx_insn *last;
4453 rtx_insn *after_after;
6fb5fa3c
DB
4454 if (!bb && !BARRIER_P (after))
4455 bb = BLOCK_FOR_INSN (after);
23b2ce53 4456
6fb5fa3c 4457 if (bb)
23b2ce53 4458 {
6fb5fa3c 4459 df_set_bb_dirty (bb);
2f937369 4460 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4461 if (!BARRIER_P (last))
6fb5fa3c
DB
4462 {
4463 set_block_for_insn (last, bb);
4464 df_insn_rescan (last);
4465 }
4b4bf941 4466 if (!BARRIER_P (last))
6fb5fa3c
DB
4467 {
4468 set_block_for_insn (last, bb);
4469 df_insn_rescan (last);
4470 }
a813c111 4471 if (BB_END (bb) == after)
1130d5e3 4472 BB_END (bb) = last;
23b2ce53
RS
4473 }
4474 else
2f937369
DM
4475 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4476 continue;
4477
4478 after_after = NEXT_INSN (after);
4479
0f82e5c9
DM
4480 SET_NEXT_INSN (after) = first;
4481 SET_PREV_INSN (first) = after;
4482 SET_NEXT_INSN (last) = after_after;
2f937369 4483 if (after_after)
0f82e5c9 4484 SET_PREV_INSN (after_after) = last;
2f937369 4485
c3284718 4486 if (after == get_last_insn ())
5936d944 4487 set_last_insn (last);
e855c69d 4488
2f937369
DM
4489 return last;
4490}
4491
cd459bf8 4492static rtx_insn *
e6eda746 4493emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4494 rtx_insn *(*make_raw)(rtx))
2f937369 4495{
e6eda746
DM
4496 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4497 rtx_insn *last = after;
2f937369 4498
5b0264cb 4499 gcc_assert (after);
2f937369
DM
4500
4501 if (x == NULL_RTX)
e6eda746 4502 return last;
2f937369
DM
4503
4504 switch (GET_CODE (x))
23b2ce53 4505 {
b5b8b0ac 4506 case DEBUG_INSN:
2f937369
DM
4507 case INSN:
4508 case JUMP_INSN:
4509 case CALL_INSN:
4510 case CODE_LABEL:
4511 case BARRIER:
4512 case NOTE:
1130d5e3 4513 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4514 break;
4515
4516#ifdef ENABLE_RTL_CHECKING
4517 case SEQUENCE:
5b0264cb 4518 gcc_unreachable ();
2f937369
DM
4519 break;
4520#endif
4521
4522 default:
5f02387d 4523 last = (*make_raw) (x);
6fb5fa3c 4524 add_insn_after (last, after, bb);
2f937369 4525 break;
23b2ce53
RS
4526 }
4527
e6eda746 4528 return last;
23b2ce53
RS
4529}
4530
5f02387d
NF
4531/* Make X be output after the insn AFTER and set the BB of insn. If
4532 BB is NULL, an attempt is made to infer the BB from AFTER. */
4533
cd459bf8 4534rtx_insn *
5f02387d
NF
4535emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4536{
4537 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4538}
4539
255680cf 4540
2f937369 4541/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4542 and output it after the insn AFTER. */
4543
1476d1bd 4544rtx_jump_insn *
a7102479 4545emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4546{
1476d1bd
MM
4547 return as_a <rtx_jump_insn *> (
4548 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4549}
4550
4551/* Make an instruction with body X and code CALL_INSN
4552 and output it after the instruction AFTER. */
4553
cd459bf8 4554rtx_insn *
a7102479 4555emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4556{
5f02387d 4557 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4558}
4559
b5b8b0ac
AO
4560/* Make an instruction with body X and code CALL_INSN
4561 and output it after the instruction AFTER. */
4562
cd459bf8 4563rtx_insn *
b5b8b0ac
AO
4564emit_debug_insn_after_noloc (rtx x, rtx after)
4565{
5f02387d 4566 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4567}
4568
23b2ce53
RS
4569/* Make an insn of code BARRIER
4570 and output it after the insn AFTER. */
4571
cd459bf8 4572rtx_barrier *
502b8322 4573emit_barrier_after (rtx after)
23b2ce53 4574{
cd459bf8 4575 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4576
4577 INSN_UID (insn) = cur_insn_uid++;
4578
6fb5fa3c 4579 add_insn_after (insn, after, NULL);
23b2ce53
RS
4580 return insn;
4581}
4582
4583/* Emit the label LABEL after the insn AFTER. */
4584
cd459bf8 4585rtx_insn *
596f2b17 4586emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4587{
468660d3
SB
4588 gcc_checking_assert (INSN_UID (label) == 0);
4589 INSN_UID (label) = cur_insn_uid++;
4590 add_insn_after (label, after, NULL);
cd459bf8 4591 return as_a <rtx_insn *> (label);
23b2ce53 4592}
96fba521
SB
4593\f
4594/* Notes require a bit of special handling: Some notes need to have their
4595 BLOCK_FOR_INSN set, others should never have it set, and some should
4596 have it set or clear depending on the context. */
4597
4598/* Return true iff a note of kind SUBTYPE should be emitted with routines
4599 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4600 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4601
4602static bool
4603note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4604{
4605 switch (subtype)
4606 {
4607 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4608 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4609 return true;
4610
4611 /* Notes for var tracking and EH region markers can appear between or
4612 inside basic blocks. If the caller is emitting on the basic block
4613 boundary, do not set BLOCK_FOR_INSN on the new note. */
4614 case NOTE_INSN_VAR_LOCATION:
4615 case NOTE_INSN_CALL_ARG_LOCATION:
4616 case NOTE_INSN_EH_REGION_BEG:
4617 case NOTE_INSN_EH_REGION_END:
4618 return on_bb_boundary_p;
4619
4620 /* Otherwise, BLOCK_FOR_INSN must be set. */
4621 default:
4622 return false;
4623 }
4624}
23b2ce53
RS
4625
4626/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4627
66e8df53 4628rtx_note *
589e43f9 4629emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4630{
66e8df53 4631 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4632 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4633 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4634
4635 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4636 add_insn_after_nobb (note, after);
4637 else
4638 add_insn_after (note, after, bb);
4639 return note;
4640}
4641
4642/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4643
66e8df53 4644rtx_note *
89b6250d 4645emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4646{
66e8df53 4647 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4648 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4649 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4650
4651 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4652 add_insn_before_nobb (note, before);
4653 else
4654 add_insn_before (note, before, bb);
23b2ce53
RS
4655 return note;
4656}
23b2ce53 4657\f
e8110d6f
NF
4658/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4659 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4660
cd459bf8 4661static rtx_insn *
dc01c3d1 4662emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4663 rtx_insn *(*make_raw) (rtx))
0d682900 4664{
dc01c3d1 4665 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e67d1102 4666 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4667
a7102479 4668 if (pattern == NULL_RTX || !loc)
e67d1102 4669 return last;
dd3adcf8 4670
2f937369
DM
4671 after = NEXT_INSN (after);
4672 while (1)
4673 {
20d4397a
EB
4674 if (active_insn_p (after)
4675 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4676 && !INSN_LOCATION (after))
5368224f 4677 INSN_LOCATION (after) = loc;
2f937369
DM
4678 if (after == last)
4679 break;
4680 after = NEXT_INSN (after);
4681 }
e67d1102 4682 return last;
0d682900
JH
4683}
4684
e8110d6f
NF
4685/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4686 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4687 any DEBUG_INSNs. */
4688
cd459bf8 4689static rtx_insn *
dc01c3d1 4690emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4691 rtx_insn *(*make_raw) (rtx))
a7102479 4692{
dc01c3d1
DM
4693 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4694 rtx_insn *prev = after;
b5b8b0ac 4695
e8110d6f
NF
4696 if (skip_debug_insns)
4697 while (DEBUG_INSN_P (prev))
4698 prev = PREV_INSN (prev);
b5b8b0ac
AO
4699
4700 if (INSN_P (prev))
5368224f 4701 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4702 make_raw);
a7102479 4703 else
e8110d6f 4704 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4705}
4706
5368224f 4707/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4708rtx_insn *
e8110d6f 4709emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4710{
e8110d6f
NF
4711 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4712}
2f937369 4713
5368224f 4714/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4715rtx_insn *
e8110d6f
NF
4716emit_insn_after (rtx pattern, rtx after)
4717{
4718 return emit_pattern_after (pattern, after, true, make_insn_raw);
4719}
dd3adcf8 4720
5368224f 4721/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4722rtx_jump_insn *
e8110d6f
NF
4723emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4724{
1476d1bd
MM
4725 return as_a <rtx_jump_insn *> (
4726 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4727}
4728
5368224f 4729/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4730rtx_jump_insn *
a7102479
JH
4731emit_jump_insn_after (rtx pattern, rtx after)
4732{
1476d1bd
MM
4733 return as_a <rtx_jump_insn *> (
4734 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4735}
4736
5368224f 4737/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4738rtx_insn *
502b8322 4739emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4740{
e8110d6f 4741 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4742}
4743
5368224f 4744/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4745rtx_insn *
a7102479
JH
4746emit_call_insn_after (rtx pattern, rtx after)
4747{
e8110d6f 4748 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4749}
4750
5368224f 4751/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4752rtx_insn *
b5b8b0ac
AO
4753emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4754{
e8110d6f 4755 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4756}
4757
5368224f 4758/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4759rtx_insn *
b5b8b0ac
AO
4760emit_debug_insn_after (rtx pattern, rtx after)
4761{
e8110d6f 4762 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4763}
4764
e8110d6f
NF
4765/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4766 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4767 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4768 CALL_INSN, etc. */
4769
cd459bf8 4770static rtx_insn *
dc01c3d1 4771emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4772 rtx_insn *(*make_raw) (rtx))
0d682900 4773{
dc01c3d1
DM
4774 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4775 rtx_insn *first = PREV_INSN (before);
4776 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4777 insnp ? before : NULL_RTX,
4778 NULL, make_raw);
a7102479
JH
4779
4780 if (pattern == NULL_RTX || !loc)
dc01c3d1 4781 return last;
a7102479 4782
26cb3993
JH
4783 if (!first)
4784 first = get_insns ();
4785 else
4786 first = NEXT_INSN (first);
a7102479
JH
4787 while (1)
4788 {
20d4397a
EB
4789 if (active_insn_p (first)
4790 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4791 && !INSN_LOCATION (first))
5368224f 4792 INSN_LOCATION (first) = loc;
a7102479
JH
4793 if (first == last)
4794 break;
4795 first = NEXT_INSN (first);
4796 }
dc01c3d1 4797 return last;
a7102479
JH
4798}
4799
e8110d6f
NF
4800/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4801 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4802 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4803 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4804
cd459bf8 4805static rtx_insn *
dc01c3d1 4806emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4807 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4808{
dc01c3d1
DM
4809 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4810 rtx_insn *next = before;
b5b8b0ac 4811
e8110d6f
NF
4812 if (skip_debug_insns)
4813 while (DEBUG_INSN_P (next))
4814 next = PREV_INSN (next);
b5b8b0ac
AO
4815
4816 if (INSN_P (next))
5368224f 4817 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4818 insnp, make_raw);
a7102479 4819 else
e8110d6f
NF
4820 return emit_pattern_before_noloc (pattern, before,
4821 insnp ? before : NULL_RTX,
4822 NULL, make_raw);
a7102479
JH
4823}
4824
5368224f 4825/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4826rtx_insn *
596f2b17 4827emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4828{
e8110d6f
NF
4829 return emit_pattern_before_setloc (pattern, before, loc, true,
4830 make_insn_raw);
4831}
a7102479 4832
5368224f 4833/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4834rtx_insn *
e8110d6f
NF
4835emit_insn_before (rtx pattern, rtx before)
4836{
4837 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4838}
a7102479 4839
5368224f 4840/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4841rtx_jump_insn *
596f2b17 4842emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f 4843{
1476d1bd
MM
4844 return as_a <rtx_jump_insn *> (
4845 emit_pattern_before_setloc (pattern, before, loc, false,
4846 make_jump_insn_raw));
a7102479
JH
4847}
4848
5368224f 4849/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 4850rtx_jump_insn *
a7102479
JH
4851emit_jump_insn_before (rtx pattern, rtx before)
4852{
1476d1bd
MM
4853 return as_a <rtx_jump_insn *> (
4854 emit_pattern_before (pattern, before, true, false,
4855 make_jump_insn_raw));
a7102479
JH
4856}
4857
5368224f 4858/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4859rtx_insn *
596f2b17 4860emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4861{
e8110d6f
NF
4862 return emit_pattern_before_setloc (pattern, before, loc, false,
4863 make_call_insn_raw);
0d682900 4864}
a7102479 4865
e8110d6f 4866/* Like emit_call_insn_before_noloc,
5368224f 4867 but set insn_location according to BEFORE. */
cd459bf8 4868rtx_insn *
596f2b17 4869emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4870{
e8110d6f
NF
4871 return emit_pattern_before (pattern, before, true, false,
4872 make_call_insn_raw);
a7102479 4873}
b5b8b0ac 4874
5368224f 4875/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4876rtx_insn *
b5b8b0ac
AO
4877emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4878{
e8110d6f
NF
4879 return emit_pattern_before_setloc (pattern, before, loc, false,
4880 make_debug_insn_raw);
b5b8b0ac
AO
4881}
4882
e8110d6f 4883/* Like emit_debug_insn_before_noloc,
5368224f 4884 but set insn_location according to BEFORE. */
cd459bf8 4885rtx_insn *
3a6216b0 4886emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 4887{
e8110d6f
NF
4888 return emit_pattern_before (pattern, before, false, false,
4889 make_debug_insn_raw);
b5b8b0ac 4890}
0d682900 4891\f
2f937369
DM
4892/* Take X and emit it at the end of the doubly-linked
4893 INSN list.
23b2ce53
RS
4894
4895 Returns the last insn emitted. */
4896
cd459bf8 4897rtx_insn *
502b8322 4898emit_insn (rtx x)
23b2ce53 4899{
cd459bf8
DM
4900 rtx_insn *last = get_last_insn ();
4901 rtx_insn *insn;
23b2ce53 4902
2f937369
DM
4903 if (x == NULL_RTX)
4904 return last;
23b2ce53 4905
2f937369
DM
4906 switch (GET_CODE (x))
4907 {
b5b8b0ac 4908 case DEBUG_INSN:
2f937369
DM
4909 case INSN:
4910 case JUMP_INSN:
4911 case CALL_INSN:
4912 case CODE_LABEL:
4913 case BARRIER:
4914 case NOTE:
cd459bf8 4915 insn = as_a <rtx_insn *> (x);
2f937369 4916 while (insn)
23b2ce53 4917 {
cd459bf8 4918 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4919 add_insn (insn);
2f937369
DM
4920 last = insn;
4921 insn = next;
23b2ce53 4922 }
2f937369 4923 break;
23b2ce53 4924
2f937369 4925#ifdef ENABLE_RTL_CHECKING
39718607 4926 case JUMP_TABLE_DATA:
2f937369 4927 case SEQUENCE:
5b0264cb 4928 gcc_unreachable ();
2f937369
DM
4929 break;
4930#endif
23b2ce53 4931
2f937369
DM
4932 default:
4933 last = make_insn_raw (x);
4934 add_insn (last);
4935 break;
23b2ce53
RS
4936 }
4937
4938 return last;
4939}
4940
b5b8b0ac
AO
4941/* Make an insn of code DEBUG_INSN with pattern X
4942 and add it to the end of the doubly-linked list. */
4943
cd459bf8 4944rtx_insn *
b5b8b0ac
AO
4945emit_debug_insn (rtx x)
4946{
cd459bf8
DM
4947 rtx_insn *last = get_last_insn ();
4948 rtx_insn *insn;
b5b8b0ac
AO
4949
4950 if (x == NULL_RTX)
4951 return last;
4952
4953 switch (GET_CODE (x))
4954 {
4955 case DEBUG_INSN:
4956 case INSN:
4957 case JUMP_INSN:
4958 case CALL_INSN:
4959 case CODE_LABEL:
4960 case BARRIER:
4961 case NOTE:
cd459bf8 4962 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4963 while (insn)
4964 {
cd459bf8 4965 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4966 add_insn (insn);
4967 last = insn;
4968 insn = next;
4969 }
4970 break;
4971
4972#ifdef ENABLE_RTL_CHECKING
39718607 4973 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4974 case SEQUENCE:
4975 gcc_unreachable ();
4976 break;
4977#endif
4978
4979 default:
4980 last = make_debug_insn_raw (x);
4981 add_insn (last);
4982 break;
4983 }
4984
4985 return last;
4986}
4987
2f937369
DM
4988/* Make an insn of code JUMP_INSN with pattern X
4989 and add it to the end of the doubly-linked list. */
23b2ce53 4990
cd459bf8 4991rtx_insn *
502b8322 4992emit_jump_insn (rtx x)
23b2ce53 4993{
cd459bf8
DM
4994 rtx_insn *last = NULL;
4995 rtx_insn *insn;
23b2ce53 4996
2f937369 4997 switch (GET_CODE (x))
23b2ce53 4998 {
b5b8b0ac 4999 case DEBUG_INSN:
2f937369
DM
5000 case INSN:
5001 case JUMP_INSN:
5002 case CALL_INSN:
5003 case CODE_LABEL:
5004 case BARRIER:
5005 case NOTE:
cd459bf8 5006 insn = as_a <rtx_insn *> (x);
2f937369
DM
5007 while (insn)
5008 {
cd459bf8 5009 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5010 add_insn (insn);
5011 last = insn;
5012 insn = next;
5013 }
5014 break;
e0a5c5eb 5015
2f937369 5016#ifdef ENABLE_RTL_CHECKING
39718607 5017 case JUMP_TABLE_DATA:
2f937369 5018 case SEQUENCE:
5b0264cb 5019 gcc_unreachable ();
2f937369
DM
5020 break;
5021#endif
e0a5c5eb 5022
2f937369
DM
5023 default:
5024 last = make_jump_insn_raw (x);
5025 add_insn (last);
5026 break;
3c030e88 5027 }
e0a5c5eb
RS
5028
5029 return last;
5030}
5031
2f937369 5032/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5033 and add it to the end of the doubly-linked list. */
5034
cd459bf8 5035rtx_insn *
502b8322 5036emit_call_insn (rtx x)
23b2ce53 5037{
cd459bf8 5038 rtx_insn *insn;
2f937369
DM
5039
5040 switch (GET_CODE (x))
23b2ce53 5041 {
b5b8b0ac 5042 case DEBUG_INSN:
2f937369
DM
5043 case INSN:
5044 case JUMP_INSN:
5045 case CALL_INSN:
5046 case CODE_LABEL:
5047 case BARRIER:
5048 case NOTE:
5049 insn = emit_insn (x);
5050 break;
23b2ce53 5051
2f937369
DM
5052#ifdef ENABLE_RTL_CHECKING
5053 case SEQUENCE:
39718607 5054 case JUMP_TABLE_DATA:
5b0264cb 5055 gcc_unreachable ();
2f937369
DM
5056 break;
5057#endif
23b2ce53 5058
2f937369
DM
5059 default:
5060 insn = make_call_insn_raw (x);
23b2ce53 5061 add_insn (insn);
2f937369 5062 break;
23b2ce53 5063 }
2f937369
DM
5064
5065 return insn;
23b2ce53
RS
5066}
5067
5068/* Add the label LABEL to the end of the doubly-linked list. */
5069
1476d1bd
MM
5070rtx_code_label *
5071emit_label (rtx uncast_label)
23b2ce53 5072{
1476d1bd
MM
5073 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5074
468660d3
SB
5075 gcc_checking_assert (INSN_UID (label) == 0);
5076 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5077 add_insn (label);
5078 return label;
23b2ce53
RS
5079}
5080
39718607
SB
5081/* Make an insn of code JUMP_TABLE_DATA
5082 and add it to the end of the doubly-linked list. */
5083
4598afdd 5084rtx_jump_table_data *
39718607
SB
5085emit_jump_table_data (rtx table)
5086{
4598afdd
DM
5087 rtx_jump_table_data *jump_table_data =
5088 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5089 INSN_UID (jump_table_data) = cur_insn_uid++;
5090 PATTERN (jump_table_data) = table;
5091 BLOCK_FOR_INSN (jump_table_data) = NULL;
5092 add_insn (jump_table_data);
5093 return jump_table_data;
5094}
5095
23b2ce53
RS
5096/* Make an insn of code BARRIER
5097 and add it to the end of the doubly-linked list. */
5098
cd459bf8 5099rtx_barrier *
502b8322 5100emit_barrier (void)
23b2ce53 5101{
cd459bf8 5102 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5103 INSN_UID (barrier) = cur_insn_uid++;
5104 add_insn (barrier);
5105 return barrier;
5106}
5107
5f2fc772 5108/* Emit a copy of note ORIG. */
502b8322 5109
66e8df53
DM
5110rtx_note *
5111emit_note_copy (rtx_note *orig)
5f2fc772 5112{
96fba521 5113 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5114 rtx_note *note = make_note_raw (kind);
5f2fc772 5115 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5116 add_insn (note);
2e040219 5117 return note;
23b2ce53
RS
5118}
5119
2e040219
NS
5120/* Make an insn of code NOTE or type NOTE_NO
5121 and add it to the end of the doubly-linked list. */
23b2ce53 5122
66e8df53 5123rtx_note *
a38e7aa5 5124emit_note (enum insn_note kind)
23b2ce53 5125{
66e8df53 5126 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5127 add_insn (note);
5128 return note;
5129}
5130
c41c1387
RS
5131/* Emit a clobber of lvalue X. */
5132
cd459bf8 5133rtx_insn *
c41c1387
RS
5134emit_clobber (rtx x)
5135{
5136 /* CONCATs should not appear in the insn stream. */
5137 if (GET_CODE (x) == CONCAT)
5138 {
5139 emit_clobber (XEXP (x, 0));
5140 return emit_clobber (XEXP (x, 1));
5141 }
5142 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5143}
5144
5145/* Return a sequence of insns to clobber lvalue X. */
5146
cd459bf8 5147rtx_insn *
c41c1387
RS
5148gen_clobber (rtx x)
5149{
cd459bf8 5150 rtx_insn *seq;
c41c1387
RS
5151
5152 start_sequence ();
5153 emit_clobber (x);
5154 seq = get_insns ();
5155 end_sequence ();
5156 return seq;
5157}
5158
5159/* Emit a use of rvalue X. */
5160
cd459bf8 5161rtx_insn *
c41c1387
RS
5162emit_use (rtx x)
5163{
5164 /* CONCATs should not appear in the insn stream. */
5165 if (GET_CODE (x) == CONCAT)
5166 {
5167 emit_use (XEXP (x, 0));
5168 return emit_use (XEXP (x, 1));
5169 }
5170 return emit_insn (gen_rtx_USE (VOIDmode, x));
5171}
5172
5173/* Return a sequence of insns to use rvalue X. */
5174
cd459bf8 5175rtx_insn *
c41c1387
RS
5176gen_use (rtx x)
5177{
cd459bf8 5178 rtx_insn *seq;
c41c1387
RS
5179
5180 start_sequence ();
5181 emit_use (x);
5182 seq = get_insns ();
5183 end_sequence ();
5184 return seq;
5185}
5186
c8912e53
RS
5187/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5188 Return the set in INSN that such notes describe, or NULL if the notes
5189 have no meaning for INSN. */
5190
5191rtx
5192set_for_reg_notes (rtx insn)
5193{
5194 rtx pat, reg;
5195
5196 if (!INSN_P (insn))
5197 return NULL_RTX;
5198
5199 pat = PATTERN (insn);
5200 if (GET_CODE (pat) == PARALLEL)
5201 {
5202 /* We do not use single_set because that ignores SETs of unused
5203 registers. REG_EQUAL and REG_EQUIV notes really do require the
5204 PARALLEL to have a single SET. */
5205 if (multiple_sets (insn))
5206 return NULL_RTX;
5207 pat = XVECEXP (pat, 0, 0);
5208 }
5209
5210 if (GET_CODE (pat) != SET)
5211 return NULL_RTX;
5212
5213 reg = SET_DEST (pat);
5214
5215 /* Notes apply to the contents of a STRICT_LOW_PART. */
7f7379f6
KV
5216 if (GET_CODE (reg) == STRICT_LOW_PART
5217 || GET_CODE (reg) == ZERO_EXTRACT)
c8912e53
RS
5218 reg = XEXP (reg, 0);
5219
5220 /* Check that we have a register. */
5221 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5222 return NULL_RTX;
5223
5224 return pat;
5225}
5226
87b47c85 5227/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5228 note of this type already exists, remove it first. */
87b47c85 5229
3d238248 5230rtx
502b8322 5231set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5232{
5233 rtx note = find_reg_note (insn, kind, NULL_RTX);
5234
52488da1
JW
5235 switch (kind)
5236 {
5237 case REG_EQUAL:
5238 case REG_EQUIV:
8073cbd4
EB
5239 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5240 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
c8912e53 5241 return NULL_RTX;
52488da1
JW
5242
5243 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5244 It serves no useful purpose and breaks eliminate_regs. */
5245 if (GET_CODE (datum) == ASM_OPERANDS)
5246 return NULL_RTX;
109374e2
RS
5247
5248 /* Notes with side effects are dangerous. Even if the side-effect
5249 initially mirrors one in PATTERN (INSN), later optimizations
5250 might alter the way that the final register value is calculated
5251 and so move or alter the side-effect in some way. The note would
5252 then no longer be a valid substitution for SET_SRC. */
5253 if (side_effects_p (datum))
5254 return NULL_RTX;
52488da1
JW
5255 break;
5256
5257 default:
5258 break;
5259 }
3d238248 5260
c8912e53
RS
5261 if (note)
5262 XEXP (note, 0) = datum;
5263 else
5264 {
5265 add_reg_note (insn, kind, datum);
5266 note = REG_NOTES (insn);
5267 }
6fb5fa3c
DB
5268
5269 switch (kind)
3d238248 5270 {
6fb5fa3c
DB
5271 case REG_EQUAL:
5272 case REG_EQUIV:
b2908ba6 5273 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5274 break;
5275 default:
5276 break;
3d238248 5277 }
87b47c85 5278
c8912e53 5279 return note;
87b47c85 5280}
7543f918
JR
5281
5282/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5283rtx
5284set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5285{
c8912e53 5286 rtx set = set_for_reg_notes (insn);
7543f918
JR
5287
5288 if (set && SET_DEST (set) == dst)
5289 return set_unique_reg_note (insn, kind, datum);
5290 return NULL_RTX;
5291}
23b2ce53 5292\f
9d8895c9
RS
5293/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5294 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5295 is true.
5296
23b2ce53
RS
5297 If X is a label, it is simply added into the insn chain. */
5298
cd459bf8 5299rtx_insn *
9d8895c9 5300emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5301{
5302 enum rtx_code code = classify_insn (x);
5303
5b0264cb 5304 switch (code)
23b2ce53 5305 {
5b0264cb
NS
5306 case CODE_LABEL:
5307 return emit_label (x);
5308 case INSN:
5309 return emit_insn (x);
5310 case JUMP_INSN:
5311 {
cd459bf8 5312 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5313 if (allow_barrier_p
5314 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5315 return emit_barrier ();
5316 return insn;
5317 }
5318 case CALL_INSN:
5319 return emit_call_insn (x);
b5b8b0ac
AO
5320 case DEBUG_INSN:
5321 return emit_debug_insn (x);
5b0264cb
NS
5322 default:
5323 gcc_unreachable ();
23b2ce53 5324 }
23b2ce53
RS
5325}
5326\f
e2500fed 5327/* Space for free sequence stack entries. */
1431042e 5328static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5329
4dfa0342
RH
5330/* Begin emitting insns to a sequence. If this sequence will contain
5331 something that might cause the compiler to pop arguments to function
5332 calls (because those pops have previously been deferred; see
5333 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5334 before calling this function. That will ensure that the deferred
5335 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5336
5337void
502b8322 5338start_sequence (void)
23b2ce53
RS
5339{
5340 struct sequence_stack *tem;
5341
e2500fed
GK
5342 if (free_sequence_stack != NULL)
5343 {
5344 tem = free_sequence_stack;
5345 free_sequence_stack = tem->next;
5346 }
5347 else
766090c2 5348 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5349
614d5bd8 5350 tem->next = get_current_sequence ()->next;
5936d944
JH
5351 tem->first = get_insns ();
5352 tem->last = get_last_insn ();
614d5bd8 5353 get_current_sequence ()->next = tem;
23b2ce53 5354
5936d944
JH
5355 set_first_insn (0);
5356 set_last_insn (0);
23b2ce53
RS
5357}
5358
5c7a310f
MM
5359/* Set up the insn chain starting with FIRST as the current sequence,
5360 saving the previously current one. See the documentation for
5361 start_sequence for more information about how to use this function. */
23b2ce53
RS
5362
5363void
fee3e72c 5364push_to_sequence (rtx_insn *first)
23b2ce53 5365{
fee3e72c 5366 rtx_insn *last;
23b2ce53
RS
5367
5368 start_sequence ();
5369
e84a58ff
EB
5370 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5371 ;
23b2ce53 5372
5936d944
JH
5373 set_first_insn (first);
5374 set_last_insn (last);
23b2ce53
RS
5375}
5376
bb27eeda
SE
5377/* Like push_to_sequence, but take the last insn as an argument to avoid
5378 looping through the list. */
5379
5380void
fee3e72c 5381push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5382{
5383 start_sequence ();
5384
5936d944
JH
5385 set_first_insn (first);
5386 set_last_insn (last);
bb27eeda
SE
5387}
5388
f15ae3a1
TW
5389/* Set up the outer-level insn chain
5390 as the current sequence, saving the previously current one. */
5391
5392void
502b8322 5393push_topmost_sequence (void)
f15ae3a1 5394{
614d5bd8 5395 struct sequence_stack *top;
f15ae3a1
TW
5396
5397 start_sequence ();
5398
614d5bd8 5399 top = get_topmost_sequence ();
5936d944
JH
5400 set_first_insn (top->first);
5401 set_last_insn (top->last);
f15ae3a1
TW
5402}
5403
5404/* After emitting to the outer-level insn chain, update the outer-level
5405 insn chain, and restore the previous saved state. */
5406
5407void
502b8322 5408pop_topmost_sequence (void)
f15ae3a1 5409{
614d5bd8 5410 struct sequence_stack *top;
f15ae3a1 5411
614d5bd8 5412 top = get_topmost_sequence ();
5936d944
JH
5413 top->first = get_insns ();
5414 top->last = get_last_insn ();
f15ae3a1
TW
5415
5416 end_sequence ();
5417}
5418
23b2ce53
RS
5419/* After emitting to a sequence, restore previous saved state.
5420
5c7a310f 5421 To get the contents of the sequence just made, you must call
2f937369 5422 `get_insns' *before* calling here.
5c7a310f
MM
5423
5424 If the compiler might have deferred popping arguments while
5425 generating this sequence, and this sequence will not be immediately
5426 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5427 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5428 pops are inserted into this sequence, and not into some random
5429 location in the instruction stream. See INHIBIT_DEFER_POP for more
5430 information about deferred popping of arguments. */
23b2ce53
RS
5431
5432void
502b8322 5433end_sequence (void)
23b2ce53 5434{
614d5bd8 5435 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5436
5936d944
JH
5437 set_first_insn (tem->first);
5438 set_last_insn (tem->last);
614d5bd8 5439 get_current_sequence ()->next = tem->next;
23b2ce53 5440
e2500fed
GK
5441 memset (tem, 0, sizeof (*tem));
5442 tem->next = free_sequence_stack;
5443 free_sequence_stack = tem;
23b2ce53
RS
5444}
5445
5446/* Return 1 if currently emitting into a sequence. */
5447
5448int
502b8322 5449in_sequence_p (void)
23b2ce53 5450{
614d5bd8 5451 return get_current_sequence ()->next != 0;
23b2ce53 5452}
23b2ce53 5453\f
59ec66dc
MM
5454/* Put the various virtual registers into REGNO_REG_RTX. */
5455
2bbdec73 5456static void
bd60bab2 5457init_virtual_regs (void)
59ec66dc 5458{
bd60bab2
JH
5459 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5460 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5461 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5462 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5463 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5464 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5465 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5466}
5467
da43a810
BS
5468\f
5469/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5470static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5471static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5472static int copy_insn_n_scratches;
5473
5474/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5475 copied an ASM_OPERANDS.
5476 In that case, it is the original input-operand vector. */
5477static rtvec orig_asm_operands_vector;
5478
5479/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5480 copied an ASM_OPERANDS.
5481 In that case, it is the copied input-operand vector. */
5482static rtvec copy_asm_operands_vector;
5483
5484/* Likewise for the constraints vector. */
5485static rtvec orig_asm_constraints_vector;
5486static rtvec copy_asm_constraints_vector;
5487
5488/* Recursively create a new copy of an rtx for copy_insn.
5489 This function differs from copy_rtx in that it handles SCRATCHes and
5490 ASM_OPERANDs properly.
5491 Normally, this function is not used directly; use copy_insn as front end.
5492 However, you could first copy an insn pattern with copy_insn and then use
5493 this function afterwards to properly copy any REG_NOTEs containing
5494 SCRATCHes. */
5495
5496rtx
502b8322 5497copy_insn_1 (rtx orig)
da43a810 5498{
b3694847
SS
5499 rtx copy;
5500 int i, j;
5501 RTX_CODE code;
5502 const char *format_ptr;
da43a810 5503
cd9c1ca8
RH
5504 if (orig == NULL)
5505 return NULL;
5506
da43a810
BS
5507 code = GET_CODE (orig);
5508
5509 switch (code)
5510 {
5511 case REG:
a52a87c3 5512 case DEBUG_EXPR:
d8116890 5513 CASE_CONST_ANY:
da43a810
BS
5514 case SYMBOL_REF:
5515 case CODE_LABEL:
5516 case PC:
5517 case CC0:
276e0224 5518 case RETURN:
26898771 5519 case SIMPLE_RETURN:
da43a810 5520 return orig;
3e89ed8d 5521 case CLOBBER:
c5c5ba89
JH
5522 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5523 clobbers or clobbers of hard registers that originated as pseudos.
5524 This is needed to allow safe register renaming. */
5525 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5526 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5527 return orig;
5528 break;
da43a810
BS
5529
5530 case SCRATCH:
5531 for (i = 0; i < copy_insn_n_scratches; i++)
5532 if (copy_insn_scratch_in[i] == orig)
5533 return copy_insn_scratch_out[i];
5534 break;
5535
5536 case CONST:
6fb5fa3c 5537 if (shared_const_p (orig))
da43a810
BS
5538 return orig;
5539 break;
750c9258 5540
da43a810
BS
5541 /* A MEM with a constant address is not sharable. The problem is that
5542 the constant address may need to be reloaded. If the mem is shared,
5543 then reloading one copy of this mem will cause all copies to appear
5544 to have been reloaded. */
5545
5546 default:
5547 break;
5548 }
5549
aacd3885
RS
5550 /* Copy the various flags, fields, and other information. We assume
5551 that all fields need copying, and then clear the fields that should
da43a810
BS
5552 not be copied. That is the sensible default behavior, and forces
5553 us to explicitly document why we are *not* copying a flag. */
aacd3885 5554 copy = shallow_copy_rtx (orig);
da43a810
BS
5555
5556 /* We do not copy the USED flag, which is used as a mark bit during
5557 walks over the RTL. */
2adc7f12 5558 RTX_FLAG (copy, used) = 0;
da43a810
BS
5559
5560 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5561 if (INSN_P (orig))
da43a810 5562 {
2adc7f12
JJ
5563 RTX_FLAG (copy, jump) = 0;
5564 RTX_FLAG (copy, call) = 0;
5565 RTX_FLAG (copy, frame_related) = 0;
da43a810 5566 }
750c9258 5567
da43a810
BS
5568 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5569
5570 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5571 switch (*format_ptr++)
5572 {
5573 case 'e':
5574 if (XEXP (orig, i) != NULL)
5575 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5576 break;
da43a810 5577
aacd3885
RS
5578 case 'E':
5579 case 'V':
5580 if (XVEC (orig, i) == orig_asm_constraints_vector)
5581 XVEC (copy, i) = copy_asm_constraints_vector;
5582 else if (XVEC (orig, i) == orig_asm_operands_vector)
5583 XVEC (copy, i) = copy_asm_operands_vector;
5584 else if (XVEC (orig, i) != NULL)
5585 {
5586 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5587 for (j = 0; j < XVECLEN (copy, i); j++)
5588 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5589 }
5590 break;
da43a810 5591
aacd3885
RS
5592 case 't':
5593 case 'w':
5594 case 'i':
5595 case 's':
5596 case 'S':
5597 case 'u':
5598 case '0':
5599 /* These are left unchanged. */
5600 break;
da43a810 5601
aacd3885
RS
5602 default:
5603 gcc_unreachable ();
5604 }
da43a810
BS
5605
5606 if (code == SCRATCH)
5607 {
5608 i = copy_insn_n_scratches++;
5b0264cb 5609 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5610 copy_insn_scratch_in[i] = orig;
5611 copy_insn_scratch_out[i] = copy;
5612 }
5613 else if (code == ASM_OPERANDS)
5614 {
6462bb43
AO
5615 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5616 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5617 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5618 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5619 }
5620
5621 return copy;
5622}
5623
5624/* Create a new copy of an rtx.
5625 This function differs from copy_rtx in that it handles SCRATCHes and
5626 ASM_OPERANDs properly.
5627 INSN doesn't really have to be a full INSN; it could be just the
5628 pattern. */
5629rtx
502b8322 5630copy_insn (rtx insn)
da43a810
BS
5631{
5632 copy_insn_n_scratches = 0;
5633 orig_asm_operands_vector = 0;
5634 orig_asm_constraints_vector = 0;
5635 copy_asm_operands_vector = 0;
5636 copy_asm_constraints_vector = 0;
5637 return copy_insn_1 (insn);
5638}
59ec66dc 5639
8e383849
JR
5640/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5641 on that assumption that INSN itself remains in its original place. */
5642
f8f0516e
DM
5643rtx_insn *
5644copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5645{
5646 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5647 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5648 INSN_UID (insn) = cur_insn_uid++;
5649 return insn;
5650}
5651
23b2ce53
RS
5652/* Initialize data structures and variables in this file
5653 before generating rtl for each function. */
5654
5655void
502b8322 5656init_emit (void)
23b2ce53 5657{
5936d944
JH
5658 set_first_insn (NULL);
5659 set_last_insn (NULL);
b5b8b0ac
AO
5660 if (MIN_NONDEBUG_INSN_UID)
5661 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5662 else
5663 cur_insn_uid = 1;
5664 cur_debug_insn_uid = 1;
23b2ce53 5665 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5666 first_label_num = label_num;
614d5bd8 5667 get_current_sequence ()->next = NULL;
23b2ce53 5668
23b2ce53
RS
5669 /* Init the tables that describe all the pseudo regs. */
5670
3e029763 5671 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5672
3e029763 5673 crtl->emit.regno_pointer_align
1b4572a8 5674 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5675
766090c2 5676 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5677
e50126e8 5678 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5679 memcpy (regno_reg_rtx,
5fb0e246 5680 initial_regno_reg_rtx,
6cde4876 5681 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5682
23b2ce53 5683 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5684 init_virtual_regs ();
740ab4a2
RK
5685
5686 /* Indicate that the virtual registers and stack locations are
5687 all pointers. */
3502dc9c
JDA
5688 REG_POINTER (stack_pointer_rtx) = 1;
5689 REG_POINTER (frame_pointer_rtx) = 1;
5690 REG_POINTER (hard_frame_pointer_rtx) = 1;
5691 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5692
3502dc9c
JDA
5693 REG_POINTER (virtual_incoming_args_rtx) = 1;
5694 REG_POINTER (virtual_stack_vars_rtx) = 1;
5695 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5696 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5697 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5698
86fe05e0 5699#ifdef STACK_BOUNDARY
bdb429a5
RK
5700 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5701 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5702 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5703 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5704
5705 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5706 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5707 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5708 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5709 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5710#endif
5711
5e82e7bd
JVA
5712#ifdef INIT_EXPANDERS
5713 INIT_EXPANDERS;
5714#endif
23b2ce53
RS
5715}
5716
a73b091d 5717/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5718
5719static rtx
ef4bddc2 5720gen_const_vector (machine_mode mode, int constant)
69ef87e2
AH
5721{
5722 rtx tem;
5723 rtvec v;
5724 int units, i;
ef4bddc2 5725 machine_mode inner;
69ef87e2
AH
5726
5727 units = GET_MODE_NUNITS (mode);
5728 inner = GET_MODE_INNER (mode);
5729
15ed7b52
JG
5730 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5731
69ef87e2
AH
5732 v = rtvec_alloc (units);
5733
a73b091d
JW
5734 /* We need to call this function after we set the scalar const_tiny_rtx
5735 entries. */
5736 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5737
5738 for (i = 0; i < units; ++i)
a73b091d 5739 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5740
a06e3c40 5741 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5742 return tem;
5743}
5744
a06e3c40 5745/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5746 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5747rtx
ef4bddc2 5748gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 5749{
ef4bddc2 5750 machine_mode inner = GET_MODE_INNER (mode);
a73b091d
JW
5751 int nunits = GET_MODE_NUNITS (mode);
5752 rtx x;
a06e3c40
R
5753 int i;
5754
a73b091d
JW
5755 /* Check to see if all of the elements have the same value. */
5756 x = RTVEC_ELT (v, nunits - 1);
5757 for (i = nunits - 2; i >= 0; i--)
5758 if (RTVEC_ELT (v, i) != x)
5759 break;
5760
5761 /* If the values are all the same, check to see if we can use one of the
5762 standard constant vectors. */
5763 if (i == -1)
5764 {
5765 if (x == CONST0_RTX (inner))
5766 return CONST0_RTX (mode);
5767 else if (x == CONST1_RTX (inner))
5768 return CONST1_RTX (mode);
e7c82a99
JJ
5769 else if (x == CONSTM1_RTX (inner))
5770 return CONSTM1_RTX (mode);
a73b091d
JW
5771 }
5772
5773 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5774}
5775
b5deb7b6
SL
5776/* Initialise global register information required by all functions. */
5777
5778void
5779init_emit_regs (void)
5780{
5781 int i;
ef4bddc2 5782 machine_mode mode;
1c3f523e 5783 mem_attrs *attrs;
b5deb7b6
SL
5784
5785 /* Reset register attributes */
aebf76a2 5786 reg_attrs_htab->empty ();
b5deb7b6
SL
5787
5788 /* We need reg_raw_mode, so initialize the modes now. */
5789 init_reg_modes_target ();
5790
5791 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5792 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5793 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5794 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5795 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5796 virtual_incoming_args_rtx =
5797 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5798 virtual_stack_vars_rtx =
5799 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5800 virtual_stack_dynamic_rtx =
5801 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5802 virtual_outgoing_args_rtx =
5803 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5804 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5805 virtual_preferred_stack_boundary_rtx =
5806 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5807
5808 /* Initialize RTL for commonly used hard registers. These are
5809 copied into regno_reg_rtx as we begin to compile each function. */
5810 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5811 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5812
5813#ifdef RETURN_ADDRESS_POINTER_REGNUM
5814 return_address_pointer_rtx
5815 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5816#endif
5817
ca72dad5 5818 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
5819 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5820 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
5821
5822 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5823 {
ef4bddc2 5824 mode = (machine_mode) i;
766090c2 5825 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5826 attrs->align = BITS_PER_UNIT;
5827 attrs->addrspace = ADDR_SPACE_GENERIC;
5828 if (mode != BLKmode)
5829 {
754c3d5d
RS
5830 attrs->size_known_p = true;
5831 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5832 if (STRICT_ALIGNMENT)
5833 attrs->align = GET_MODE_ALIGNMENT (mode);
5834 }
5835 mode_mem_attrs[i] = attrs;
5836 }
b5deb7b6
SL
5837}
5838
aa3a12d6
RS
5839/* Initialize global machine_mode variables. */
5840
5841void
5842init_derived_machine_modes (void)
5843{
5844 byte_mode = VOIDmode;
5845 word_mode = VOIDmode;
5846
ef4bddc2 5847 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
aa3a12d6
RS
5848 mode != VOIDmode;
5849 mode = GET_MODE_WIDER_MODE (mode))
5850 {
5851 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5852 && byte_mode == VOIDmode)
5853 byte_mode = mode;
5854
5855 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5856 && word_mode == VOIDmode)
5857 word_mode = mode;
5858 }
5859
5860 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5861}
5862
2d888286 5863/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5864
5865void
2d888286 5866init_emit_once (void)
23b2ce53
RS
5867{
5868 int i;
ef4bddc2
RS
5869 machine_mode mode;
5870 machine_mode double_mode;
23b2ce53 5871
807e902e
KZ
5872 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5873 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 5874 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 5875
807e902e 5876#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 5877 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 5878#endif
aebf76a2 5879 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 5880
aebf76a2 5881 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 5882
aebf76a2 5883 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 5884
5da077de 5885#ifdef INIT_EXPANDERS
414c4dc4
NC
5886 /* This is to initialize {init|mark|free}_machine_status before the first
5887 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5888 end which calls push_function_context_to before the first call to
5da077de
AS
5889 init_function_start. */
5890 INIT_EXPANDERS;
5891#endif
5892
23b2ce53
RS
5893 /* Create the unique rtx's for certain rtx codes and operand values. */
5894
ecf835e9
KN
5895 /* Process stack-limiting command-line options. */
5896 if (opt_fstack_limit_symbol_arg != NULL)
5897 stack_limit_rtx
5898 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5899 if (opt_fstack_limit_register_no >= 0)
5900 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5901
a2a8cc44 5902 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5903 tries to use these variables. */
23b2ce53 5904 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5905 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5906 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5907
68d75312
JC
5908 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5909 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5910 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5911 else
3b80f6ca 5912 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5913
aa3a12d6
RS
5914 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5915
807e902e
KZ
5916 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5917 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5918 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5919
5920 dconstm1 = dconst1;
5921 dconstm1.sign = 1;
03f2ea93
RS
5922
5923 dconsthalf = dconst1;
1e92bbb9 5924 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5925
e7c82a99 5926 for (i = 0; i < 3; i++)
23b2ce53 5927 {
aefa9d43 5928 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5929 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5930
15ed7b52
JG
5931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5932 mode != VOIDmode;
5933 mode = GET_MODE_WIDER_MODE (mode))
5934 const_tiny_rtx[i][(int) mode] =
555affd7 5935 const_double_from_real_value (*r, mode);
15ed7b52
JG
5936
5937 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5938 mode != VOIDmode;
23b2ce53 5939 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc 5940 const_tiny_rtx[i][(int) mode] =
555affd7 5941 const_double_from_real_value (*r, mode);
23b2ce53 5942
906c4e36 5943 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5944
15ed7b52
JG
5945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5946 mode != VOIDmode;
23b2ce53 5947 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5948 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5949
ede6c734
MS
5950 for (mode = MIN_MODE_PARTIAL_INT;
5951 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5952 mode = (machine_mode)((int)(mode) + 1))
33d3e559 5953 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5954 }
5955
e7c82a99
JJ
5956 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5957
5958 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5959 mode != VOIDmode;
5960 mode = GET_MODE_WIDER_MODE (mode))
5961 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5962
ede6c734
MS
5963 for (mode = MIN_MODE_PARTIAL_INT;
5964 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5965 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5966 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5967
e90721b1
AP
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 {
5972 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5973 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5974 }
5975
5976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5977 mode != VOIDmode;
5978 mode = GET_MODE_WIDER_MODE (mode))
5979 {
5980 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5981 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5982 }
5983
69ef87e2
AH
5984 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5985 mode != VOIDmode;
5986 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5987 {
5988 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5989 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5990 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5991 }
69ef87e2
AH
5992
5993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5994 mode != VOIDmode;
5995 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5996 {
5997 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5998 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5999 }
69ef87e2 6000
325217ed
CF
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 {
c3284718
RS
6005 FCONST0 (mode).data.high = 0;
6006 FCONST0 (mode).data.low = 0;
6007 FCONST0 (mode).mode = mode;
091a3ac7
CF
6008 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6009 FCONST0 (mode), mode);
325217ed
CF
6010 }
6011
6012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6013 mode != VOIDmode;
6014 mode = GET_MODE_WIDER_MODE (mode))
6015 {
c3284718
RS
6016 FCONST0 (mode).data.high = 0;
6017 FCONST0 (mode).data.low = 0;
6018 FCONST0 (mode).mode = mode;
091a3ac7
CF
6019 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6020 FCONST0 (mode), mode);
325217ed
CF
6021 }
6022
6023 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6024 mode != VOIDmode;
6025 mode = GET_MODE_WIDER_MODE (mode))
6026 {
c3284718
RS
6027 FCONST0 (mode).data.high = 0;
6028 FCONST0 (mode).data.low = 0;
6029 FCONST0 (mode).mode = mode;
091a3ac7
CF
6030 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6031 FCONST0 (mode), mode);
325217ed
CF
6032
6033 /* We store the value 1. */
c3284718
RS
6034 FCONST1 (mode).data.high = 0;
6035 FCONST1 (mode).data.low = 0;
6036 FCONST1 (mode).mode = mode;
6037 FCONST1 (mode).data
9be0ac8c
LC
6038 = double_int_one.lshift (GET_MODE_FBIT (mode),
6039 HOST_BITS_PER_DOUBLE_INT,
6040 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6041 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6042 FCONST1 (mode), mode);
325217ed
CF
6043 }
6044
6045 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6046 mode != VOIDmode;
6047 mode = GET_MODE_WIDER_MODE (mode))
6048 {
c3284718
RS
6049 FCONST0 (mode).data.high = 0;
6050 FCONST0 (mode).data.low = 0;
6051 FCONST0 (mode).mode = mode;
091a3ac7
CF
6052 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6053 FCONST0 (mode), mode);
325217ed
CF
6054
6055 /* We store the value 1. */
c3284718
RS
6056 FCONST1 (mode).data.high = 0;
6057 FCONST1 (mode).data.low = 0;
6058 FCONST1 (mode).mode = mode;
6059 FCONST1 (mode).data
9be0ac8c
LC
6060 = double_int_one.lshift (GET_MODE_FBIT (mode),
6061 HOST_BITS_PER_DOUBLE_INT,
6062 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6063 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6064 FCONST1 (mode), mode);
6065 }
6066
6067 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6068 mode != VOIDmode;
6069 mode = GET_MODE_WIDER_MODE (mode))
6070 {
6071 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6072 }
6073
6074 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6075 mode != VOIDmode;
6076 mode = GET_MODE_WIDER_MODE (mode))
6077 {
6078 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6079 }
6080
6081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6082 mode != VOIDmode;
6083 mode = GET_MODE_WIDER_MODE (mode))
6084 {
6085 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6086 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6087 }
6088
6089 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6090 mode != VOIDmode;
6091 mode = GET_MODE_WIDER_MODE (mode))
6092 {
6093 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6094 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6095 }
6096
dbbbbf3b 6097 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6098 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6099 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6100
f0417c82
RH
6101 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6102 if (STORE_FLAG_VALUE == 1)
6103 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91 6104
d5e254e1
IE
6105 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6106 mode != VOIDmode;
6107 mode = GET_MODE_WIDER_MODE (mode))
6108 {
6109 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6110 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6111 }
6112
ca4adc91
RS
6113 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6114 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6115 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6116 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6117 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6118 /*prev_insn=*/NULL,
6119 /*next_insn=*/NULL,
6120 /*bb=*/NULL,
6121 /*pattern=*/NULL_RTX,
6122 /*location=*/-1,
6123 CODE_FOR_nothing,
6124 /*reg_notes=*/NULL_RTX);
23b2ce53 6125}
a11759a3 6126\f
969d70ca
JH
6127/* Produce exact duplicate of insn INSN after AFTER.
6128 Care updating of libcall regions if present. */
6129
cd459bf8 6130rtx_insn *
a1950df3 6131emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6132{
cd459bf8
DM
6133 rtx_insn *new_rtx;
6134 rtx link;
969d70ca
JH
6135
6136 switch (GET_CODE (insn))
6137 {
6138 case INSN:
60564289 6139 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6140 break;
6141
6142 case JUMP_INSN:
60564289 6143 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6144 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6145 break;
6146
b5b8b0ac
AO
6147 case DEBUG_INSN:
6148 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6149 break;
6150
969d70ca 6151 case CALL_INSN:
60564289 6152 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6153 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6154 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6155 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6156 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6157 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6158 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6159 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6160 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6161 break;
6162
6163 default:
5b0264cb 6164 gcc_unreachable ();
969d70ca
JH
6165 }
6166
6167 /* Update LABEL_NUSES. */
60564289 6168 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6169
5368224f 6170 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6171
0a3d71f5
JW
6172 /* If the old insn is frame related, then so is the new one. This is
6173 primarily needed for IA-64 unwind info which marks epilogue insns,
6174 which may be duplicated by the basic block reordering code. */
60564289 6175 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6176
cf7c4aa6
HPN
6177 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6178 will make them. REG_LABEL_TARGETs are created there too, but are
6179 supposed to be sticky, so we copy them. */
969d70ca 6180 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6181 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6182 {
6183 if (GET_CODE (link) == EXPR_LIST)
60564289 6184 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6185 copy_insn_1 (XEXP (link, 0)));
969d70ca 6186 else
e5af9ddd 6187 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
6188 }
6189
60564289
KG
6190 INSN_CODE (new_rtx) = INSN_CODE (insn);
6191 return new_rtx;
969d70ca 6192}
e2500fed 6193
1431042e 6194static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6195rtx
ef4bddc2 6196gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6197{
6198 if (hard_reg_clobbers[mode][regno])
6199 return hard_reg_clobbers[mode][regno];
6200 else
6201 return (hard_reg_clobbers[mode][regno] =
6202 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6203}
6204
5368224f
DC
6205location_t prologue_location;
6206location_t epilogue_location;
78bde837
SB
6207
6208/* Hold current location information and last location information, so the
6209 datastructures are built lazily only when some instructions in given
6210 place are needed. */
3a50da34 6211static location_t curr_location;
78bde837 6212
5368224f 6213/* Allocate insn location datastructure. */
78bde837 6214void
5368224f 6215insn_locations_init (void)
78bde837 6216{
5368224f 6217 prologue_location = epilogue_location = 0;
78bde837 6218 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6219}
6220
6221/* At the end of emit stage, clear current location. */
6222void
5368224f 6223insn_locations_finalize (void)
78bde837 6224{
5368224f
DC
6225 epilogue_location = curr_location;
6226 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6227}
6228
6229/* Set current location. */
6230void
5368224f 6231set_curr_insn_location (location_t location)
78bde837 6232{
78bde837
SB
6233 curr_location = location;
6234}
6235
6236/* Get current location. */
6237location_t
5368224f 6238curr_insn_location (void)
78bde837
SB
6239{
6240 return curr_location;
6241}
6242
78bde837
SB
6243/* Return lexical scope block insn belongs to. */
6244tree
a1950df3 6245insn_scope (const rtx_insn *insn)
78bde837 6246{
5368224f 6247 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6248}
6249
6250/* Return line number of the statement that produced this insn. */
6251int
a1950df3 6252insn_line (const rtx_insn *insn)
78bde837 6253{
5368224f 6254 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6255}
6256
6257/* Return source file of the statement that produced this insn. */
6258const char *
a1950df3 6259insn_file (const rtx_insn *insn)
78bde837 6260{
5368224f 6261 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6262}
8930883e 6263
ffa4602f
EB
6264/* Return expanded location of the statement that produced this insn. */
6265expanded_location
a1950df3 6266insn_location (const rtx_insn *insn)
ffa4602f
EB
6267{
6268 return expand_location (INSN_LOCATION (insn));
6269}
6270
8930883e
MK
6271/* Return true if memory model MODEL requires a pre-operation (release-style)
6272 barrier or a post-operation (acquire-style) barrier. While not universal,
6273 this function matches behavior of several targets. */
6274
6275bool
6276need_atomic_barrier_p (enum memmodel model, bool pre)
6277{
40ad260d 6278 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6279 {
6280 case MEMMODEL_RELAXED:
6281 case MEMMODEL_CONSUME:
6282 return false;
6283 case MEMMODEL_RELEASE:
6284 return pre;
6285 case MEMMODEL_ACQUIRE:
6286 return !pre;
6287 case MEMMODEL_ACQ_REL:
6288 case MEMMODEL_SEQ_CST:
6289 return true;
6290 default:
6291 gcc_unreachable ();
6292 }
6293}
6294\f
e2500fed 6295#include "gt-emit-rtl.h"