]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
double-int.h (double_int_setbit): Declare.
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
5 Free Software Foundation, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* Middle-to-low level generation of rtx code and insns.
25
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "toplev.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "real.h"
54 #include "fixed-value.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60 #include "tree-pass.h"
61 #include "df.h"
62 #include "params.h"
63 #include "target.h"
64
65 /* Commonly used modes. */
66
67 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
68 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
69 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
70 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
71
72 /* Datastructures maintained for currently processed function in RTL form. */
73
74 struct rtl_data x_rtl;
75
76 /* Indexed by pseudo register number, gives the rtx for that pseudo.
77 Allocated in parallel with regno_pointer_align.
78 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
79 with length attribute nested in top level structures. */
80
81 rtx * regno_reg_rtx;
82
83 /* This is *not* reset after each function. It gives each CODE_LABEL
84 in the entire compilation a unique label number. */
85
86 static GTY(()) int label_num = 1;
87
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
91 of these. */
92
93 rtx global_rtl[GR_MAX];
94
95 /* Commonly used RTL for hard registers. These objects are not necessarily
96 unique, so we allocate them separately from global_rtl. They are
97 initialized once per compilation unit, then copied into regno_reg_rtx
98 at the beginning of each function. */
99 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
100
101 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
102 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
103 record a copy of const[012]_rtx. */
104
105 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
106
107 rtx const_true_rtx;
108
109 REAL_VALUE_TYPE dconst0;
110 REAL_VALUE_TYPE dconst1;
111 REAL_VALUE_TYPE dconst2;
112 REAL_VALUE_TYPE dconstm1;
113 REAL_VALUE_TYPE dconsthalf;
114
115 /* Record fixed-point constant 0 and 1. */
116 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
117 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
118
119 /* All references to the following fixed hard registers go through
120 these unique rtl objects. On machines where the frame-pointer and
121 arg-pointer are the same register, they use the same unique object.
122
123 After register allocation, other rtl objects which used to be pseudo-regs
124 may be clobbered to refer to the frame-pointer register.
125 But references that were originally to the frame-pointer can be
126 distinguished from the others because they contain frame_pointer_rtx.
127
128 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
129 tricky: until register elimination has taken place hard_frame_pointer_rtx
130 should be used if it is being set, and frame_pointer_rtx otherwise. After
131 register elimination hard_frame_pointer_rtx should always be used.
132 On machines where the two registers are same (most) then these are the
133 same.
134
135 In an inline procedure, the stack and frame pointer rtxs may not be
136 used for anything else. */
137 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
138
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
142
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
146 integers. */
147
148 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
149
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
152
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
154 htab_t const_int_htab;
155
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
158 htab_t mem_attrs_htab;
159
160 /* A hash table storing register attribute structures. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
162 htab_t reg_attrs_htab;
163
164 /* A hash table storing all CONST_DOUBLEs. */
165 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
166 htab_t const_double_htab;
167
168 /* A hash table storing all CONST_FIXEDs. */
169 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
170 htab_t const_fixed_htab;
171
172 #define first_insn (crtl->emit.x_first_insn)
173 #define last_insn (crtl->emit.x_last_insn)
174 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
175 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
176 #define last_location (crtl->emit.x_last_location)
177 #define first_label_num (crtl->emit.x_first_label_num)
178
179 static rtx make_call_insn_raw (rtx);
180 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
181 static void set_used_decls (tree);
182 static void mark_label_nuses (rtx);
183 static hashval_t const_int_htab_hash (const void *);
184 static int const_int_htab_eq (const void *, const void *);
185 static hashval_t const_double_htab_hash (const void *);
186 static int const_double_htab_eq (const void *, const void *);
187 static rtx lookup_const_double (rtx);
188 static hashval_t const_fixed_htab_hash (const void *);
189 static int const_fixed_htab_eq (const void *, const void *);
190 static rtx lookup_const_fixed (rtx);
191 static hashval_t mem_attrs_htab_hash (const void *);
192 static int mem_attrs_htab_eq (const void *, const void *);
193 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
194 addr_space_t, enum machine_mode);
195 static hashval_t reg_attrs_htab_hash (const void *);
196 static int reg_attrs_htab_eq (const void *, const void *);
197 static reg_attrs *get_reg_attrs (tree, int);
198 static rtx gen_const_vector (enum machine_mode, int);
199 static void copy_rtx_if_shared_1 (rtx *orig);
200
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability = -1;
204 \f
205 /* Returns a hash code for X (which is a really a CONST_INT). */
206
207 static hashval_t
208 const_int_htab_hash (const void *x)
209 {
210 return (hashval_t) INTVAL ((const_rtx) x);
211 }
212
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
216
217 static int
218 const_int_htab_eq (const void *x, const void *y)
219 {
220 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
221 }
222
223 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
224 static hashval_t
225 const_double_htab_hash (const void *x)
226 {
227 const_rtx const value = (const_rtx) x;
228 hashval_t h;
229
230 if (GET_MODE (value) == VOIDmode)
231 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
232 else
233 {
234 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
235 /* MODE is used in the comparison, so it should be in the hash. */
236 h ^= GET_MODE (value);
237 }
238 return h;
239 }
240
241 /* Returns nonzero if the value represented by X (really a ...)
242 is the same as that represented by Y (really a ...) */
243 static int
244 const_double_htab_eq (const void *x, const void *y)
245 {
246 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
247
248 if (GET_MODE (a) != GET_MODE (b))
249 return 0;
250 if (GET_MODE (a) == VOIDmode)
251 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
252 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
253 else
254 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
255 CONST_DOUBLE_REAL_VALUE (b));
256 }
257
258 /* Returns a hash code for X (which is really a CONST_FIXED). */
259
260 static hashval_t
261 const_fixed_htab_hash (const void *x)
262 {
263 const_rtx const value = (const_rtx) x;
264 hashval_t h;
265
266 h = fixed_hash (CONST_FIXED_VALUE (value));
267 /* MODE is used in the comparison, so it should be in the hash. */
268 h ^= GET_MODE (value);
269 return h;
270 }
271
272 /* Returns nonzero if the value represented by X (really a ...)
273 is the same as that represented by Y (really a ...). */
274
275 static int
276 const_fixed_htab_eq (const void *x, const void *y)
277 {
278 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
279
280 if (GET_MODE (a) != GET_MODE (b))
281 return 0;
282 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
283 }
284
285 /* Returns a hash code for X (which is a really a mem_attrs *). */
286
287 static hashval_t
288 mem_attrs_htab_hash (const void *x)
289 {
290 const mem_attrs *const p = (const mem_attrs *) x;
291
292 return (p->alias ^ (p->align * 1000)
293 ^ (p->addrspace * 4000)
294 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
295 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
296 ^ (size_t) iterative_hash_expr (p->expr, 0));
297 }
298
299 /* Returns nonzero if the value represented by X (which is really a
300 mem_attrs *) is the same as that given by Y (which is also really a
301 mem_attrs *). */
302
303 static int
304 mem_attrs_htab_eq (const void *x, const void *y)
305 {
306 const mem_attrs *const p = (const mem_attrs *) x;
307 const mem_attrs *const q = (const mem_attrs *) y;
308
309 return (p->alias == q->alias && p->offset == q->offset
310 && p->size == q->size && p->align == q->align
311 && p->addrspace == q->addrspace
312 && (p->expr == q->expr
313 || (p->expr != NULL_TREE && q->expr != NULL_TREE
314 && operand_equal_p (p->expr, q->expr, 0))));
315 }
316
317 /* Allocate a new mem_attrs structure and insert it into the hash table if
318 one identical to it is not already in the table. We are doing this for
319 MEM of mode MODE. */
320
321 static mem_attrs *
322 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
323 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
324 {
325 mem_attrs attrs;
326 void **slot;
327
328 /* If everything is the default, we can just return zero.
329 This must match what the corresponding MEM_* macros return when the
330 field is not present. */
331 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
332 && (size == 0
333 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
334 && (STRICT_ALIGNMENT && mode != BLKmode
335 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
336 return 0;
337
338 attrs.alias = alias;
339 attrs.expr = expr;
340 attrs.offset = offset;
341 attrs.size = size;
342 attrs.align = align;
343 attrs.addrspace = addrspace;
344
345 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
346 if (*slot == 0)
347 {
348 *slot = ggc_alloc (sizeof (mem_attrs));
349 memcpy (*slot, &attrs, sizeof (mem_attrs));
350 }
351
352 return (mem_attrs *) *slot;
353 }
354
355 /* Returns a hash code for X (which is a really a reg_attrs *). */
356
357 static hashval_t
358 reg_attrs_htab_hash (const void *x)
359 {
360 const reg_attrs *const p = (const reg_attrs *) x;
361
362 return ((p->offset * 1000) ^ (long) p->decl);
363 }
364
365 /* Returns nonzero if the value represented by X (which is really a
366 reg_attrs *) is the same as that given by Y (which is also really a
367 reg_attrs *). */
368
369 static int
370 reg_attrs_htab_eq (const void *x, const void *y)
371 {
372 const reg_attrs *const p = (const reg_attrs *) x;
373 const reg_attrs *const q = (const reg_attrs *) y;
374
375 return (p->decl == q->decl && p->offset == q->offset);
376 }
377 /* Allocate a new reg_attrs structure and insert it into the hash table if
378 one identical to it is not already in the table. We are doing this for
379 MEM of mode MODE. */
380
381 static reg_attrs *
382 get_reg_attrs (tree decl, int offset)
383 {
384 reg_attrs attrs;
385 void **slot;
386
387 /* If everything is the default, we can just return zero. */
388 if (decl == 0 && offset == 0)
389 return 0;
390
391 attrs.decl = decl;
392 attrs.offset = offset;
393
394 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
395 if (*slot == 0)
396 {
397 *slot = ggc_alloc (sizeof (reg_attrs));
398 memcpy (*slot, &attrs, sizeof (reg_attrs));
399 }
400
401 return (reg_attrs *) *slot;
402 }
403
404
405 #if !HAVE_blockage
406 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
407 across this insn. */
408
409 rtx
410 gen_blockage (void)
411 {
412 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
413 MEM_VOLATILE_P (x) = true;
414 return x;
415 }
416 #endif
417
418
419 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
420 don't attempt to share with the various global pieces of rtl (such as
421 frame_pointer_rtx). */
422
423 rtx
424 gen_raw_REG (enum machine_mode mode, int regno)
425 {
426 rtx x = gen_rtx_raw_REG (mode, regno);
427 ORIGINAL_REGNO (x) = regno;
428 return x;
429 }
430
431 /* There are some RTL codes that require special attention; the generation
432 functions do the raw handling. If you add to this list, modify
433 special_rtx in gengenrtl.c as well. */
434
435 rtx
436 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
437 {
438 void **slot;
439
440 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
441 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
442
443 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
444 if (const_true_rtx && arg == STORE_FLAG_VALUE)
445 return const_true_rtx;
446 #endif
447
448 /* Look up the CONST_INT in the hash table. */
449 slot = htab_find_slot_with_hash (const_int_htab, &arg,
450 (hashval_t) arg, INSERT);
451 if (*slot == 0)
452 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
453
454 return (rtx) *slot;
455 }
456
457 rtx
458 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
459 {
460 return GEN_INT (trunc_int_for_mode (c, mode));
461 }
462
463 /* CONST_DOUBLEs might be created from pairs of integers, or from
464 REAL_VALUE_TYPEs. Also, their length is known only at run time,
465 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
466
467 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
468 hash table. If so, return its counterpart; otherwise add it
469 to the hash table and return it. */
470 static rtx
471 lookup_const_double (rtx real)
472 {
473 void **slot = htab_find_slot (const_double_htab, real, INSERT);
474 if (*slot == 0)
475 *slot = real;
476
477 return (rtx) *slot;
478 }
479
480 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
481 VALUE in mode MODE. */
482 rtx
483 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
484 {
485 rtx real = rtx_alloc (CONST_DOUBLE);
486 PUT_MODE (real, mode);
487
488 real->u.rv = value;
489
490 return lookup_const_double (real);
491 }
492
493 /* Determine whether FIXED, a CONST_FIXED, already exists in the
494 hash table. If so, return its counterpart; otherwise add it
495 to the hash table and return it. */
496
497 static rtx
498 lookup_const_fixed (rtx fixed)
499 {
500 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
501 if (*slot == 0)
502 *slot = fixed;
503
504 return (rtx) *slot;
505 }
506
507 /* Return a CONST_FIXED rtx for a fixed-point value specified by
508 VALUE in mode MODE. */
509
510 rtx
511 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
512 {
513 rtx fixed = rtx_alloc (CONST_FIXED);
514 PUT_MODE (fixed, mode);
515
516 fixed->u.fv = value;
517
518 return lookup_const_fixed (fixed);
519 }
520
521 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
522 a double_int. */
523
524 rtx
525 immed_double_int_const (double_int i, enum machine_mode mode)
526 {
527 return immed_double_const (i.low, i.high, mode);
528 }
529
530 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
531 of ints: I0 is the low-order word and I1 is the high-order word.
532 Do not use this routine for non-integer modes; convert to
533 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
534
535 rtx
536 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
537 {
538 rtx value;
539 unsigned int i;
540
541 /* There are the following cases (note that there are no modes with
542 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
543
544 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
545 gen_int_mode.
546 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
547 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
548 from copies of the sign bit, and sign of i0 and i1 are the same), then
549 we return a CONST_INT for i0.
550 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
551 if (mode != VOIDmode)
552 {
553 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
554 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
555 /* We can get a 0 for an error mark. */
556 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
557 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
558
559 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
560 return gen_int_mode (i0, mode);
561
562 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
563 }
564
565 /* If this integer fits in one word, return a CONST_INT. */
566 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
567 return GEN_INT (i0);
568
569 /* We use VOIDmode for integers. */
570 value = rtx_alloc (CONST_DOUBLE);
571 PUT_MODE (value, VOIDmode);
572
573 CONST_DOUBLE_LOW (value) = i0;
574 CONST_DOUBLE_HIGH (value) = i1;
575
576 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
577 XWINT (value, i) = 0;
578
579 return lookup_const_double (value);
580 }
581
582 rtx
583 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
584 {
585 /* In case the MD file explicitly references the frame pointer, have
586 all such references point to the same frame pointer. This is
587 used during frame pointer elimination to distinguish the explicit
588 references to these registers from pseudos that happened to be
589 assigned to them.
590
591 If we have eliminated the frame pointer or arg pointer, we will
592 be using it as a normal register, for example as a spill
593 register. In such cases, we might be accessing it in a mode that
594 is not Pmode and therefore cannot use the pre-allocated rtx.
595
596 Also don't do this when we are making new REGs in reload, since
597 we don't want to get confused with the real pointers. */
598
599 if (mode == Pmode && !reload_in_progress)
600 {
601 if (regno == FRAME_POINTER_REGNUM
602 && (!reload_completed || frame_pointer_needed))
603 return frame_pointer_rtx;
604 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
605 if (regno == HARD_FRAME_POINTER_REGNUM
606 && (!reload_completed || frame_pointer_needed))
607 return hard_frame_pointer_rtx;
608 #endif
609 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
610 if (regno == ARG_POINTER_REGNUM)
611 return arg_pointer_rtx;
612 #endif
613 #ifdef RETURN_ADDRESS_POINTER_REGNUM
614 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
615 return return_address_pointer_rtx;
616 #endif
617 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
618 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
619 return pic_offset_table_rtx;
620 if (regno == STACK_POINTER_REGNUM)
621 return stack_pointer_rtx;
622 }
623
624 #if 0
625 /* If the per-function register table has been set up, try to re-use
626 an existing entry in that table to avoid useless generation of RTL.
627
628 This code is disabled for now until we can fix the various backends
629 which depend on having non-shared hard registers in some cases. Long
630 term we want to re-enable this code as it can significantly cut down
631 on the amount of useless RTL that gets generated.
632
633 We'll also need to fix some code that runs after reload that wants to
634 set ORIGINAL_REGNO. */
635
636 if (cfun
637 && cfun->emit
638 && regno_reg_rtx
639 && regno < FIRST_PSEUDO_REGISTER
640 && reg_raw_mode[regno] == mode)
641 return regno_reg_rtx[regno];
642 #endif
643
644 return gen_raw_REG (mode, regno);
645 }
646
647 rtx
648 gen_rtx_MEM (enum machine_mode mode, rtx addr)
649 {
650 rtx rt = gen_rtx_raw_MEM (mode, addr);
651
652 /* This field is not cleared by the mere allocation of the rtx, so
653 we clear it here. */
654 MEM_ATTRS (rt) = 0;
655
656 return rt;
657 }
658
659 /* Generate a memory referring to non-trapping constant memory. */
660
661 rtx
662 gen_const_mem (enum machine_mode mode, rtx addr)
663 {
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_READONLY_P (mem) = 1;
666 MEM_NOTRAP_P (mem) = 1;
667 return mem;
668 }
669
670 /* Generate a MEM referring to fixed portions of the frame, e.g., register
671 save areas. */
672
673 rtx
674 gen_frame_mem (enum machine_mode mode, rtx addr)
675 {
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
678 set_mem_alias_set (mem, get_frame_alias_set ());
679 return mem;
680 }
681
682 /* Generate a MEM referring to a temporary use of the stack, not part
683 of the fixed stack frame. For example, something which is pushed
684 by a target splitter. */
685 rtx
686 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
687 {
688 rtx mem = gen_rtx_MEM (mode, addr);
689 MEM_NOTRAP_P (mem) = 1;
690 if (!cfun->calls_alloca)
691 set_mem_alias_set (mem, get_frame_alias_set ());
692 return mem;
693 }
694
695 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
696 this construct would be valid, and false otherwise. */
697
698 bool
699 validate_subreg (enum machine_mode omode, enum machine_mode imode,
700 const_rtx reg, unsigned int offset)
701 {
702 unsigned int isize = GET_MODE_SIZE (imode);
703 unsigned int osize = GET_MODE_SIZE (omode);
704
705 /* All subregs must be aligned. */
706 if (offset % osize != 0)
707 return false;
708
709 /* The subreg offset cannot be outside the inner object. */
710 if (offset >= isize)
711 return false;
712
713 /* ??? This should not be here. Temporarily continue to allow word_mode
714 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
715 Generally, backends are doing something sketchy but it'll take time to
716 fix them all. */
717 if (omode == word_mode)
718 ;
719 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
720 is the culprit here, and not the backends. */
721 else if (osize >= UNITS_PER_WORD && isize >= osize)
722 ;
723 /* Allow component subregs of complex and vector. Though given the below
724 extraction rules, it's not always clear what that means. */
725 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
726 && GET_MODE_INNER (imode) == omode)
727 ;
728 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
729 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
730 represent this. It's questionable if this ought to be represented at
731 all -- why can't this all be hidden in post-reload splitters that make
732 arbitrarily mode changes to the registers themselves. */
733 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
734 ;
735 /* Subregs involving floating point modes are not allowed to
736 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
737 (subreg:SI (reg:DF) 0) isn't. */
738 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
739 {
740 if (isize != osize)
741 return false;
742 }
743
744 /* Paradoxical subregs must have offset zero. */
745 if (osize > isize)
746 return offset == 0;
747
748 /* This is a normal subreg. Verify that the offset is representable. */
749
750 /* For hard registers, we already have most of these rules collected in
751 subreg_offset_representable_p. */
752 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
753 {
754 unsigned int regno = REGNO (reg);
755
756 #ifdef CANNOT_CHANGE_MODE_CLASS
757 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
758 && GET_MODE_INNER (imode) == omode)
759 ;
760 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
761 return false;
762 #endif
763
764 return subreg_offset_representable_p (regno, imode, offset, omode);
765 }
766
767 /* For pseudo registers, we want most of the same checks. Namely:
768 If the register no larger than a word, the subreg must be lowpart.
769 If the register is larger than a word, the subreg must be the lowpart
770 of a subword. A subreg does *not* perform arbitrary bit extraction.
771 Given that we've already checked mode/offset alignment, we only have
772 to check subword subregs here. */
773 if (osize < UNITS_PER_WORD)
774 {
775 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
776 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
777 if (offset % UNITS_PER_WORD != low_off)
778 return false;
779 }
780 return true;
781 }
782
783 rtx
784 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
785 {
786 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
787 return gen_rtx_raw_SUBREG (mode, reg, offset);
788 }
789
790 /* Generate a SUBREG representing the least-significant part of REG if MODE
791 is smaller than mode of REG, otherwise paradoxical SUBREG. */
792
793 rtx
794 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
795 {
796 enum machine_mode inmode;
797
798 inmode = GET_MODE (reg);
799 if (inmode == VOIDmode)
800 inmode = mode;
801 return gen_rtx_SUBREG (mode, reg,
802 subreg_lowpart_offset (mode, inmode));
803 }
804 \f
805
806 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
807
808 rtvec
809 gen_rtvec (int n, ...)
810 {
811 int i;
812 rtvec rt_val;
813 va_list p;
814
815 va_start (p, n);
816
817 /* Don't allocate an empty rtvec... */
818 if (n == 0)
819 return NULL_RTVEC;
820
821 rt_val = rtvec_alloc (n);
822
823 for (i = 0; i < n; i++)
824 rt_val->elem[i] = va_arg (p, rtx);
825
826 va_end (p);
827 return rt_val;
828 }
829
830 rtvec
831 gen_rtvec_v (int n, rtx *argp)
832 {
833 int i;
834 rtvec rt_val;
835
836 /* Don't allocate an empty rtvec... */
837 if (n == 0)
838 return NULL_RTVEC;
839
840 rt_val = rtvec_alloc (n);
841
842 for (i = 0; i < n; i++)
843 rt_val->elem[i] = *argp++;
844
845 return rt_val;
846 }
847 \f
848 /* Return the number of bytes between the start of an OUTER_MODE
849 in-memory value and the start of an INNER_MODE in-memory value,
850 given that the former is a lowpart of the latter. It may be a
851 paradoxical lowpart, in which case the offset will be negative
852 on big-endian targets. */
853
854 int
855 byte_lowpart_offset (enum machine_mode outer_mode,
856 enum machine_mode inner_mode)
857 {
858 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
859 return subreg_lowpart_offset (outer_mode, inner_mode);
860 else
861 return -subreg_lowpart_offset (inner_mode, outer_mode);
862 }
863 \f
864 /* Generate a REG rtx for a new pseudo register of mode MODE.
865 This pseudo is assigned the next sequential register number. */
866
867 rtx
868 gen_reg_rtx (enum machine_mode mode)
869 {
870 rtx val;
871 unsigned int align = GET_MODE_ALIGNMENT (mode);
872
873 gcc_assert (can_create_pseudo_p ());
874
875 /* If a virtual register with bigger mode alignment is generated,
876 increase stack alignment estimation because it might be spilled
877 to stack later. */
878 if (SUPPORTS_STACK_ALIGNMENT
879 && crtl->stack_alignment_estimated < align
880 && !crtl->stack_realign_processed)
881 {
882 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
883 if (crtl->stack_alignment_estimated < min_align)
884 crtl->stack_alignment_estimated = min_align;
885 }
886
887 if (generating_concat_p
888 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
889 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
890 {
891 /* For complex modes, don't make a single pseudo.
892 Instead, make a CONCAT of two pseudos.
893 This allows noncontiguous allocation of the real and imaginary parts,
894 which makes much better code. Besides, allocating DCmode
895 pseudos overstrains reload on some machines like the 386. */
896 rtx realpart, imagpart;
897 enum machine_mode partmode = GET_MODE_INNER (mode);
898
899 realpart = gen_reg_rtx (partmode);
900 imagpart = gen_reg_rtx (partmode);
901 return gen_rtx_CONCAT (mode, realpart, imagpart);
902 }
903
904 /* Make sure regno_pointer_align, and regno_reg_rtx are large
905 enough to have an element for this pseudo reg number. */
906
907 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
908 {
909 int old_size = crtl->emit.regno_pointer_align_length;
910 char *tmp;
911 rtx *new1;
912
913 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
914 memset (tmp + old_size, 0, old_size);
915 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
916
917 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
918 memset (new1 + old_size, 0, old_size * sizeof (rtx));
919 regno_reg_rtx = new1;
920
921 crtl->emit.regno_pointer_align_length = old_size * 2;
922 }
923
924 val = gen_raw_REG (mode, reg_rtx_no);
925 regno_reg_rtx[reg_rtx_no++] = val;
926 return val;
927 }
928
929 /* Update NEW with the same attributes as REG, but with OFFSET added
930 to the REG_OFFSET. */
931
932 static void
933 update_reg_offset (rtx new_rtx, rtx reg, int offset)
934 {
935 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
936 REG_OFFSET (reg) + offset);
937 }
938
939 /* Generate a register with same attributes as REG, but with OFFSET
940 added to the REG_OFFSET. */
941
942 rtx
943 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
944 int offset)
945 {
946 rtx new_rtx = gen_rtx_REG (mode, regno);
947
948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
950 }
951
952 /* Generate a new pseudo-register with the same attributes as REG, but
953 with OFFSET added to the REG_OFFSET. */
954
955 rtx
956 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
957 {
958 rtx new_rtx = gen_reg_rtx (mode);
959
960 update_reg_offset (new_rtx, reg, offset);
961 return new_rtx;
962 }
963
964 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
965 new register is a (possibly paradoxical) lowpart of the old one. */
966
967 void
968 adjust_reg_mode (rtx reg, enum machine_mode mode)
969 {
970 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
971 PUT_MODE (reg, mode);
972 }
973
974 /* Copy REG's attributes from X, if X has any attributes. If REG and X
975 have different modes, REG is a (possibly paradoxical) lowpart of X. */
976
977 void
978 set_reg_attrs_from_value (rtx reg, rtx x)
979 {
980 int offset;
981
982 /* Hard registers can be reused for multiple purposes within the same
983 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
984 on them is wrong. */
985 if (HARD_REGISTER_P (reg))
986 return;
987
988 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
989 if (MEM_P (x))
990 {
991 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
992 REG_ATTRS (reg)
993 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
994 if (MEM_POINTER (x))
995 mark_reg_pointer (reg, 0);
996 }
997 else if (REG_P (x))
998 {
999 if (REG_ATTRS (x))
1000 update_reg_offset (reg, x, offset);
1001 if (REG_POINTER (x))
1002 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1003 }
1004 }
1005
1006 /* Generate a REG rtx for a new pseudo register, copying the mode
1007 and attributes from X. */
1008
1009 rtx
1010 gen_reg_rtx_and_attrs (rtx x)
1011 {
1012 rtx reg = gen_reg_rtx (GET_MODE (x));
1013 set_reg_attrs_from_value (reg, x);
1014 return reg;
1015 }
1016
1017 /* Set the register attributes for registers contained in PARM_RTX.
1018 Use needed values from memory attributes of MEM. */
1019
1020 void
1021 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1022 {
1023 if (REG_P (parm_rtx))
1024 set_reg_attrs_from_value (parm_rtx, mem);
1025 else if (GET_CODE (parm_rtx) == PARALLEL)
1026 {
1027 /* Check for a NULL entry in the first slot, used to indicate that the
1028 parameter goes both on the stack and in registers. */
1029 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1030 for (; i < XVECLEN (parm_rtx, 0); i++)
1031 {
1032 rtx x = XVECEXP (parm_rtx, 0, i);
1033 if (REG_P (XEXP (x, 0)))
1034 REG_ATTRS (XEXP (x, 0))
1035 = get_reg_attrs (MEM_EXPR (mem),
1036 INTVAL (XEXP (x, 1)));
1037 }
1038 }
1039 }
1040
1041 /* Set the REG_ATTRS for registers in value X, given that X represents
1042 decl T. */
1043
1044 void
1045 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1046 {
1047 if (GET_CODE (x) == SUBREG)
1048 {
1049 gcc_assert (subreg_lowpart_p (x));
1050 x = SUBREG_REG (x);
1051 }
1052 if (REG_P (x))
1053 REG_ATTRS (x)
1054 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1055 DECL_MODE (t)));
1056 if (GET_CODE (x) == CONCAT)
1057 {
1058 if (REG_P (XEXP (x, 0)))
1059 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1060 if (REG_P (XEXP (x, 1)))
1061 REG_ATTRS (XEXP (x, 1))
1062 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1063 }
1064 if (GET_CODE (x) == PARALLEL)
1065 {
1066 int i, start;
1067
1068 /* Check for a NULL entry, used to indicate that the parameter goes
1069 both on the stack and in registers. */
1070 if (XEXP (XVECEXP (x, 0, 0), 0))
1071 start = 0;
1072 else
1073 start = 1;
1074
1075 for (i = start; i < XVECLEN (x, 0); i++)
1076 {
1077 rtx y = XVECEXP (x, 0, i);
1078 if (REG_P (XEXP (y, 0)))
1079 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1080 }
1081 }
1082 }
1083
1084 /* Assign the RTX X to declaration T. */
1085
1086 void
1087 set_decl_rtl (tree t, rtx x)
1088 {
1089 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1090 if (x)
1091 set_reg_attrs_for_decl_rtl (t, x);
1092 }
1093
1094 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1095 if the ABI requires the parameter to be passed by reference. */
1096
1097 void
1098 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1099 {
1100 DECL_INCOMING_RTL (t) = x;
1101 if (x && !by_reference_p)
1102 set_reg_attrs_for_decl_rtl (t, x);
1103 }
1104
1105 /* Identify REG (which may be a CONCAT) as a user register. */
1106
1107 void
1108 mark_user_reg (rtx reg)
1109 {
1110 if (GET_CODE (reg) == CONCAT)
1111 {
1112 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1113 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1114 }
1115 else
1116 {
1117 gcc_assert (REG_P (reg));
1118 REG_USERVAR_P (reg) = 1;
1119 }
1120 }
1121
1122 /* Identify REG as a probable pointer register and show its alignment
1123 as ALIGN, if nonzero. */
1124
1125 void
1126 mark_reg_pointer (rtx reg, int align)
1127 {
1128 if (! REG_POINTER (reg))
1129 {
1130 REG_POINTER (reg) = 1;
1131
1132 if (align)
1133 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1134 }
1135 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1136 /* We can no-longer be sure just how aligned this pointer is. */
1137 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1138 }
1139
1140 /* Return 1 plus largest pseudo reg number used in the current function. */
1141
1142 int
1143 max_reg_num (void)
1144 {
1145 return reg_rtx_no;
1146 }
1147
1148 /* Return 1 + the largest label number used so far in the current function. */
1149
1150 int
1151 max_label_num (void)
1152 {
1153 return label_num;
1154 }
1155
1156 /* Return first label number used in this function (if any were used). */
1157
1158 int
1159 get_first_label_num (void)
1160 {
1161 return first_label_num;
1162 }
1163
1164 /* If the rtx for label was created during the expansion of a nested
1165 function, then first_label_num won't include this label number.
1166 Fix this now so that array indices work later. */
1167
1168 void
1169 maybe_set_first_label_num (rtx x)
1170 {
1171 if (CODE_LABEL_NUMBER (x) < first_label_num)
1172 first_label_num = CODE_LABEL_NUMBER (x);
1173 }
1174 \f
1175 /* Return a value representing some low-order bits of X, where the number
1176 of low-order bits is given by MODE. Note that no conversion is done
1177 between floating-point and fixed-point values, rather, the bit
1178 representation is returned.
1179
1180 This function handles the cases in common between gen_lowpart, below,
1181 and two variants in cse.c and combine.c. These are the cases that can
1182 be safely handled at all points in the compilation.
1183
1184 If this is not a case we can handle, return 0. */
1185
1186 rtx
1187 gen_lowpart_common (enum machine_mode mode, rtx x)
1188 {
1189 int msize = GET_MODE_SIZE (mode);
1190 int xsize;
1191 int offset = 0;
1192 enum machine_mode innermode;
1193
1194 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1195 so we have to make one up. Yuk. */
1196 innermode = GET_MODE (x);
1197 if (CONST_INT_P (x)
1198 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1199 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1200 else if (innermode == VOIDmode)
1201 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1202
1203 xsize = GET_MODE_SIZE (innermode);
1204
1205 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1206
1207 if (innermode == mode)
1208 return x;
1209
1210 /* MODE must occupy no more words than the mode of X. */
1211 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1212 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1213 return 0;
1214
1215 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1216 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1217 return 0;
1218
1219 offset = subreg_lowpart_offset (mode, innermode);
1220
1221 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1222 && (GET_MODE_CLASS (mode) == MODE_INT
1223 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1224 {
1225 /* If we are getting the low-order part of something that has been
1226 sign- or zero-extended, we can either just use the object being
1227 extended or make a narrower extension. If we want an even smaller
1228 piece than the size of the object being extended, call ourselves
1229 recursively.
1230
1231 This case is used mostly by combine and cse. */
1232
1233 if (GET_MODE (XEXP (x, 0)) == mode)
1234 return XEXP (x, 0);
1235 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1236 return gen_lowpart_common (mode, XEXP (x, 0));
1237 else if (msize < xsize)
1238 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1239 }
1240 else if (GET_CODE (x) == SUBREG || REG_P (x)
1241 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1242 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1243 return simplify_gen_subreg (mode, x, innermode, offset);
1244
1245 /* Otherwise, we can't do this. */
1246 return 0;
1247 }
1248 \f
1249 rtx
1250 gen_highpart (enum machine_mode mode, rtx x)
1251 {
1252 unsigned int msize = GET_MODE_SIZE (mode);
1253 rtx result;
1254
1255 /* This case loses if X is a subreg. To catch bugs early,
1256 complain if an invalid MODE is used even in other cases. */
1257 gcc_assert (msize <= UNITS_PER_WORD
1258 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1259
1260 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1261 subreg_highpart_offset (mode, GET_MODE (x)));
1262 gcc_assert (result);
1263
1264 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1265 the target if we have a MEM. gen_highpart must return a valid operand,
1266 emitting code if necessary to do so. */
1267 if (MEM_P (result))
1268 {
1269 result = validize_mem (result);
1270 gcc_assert (result);
1271 }
1272
1273 return result;
1274 }
1275
1276 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1277 be VOIDmode constant. */
1278 rtx
1279 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1280 {
1281 if (GET_MODE (exp) != VOIDmode)
1282 {
1283 gcc_assert (GET_MODE (exp) == innermode);
1284 return gen_highpart (outermode, exp);
1285 }
1286 return simplify_gen_subreg (outermode, exp, innermode,
1287 subreg_highpart_offset (outermode, innermode));
1288 }
1289
1290 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1291
1292 unsigned int
1293 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1294 {
1295 unsigned int offset = 0;
1296 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1297
1298 if (difference > 0)
1299 {
1300 if (WORDS_BIG_ENDIAN)
1301 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1302 if (BYTES_BIG_ENDIAN)
1303 offset += difference % UNITS_PER_WORD;
1304 }
1305
1306 return offset;
1307 }
1308
1309 /* Return offset in bytes to get OUTERMODE high part
1310 of the value in mode INNERMODE stored in memory in target format. */
1311 unsigned int
1312 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1313 {
1314 unsigned int offset = 0;
1315 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1316
1317 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1318
1319 if (difference > 0)
1320 {
1321 if (! WORDS_BIG_ENDIAN)
1322 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1323 if (! BYTES_BIG_ENDIAN)
1324 offset += difference % UNITS_PER_WORD;
1325 }
1326
1327 return offset;
1328 }
1329
1330 /* Return 1 iff X, assumed to be a SUBREG,
1331 refers to the least significant part of its containing reg.
1332 If X is not a SUBREG, always return 1 (it is its own low part!). */
1333
1334 int
1335 subreg_lowpart_p (const_rtx x)
1336 {
1337 if (GET_CODE (x) != SUBREG)
1338 return 1;
1339 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1340 return 0;
1341
1342 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1343 == SUBREG_BYTE (x));
1344 }
1345 \f
1346 /* Return subword OFFSET of operand OP.
1347 The word number, OFFSET, is interpreted as the word number starting
1348 at the low-order address. OFFSET 0 is the low-order word if not
1349 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1350
1351 If we cannot extract the required word, we return zero. Otherwise,
1352 an rtx corresponding to the requested word will be returned.
1353
1354 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1355 reload has completed, a valid address will always be returned. After
1356 reload, if a valid address cannot be returned, we return zero.
1357
1358 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1359 it is the responsibility of the caller.
1360
1361 MODE is the mode of OP in case it is a CONST_INT.
1362
1363 ??? This is still rather broken for some cases. The problem for the
1364 moment is that all callers of this thing provide no 'goal mode' to
1365 tell us to work with. This exists because all callers were written
1366 in a word based SUBREG world.
1367 Now use of this function can be deprecated by simplify_subreg in most
1368 cases.
1369 */
1370
1371 rtx
1372 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1373 {
1374 if (mode == VOIDmode)
1375 mode = GET_MODE (op);
1376
1377 gcc_assert (mode != VOIDmode);
1378
1379 /* If OP is narrower than a word, fail. */
1380 if (mode != BLKmode
1381 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1382 return 0;
1383
1384 /* If we want a word outside OP, return zero. */
1385 if (mode != BLKmode
1386 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1387 return const0_rtx;
1388
1389 /* Form a new MEM at the requested address. */
1390 if (MEM_P (op))
1391 {
1392 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1393
1394 if (! validate_address)
1395 return new_rtx;
1396
1397 else if (reload_completed)
1398 {
1399 if (! strict_memory_address_addr_space_p (word_mode,
1400 XEXP (new_rtx, 0),
1401 MEM_ADDR_SPACE (op)))
1402 return 0;
1403 }
1404 else
1405 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1406 }
1407
1408 /* Rest can be handled by simplify_subreg. */
1409 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1410 }
1411
1412 /* Similar to `operand_subword', but never return 0. If we can't
1413 extract the required subword, put OP into a register and try again.
1414 The second attempt must succeed. We always validate the address in
1415 this case.
1416
1417 MODE is the mode of OP, in case it is CONST_INT. */
1418
1419 rtx
1420 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1421 {
1422 rtx result = operand_subword (op, offset, 1, mode);
1423
1424 if (result)
1425 return result;
1426
1427 if (mode != BLKmode && mode != VOIDmode)
1428 {
1429 /* If this is a register which can not be accessed by words, copy it
1430 to a pseudo register. */
1431 if (REG_P (op))
1432 op = copy_to_reg (op);
1433 else
1434 op = force_reg (mode, op);
1435 }
1436
1437 result = operand_subword (op, offset, 1, mode);
1438 gcc_assert (result);
1439
1440 return result;
1441 }
1442 \f
1443 /* Returns 1 if both MEM_EXPR can be considered equal
1444 and 0 otherwise. */
1445
1446 int
1447 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1448 {
1449 if (expr1 == expr2)
1450 return 1;
1451
1452 if (! expr1 || ! expr2)
1453 return 0;
1454
1455 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1456 return 0;
1457
1458 return operand_equal_p (expr1, expr2, 0);
1459 }
1460
1461 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1462 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1463 -1 if not known. */
1464
1465 int
1466 get_mem_align_offset (rtx mem, unsigned int align)
1467 {
1468 tree expr;
1469 unsigned HOST_WIDE_INT offset;
1470
1471 /* This function can't use
1472 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1473 || !CONST_INT_P (MEM_OFFSET (mem))
1474 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1475 < align))
1476 return -1;
1477 else
1478 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1479 for two reasons:
1480 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1481 for <variable>. get_inner_reference doesn't handle it and
1482 even if it did, the alignment in that case needs to be determined
1483 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1484 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1485 isn't sufficiently aligned, the object it is in might be. */
1486 gcc_assert (MEM_P (mem));
1487 expr = MEM_EXPR (mem);
1488 if (expr == NULL_TREE
1489 || MEM_OFFSET (mem) == NULL_RTX
1490 || !CONST_INT_P (MEM_OFFSET (mem)))
1491 return -1;
1492
1493 offset = INTVAL (MEM_OFFSET (mem));
1494 if (DECL_P (expr))
1495 {
1496 if (DECL_ALIGN (expr) < align)
1497 return -1;
1498 }
1499 else if (INDIRECT_REF_P (expr))
1500 {
1501 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1502 return -1;
1503 }
1504 else if (TREE_CODE (expr) == COMPONENT_REF)
1505 {
1506 while (1)
1507 {
1508 tree inner = TREE_OPERAND (expr, 0);
1509 tree field = TREE_OPERAND (expr, 1);
1510 tree byte_offset = component_ref_field_offset (expr);
1511 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1512
1513 if (!byte_offset
1514 || !host_integerp (byte_offset, 1)
1515 || !host_integerp (bit_offset, 1))
1516 return -1;
1517
1518 offset += tree_low_cst (byte_offset, 1);
1519 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1520
1521 if (inner == NULL_TREE)
1522 {
1523 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1524 < (unsigned int) align)
1525 return -1;
1526 break;
1527 }
1528 else if (DECL_P (inner))
1529 {
1530 if (DECL_ALIGN (inner) < align)
1531 return -1;
1532 break;
1533 }
1534 else if (TREE_CODE (inner) != COMPONENT_REF)
1535 return -1;
1536 expr = inner;
1537 }
1538 }
1539 else
1540 return -1;
1541
1542 return offset & ((align / BITS_PER_UNIT) - 1);
1543 }
1544
1545 /* Given REF (a MEM) and T, either the type of X or the expression
1546 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1547 if we are making a new object of this type. BITPOS is nonzero if
1548 there is an offset outstanding on T that will be applied later. */
1549
1550 void
1551 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1552 HOST_WIDE_INT bitpos)
1553 {
1554 alias_set_type alias = MEM_ALIAS_SET (ref);
1555 tree expr = MEM_EXPR (ref);
1556 rtx offset = MEM_OFFSET (ref);
1557 rtx size = MEM_SIZE (ref);
1558 unsigned int align = MEM_ALIGN (ref);
1559 HOST_WIDE_INT apply_bitpos = 0;
1560 tree type;
1561
1562 /* It can happen that type_for_mode was given a mode for which there
1563 is no language-level type. In which case it returns NULL, which
1564 we can see here. */
1565 if (t == NULL_TREE)
1566 return;
1567
1568 type = TYPE_P (t) ? t : TREE_TYPE (t);
1569 if (type == error_mark_node)
1570 return;
1571
1572 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1573 wrong answer, as it assumes that DECL_RTL already has the right alias
1574 info. Callers should not set DECL_RTL until after the call to
1575 set_mem_attributes. */
1576 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1577
1578 /* Get the alias set from the expression or type (perhaps using a
1579 front-end routine) and use it. */
1580 alias = get_alias_set (t);
1581
1582 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1583 MEM_IN_STRUCT_P (ref)
1584 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1585 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1586
1587 /* If we are making an object of this type, or if this is a DECL, we know
1588 that it is a scalar if the type is not an aggregate. */
1589 if ((objectp || DECL_P (t))
1590 && ! AGGREGATE_TYPE_P (type)
1591 && TREE_CODE (type) != COMPLEX_TYPE)
1592 MEM_SCALAR_P (ref) = 1;
1593
1594 /* We can set the alignment from the type if we are making an object,
1595 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1596 if (objectp || TREE_CODE (t) == INDIRECT_REF
1597 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1598 || TYPE_ALIGN_OK (type))
1599 align = MAX (align, TYPE_ALIGN (type));
1600 else
1601 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1602 {
1603 if (integer_zerop (TREE_OPERAND (t, 1)))
1604 /* We don't know anything about the alignment. */
1605 align = BITS_PER_UNIT;
1606 else
1607 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1608 }
1609
1610 /* If the size is known, we can set that. */
1611 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1612 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1613
1614 /* If T is not a type, we may be able to deduce some more information about
1615 the expression. */
1616 if (! TYPE_P (t))
1617 {
1618 tree base;
1619 bool align_computed = false;
1620
1621 if (TREE_THIS_VOLATILE (t))
1622 MEM_VOLATILE_P (ref) = 1;
1623
1624 /* Now remove any conversions: they don't change what the underlying
1625 object is. Likewise for SAVE_EXPR. */
1626 while (CONVERT_EXPR_P (t)
1627 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1628 || TREE_CODE (t) == SAVE_EXPR)
1629 t = TREE_OPERAND (t, 0);
1630
1631 /* We may look through structure-like accesses for the purposes of
1632 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1633 base = t;
1634 while (TREE_CODE (base) == COMPONENT_REF
1635 || TREE_CODE (base) == REALPART_EXPR
1636 || TREE_CODE (base) == IMAGPART_EXPR
1637 || TREE_CODE (base) == BIT_FIELD_REF)
1638 base = TREE_OPERAND (base, 0);
1639
1640 if (DECL_P (base))
1641 {
1642 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1643 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1644 else
1645 MEM_NOTRAP_P (ref) = 1;
1646 }
1647 else
1648 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1649
1650 base = get_base_address (base);
1651 if (base && DECL_P (base)
1652 && TREE_READONLY (base)
1653 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1654 {
1655 tree base_type = TREE_TYPE (base);
1656 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1657 || DECL_ARTIFICIAL (base));
1658 MEM_READONLY_P (ref) = 1;
1659 }
1660
1661 /* If this expression uses it's parent's alias set, mark it such
1662 that we won't change it. */
1663 if (component_uses_parent_alias_set (t))
1664 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1665
1666 /* If this is a decl, set the attributes of the MEM from it. */
1667 if (DECL_P (t))
1668 {
1669 expr = t;
1670 offset = const0_rtx;
1671 apply_bitpos = bitpos;
1672 size = (DECL_SIZE_UNIT (t)
1673 && host_integerp (DECL_SIZE_UNIT (t), 1)
1674 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1675 align = DECL_ALIGN (t);
1676 align_computed = true;
1677 }
1678
1679 /* If this is a constant, we know the alignment. */
1680 else if (CONSTANT_CLASS_P (t))
1681 {
1682 align = TYPE_ALIGN (type);
1683 #ifdef CONSTANT_ALIGNMENT
1684 align = CONSTANT_ALIGNMENT (t, align);
1685 #endif
1686 align_computed = true;
1687 }
1688
1689 /* If this is a field reference and not a bit-field, record it. */
1690 /* ??? There is some information that can be gleaned from bit-fields,
1691 such as the word offset in the structure that might be modified.
1692 But skip it for now. */
1693 else if (TREE_CODE (t) == COMPONENT_REF
1694 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1695 {
1696 expr = t;
1697 offset = const0_rtx;
1698 apply_bitpos = bitpos;
1699 /* ??? Any reason the field size would be different than
1700 the size we got from the type? */
1701 }
1702
1703 /* If this is an array reference, look for an outer field reference. */
1704 else if (TREE_CODE (t) == ARRAY_REF)
1705 {
1706 tree off_tree = size_zero_node;
1707 /* We can't modify t, because we use it at the end of the
1708 function. */
1709 tree t2 = t;
1710
1711 do
1712 {
1713 tree index = TREE_OPERAND (t2, 1);
1714 tree low_bound = array_ref_low_bound (t2);
1715 tree unit_size = array_ref_element_size (t2);
1716
1717 /* We assume all arrays have sizes that are a multiple of a byte.
1718 First subtract the lower bound, if any, in the type of the
1719 index, then convert to sizetype and multiply by the size of
1720 the array element. */
1721 if (! integer_zerop (low_bound))
1722 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1723 index, low_bound);
1724
1725 off_tree = size_binop (PLUS_EXPR,
1726 size_binop (MULT_EXPR,
1727 fold_convert (sizetype,
1728 index),
1729 unit_size),
1730 off_tree);
1731 t2 = TREE_OPERAND (t2, 0);
1732 }
1733 while (TREE_CODE (t2) == ARRAY_REF);
1734
1735 if (DECL_P (t2))
1736 {
1737 expr = t2;
1738 offset = NULL;
1739 if (host_integerp (off_tree, 1))
1740 {
1741 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1742 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1743 align = DECL_ALIGN (t2);
1744 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1745 align = aoff;
1746 align_computed = true;
1747 offset = GEN_INT (ioff);
1748 apply_bitpos = bitpos;
1749 }
1750 }
1751 else if (TREE_CODE (t2) == COMPONENT_REF)
1752 {
1753 expr = t2;
1754 offset = NULL;
1755 if (host_integerp (off_tree, 1))
1756 {
1757 offset = GEN_INT (tree_low_cst (off_tree, 1));
1758 apply_bitpos = bitpos;
1759 }
1760 /* ??? Any reason the field size would be different than
1761 the size we got from the type? */
1762 }
1763
1764 /* If this is an indirect reference, record it. */
1765 else if (TREE_CODE (t) == INDIRECT_REF
1766 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1767 {
1768 expr = t;
1769 offset = const0_rtx;
1770 apply_bitpos = bitpos;
1771 }
1772 }
1773
1774 /* If this is an indirect reference, record it. */
1775 else if (TREE_CODE (t) == INDIRECT_REF
1776 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1777 {
1778 expr = t;
1779 offset = const0_rtx;
1780 apply_bitpos = bitpos;
1781 }
1782
1783 if (!align_computed && !INDIRECT_REF_P (t))
1784 {
1785 unsigned int obj_align
1786 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1787 align = MAX (align, obj_align);
1788 }
1789 }
1790
1791 /* If we modified OFFSET based on T, then subtract the outstanding
1792 bit position offset. Similarly, increase the size of the accessed
1793 object to contain the negative offset. */
1794 if (apply_bitpos)
1795 {
1796 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1797 if (size)
1798 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1799 }
1800
1801 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1802 {
1803 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1804 we're overlapping. */
1805 offset = NULL;
1806 expr = NULL;
1807 }
1808
1809 /* Now set the attributes we computed above. */
1810 MEM_ATTRS (ref)
1811 = get_mem_attrs (alias, expr, offset, size, align,
1812 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1813
1814 /* If this is already known to be a scalar or aggregate, we are done. */
1815 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1816 return;
1817
1818 /* If it is a reference into an aggregate, this is part of an aggregate.
1819 Otherwise we don't know. */
1820 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1821 || TREE_CODE (t) == ARRAY_RANGE_REF
1822 || TREE_CODE (t) == BIT_FIELD_REF)
1823 MEM_IN_STRUCT_P (ref) = 1;
1824 }
1825
1826 void
1827 set_mem_attributes (rtx ref, tree t, int objectp)
1828 {
1829 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1830 }
1831
1832 /* Set the alias set of MEM to SET. */
1833
1834 void
1835 set_mem_alias_set (rtx mem, alias_set_type set)
1836 {
1837 #ifdef ENABLE_CHECKING
1838 /* If the new and old alias sets don't conflict, something is wrong. */
1839 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1840 #endif
1841
1842 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1843 MEM_SIZE (mem), MEM_ALIGN (mem),
1844 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1845 }
1846
1847 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1848
1849 void
1850 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1851 {
1852 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1853 MEM_OFFSET (mem), MEM_SIZE (mem),
1854 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1855 }
1856
1857 /* Set the alignment of MEM to ALIGN bits. */
1858
1859 void
1860 set_mem_align (rtx mem, unsigned int align)
1861 {
1862 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1863 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1864 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1865 }
1866
1867 /* Set the expr for MEM to EXPR. */
1868
1869 void
1870 set_mem_expr (rtx mem, tree expr)
1871 {
1872 MEM_ATTRS (mem)
1873 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1874 MEM_SIZE (mem), MEM_ALIGN (mem),
1875 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1876 }
1877
1878 /* Set the offset of MEM to OFFSET. */
1879
1880 void
1881 set_mem_offset (rtx mem, rtx offset)
1882 {
1883 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1884 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1885 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1886 }
1887
1888 /* Set the size of MEM to SIZE. */
1889
1890 void
1891 set_mem_size (rtx mem, rtx size)
1892 {
1893 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1894 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1895 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1896 }
1897 \f
1898 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1899 and its address changed to ADDR. (VOIDmode means don't change the mode.
1900 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1901 returned memory location is required to be valid. The memory
1902 attributes are not changed. */
1903
1904 static rtx
1905 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1906 {
1907 addr_space_t as;
1908 rtx new_rtx;
1909
1910 gcc_assert (MEM_P (memref));
1911 as = MEM_ADDR_SPACE (memref);
1912 if (mode == VOIDmode)
1913 mode = GET_MODE (memref);
1914 if (addr == 0)
1915 addr = XEXP (memref, 0);
1916 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1917 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1918 return memref;
1919
1920 if (validate)
1921 {
1922 if (reload_in_progress || reload_completed)
1923 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1924 else
1925 addr = memory_address_addr_space (mode, addr, as);
1926 }
1927
1928 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1929 return memref;
1930
1931 new_rtx = gen_rtx_MEM (mode, addr);
1932 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1933 return new_rtx;
1934 }
1935
1936 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1937 way we are changing MEMREF, so we only preserve the alias set. */
1938
1939 rtx
1940 change_address (rtx memref, enum machine_mode mode, rtx addr)
1941 {
1942 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1943 enum machine_mode mmode = GET_MODE (new_rtx);
1944 unsigned int align;
1945
1946 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1947 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1948
1949 /* If there are no changes, just return the original memory reference. */
1950 if (new_rtx == memref)
1951 {
1952 if (MEM_ATTRS (memref) == 0
1953 || (MEM_EXPR (memref) == NULL
1954 && MEM_OFFSET (memref) == NULL
1955 && MEM_SIZE (memref) == size
1956 && MEM_ALIGN (memref) == align))
1957 return new_rtx;
1958
1959 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1960 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1961 }
1962
1963 MEM_ATTRS (new_rtx)
1964 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1965 MEM_ADDR_SPACE (memref), mmode);
1966
1967 return new_rtx;
1968 }
1969
1970 /* Return a memory reference like MEMREF, but with its mode changed
1971 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1972 nonzero, the memory address is forced to be valid.
1973 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1974 and caller is responsible for adjusting MEMREF base register. */
1975
1976 rtx
1977 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1978 int validate, int adjust)
1979 {
1980 rtx addr = XEXP (memref, 0);
1981 rtx new_rtx;
1982 rtx memoffset = MEM_OFFSET (memref);
1983 rtx size = 0;
1984 unsigned int memalign = MEM_ALIGN (memref);
1985 addr_space_t as = MEM_ADDR_SPACE (memref);
1986 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
1987 int pbits;
1988
1989 /* If there are no changes, just return the original memory reference. */
1990 if (mode == GET_MODE (memref) && !offset
1991 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1992 return memref;
1993
1994 /* ??? Prefer to create garbage instead of creating shared rtl.
1995 This may happen even if offset is nonzero -- consider
1996 (plus (plus reg reg) const_int) -- so do this always. */
1997 addr = copy_rtx (addr);
1998
1999 /* Convert a possibly large offset to a signed value within the
2000 range of the target address space. */
2001 pbits = GET_MODE_BITSIZE (address_mode);
2002 if (HOST_BITS_PER_WIDE_INT > pbits)
2003 {
2004 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2005 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2006 >> shift);
2007 }
2008
2009 if (adjust)
2010 {
2011 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2012 object, we can merge it into the LO_SUM. */
2013 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2014 && offset >= 0
2015 && (unsigned HOST_WIDE_INT) offset
2016 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2017 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2018 plus_constant (XEXP (addr, 1), offset));
2019 else
2020 addr = plus_constant (addr, offset);
2021 }
2022
2023 new_rtx = change_address_1 (memref, mode, addr, validate);
2024
2025 /* If the address is a REG, change_address_1 rightfully returns memref,
2026 but this would destroy memref's MEM_ATTRS. */
2027 if (new_rtx == memref && offset != 0)
2028 new_rtx = copy_rtx (new_rtx);
2029
2030 /* Compute the new values of the memory attributes due to this adjustment.
2031 We add the offsets and update the alignment. */
2032 if (memoffset)
2033 memoffset = GEN_INT (offset + INTVAL (memoffset));
2034
2035 /* Compute the new alignment by taking the MIN of the alignment and the
2036 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2037 if zero. */
2038 if (offset != 0)
2039 memalign
2040 = MIN (memalign,
2041 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2042
2043 /* We can compute the size in a number of ways. */
2044 if (GET_MODE (new_rtx) != BLKmode)
2045 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2046 else if (MEM_SIZE (memref))
2047 size = plus_constant (MEM_SIZE (memref), -offset);
2048
2049 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2050 memoffset, size, memalign, as,
2051 GET_MODE (new_rtx));
2052
2053 /* At some point, we should validate that this offset is within the object,
2054 if all the appropriate values are known. */
2055 return new_rtx;
2056 }
2057
2058 /* Return a memory reference like MEMREF, but with its mode changed
2059 to MODE and its address changed to ADDR, which is assumed to be
2060 MEMREF offset by OFFSET bytes. If VALIDATE is
2061 nonzero, the memory address is forced to be valid. */
2062
2063 rtx
2064 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2065 HOST_WIDE_INT offset, int validate)
2066 {
2067 memref = change_address_1 (memref, VOIDmode, addr, validate);
2068 return adjust_address_1 (memref, mode, offset, validate, 0);
2069 }
2070
2071 /* Return a memory reference like MEMREF, but whose address is changed by
2072 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2073 known to be in OFFSET (possibly 1). */
2074
2075 rtx
2076 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2077 {
2078 rtx new_rtx, addr = XEXP (memref, 0);
2079 addr_space_t as = MEM_ADDR_SPACE (memref);
2080 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2081
2082 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2083
2084 /* At this point we don't know _why_ the address is invalid. It
2085 could have secondary memory references, multiplies or anything.
2086
2087 However, if we did go and rearrange things, we can wind up not
2088 being able to recognize the magic around pic_offset_table_rtx.
2089 This stuff is fragile, and is yet another example of why it is
2090 bad to expose PIC machinery too early. */
2091 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2092 && GET_CODE (addr) == PLUS
2093 && XEXP (addr, 0) == pic_offset_table_rtx)
2094 {
2095 addr = force_reg (GET_MODE (addr), addr);
2096 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2097 }
2098
2099 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2100 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2101
2102 /* If there are no changes, just return the original memory reference. */
2103 if (new_rtx == memref)
2104 return new_rtx;
2105
2106 /* Update the alignment to reflect the offset. Reset the offset, which
2107 we don't know. */
2108 MEM_ATTRS (new_rtx)
2109 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2110 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2111 as, GET_MODE (new_rtx));
2112 return new_rtx;
2113 }
2114
2115 /* Return a memory reference like MEMREF, but with its address changed to
2116 ADDR. The caller is asserting that the actual piece of memory pointed
2117 to is the same, just the form of the address is being changed, such as
2118 by putting something into a register. */
2119
2120 rtx
2121 replace_equiv_address (rtx memref, rtx addr)
2122 {
2123 /* change_address_1 copies the memory attribute structure without change
2124 and that's exactly what we want here. */
2125 update_temp_slot_address (XEXP (memref, 0), addr);
2126 return change_address_1 (memref, VOIDmode, addr, 1);
2127 }
2128
2129 /* Likewise, but the reference is not required to be valid. */
2130
2131 rtx
2132 replace_equiv_address_nv (rtx memref, rtx addr)
2133 {
2134 return change_address_1 (memref, VOIDmode, addr, 0);
2135 }
2136
2137 /* Return a memory reference like MEMREF, but with its mode widened to
2138 MODE and offset by OFFSET. This would be used by targets that e.g.
2139 cannot issue QImode memory operations and have to use SImode memory
2140 operations plus masking logic. */
2141
2142 rtx
2143 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2144 {
2145 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2146 tree expr = MEM_EXPR (new_rtx);
2147 rtx memoffset = MEM_OFFSET (new_rtx);
2148 unsigned int size = GET_MODE_SIZE (mode);
2149
2150 /* If there are no changes, just return the original memory reference. */
2151 if (new_rtx == memref)
2152 return new_rtx;
2153
2154 /* If we don't know what offset we were at within the expression, then
2155 we can't know if we've overstepped the bounds. */
2156 if (! memoffset)
2157 expr = NULL_TREE;
2158
2159 while (expr)
2160 {
2161 if (TREE_CODE (expr) == COMPONENT_REF)
2162 {
2163 tree field = TREE_OPERAND (expr, 1);
2164 tree offset = component_ref_field_offset (expr);
2165
2166 if (! DECL_SIZE_UNIT (field))
2167 {
2168 expr = NULL_TREE;
2169 break;
2170 }
2171
2172 /* Is the field at least as large as the access? If so, ok,
2173 otherwise strip back to the containing structure. */
2174 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2175 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2176 && INTVAL (memoffset) >= 0)
2177 break;
2178
2179 if (! host_integerp (offset, 1))
2180 {
2181 expr = NULL_TREE;
2182 break;
2183 }
2184
2185 expr = TREE_OPERAND (expr, 0);
2186 memoffset
2187 = (GEN_INT (INTVAL (memoffset)
2188 + tree_low_cst (offset, 1)
2189 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2190 / BITS_PER_UNIT)));
2191 }
2192 /* Similarly for the decl. */
2193 else if (DECL_P (expr)
2194 && DECL_SIZE_UNIT (expr)
2195 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2196 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2197 && (! memoffset || INTVAL (memoffset) >= 0))
2198 break;
2199 else
2200 {
2201 /* The widened memory access overflows the expression, which means
2202 that it could alias another expression. Zap it. */
2203 expr = NULL_TREE;
2204 break;
2205 }
2206 }
2207
2208 if (! expr)
2209 memoffset = NULL_RTX;
2210
2211 /* The widened memory may alias other stuff, so zap the alias set. */
2212 /* ??? Maybe use get_alias_set on any remaining expression. */
2213
2214 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2215 MEM_ALIGN (new_rtx),
2216 MEM_ADDR_SPACE (new_rtx), mode);
2217
2218 return new_rtx;
2219 }
2220 \f
2221 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2222 static GTY(()) tree spill_slot_decl;
2223
2224 tree
2225 get_spill_slot_decl (bool force_build_p)
2226 {
2227 tree d = spill_slot_decl;
2228 rtx rd;
2229
2230 if (d || !force_build_p)
2231 return d;
2232
2233 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2234 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2235 DECL_ARTIFICIAL (d) = 1;
2236 DECL_IGNORED_P (d) = 1;
2237 TREE_USED (d) = 1;
2238 TREE_THIS_NOTRAP (d) = 1;
2239 spill_slot_decl = d;
2240
2241 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2242 MEM_NOTRAP_P (rd) = 1;
2243 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2244 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2245 SET_DECL_RTL (d, rd);
2246
2247 return d;
2248 }
2249
2250 /* Given MEM, a result from assign_stack_local, fill in the memory
2251 attributes as appropriate for a register allocator spill slot.
2252 These slots are not aliasable by other memory. We arrange for
2253 them all to use a single MEM_EXPR, so that the aliasing code can
2254 work properly in the case of shared spill slots. */
2255
2256 void
2257 set_mem_attrs_for_spill (rtx mem)
2258 {
2259 alias_set_type alias;
2260 rtx addr, offset;
2261 tree expr;
2262
2263 expr = get_spill_slot_decl (true);
2264 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2265
2266 /* We expect the incoming memory to be of the form:
2267 (mem:MODE (plus (reg sfp) (const_int offset)))
2268 with perhaps the plus missing for offset = 0. */
2269 addr = XEXP (mem, 0);
2270 offset = const0_rtx;
2271 if (GET_CODE (addr) == PLUS
2272 && CONST_INT_P (XEXP (addr, 1)))
2273 offset = XEXP (addr, 1);
2274
2275 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2276 MEM_SIZE (mem), MEM_ALIGN (mem),
2277 ADDR_SPACE_GENERIC, GET_MODE (mem));
2278 MEM_NOTRAP_P (mem) = 1;
2279 }
2280 \f
2281 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2282
2283 rtx
2284 gen_label_rtx (void)
2285 {
2286 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2287 NULL, label_num++, NULL);
2288 }
2289 \f
2290 /* For procedure integration. */
2291
2292 /* Install new pointers to the first and last insns in the chain.
2293 Also, set cur_insn_uid to one higher than the last in use.
2294 Used for an inline-procedure after copying the insn chain. */
2295
2296 void
2297 set_new_first_and_last_insn (rtx first, rtx last)
2298 {
2299 rtx insn;
2300
2301 first_insn = first;
2302 last_insn = last;
2303 cur_insn_uid = 0;
2304
2305 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2306 {
2307 int debug_count = 0;
2308
2309 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2310 cur_debug_insn_uid = 0;
2311
2312 for (insn = first; insn; insn = NEXT_INSN (insn))
2313 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2314 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2315 else
2316 {
2317 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2318 if (DEBUG_INSN_P (insn))
2319 debug_count++;
2320 }
2321
2322 if (debug_count)
2323 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2324 else
2325 cur_debug_insn_uid++;
2326 }
2327 else
2328 for (insn = first; insn; insn = NEXT_INSN (insn))
2329 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2330
2331 cur_insn_uid++;
2332 }
2333 \f
2334 /* Go through all the RTL insn bodies and copy any invalid shared
2335 structure. This routine should only be called once. */
2336
2337 static void
2338 unshare_all_rtl_1 (rtx insn)
2339 {
2340 /* Unshare just about everything else. */
2341 unshare_all_rtl_in_chain (insn);
2342
2343 /* Make sure the addresses of stack slots found outside the insn chain
2344 (such as, in DECL_RTL of a variable) are not shared
2345 with the insn chain.
2346
2347 This special care is necessary when the stack slot MEM does not
2348 actually appear in the insn chain. If it does appear, its address
2349 is unshared from all else at that point. */
2350 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2351 }
2352
2353 /* Go through all the RTL insn bodies and copy any invalid shared
2354 structure, again. This is a fairly expensive thing to do so it
2355 should be done sparingly. */
2356
2357 void
2358 unshare_all_rtl_again (rtx insn)
2359 {
2360 rtx p;
2361 tree decl;
2362
2363 for (p = insn; p; p = NEXT_INSN (p))
2364 if (INSN_P (p))
2365 {
2366 reset_used_flags (PATTERN (p));
2367 reset_used_flags (REG_NOTES (p));
2368 }
2369
2370 /* Make sure that virtual stack slots are not shared. */
2371 set_used_decls (DECL_INITIAL (cfun->decl));
2372
2373 /* Make sure that virtual parameters are not shared. */
2374 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2375 set_used_flags (DECL_RTL (decl));
2376
2377 reset_used_flags (stack_slot_list);
2378
2379 unshare_all_rtl_1 (insn);
2380 }
2381
2382 unsigned int
2383 unshare_all_rtl (void)
2384 {
2385 unshare_all_rtl_1 (get_insns ());
2386 return 0;
2387 }
2388
2389 struct rtl_opt_pass pass_unshare_all_rtl =
2390 {
2391 {
2392 RTL_PASS,
2393 "unshare", /* name */
2394 NULL, /* gate */
2395 unshare_all_rtl, /* execute */
2396 NULL, /* sub */
2397 NULL, /* next */
2398 0, /* static_pass_number */
2399 TV_NONE, /* tv_id */
2400 0, /* properties_required */
2401 0, /* properties_provided */
2402 0, /* properties_destroyed */
2403 0, /* todo_flags_start */
2404 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2405 }
2406 };
2407
2408
2409 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2410 Recursively does the same for subexpressions. */
2411
2412 static void
2413 verify_rtx_sharing (rtx orig, rtx insn)
2414 {
2415 rtx x = orig;
2416 int i;
2417 enum rtx_code code;
2418 const char *format_ptr;
2419
2420 if (x == 0)
2421 return;
2422
2423 code = GET_CODE (x);
2424
2425 /* These types may be freely shared. */
2426
2427 switch (code)
2428 {
2429 case REG:
2430 case DEBUG_EXPR:
2431 case VALUE:
2432 case CONST_INT:
2433 case CONST_DOUBLE:
2434 case CONST_FIXED:
2435 case CONST_VECTOR:
2436 case SYMBOL_REF:
2437 case LABEL_REF:
2438 case CODE_LABEL:
2439 case PC:
2440 case CC0:
2441 case SCRATCH:
2442 return;
2443 /* SCRATCH must be shared because they represent distinct values. */
2444 case CLOBBER:
2445 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2446 return;
2447 break;
2448
2449 case CONST:
2450 if (shared_const_p (orig))
2451 return;
2452 break;
2453
2454 case MEM:
2455 /* A MEM is allowed to be shared if its address is constant. */
2456 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2457 || reload_completed || reload_in_progress)
2458 return;
2459
2460 break;
2461
2462 default:
2463 break;
2464 }
2465
2466 /* This rtx may not be shared. If it has already been seen,
2467 replace it with a copy of itself. */
2468 #ifdef ENABLE_CHECKING
2469 if (RTX_FLAG (x, used))
2470 {
2471 error ("invalid rtl sharing found in the insn");
2472 debug_rtx (insn);
2473 error ("shared rtx");
2474 debug_rtx (x);
2475 internal_error ("internal consistency failure");
2476 }
2477 #endif
2478 gcc_assert (!RTX_FLAG (x, used));
2479
2480 RTX_FLAG (x, used) = 1;
2481
2482 /* Now scan the subexpressions recursively. */
2483
2484 format_ptr = GET_RTX_FORMAT (code);
2485
2486 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2487 {
2488 switch (*format_ptr++)
2489 {
2490 case 'e':
2491 verify_rtx_sharing (XEXP (x, i), insn);
2492 break;
2493
2494 case 'E':
2495 if (XVEC (x, i) != NULL)
2496 {
2497 int j;
2498 int len = XVECLEN (x, i);
2499
2500 for (j = 0; j < len; j++)
2501 {
2502 /* We allow sharing of ASM_OPERANDS inside single
2503 instruction. */
2504 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2505 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2506 == ASM_OPERANDS))
2507 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2508 else
2509 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2510 }
2511 }
2512 break;
2513 }
2514 }
2515 return;
2516 }
2517
2518 /* Go through all the RTL insn bodies and check that there is no unexpected
2519 sharing in between the subexpressions. */
2520
2521 void
2522 verify_rtl_sharing (void)
2523 {
2524 rtx p;
2525
2526 for (p = get_insns (); p; p = NEXT_INSN (p))
2527 if (INSN_P (p))
2528 {
2529 reset_used_flags (PATTERN (p));
2530 reset_used_flags (REG_NOTES (p));
2531 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2532 {
2533 int i;
2534 rtx q, sequence = PATTERN (p);
2535
2536 for (i = 0; i < XVECLEN (sequence, 0); i++)
2537 {
2538 q = XVECEXP (sequence, 0, i);
2539 gcc_assert (INSN_P (q));
2540 reset_used_flags (PATTERN (q));
2541 reset_used_flags (REG_NOTES (q));
2542 }
2543 }
2544 }
2545
2546 for (p = get_insns (); p; p = NEXT_INSN (p))
2547 if (INSN_P (p))
2548 {
2549 verify_rtx_sharing (PATTERN (p), p);
2550 verify_rtx_sharing (REG_NOTES (p), p);
2551 }
2552 }
2553
2554 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2555 Assumes the mark bits are cleared at entry. */
2556
2557 void
2558 unshare_all_rtl_in_chain (rtx insn)
2559 {
2560 for (; insn; insn = NEXT_INSN (insn))
2561 if (INSN_P (insn))
2562 {
2563 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2564 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2565 }
2566 }
2567
2568 /* Go through all virtual stack slots of a function and mark them as
2569 shared. We never replace the DECL_RTLs themselves with a copy,
2570 but expressions mentioned into a DECL_RTL cannot be shared with
2571 expressions in the instruction stream.
2572
2573 Note that reload may convert pseudo registers into memories in-place.
2574 Pseudo registers are always shared, but MEMs never are. Thus if we
2575 reset the used flags on MEMs in the instruction stream, we must set
2576 them again on MEMs that appear in DECL_RTLs. */
2577
2578 static void
2579 set_used_decls (tree blk)
2580 {
2581 tree t;
2582
2583 /* Mark decls. */
2584 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2585 if (DECL_RTL_SET_P (t))
2586 set_used_flags (DECL_RTL (t));
2587
2588 /* Now process sub-blocks. */
2589 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2590 set_used_decls (t);
2591 }
2592
2593 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2594 Recursively does the same for subexpressions. Uses
2595 copy_rtx_if_shared_1 to reduce stack space. */
2596
2597 rtx
2598 copy_rtx_if_shared (rtx orig)
2599 {
2600 copy_rtx_if_shared_1 (&orig);
2601 return orig;
2602 }
2603
2604 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2605 use. Recursively does the same for subexpressions. */
2606
2607 static void
2608 copy_rtx_if_shared_1 (rtx *orig1)
2609 {
2610 rtx x;
2611 int i;
2612 enum rtx_code code;
2613 rtx *last_ptr;
2614 const char *format_ptr;
2615 int copied = 0;
2616 int length;
2617
2618 /* Repeat is used to turn tail-recursion into iteration. */
2619 repeat:
2620 x = *orig1;
2621
2622 if (x == 0)
2623 return;
2624
2625 code = GET_CODE (x);
2626
2627 /* These types may be freely shared. */
2628
2629 switch (code)
2630 {
2631 case REG:
2632 case DEBUG_EXPR:
2633 case VALUE:
2634 case CONST_INT:
2635 case CONST_DOUBLE:
2636 case CONST_FIXED:
2637 case CONST_VECTOR:
2638 case SYMBOL_REF:
2639 case LABEL_REF:
2640 case CODE_LABEL:
2641 case PC:
2642 case CC0:
2643 case SCRATCH:
2644 /* SCRATCH must be shared because they represent distinct values. */
2645 return;
2646 case CLOBBER:
2647 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2648 return;
2649 break;
2650
2651 case CONST:
2652 if (shared_const_p (x))
2653 return;
2654 break;
2655
2656 case DEBUG_INSN:
2657 case INSN:
2658 case JUMP_INSN:
2659 case CALL_INSN:
2660 case NOTE:
2661 case BARRIER:
2662 /* The chain of insns is not being copied. */
2663 return;
2664
2665 default:
2666 break;
2667 }
2668
2669 /* This rtx may not be shared. If it has already been seen,
2670 replace it with a copy of itself. */
2671
2672 if (RTX_FLAG (x, used))
2673 {
2674 x = shallow_copy_rtx (x);
2675 copied = 1;
2676 }
2677 RTX_FLAG (x, used) = 1;
2678
2679 /* Now scan the subexpressions recursively.
2680 We can store any replaced subexpressions directly into X
2681 since we know X is not shared! Any vectors in X
2682 must be copied if X was copied. */
2683
2684 format_ptr = GET_RTX_FORMAT (code);
2685 length = GET_RTX_LENGTH (code);
2686 last_ptr = NULL;
2687
2688 for (i = 0; i < length; i++)
2689 {
2690 switch (*format_ptr++)
2691 {
2692 case 'e':
2693 if (last_ptr)
2694 copy_rtx_if_shared_1 (last_ptr);
2695 last_ptr = &XEXP (x, i);
2696 break;
2697
2698 case 'E':
2699 if (XVEC (x, i) != NULL)
2700 {
2701 int j;
2702 int len = XVECLEN (x, i);
2703
2704 /* Copy the vector iff I copied the rtx and the length
2705 is nonzero. */
2706 if (copied && len > 0)
2707 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2708
2709 /* Call recursively on all inside the vector. */
2710 for (j = 0; j < len; j++)
2711 {
2712 if (last_ptr)
2713 copy_rtx_if_shared_1 (last_ptr);
2714 last_ptr = &XVECEXP (x, i, j);
2715 }
2716 }
2717 break;
2718 }
2719 }
2720 *orig1 = x;
2721 if (last_ptr)
2722 {
2723 orig1 = last_ptr;
2724 goto repeat;
2725 }
2726 return;
2727 }
2728
2729 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2730 to look for shared sub-parts. */
2731
2732 void
2733 reset_used_flags (rtx x)
2734 {
2735 int i, j;
2736 enum rtx_code code;
2737 const char *format_ptr;
2738 int length;
2739
2740 /* Repeat is used to turn tail-recursion into iteration. */
2741 repeat:
2742 if (x == 0)
2743 return;
2744
2745 code = GET_CODE (x);
2746
2747 /* These types may be freely shared so we needn't do any resetting
2748 for them. */
2749
2750 switch (code)
2751 {
2752 case REG:
2753 case DEBUG_EXPR:
2754 case VALUE:
2755 case CONST_INT:
2756 case CONST_DOUBLE:
2757 case CONST_FIXED:
2758 case CONST_VECTOR:
2759 case SYMBOL_REF:
2760 case CODE_LABEL:
2761 case PC:
2762 case CC0:
2763 return;
2764
2765 case DEBUG_INSN:
2766 case INSN:
2767 case JUMP_INSN:
2768 case CALL_INSN:
2769 case NOTE:
2770 case LABEL_REF:
2771 case BARRIER:
2772 /* The chain of insns is not being copied. */
2773 return;
2774
2775 default:
2776 break;
2777 }
2778
2779 RTX_FLAG (x, used) = 0;
2780
2781 format_ptr = GET_RTX_FORMAT (code);
2782 length = GET_RTX_LENGTH (code);
2783
2784 for (i = 0; i < length; i++)
2785 {
2786 switch (*format_ptr++)
2787 {
2788 case 'e':
2789 if (i == length-1)
2790 {
2791 x = XEXP (x, i);
2792 goto repeat;
2793 }
2794 reset_used_flags (XEXP (x, i));
2795 break;
2796
2797 case 'E':
2798 for (j = 0; j < XVECLEN (x, i); j++)
2799 reset_used_flags (XVECEXP (x, i, j));
2800 break;
2801 }
2802 }
2803 }
2804
2805 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2806 to look for shared sub-parts. */
2807
2808 void
2809 set_used_flags (rtx x)
2810 {
2811 int i, j;
2812 enum rtx_code code;
2813 const char *format_ptr;
2814
2815 if (x == 0)
2816 return;
2817
2818 code = GET_CODE (x);
2819
2820 /* These types may be freely shared so we needn't do any resetting
2821 for them. */
2822
2823 switch (code)
2824 {
2825 case REG:
2826 case DEBUG_EXPR:
2827 case VALUE:
2828 case CONST_INT:
2829 case CONST_DOUBLE:
2830 case CONST_FIXED:
2831 case CONST_VECTOR:
2832 case SYMBOL_REF:
2833 case CODE_LABEL:
2834 case PC:
2835 case CC0:
2836 return;
2837
2838 case DEBUG_INSN:
2839 case INSN:
2840 case JUMP_INSN:
2841 case CALL_INSN:
2842 case NOTE:
2843 case LABEL_REF:
2844 case BARRIER:
2845 /* The chain of insns is not being copied. */
2846 return;
2847
2848 default:
2849 break;
2850 }
2851
2852 RTX_FLAG (x, used) = 1;
2853
2854 format_ptr = GET_RTX_FORMAT (code);
2855 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2856 {
2857 switch (*format_ptr++)
2858 {
2859 case 'e':
2860 set_used_flags (XEXP (x, i));
2861 break;
2862
2863 case 'E':
2864 for (j = 0; j < XVECLEN (x, i); j++)
2865 set_used_flags (XVECEXP (x, i, j));
2866 break;
2867 }
2868 }
2869 }
2870 \f
2871 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2872 Return X or the rtx for the pseudo reg the value of X was copied into.
2873 OTHER must be valid as a SET_DEST. */
2874
2875 rtx
2876 make_safe_from (rtx x, rtx other)
2877 {
2878 while (1)
2879 switch (GET_CODE (other))
2880 {
2881 case SUBREG:
2882 other = SUBREG_REG (other);
2883 break;
2884 case STRICT_LOW_PART:
2885 case SIGN_EXTEND:
2886 case ZERO_EXTEND:
2887 other = XEXP (other, 0);
2888 break;
2889 default:
2890 goto done;
2891 }
2892 done:
2893 if ((MEM_P (other)
2894 && ! CONSTANT_P (x)
2895 && !REG_P (x)
2896 && GET_CODE (x) != SUBREG)
2897 || (REG_P (other)
2898 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2899 || reg_mentioned_p (other, x))))
2900 {
2901 rtx temp = gen_reg_rtx (GET_MODE (x));
2902 emit_move_insn (temp, x);
2903 return temp;
2904 }
2905 return x;
2906 }
2907 \f
2908 /* Emission of insns (adding them to the doubly-linked list). */
2909
2910 /* Return the first insn of the current sequence or current function. */
2911
2912 rtx
2913 get_insns (void)
2914 {
2915 return first_insn;
2916 }
2917
2918 /* Specify a new insn as the first in the chain. */
2919
2920 void
2921 set_first_insn (rtx insn)
2922 {
2923 gcc_assert (!PREV_INSN (insn));
2924 first_insn = insn;
2925 }
2926
2927 /* Return the last insn emitted in current sequence or current function. */
2928
2929 rtx
2930 get_last_insn (void)
2931 {
2932 return last_insn;
2933 }
2934
2935 /* Specify a new insn as the last in the chain. */
2936
2937 void
2938 set_last_insn (rtx insn)
2939 {
2940 gcc_assert (!NEXT_INSN (insn));
2941 last_insn = insn;
2942 }
2943
2944 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2945
2946 rtx
2947 get_last_insn_anywhere (void)
2948 {
2949 struct sequence_stack *stack;
2950 if (last_insn)
2951 return last_insn;
2952 for (stack = seq_stack; stack; stack = stack->next)
2953 if (stack->last != 0)
2954 return stack->last;
2955 return 0;
2956 }
2957
2958 /* Return the first nonnote insn emitted in current sequence or current
2959 function. This routine looks inside SEQUENCEs. */
2960
2961 rtx
2962 get_first_nonnote_insn (void)
2963 {
2964 rtx insn = first_insn;
2965
2966 if (insn)
2967 {
2968 if (NOTE_P (insn))
2969 for (insn = next_insn (insn);
2970 insn && NOTE_P (insn);
2971 insn = next_insn (insn))
2972 continue;
2973 else
2974 {
2975 if (NONJUMP_INSN_P (insn)
2976 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2977 insn = XVECEXP (PATTERN (insn), 0, 0);
2978 }
2979 }
2980
2981 return insn;
2982 }
2983
2984 /* Return the last nonnote insn emitted in current sequence or current
2985 function. This routine looks inside SEQUENCEs. */
2986
2987 rtx
2988 get_last_nonnote_insn (void)
2989 {
2990 rtx insn = last_insn;
2991
2992 if (insn)
2993 {
2994 if (NOTE_P (insn))
2995 for (insn = previous_insn (insn);
2996 insn && NOTE_P (insn);
2997 insn = previous_insn (insn))
2998 continue;
2999 else
3000 {
3001 if (NONJUMP_INSN_P (insn)
3002 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3003 insn = XVECEXP (PATTERN (insn), 0,
3004 XVECLEN (PATTERN (insn), 0) - 1);
3005 }
3006 }
3007
3008 return insn;
3009 }
3010
3011 /* Return a number larger than any instruction's uid in this function. */
3012
3013 int
3014 get_max_uid (void)
3015 {
3016 return cur_insn_uid;
3017 }
3018
3019 /* Return the number of actual (non-debug) insns emitted in this
3020 function. */
3021
3022 int
3023 get_max_insn_count (void)
3024 {
3025 int n = cur_insn_uid;
3026
3027 /* The table size must be stable across -g, to avoid codegen
3028 differences due to debug insns, and not be affected by
3029 -fmin-insn-uid, to avoid excessive table size and to simplify
3030 debugging of -fcompare-debug failures. */
3031 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3032 n -= cur_debug_insn_uid;
3033 else
3034 n -= MIN_NONDEBUG_INSN_UID;
3035
3036 return n;
3037 }
3038
3039 \f
3040 /* Return the next insn. If it is a SEQUENCE, return the first insn
3041 of the sequence. */
3042
3043 rtx
3044 next_insn (rtx insn)
3045 {
3046 if (insn)
3047 {
3048 insn = NEXT_INSN (insn);
3049 if (insn && NONJUMP_INSN_P (insn)
3050 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3051 insn = XVECEXP (PATTERN (insn), 0, 0);
3052 }
3053
3054 return insn;
3055 }
3056
3057 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3058 of the sequence. */
3059
3060 rtx
3061 previous_insn (rtx insn)
3062 {
3063 if (insn)
3064 {
3065 insn = PREV_INSN (insn);
3066 if (insn && NONJUMP_INSN_P (insn)
3067 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3068 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3069 }
3070
3071 return insn;
3072 }
3073
3074 /* Return the next insn after INSN that is not a NOTE. This routine does not
3075 look inside SEQUENCEs. */
3076
3077 rtx
3078 next_nonnote_insn (rtx insn)
3079 {
3080 while (insn)
3081 {
3082 insn = NEXT_INSN (insn);
3083 if (insn == 0 || !NOTE_P (insn))
3084 break;
3085 }
3086
3087 return insn;
3088 }
3089
3090 /* Return the next insn after INSN that is not a NOTE, but stop the
3091 search before we enter another basic block. This routine does not
3092 look inside SEQUENCEs. */
3093
3094 rtx
3095 next_nonnote_insn_bb (rtx insn)
3096 {
3097 while (insn)
3098 {
3099 insn = NEXT_INSN (insn);
3100 if (insn == 0 || !NOTE_P (insn))
3101 break;
3102 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3103 return NULL_RTX;
3104 }
3105
3106 return insn;
3107 }
3108
3109 /* Return the previous insn before INSN that is not a NOTE. This routine does
3110 not look inside SEQUENCEs. */
3111
3112 rtx
3113 prev_nonnote_insn (rtx insn)
3114 {
3115 while (insn)
3116 {
3117 insn = PREV_INSN (insn);
3118 if (insn == 0 || !NOTE_P (insn))
3119 break;
3120 }
3121
3122 return insn;
3123 }
3124
3125 /* Return the previous insn before INSN that is not a NOTE, but stop
3126 the search before we enter another basic block. This routine does
3127 not look inside SEQUENCEs. */
3128
3129 rtx
3130 prev_nonnote_insn_bb (rtx insn)
3131 {
3132 while (insn)
3133 {
3134 insn = PREV_INSN (insn);
3135 if (insn == 0 || !NOTE_P (insn))
3136 break;
3137 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3138 return NULL_RTX;
3139 }
3140
3141 return insn;
3142 }
3143
3144 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3145 routine does not look inside SEQUENCEs. */
3146
3147 rtx
3148 next_nondebug_insn (rtx insn)
3149 {
3150 while (insn)
3151 {
3152 insn = NEXT_INSN (insn);
3153 if (insn == 0 || !DEBUG_INSN_P (insn))
3154 break;
3155 }
3156
3157 return insn;
3158 }
3159
3160 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3161 This routine does not look inside SEQUENCEs. */
3162
3163 rtx
3164 prev_nondebug_insn (rtx insn)
3165 {
3166 while (insn)
3167 {
3168 insn = PREV_INSN (insn);
3169 if (insn == 0 || !DEBUG_INSN_P (insn))
3170 break;
3171 }
3172
3173 return insn;
3174 }
3175
3176 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3177 or 0, if there is none. This routine does not look inside
3178 SEQUENCEs. */
3179
3180 rtx
3181 next_real_insn (rtx insn)
3182 {
3183 while (insn)
3184 {
3185 insn = NEXT_INSN (insn);
3186 if (insn == 0 || INSN_P (insn))
3187 break;
3188 }
3189
3190 return insn;
3191 }
3192
3193 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3194 or 0, if there is none. This routine does not look inside
3195 SEQUENCEs. */
3196
3197 rtx
3198 prev_real_insn (rtx insn)
3199 {
3200 while (insn)
3201 {
3202 insn = PREV_INSN (insn);
3203 if (insn == 0 || INSN_P (insn))
3204 break;
3205 }
3206
3207 return insn;
3208 }
3209
3210 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3211 This routine does not look inside SEQUENCEs. */
3212
3213 rtx
3214 last_call_insn (void)
3215 {
3216 rtx insn;
3217
3218 for (insn = get_last_insn ();
3219 insn && !CALL_P (insn);
3220 insn = PREV_INSN (insn))
3221 ;
3222
3223 return insn;
3224 }
3225
3226 /* Find the next insn after INSN that really does something. This routine
3227 does not look inside SEQUENCEs. After reload this also skips over
3228 standalone USE and CLOBBER insn. */
3229
3230 int
3231 active_insn_p (const_rtx insn)
3232 {
3233 return (CALL_P (insn) || JUMP_P (insn)
3234 || (NONJUMP_INSN_P (insn)
3235 && (! reload_completed
3236 || (GET_CODE (PATTERN (insn)) != USE
3237 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3238 }
3239
3240 rtx
3241 next_active_insn (rtx insn)
3242 {
3243 while (insn)
3244 {
3245 insn = NEXT_INSN (insn);
3246 if (insn == 0 || active_insn_p (insn))
3247 break;
3248 }
3249
3250 return insn;
3251 }
3252
3253 /* Find the last insn before INSN that really does something. This routine
3254 does not look inside SEQUENCEs. After reload this also skips over
3255 standalone USE and CLOBBER insn. */
3256
3257 rtx
3258 prev_active_insn (rtx insn)
3259 {
3260 while (insn)
3261 {
3262 insn = PREV_INSN (insn);
3263 if (insn == 0 || active_insn_p (insn))
3264 break;
3265 }
3266
3267 return insn;
3268 }
3269
3270 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3271
3272 rtx
3273 next_label (rtx insn)
3274 {
3275 while (insn)
3276 {
3277 insn = NEXT_INSN (insn);
3278 if (insn == 0 || LABEL_P (insn))
3279 break;
3280 }
3281
3282 return insn;
3283 }
3284
3285 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3286
3287 rtx
3288 prev_label (rtx insn)
3289 {
3290 while (insn)
3291 {
3292 insn = PREV_INSN (insn);
3293 if (insn == 0 || LABEL_P (insn))
3294 break;
3295 }
3296
3297 return insn;
3298 }
3299
3300 /* Return the last label to mark the same position as LABEL. Return null
3301 if LABEL itself is null. */
3302
3303 rtx
3304 skip_consecutive_labels (rtx label)
3305 {
3306 rtx insn;
3307
3308 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3309 if (LABEL_P (insn))
3310 label = insn;
3311
3312 return label;
3313 }
3314 \f
3315 #ifdef HAVE_cc0
3316 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3317 and REG_CC_USER notes so we can find it. */
3318
3319 void
3320 link_cc0_insns (rtx insn)
3321 {
3322 rtx user = next_nonnote_insn (insn);
3323
3324 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3325 user = XVECEXP (PATTERN (user), 0, 0);
3326
3327 add_reg_note (user, REG_CC_SETTER, insn);
3328 add_reg_note (insn, REG_CC_USER, user);
3329 }
3330
3331 /* Return the next insn that uses CC0 after INSN, which is assumed to
3332 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3333 applied to the result of this function should yield INSN).
3334
3335 Normally, this is simply the next insn. However, if a REG_CC_USER note
3336 is present, it contains the insn that uses CC0.
3337
3338 Return 0 if we can't find the insn. */
3339
3340 rtx
3341 next_cc0_user (rtx insn)
3342 {
3343 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3344
3345 if (note)
3346 return XEXP (note, 0);
3347
3348 insn = next_nonnote_insn (insn);
3349 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3350 insn = XVECEXP (PATTERN (insn), 0, 0);
3351
3352 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3353 return insn;
3354
3355 return 0;
3356 }
3357
3358 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3359 note, it is the previous insn. */
3360
3361 rtx
3362 prev_cc0_setter (rtx insn)
3363 {
3364 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3365
3366 if (note)
3367 return XEXP (note, 0);
3368
3369 insn = prev_nonnote_insn (insn);
3370 gcc_assert (sets_cc0_p (PATTERN (insn)));
3371
3372 return insn;
3373 }
3374 #endif
3375
3376 #ifdef AUTO_INC_DEC
3377 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3378
3379 static int
3380 find_auto_inc (rtx *xp, void *data)
3381 {
3382 rtx x = *xp;
3383 rtx reg = (rtx) data;
3384
3385 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3386 return 0;
3387
3388 switch (GET_CODE (x))
3389 {
3390 case PRE_DEC:
3391 case PRE_INC:
3392 case POST_DEC:
3393 case POST_INC:
3394 case PRE_MODIFY:
3395 case POST_MODIFY:
3396 if (rtx_equal_p (reg, XEXP (x, 0)))
3397 return 1;
3398 break;
3399
3400 default:
3401 gcc_unreachable ();
3402 }
3403 return -1;
3404 }
3405 #endif
3406
3407 /* Increment the label uses for all labels present in rtx. */
3408
3409 static void
3410 mark_label_nuses (rtx x)
3411 {
3412 enum rtx_code code;
3413 int i, j;
3414 const char *fmt;
3415
3416 code = GET_CODE (x);
3417 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3418 LABEL_NUSES (XEXP (x, 0))++;
3419
3420 fmt = GET_RTX_FORMAT (code);
3421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3422 {
3423 if (fmt[i] == 'e')
3424 mark_label_nuses (XEXP (x, i));
3425 else if (fmt[i] == 'E')
3426 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3427 mark_label_nuses (XVECEXP (x, i, j));
3428 }
3429 }
3430
3431 \f
3432 /* Try splitting insns that can be split for better scheduling.
3433 PAT is the pattern which might split.
3434 TRIAL is the insn providing PAT.
3435 LAST is nonzero if we should return the last insn of the sequence produced.
3436
3437 If this routine succeeds in splitting, it returns the first or last
3438 replacement insn depending on the value of LAST. Otherwise, it
3439 returns TRIAL. If the insn to be returned can be split, it will be. */
3440
3441 rtx
3442 try_split (rtx pat, rtx trial, int last)
3443 {
3444 rtx before = PREV_INSN (trial);
3445 rtx after = NEXT_INSN (trial);
3446 int has_barrier = 0;
3447 rtx note, seq, tem;
3448 int probability;
3449 rtx insn_last, insn;
3450 int njumps = 0;
3451
3452 /* We're not good at redistributing frame information. */
3453 if (RTX_FRAME_RELATED_P (trial))
3454 return trial;
3455
3456 if (any_condjump_p (trial)
3457 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3458 split_branch_probability = INTVAL (XEXP (note, 0));
3459 probability = split_branch_probability;
3460
3461 seq = split_insns (pat, trial);
3462
3463 split_branch_probability = -1;
3464
3465 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3466 We may need to handle this specially. */
3467 if (after && BARRIER_P (after))
3468 {
3469 has_barrier = 1;
3470 after = NEXT_INSN (after);
3471 }
3472
3473 if (!seq)
3474 return trial;
3475
3476 /* Avoid infinite loop if any insn of the result matches
3477 the original pattern. */
3478 insn_last = seq;
3479 while (1)
3480 {
3481 if (INSN_P (insn_last)
3482 && rtx_equal_p (PATTERN (insn_last), pat))
3483 return trial;
3484 if (!NEXT_INSN (insn_last))
3485 break;
3486 insn_last = NEXT_INSN (insn_last);
3487 }
3488
3489 /* We will be adding the new sequence to the function. The splitters
3490 may have introduced invalid RTL sharing, so unshare the sequence now. */
3491 unshare_all_rtl_in_chain (seq);
3492
3493 /* Mark labels. */
3494 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3495 {
3496 if (JUMP_P (insn))
3497 {
3498 mark_jump_label (PATTERN (insn), insn, 0);
3499 njumps++;
3500 if (probability != -1
3501 && any_condjump_p (insn)
3502 && !find_reg_note (insn, REG_BR_PROB, 0))
3503 {
3504 /* We can preserve the REG_BR_PROB notes only if exactly
3505 one jump is created, otherwise the machine description
3506 is responsible for this step using
3507 split_branch_probability variable. */
3508 gcc_assert (njumps == 1);
3509 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3510 }
3511 }
3512 }
3513
3514 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3515 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3516 if (CALL_P (trial))
3517 {
3518 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3519 if (CALL_P (insn))
3520 {
3521 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3522 while (*p)
3523 p = &XEXP (*p, 1);
3524 *p = CALL_INSN_FUNCTION_USAGE (trial);
3525 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3526
3527 /* Update the debug information for the CALL_INSN. */
3528 if (flag_enable_icf_debug)
3529 (*debug_hooks->copy_call_info) (trial, insn);
3530 }
3531 }
3532
3533 /* Copy notes, particularly those related to the CFG. */
3534 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3535 {
3536 switch (REG_NOTE_KIND (note))
3537 {
3538 case REG_EH_REGION:
3539 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3540 break;
3541
3542 case REG_NORETURN:
3543 case REG_SETJMP:
3544 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3545 {
3546 if (CALL_P (insn))
3547 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3548 }
3549 break;
3550
3551 case REG_NON_LOCAL_GOTO:
3552 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3553 {
3554 if (JUMP_P (insn))
3555 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3556 }
3557 break;
3558
3559 #ifdef AUTO_INC_DEC
3560 case REG_INC:
3561 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3562 {
3563 rtx reg = XEXP (note, 0);
3564 if (!FIND_REG_INC_NOTE (insn, reg)
3565 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3566 add_reg_note (insn, REG_INC, reg);
3567 }
3568 break;
3569 #endif
3570
3571 default:
3572 break;
3573 }
3574 }
3575
3576 /* If there are LABELS inside the split insns increment the
3577 usage count so we don't delete the label. */
3578 if (INSN_P (trial))
3579 {
3580 insn = insn_last;
3581 while (insn != NULL_RTX)
3582 {
3583 /* JUMP_P insns have already been "marked" above. */
3584 if (NONJUMP_INSN_P (insn))
3585 mark_label_nuses (PATTERN (insn));
3586
3587 insn = PREV_INSN (insn);
3588 }
3589 }
3590
3591 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3592
3593 delete_insn (trial);
3594 if (has_barrier)
3595 emit_barrier_after (tem);
3596
3597 /* Recursively call try_split for each new insn created; by the
3598 time control returns here that insn will be fully split, so
3599 set LAST and continue from the insn after the one returned.
3600 We can't use next_active_insn here since AFTER may be a note.
3601 Ignore deleted insns, which can be occur if not optimizing. */
3602 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3603 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3604 tem = try_split (PATTERN (tem), tem, 1);
3605
3606 /* Return either the first or the last insn, depending on which was
3607 requested. */
3608 return last
3609 ? (after ? PREV_INSN (after) : last_insn)
3610 : NEXT_INSN (before);
3611 }
3612 \f
3613 /* Make and return an INSN rtx, initializing all its slots.
3614 Store PATTERN in the pattern slots. */
3615
3616 rtx
3617 make_insn_raw (rtx pattern)
3618 {
3619 rtx insn;
3620
3621 insn = rtx_alloc (INSN);
3622
3623 INSN_UID (insn) = cur_insn_uid++;
3624 PATTERN (insn) = pattern;
3625 INSN_CODE (insn) = -1;
3626 REG_NOTES (insn) = NULL;
3627 INSN_LOCATOR (insn) = curr_insn_locator ();
3628 BLOCK_FOR_INSN (insn) = NULL;
3629
3630 #ifdef ENABLE_RTL_CHECKING
3631 if (insn
3632 && INSN_P (insn)
3633 && (returnjump_p (insn)
3634 || (GET_CODE (insn) == SET
3635 && SET_DEST (insn) == pc_rtx)))
3636 {
3637 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3638 debug_rtx (insn);
3639 }
3640 #endif
3641
3642 return insn;
3643 }
3644
3645 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3646
3647 rtx
3648 make_debug_insn_raw (rtx pattern)
3649 {
3650 rtx insn;
3651
3652 insn = rtx_alloc (DEBUG_INSN);
3653 INSN_UID (insn) = cur_debug_insn_uid++;
3654 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3655 INSN_UID (insn) = cur_insn_uid++;
3656
3657 PATTERN (insn) = pattern;
3658 INSN_CODE (insn) = -1;
3659 REG_NOTES (insn) = NULL;
3660 INSN_LOCATOR (insn) = curr_insn_locator ();
3661 BLOCK_FOR_INSN (insn) = NULL;
3662
3663 return insn;
3664 }
3665
3666 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3667
3668 rtx
3669 make_jump_insn_raw (rtx pattern)
3670 {
3671 rtx insn;
3672
3673 insn = rtx_alloc (JUMP_INSN);
3674 INSN_UID (insn) = cur_insn_uid++;
3675
3676 PATTERN (insn) = pattern;
3677 INSN_CODE (insn) = -1;
3678 REG_NOTES (insn) = NULL;
3679 JUMP_LABEL (insn) = NULL;
3680 INSN_LOCATOR (insn) = curr_insn_locator ();
3681 BLOCK_FOR_INSN (insn) = NULL;
3682
3683 return insn;
3684 }
3685
3686 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3687
3688 static rtx
3689 make_call_insn_raw (rtx pattern)
3690 {
3691 rtx insn;
3692
3693 insn = rtx_alloc (CALL_INSN);
3694 INSN_UID (insn) = cur_insn_uid++;
3695
3696 PATTERN (insn) = pattern;
3697 INSN_CODE (insn) = -1;
3698 REG_NOTES (insn) = NULL;
3699 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3700 INSN_LOCATOR (insn) = curr_insn_locator ();
3701 BLOCK_FOR_INSN (insn) = NULL;
3702
3703 return insn;
3704 }
3705 \f
3706 /* Add INSN to the end of the doubly-linked list.
3707 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3708
3709 void
3710 add_insn (rtx insn)
3711 {
3712 PREV_INSN (insn) = last_insn;
3713 NEXT_INSN (insn) = 0;
3714
3715 if (NULL != last_insn)
3716 NEXT_INSN (last_insn) = insn;
3717
3718 if (NULL == first_insn)
3719 first_insn = insn;
3720
3721 last_insn = insn;
3722 }
3723
3724 /* Add INSN into the doubly-linked list after insn AFTER. This and
3725 the next should be the only functions called to insert an insn once
3726 delay slots have been filled since only they know how to update a
3727 SEQUENCE. */
3728
3729 void
3730 add_insn_after (rtx insn, rtx after, basic_block bb)
3731 {
3732 rtx next = NEXT_INSN (after);
3733
3734 gcc_assert (!optimize || !INSN_DELETED_P (after));
3735
3736 NEXT_INSN (insn) = next;
3737 PREV_INSN (insn) = after;
3738
3739 if (next)
3740 {
3741 PREV_INSN (next) = insn;
3742 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3743 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3744 }
3745 else if (last_insn == after)
3746 last_insn = insn;
3747 else
3748 {
3749 struct sequence_stack *stack = seq_stack;
3750 /* Scan all pending sequences too. */
3751 for (; stack; stack = stack->next)
3752 if (after == stack->last)
3753 {
3754 stack->last = insn;
3755 break;
3756 }
3757
3758 gcc_assert (stack);
3759 }
3760
3761 if (!BARRIER_P (after)
3762 && !BARRIER_P (insn)
3763 && (bb = BLOCK_FOR_INSN (after)))
3764 {
3765 set_block_for_insn (insn, bb);
3766 if (INSN_P (insn))
3767 df_insn_rescan (insn);
3768 /* Should not happen as first in the BB is always
3769 either NOTE or LABEL. */
3770 if (BB_END (bb) == after
3771 /* Avoid clobbering of structure when creating new BB. */
3772 && !BARRIER_P (insn)
3773 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3774 BB_END (bb) = insn;
3775 }
3776
3777 NEXT_INSN (after) = insn;
3778 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3779 {
3780 rtx sequence = PATTERN (after);
3781 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3782 }
3783 }
3784
3785 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3786 the previous should be the only functions called to insert an insn
3787 once delay slots have been filled since only they know how to
3788 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3789 bb from before. */
3790
3791 void
3792 add_insn_before (rtx insn, rtx before, basic_block bb)
3793 {
3794 rtx prev = PREV_INSN (before);
3795
3796 gcc_assert (!optimize || !INSN_DELETED_P (before));
3797
3798 PREV_INSN (insn) = prev;
3799 NEXT_INSN (insn) = before;
3800
3801 if (prev)
3802 {
3803 NEXT_INSN (prev) = insn;
3804 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3805 {
3806 rtx sequence = PATTERN (prev);
3807 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3808 }
3809 }
3810 else if (first_insn == before)
3811 first_insn = insn;
3812 else
3813 {
3814 struct sequence_stack *stack = seq_stack;
3815 /* Scan all pending sequences too. */
3816 for (; stack; stack = stack->next)
3817 if (before == stack->first)
3818 {
3819 stack->first = insn;
3820 break;
3821 }
3822
3823 gcc_assert (stack);
3824 }
3825
3826 if (!bb
3827 && !BARRIER_P (before)
3828 && !BARRIER_P (insn))
3829 bb = BLOCK_FOR_INSN (before);
3830
3831 if (bb)
3832 {
3833 set_block_for_insn (insn, bb);
3834 if (INSN_P (insn))
3835 df_insn_rescan (insn);
3836 /* Should not happen as first in the BB is always either NOTE or
3837 LABEL. */
3838 gcc_assert (BB_HEAD (bb) != insn
3839 /* Avoid clobbering of structure when creating new BB. */
3840 || BARRIER_P (insn)
3841 || NOTE_INSN_BASIC_BLOCK_P (insn));
3842 }
3843
3844 PREV_INSN (before) = insn;
3845 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3846 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3847 }
3848
3849
3850 /* Replace insn with an deleted instruction note. */
3851
3852 void
3853 set_insn_deleted (rtx insn)
3854 {
3855 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3856 PUT_CODE (insn, NOTE);
3857 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3858 }
3859
3860
3861 /* Remove an insn from its doubly-linked list. This function knows how
3862 to handle sequences. */
3863 void
3864 remove_insn (rtx insn)
3865 {
3866 rtx next = NEXT_INSN (insn);
3867 rtx prev = PREV_INSN (insn);
3868 basic_block bb;
3869
3870 /* Later in the code, the block will be marked dirty. */
3871 df_insn_delete (NULL, INSN_UID (insn));
3872
3873 if (prev)
3874 {
3875 NEXT_INSN (prev) = next;
3876 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3877 {
3878 rtx sequence = PATTERN (prev);
3879 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3880 }
3881 }
3882 else if (first_insn == insn)
3883 first_insn = next;
3884 else
3885 {
3886 struct sequence_stack *stack = seq_stack;
3887 /* Scan all pending sequences too. */
3888 for (; stack; stack = stack->next)
3889 if (insn == stack->first)
3890 {
3891 stack->first = next;
3892 break;
3893 }
3894
3895 gcc_assert (stack);
3896 }
3897
3898 if (next)
3899 {
3900 PREV_INSN (next) = prev;
3901 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3902 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3903 }
3904 else if (last_insn == insn)
3905 last_insn = prev;
3906 else
3907 {
3908 struct sequence_stack *stack = seq_stack;
3909 /* Scan all pending sequences too. */
3910 for (; stack; stack = stack->next)
3911 if (insn == stack->last)
3912 {
3913 stack->last = prev;
3914 break;
3915 }
3916
3917 gcc_assert (stack);
3918 }
3919 if (!BARRIER_P (insn)
3920 && (bb = BLOCK_FOR_INSN (insn)))
3921 {
3922 if (INSN_P (insn))
3923 df_set_bb_dirty (bb);
3924 if (BB_HEAD (bb) == insn)
3925 {
3926 /* Never ever delete the basic block note without deleting whole
3927 basic block. */
3928 gcc_assert (!NOTE_P (insn));
3929 BB_HEAD (bb) = next;
3930 }
3931 if (BB_END (bb) == insn)
3932 BB_END (bb) = prev;
3933 }
3934 }
3935
3936 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3937
3938 void
3939 add_function_usage_to (rtx call_insn, rtx call_fusage)
3940 {
3941 gcc_assert (call_insn && CALL_P (call_insn));
3942
3943 /* Put the register usage information on the CALL. If there is already
3944 some usage information, put ours at the end. */
3945 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3946 {
3947 rtx link;
3948
3949 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3950 link = XEXP (link, 1))
3951 ;
3952
3953 XEXP (link, 1) = call_fusage;
3954 }
3955 else
3956 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3957 }
3958
3959 /* Delete all insns made since FROM.
3960 FROM becomes the new last instruction. */
3961
3962 void
3963 delete_insns_since (rtx from)
3964 {
3965 if (from == 0)
3966 first_insn = 0;
3967 else
3968 NEXT_INSN (from) = 0;
3969 last_insn = from;
3970 }
3971
3972 /* This function is deprecated, please use sequences instead.
3973
3974 Move a consecutive bunch of insns to a different place in the chain.
3975 The insns to be moved are those between FROM and TO.
3976 They are moved to a new position after the insn AFTER.
3977 AFTER must not be FROM or TO or any insn in between.
3978
3979 This function does not know about SEQUENCEs and hence should not be
3980 called after delay-slot filling has been done. */
3981
3982 void
3983 reorder_insns_nobb (rtx from, rtx to, rtx after)
3984 {
3985 /* Splice this bunch out of where it is now. */
3986 if (PREV_INSN (from))
3987 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3988 if (NEXT_INSN (to))
3989 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3990 if (last_insn == to)
3991 last_insn = PREV_INSN (from);
3992 if (first_insn == from)
3993 first_insn = NEXT_INSN (to);
3994
3995 /* Make the new neighbors point to it and it to them. */
3996 if (NEXT_INSN (after))
3997 PREV_INSN (NEXT_INSN (after)) = to;
3998
3999 NEXT_INSN (to) = NEXT_INSN (after);
4000 PREV_INSN (from) = after;
4001 NEXT_INSN (after) = from;
4002 if (after == last_insn)
4003 last_insn = to;
4004 }
4005
4006 /* Same as function above, but take care to update BB boundaries. */
4007 void
4008 reorder_insns (rtx from, rtx to, rtx after)
4009 {
4010 rtx prev = PREV_INSN (from);
4011 basic_block bb, bb2;
4012
4013 reorder_insns_nobb (from, to, after);
4014
4015 if (!BARRIER_P (after)
4016 && (bb = BLOCK_FOR_INSN (after)))
4017 {
4018 rtx x;
4019 df_set_bb_dirty (bb);
4020
4021 if (!BARRIER_P (from)
4022 && (bb2 = BLOCK_FOR_INSN (from)))
4023 {
4024 if (BB_END (bb2) == to)
4025 BB_END (bb2) = prev;
4026 df_set_bb_dirty (bb2);
4027 }
4028
4029 if (BB_END (bb) == after)
4030 BB_END (bb) = to;
4031
4032 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4033 if (!BARRIER_P (x))
4034 df_insn_change_bb (x, bb);
4035 }
4036 }
4037
4038 \f
4039 /* Emit insn(s) of given code and pattern
4040 at a specified place within the doubly-linked list.
4041
4042 All of the emit_foo global entry points accept an object
4043 X which is either an insn list or a PATTERN of a single
4044 instruction.
4045
4046 There are thus a few canonical ways to generate code and
4047 emit it at a specific place in the instruction stream. For
4048 example, consider the instruction named SPOT and the fact that
4049 we would like to emit some instructions before SPOT. We might
4050 do it like this:
4051
4052 start_sequence ();
4053 ... emit the new instructions ...
4054 insns_head = get_insns ();
4055 end_sequence ();
4056
4057 emit_insn_before (insns_head, SPOT);
4058
4059 It used to be common to generate SEQUENCE rtl instead, but that
4060 is a relic of the past which no longer occurs. The reason is that
4061 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4062 generated would almost certainly die right after it was created. */
4063
4064 /* Make X be output before the instruction BEFORE. */
4065
4066 rtx
4067 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4068 {
4069 rtx last = before;
4070 rtx insn;
4071
4072 gcc_assert (before);
4073
4074 if (x == NULL_RTX)
4075 return last;
4076
4077 switch (GET_CODE (x))
4078 {
4079 case DEBUG_INSN:
4080 case INSN:
4081 case JUMP_INSN:
4082 case CALL_INSN:
4083 case CODE_LABEL:
4084 case BARRIER:
4085 case NOTE:
4086 insn = x;
4087 while (insn)
4088 {
4089 rtx next = NEXT_INSN (insn);
4090 add_insn_before (insn, before, bb);
4091 last = insn;
4092 insn = next;
4093 }
4094 break;
4095
4096 #ifdef ENABLE_RTL_CHECKING
4097 case SEQUENCE:
4098 gcc_unreachable ();
4099 break;
4100 #endif
4101
4102 default:
4103 last = make_insn_raw (x);
4104 add_insn_before (last, before, bb);
4105 break;
4106 }
4107
4108 return last;
4109 }
4110
4111 /* Make an instruction with body X and code JUMP_INSN
4112 and output it before the instruction BEFORE. */
4113
4114 rtx
4115 emit_jump_insn_before_noloc (rtx x, rtx before)
4116 {
4117 rtx insn, last = NULL_RTX;
4118
4119 gcc_assert (before);
4120
4121 switch (GET_CODE (x))
4122 {
4123 case DEBUG_INSN:
4124 case INSN:
4125 case JUMP_INSN:
4126 case CALL_INSN:
4127 case CODE_LABEL:
4128 case BARRIER:
4129 case NOTE:
4130 insn = x;
4131 while (insn)
4132 {
4133 rtx next = NEXT_INSN (insn);
4134 add_insn_before (insn, before, NULL);
4135 last = insn;
4136 insn = next;
4137 }
4138 break;
4139
4140 #ifdef ENABLE_RTL_CHECKING
4141 case SEQUENCE:
4142 gcc_unreachable ();
4143 break;
4144 #endif
4145
4146 default:
4147 last = make_jump_insn_raw (x);
4148 add_insn_before (last, before, NULL);
4149 break;
4150 }
4151
4152 return last;
4153 }
4154
4155 /* Make an instruction with body X and code CALL_INSN
4156 and output it before the instruction BEFORE. */
4157
4158 rtx
4159 emit_call_insn_before_noloc (rtx x, rtx before)
4160 {
4161 rtx last = NULL_RTX, insn;
4162
4163 gcc_assert (before);
4164
4165 switch (GET_CODE (x))
4166 {
4167 case DEBUG_INSN:
4168 case INSN:
4169 case JUMP_INSN:
4170 case CALL_INSN:
4171 case CODE_LABEL:
4172 case BARRIER:
4173 case NOTE:
4174 insn = x;
4175 while (insn)
4176 {
4177 rtx next = NEXT_INSN (insn);
4178 add_insn_before (insn, before, NULL);
4179 last = insn;
4180 insn = next;
4181 }
4182 break;
4183
4184 #ifdef ENABLE_RTL_CHECKING
4185 case SEQUENCE:
4186 gcc_unreachable ();
4187 break;
4188 #endif
4189
4190 default:
4191 last = make_call_insn_raw (x);
4192 add_insn_before (last, before, NULL);
4193 break;
4194 }
4195
4196 return last;
4197 }
4198
4199 /* Make an instruction with body X and code DEBUG_INSN
4200 and output it before the instruction BEFORE. */
4201
4202 rtx
4203 emit_debug_insn_before_noloc (rtx x, rtx before)
4204 {
4205 rtx last = NULL_RTX, insn;
4206
4207 gcc_assert (before);
4208
4209 switch (GET_CODE (x))
4210 {
4211 case DEBUG_INSN:
4212 case INSN:
4213 case JUMP_INSN:
4214 case CALL_INSN:
4215 case CODE_LABEL:
4216 case BARRIER:
4217 case NOTE:
4218 insn = x;
4219 while (insn)
4220 {
4221 rtx next = NEXT_INSN (insn);
4222 add_insn_before (insn, before, NULL);
4223 last = insn;
4224 insn = next;
4225 }
4226 break;
4227
4228 #ifdef ENABLE_RTL_CHECKING
4229 case SEQUENCE:
4230 gcc_unreachable ();
4231 break;
4232 #endif
4233
4234 default:
4235 last = make_debug_insn_raw (x);
4236 add_insn_before (last, before, NULL);
4237 break;
4238 }
4239
4240 return last;
4241 }
4242
4243 /* Make an insn of code BARRIER
4244 and output it before the insn BEFORE. */
4245
4246 rtx
4247 emit_barrier_before (rtx before)
4248 {
4249 rtx insn = rtx_alloc (BARRIER);
4250
4251 INSN_UID (insn) = cur_insn_uid++;
4252
4253 add_insn_before (insn, before, NULL);
4254 return insn;
4255 }
4256
4257 /* Emit the label LABEL before the insn BEFORE. */
4258
4259 rtx
4260 emit_label_before (rtx label, rtx before)
4261 {
4262 /* This can be called twice for the same label as a result of the
4263 confusion that follows a syntax error! So make it harmless. */
4264 if (INSN_UID (label) == 0)
4265 {
4266 INSN_UID (label) = cur_insn_uid++;
4267 add_insn_before (label, before, NULL);
4268 }
4269
4270 return label;
4271 }
4272
4273 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4274
4275 rtx
4276 emit_note_before (enum insn_note subtype, rtx before)
4277 {
4278 rtx note = rtx_alloc (NOTE);
4279 INSN_UID (note) = cur_insn_uid++;
4280 NOTE_KIND (note) = subtype;
4281 BLOCK_FOR_INSN (note) = NULL;
4282 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4283
4284 add_insn_before (note, before, NULL);
4285 return note;
4286 }
4287 \f
4288 /* Helper for emit_insn_after, handles lists of instructions
4289 efficiently. */
4290
4291 static rtx
4292 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4293 {
4294 rtx last;
4295 rtx after_after;
4296 if (!bb && !BARRIER_P (after))
4297 bb = BLOCK_FOR_INSN (after);
4298
4299 if (bb)
4300 {
4301 df_set_bb_dirty (bb);
4302 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4303 if (!BARRIER_P (last))
4304 {
4305 set_block_for_insn (last, bb);
4306 df_insn_rescan (last);
4307 }
4308 if (!BARRIER_P (last))
4309 {
4310 set_block_for_insn (last, bb);
4311 df_insn_rescan (last);
4312 }
4313 if (BB_END (bb) == after)
4314 BB_END (bb) = last;
4315 }
4316 else
4317 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4318 continue;
4319
4320 after_after = NEXT_INSN (after);
4321
4322 NEXT_INSN (after) = first;
4323 PREV_INSN (first) = after;
4324 NEXT_INSN (last) = after_after;
4325 if (after_after)
4326 PREV_INSN (after_after) = last;
4327
4328 if (after == last_insn)
4329 last_insn = last;
4330
4331 return last;
4332 }
4333
4334 /* Make X be output after the insn AFTER and set the BB of insn. If
4335 BB is NULL, an attempt is made to infer the BB from AFTER. */
4336
4337 rtx
4338 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4339 {
4340 rtx last = after;
4341
4342 gcc_assert (after);
4343
4344 if (x == NULL_RTX)
4345 return last;
4346
4347 switch (GET_CODE (x))
4348 {
4349 case DEBUG_INSN:
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
4356 last = emit_insn_after_1 (x, after, bb);
4357 break;
4358
4359 #ifdef ENABLE_RTL_CHECKING
4360 case SEQUENCE:
4361 gcc_unreachable ();
4362 break;
4363 #endif
4364
4365 default:
4366 last = make_insn_raw (x);
4367 add_insn_after (last, after, bb);
4368 break;
4369 }
4370
4371 return last;
4372 }
4373
4374
4375 /* Make an insn of code JUMP_INSN with body X
4376 and output it after the insn AFTER. */
4377
4378 rtx
4379 emit_jump_insn_after_noloc (rtx x, rtx after)
4380 {
4381 rtx last;
4382
4383 gcc_assert (after);
4384
4385 switch (GET_CODE (x))
4386 {
4387 case DEBUG_INSN:
4388 case INSN:
4389 case JUMP_INSN:
4390 case CALL_INSN:
4391 case CODE_LABEL:
4392 case BARRIER:
4393 case NOTE:
4394 last = emit_insn_after_1 (x, after, NULL);
4395 break;
4396
4397 #ifdef ENABLE_RTL_CHECKING
4398 case SEQUENCE:
4399 gcc_unreachable ();
4400 break;
4401 #endif
4402
4403 default:
4404 last = make_jump_insn_raw (x);
4405 add_insn_after (last, after, NULL);
4406 break;
4407 }
4408
4409 return last;
4410 }
4411
4412 /* Make an instruction with body X and code CALL_INSN
4413 and output it after the instruction AFTER. */
4414
4415 rtx
4416 emit_call_insn_after_noloc (rtx x, rtx after)
4417 {
4418 rtx last;
4419
4420 gcc_assert (after);
4421
4422 switch (GET_CODE (x))
4423 {
4424 case DEBUG_INSN:
4425 case INSN:
4426 case JUMP_INSN:
4427 case CALL_INSN:
4428 case CODE_LABEL:
4429 case BARRIER:
4430 case NOTE:
4431 last = emit_insn_after_1 (x, after, NULL);
4432 break;
4433
4434 #ifdef ENABLE_RTL_CHECKING
4435 case SEQUENCE:
4436 gcc_unreachable ();
4437 break;
4438 #endif
4439
4440 default:
4441 last = make_call_insn_raw (x);
4442 add_insn_after (last, after, NULL);
4443 break;
4444 }
4445
4446 return last;
4447 }
4448
4449 /* Make an instruction with body X and code CALL_INSN
4450 and output it after the instruction AFTER. */
4451
4452 rtx
4453 emit_debug_insn_after_noloc (rtx x, rtx after)
4454 {
4455 rtx last;
4456
4457 gcc_assert (after);
4458
4459 switch (GET_CODE (x))
4460 {
4461 case DEBUG_INSN:
4462 case INSN:
4463 case JUMP_INSN:
4464 case CALL_INSN:
4465 case CODE_LABEL:
4466 case BARRIER:
4467 case NOTE:
4468 last = emit_insn_after_1 (x, after, NULL);
4469 break;
4470
4471 #ifdef ENABLE_RTL_CHECKING
4472 case SEQUENCE:
4473 gcc_unreachable ();
4474 break;
4475 #endif
4476
4477 default:
4478 last = make_debug_insn_raw (x);
4479 add_insn_after (last, after, NULL);
4480 break;
4481 }
4482
4483 return last;
4484 }
4485
4486 /* Make an insn of code BARRIER
4487 and output it after the insn AFTER. */
4488
4489 rtx
4490 emit_barrier_after (rtx after)
4491 {
4492 rtx insn = rtx_alloc (BARRIER);
4493
4494 INSN_UID (insn) = cur_insn_uid++;
4495
4496 add_insn_after (insn, after, NULL);
4497 return insn;
4498 }
4499
4500 /* Emit the label LABEL after the insn AFTER. */
4501
4502 rtx
4503 emit_label_after (rtx label, rtx after)
4504 {
4505 /* This can be called twice for the same label
4506 as a result of the confusion that follows a syntax error!
4507 So make it harmless. */
4508 if (INSN_UID (label) == 0)
4509 {
4510 INSN_UID (label) = cur_insn_uid++;
4511 add_insn_after (label, after, NULL);
4512 }
4513
4514 return label;
4515 }
4516
4517 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4518
4519 rtx
4520 emit_note_after (enum insn_note subtype, rtx after)
4521 {
4522 rtx note = rtx_alloc (NOTE);
4523 INSN_UID (note) = cur_insn_uid++;
4524 NOTE_KIND (note) = subtype;
4525 BLOCK_FOR_INSN (note) = NULL;
4526 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4527 add_insn_after (note, after, NULL);
4528 return note;
4529 }
4530 \f
4531 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4532 rtx
4533 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4534 {
4535 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4536
4537 if (pattern == NULL_RTX || !loc)
4538 return last;
4539
4540 after = NEXT_INSN (after);
4541 while (1)
4542 {
4543 if (active_insn_p (after) && !INSN_LOCATOR (after))
4544 INSN_LOCATOR (after) = loc;
4545 if (after == last)
4546 break;
4547 after = NEXT_INSN (after);
4548 }
4549 return last;
4550 }
4551
4552 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4553 rtx
4554 emit_insn_after (rtx pattern, rtx after)
4555 {
4556 rtx prev = after;
4557
4558 while (DEBUG_INSN_P (prev))
4559 prev = PREV_INSN (prev);
4560
4561 if (INSN_P (prev))
4562 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4563 else
4564 return emit_insn_after_noloc (pattern, after, NULL);
4565 }
4566
4567 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4568 rtx
4569 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4570 {
4571 rtx last = emit_jump_insn_after_noloc (pattern, after);
4572
4573 if (pattern == NULL_RTX || !loc)
4574 return last;
4575
4576 after = NEXT_INSN (after);
4577 while (1)
4578 {
4579 if (active_insn_p (after) && !INSN_LOCATOR (after))
4580 INSN_LOCATOR (after) = loc;
4581 if (after == last)
4582 break;
4583 after = NEXT_INSN (after);
4584 }
4585 return last;
4586 }
4587
4588 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4589 rtx
4590 emit_jump_insn_after (rtx pattern, rtx after)
4591 {
4592 rtx prev = after;
4593
4594 while (DEBUG_INSN_P (prev))
4595 prev = PREV_INSN (prev);
4596
4597 if (INSN_P (prev))
4598 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4599 else
4600 return emit_jump_insn_after_noloc (pattern, after);
4601 }
4602
4603 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4604 rtx
4605 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4606 {
4607 rtx last = emit_call_insn_after_noloc (pattern, after);
4608
4609 if (pattern == NULL_RTX || !loc)
4610 return last;
4611
4612 after = NEXT_INSN (after);
4613 while (1)
4614 {
4615 if (active_insn_p (after) && !INSN_LOCATOR (after))
4616 INSN_LOCATOR (after) = loc;
4617 if (after == last)
4618 break;
4619 after = NEXT_INSN (after);
4620 }
4621 return last;
4622 }
4623
4624 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4625 rtx
4626 emit_call_insn_after (rtx pattern, rtx after)
4627 {
4628 rtx prev = after;
4629
4630 while (DEBUG_INSN_P (prev))
4631 prev = PREV_INSN (prev);
4632
4633 if (INSN_P (prev))
4634 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4635 else
4636 return emit_call_insn_after_noloc (pattern, after);
4637 }
4638
4639 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4640 rtx
4641 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4642 {
4643 rtx last = emit_debug_insn_after_noloc (pattern, after);
4644
4645 if (pattern == NULL_RTX || !loc)
4646 return last;
4647
4648 after = NEXT_INSN (after);
4649 while (1)
4650 {
4651 if (active_insn_p (after) && !INSN_LOCATOR (after))
4652 INSN_LOCATOR (after) = loc;
4653 if (after == last)
4654 break;
4655 after = NEXT_INSN (after);
4656 }
4657 return last;
4658 }
4659
4660 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4661 rtx
4662 emit_debug_insn_after (rtx pattern, rtx after)
4663 {
4664 if (INSN_P (after))
4665 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4666 else
4667 return emit_debug_insn_after_noloc (pattern, after);
4668 }
4669
4670 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4671 rtx
4672 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4673 {
4674 rtx first = PREV_INSN (before);
4675 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4676
4677 if (pattern == NULL_RTX || !loc)
4678 return last;
4679
4680 if (!first)
4681 first = get_insns ();
4682 else
4683 first = NEXT_INSN (first);
4684 while (1)
4685 {
4686 if (active_insn_p (first) && !INSN_LOCATOR (first))
4687 INSN_LOCATOR (first) = loc;
4688 if (first == last)
4689 break;
4690 first = NEXT_INSN (first);
4691 }
4692 return last;
4693 }
4694
4695 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4696 rtx
4697 emit_insn_before (rtx pattern, rtx before)
4698 {
4699 rtx next = before;
4700
4701 while (DEBUG_INSN_P (next))
4702 next = PREV_INSN (next);
4703
4704 if (INSN_P (next))
4705 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4706 else
4707 return emit_insn_before_noloc (pattern, before, NULL);
4708 }
4709
4710 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4711 rtx
4712 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4713 {
4714 rtx first = PREV_INSN (before);
4715 rtx last = emit_jump_insn_before_noloc (pattern, before);
4716
4717 if (pattern == NULL_RTX)
4718 return last;
4719
4720 first = NEXT_INSN (first);
4721 while (1)
4722 {
4723 if (active_insn_p (first) && !INSN_LOCATOR (first))
4724 INSN_LOCATOR (first) = loc;
4725 if (first == last)
4726 break;
4727 first = NEXT_INSN (first);
4728 }
4729 return last;
4730 }
4731
4732 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4733 rtx
4734 emit_jump_insn_before (rtx pattern, rtx before)
4735 {
4736 rtx next = before;
4737
4738 while (DEBUG_INSN_P (next))
4739 next = PREV_INSN (next);
4740
4741 if (INSN_P (next))
4742 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4743 else
4744 return emit_jump_insn_before_noloc (pattern, before);
4745 }
4746
4747 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4748 rtx
4749 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4750 {
4751 rtx first = PREV_INSN (before);
4752 rtx last = emit_call_insn_before_noloc (pattern, before);
4753
4754 if (pattern == NULL_RTX)
4755 return last;
4756
4757 first = NEXT_INSN (first);
4758 while (1)
4759 {
4760 if (active_insn_p (first) && !INSN_LOCATOR (first))
4761 INSN_LOCATOR (first) = loc;
4762 if (first == last)
4763 break;
4764 first = NEXT_INSN (first);
4765 }
4766 return last;
4767 }
4768
4769 /* like emit_call_insn_before_noloc,
4770 but set insn_locator according to before. */
4771 rtx
4772 emit_call_insn_before (rtx pattern, rtx before)
4773 {
4774 rtx next = before;
4775
4776 while (DEBUG_INSN_P (next))
4777 next = PREV_INSN (next);
4778
4779 if (INSN_P (next))
4780 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4781 else
4782 return emit_call_insn_before_noloc (pattern, before);
4783 }
4784
4785 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4786 rtx
4787 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4788 {
4789 rtx first = PREV_INSN (before);
4790 rtx last = emit_debug_insn_before_noloc (pattern, before);
4791
4792 if (pattern == NULL_RTX)
4793 return last;
4794
4795 first = NEXT_INSN (first);
4796 while (1)
4797 {
4798 if (active_insn_p (first) && !INSN_LOCATOR (first))
4799 INSN_LOCATOR (first) = loc;
4800 if (first == last)
4801 break;
4802 first = NEXT_INSN (first);
4803 }
4804 return last;
4805 }
4806
4807 /* like emit_debug_insn_before_noloc,
4808 but set insn_locator according to before. */
4809 rtx
4810 emit_debug_insn_before (rtx pattern, rtx before)
4811 {
4812 if (INSN_P (before))
4813 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4814 else
4815 return emit_debug_insn_before_noloc (pattern, before);
4816 }
4817 \f
4818 /* Take X and emit it at the end of the doubly-linked
4819 INSN list.
4820
4821 Returns the last insn emitted. */
4822
4823 rtx
4824 emit_insn (rtx x)
4825 {
4826 rtx last = last_insn;
4827 rtx insn;
4828
4829 if (x == NULL_RTX)
4830 return last;
4831
4832 switch (GET_CODE (x))
4833 {
4834 case DEBUG_INSN:
4835 case INSN:
4836 case JUMP_INSN:
4837 case CALL_INSN:
4838 case CODE_LABEL:
4839 case BARRIER:
4840 case NOTE:
4841 insn = x;
4842 while (insn)
4843 {
4844 rtx next = NEXT_INSN (insn);
4845 add_insn (insn);
4846 last = insn;
4847 insn = next;
4848 }
4849 break;
4850
4851 #ifdef ENABLE_RTL_CHECKING
4852 case SEQUENCE:
4853 gcc_unreachable ();
4854 break;
4855 #endif
4856
4857 default:
4858 last = make_insn_raw (x);
4859 add_insn (last);
4860 break;
4861 }
4862
4863 return last;
4864 }
4865
4866 /* Make an insn of code DEBUG_INSN with pattern X
4867 and add it to the end of the doubly-linked list. */
4868
4869 rtx
4870 emit_debug_insn (rtx x)
4871 {
4872 rtx last = last_insn;
4873 rtx insn;
4874
4875 if (x == NULL_RTX)
4876 return last;
4877
4878 switch (GET_CODE (x))
4879 {
4880 case DEBUG_INSN:
4881 case INSN:
4882 case JUMP_INSN:
4883 case CALL_INSN:
4884 case CODE_LABEL:
4885 case BARRIER:
4886 case NOTE:
4887 insn = x;
4888 while (insn)
4889 {
4890 rtx next = NEXT_INSN (insn);
4891 add_insn (insn);
4892 last = insn;
4893 insn = next;
4894 }
4895 break;
4896
4897 #ifdef ENABLE_RTL_CHECKING
4898 case SEQUENCE:
4899 gcc_unreachable ();
4900 break;
4901 #endif
4902
4903 default:
4904 last = make_debug_insn_raw (x);
4905 add_insn (last);
4906 break;
4907 }
4908
4909 return last;
4910 }
4911
4912 /* Make an insn of code JUMP_INSN with pattern X
4913 and add it to the end of the doubly-linked list. */
4914
4915 rtx
4916 emit_jump_insn (rtx x)
4917 {
4918 rtx last = NULL_RTX, insn;
4919
4920 switch (GET_CODE (x))
4921 {
4922 case DEBUG_INSN:
4923 case INSN:
4924 case JUMP_INSN:
4925 case CALL_INSN:
4926 case CODE_LABEL:
4927 case BARRIER:
4928 case NOTE:
4929 insn = x;
4930 while (insn)
4931 {
4932 rtx next = NEXT_INSN (insn);
4933 add_insn (insn);
4934 last = insn;
4935 insn = next;
4936 }
4937 break;
4938
4939 #ifdef ENABLE_RTL_CHECKING
4940 case SEQUENCE:
4941 gcc_unreachable ();
4942 break;
4943 #endif
4944
4945 default:
4946 last = make_jump_insn_raw (x);
4947 add_insn (last);
4948 break;
4949 }
4950
4951 return last;
4952 }
4953
4954 /* Make an insn of code CALL_INSN with pattern X
4955 and add it to the end of the doubly-linked list. */
4956
4957 rtx
4958 emit_call_insn (rtx x)
4959 {
4960 rtx insn;
4961
4962 switch (GET_CODE (x))
4963 {
4964 case DEBUG_INSN:
4965 case INSN:
4966 case JUMP_INSN:
4967 case CALL_INSN:
4968 case CODE_LABEL:
4969 case BARRIER:
4970 case NOTE:
4971 insn = emit_insn (x);
4972 break;
4973
4974 #ifdef ENABLE_RTL_CHECKING
4975 case SEQUENCE:
4976 gcc_unreachable ();
4977 break;
4978 #endif
4979
4980 default:
4981 insn = make_call_insn_raw (x);
4982 add_insn (insn);
4983 break;
4984 }
4985
4986 return insn;
4987 }
4988
4989 /* Add the label LABEL to the end of the doubly-linked list. */
4990
4991 rtx
4992 emit_label (rtx label)
4993 {
4994 /* This can be called twice for the same label
4995 as a result of the confusion that follows a syntax error!
4996 So make it harmless. */
4997 if (INSN_UID (label) == 0)
4998 {
4999 INSN_UID (label) = cur_insn_uid++;
5000 add_insn (label);
5001 }
5002 return label;
5003 }
5004
5005 /* Make an insn of code BARRIER
5006 and add it to the end of the doubly-linked list. */
5007
5008 rtx
5009 emit_barrier (void)
5010 {
5011 rtx barrier = rtx_alloc (BARRIER);
5012 INSN_UID (barrier) = cur_insn_uid++;
5013 add_insn (barrier);
5014 return barrier;
5015 }
5016
5017 /* Emit a copy of note ORIG. */
5018
5019 rtx
5020 emit_note_copy (rtx orig)
5021 {
5022 rtx note;
5023
5024 note = rtx_alloc (NOTE);
5025
5026 INSN_UID (note) = cur_insn_uid++;
5027 NOTE_DATA (note) = NOTE_DATA (orig);
5028 NOTE_KIND (note) = NOTE_KIND (orig);
5029 BLOCK_FOR_INSN (note) = NULL;
5030 add_insn (note);
5031
5032 return note;
5033 }
5034
5035 /* Make an insn of code NOTE or type NOTE_NO
5036 and add it to the end of the doubly-linked list. */
5037
5038 rtx
5039 emit_note (enum insn_note kind)
5040 {
5041 rtx note;
5042
5043 note = rtx_alloc (NOTE);
5044 INSN_UID (note) = cur_insn_uid++;
5045 NOTE_KIND (note) = kind;
5046 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5047 BLOCK_FOR_INSN (note) = NULL;
5048 add_insn (note);
5049 return note;
5050 }
5051
5052 /* Emit a clobber of lvalue X. */
5053
5054 rtx
5055 emit_clobber (rtx x)
5056 {
5057 /* CONCATs should not appear in the insn stream. */
5058 if (GET_CODE (x) == CONCAT)
5059 {
5060 emit_clobber (XEXP (x, 0));
5061 return emit_clobber (XEXP (x, 1));
5062 }
5063 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5064 }
5065
5066 /* Return a sequence of insns to clobber lvalue X. */
5067
5068 rtx
5069 gen_clobber (rtx x)
5070 {
5071 rtx seq;
5072
5073 start_sequence ();
5074 emit_clobber (x);
5075 seq = get_insns ();
5076 end_sequence ();
5077 return seq;
5078 }
5079
5080 /* Emit a use of rvalue X. */
5081
5082 rtx
5083 emit_use (rtx x)
5084 {
5085 /* CONCATs should not appear in the insn stream. */
5086 if (GET_CODE (x) == CONCAT)
5087 {
5088 emit_use (XEXP (x, 0));
5089 return emit_use (XEXP (x, 1));
5090 }
5091 return emit_insn (gen_rtx_USE (VOIDmode, x));
5092 }
5093
5094 /* Return a sequence of insns to use rvalue X. */
5095
5096 rtx
5097 gen_use (rtx x)
5098 {
5099 rtx seq;
5100
5101 start_sequence ();
5102 emit_use (x);
5103 seq = get_insns ();
5104 end_sequence ();
5105 return seq;
5106 }
5107
5108 /* Cause next statement to emit a line note even if the line number
5109 has not changed. */
5110
5111 void
5112 force_next_line_note (void)
5113 {
5114 last_location = -1;
5115 }
5116
5117 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5118 note of this type already exists, remove it first. */
5119
5120 rtx
5121 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5122 {
5123 rtx note = find_reg_note (insn, kind, NULL_RTX);
5124
5125 switch (kind)
5126 {
5127 case REG_EQUAL:
5128 case REG_EQUIV:
5129 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5130 has multiple sets (some callers assume single_set
5131 means the insn only has one set, when in fact it
5132 means the insn only has one * useful * set). */
5133 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5134 {
5135 gcc_assert (!note);
5136 return NULL_RTX;
5137 }
5138
5139 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5140 It serves no useful purpose and breaks eliminate_regs. */
5141 if (GET_CODE (datum) == ASM_OPERANDS)
5142 return NULL_RTX;
5143
5144 if (note)
5145 {
5146 XEXP (note, 0) = datum;
5147 df_notes_rescan (insn);
5148 return note;
5149 }
5150 break;
5151
5152 default:
5153 if (note)
5154 {
5155 XEXP (note, 0) = datum;
5156 return note;
5157 }
5158 break;
5159 }
5160
5161 add_reg_note (insn, kind, datum);
5162
5163 switch (kind)
5164 {
5165 case REG_EQUAL:
5166 case REG_EQUIV:
5167 df_notes_rescan (insn);
5168 break;
5169 default:
5170 break;
5171 }
5172
5173 return REG_NOTES (insn);
5174 }
5175 \f
5176 /* Return an indication of which type of insn should have X as a body.
5177 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5178
5179 static enum rtx_code
5180 classify_insn (rtx x)
5181 {
5182 if (LABEL_P (x))
5183 return CODE_LABEL;
5184 if (GET_CODE (x) == CALL)
5185 return CALL_INSN;
5186 if (GET_CODE (x) == RETURN)
5187 return JUMP_INSN;
5188 if (GET_CODE (x) == SET)
5189 {
5190 if (SET_DEST (x) == pc_rtx)
5191 return JUMP_INSN;
5192 else if (GET_CODE (SET_SRC (x)) == CALL)
5193 return CALL_INSN;
5194 else
5195 return INSN;
5196 }
5197 if (GET_CODE (x) == PARALLEL)
5198 {
5199 int j;
5200 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5201 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5202 return CALL_INSN;
5203 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5204 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5205 return JUMP_INSN;
5206 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5207 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5208 return CALL_INSN;
5209 }
5210 return INSN;
5211 }
5212
5213 /* Emit the rtl pattern X as an appropriate kind of insn.
5214 If X is a label, it is simply added into the insn chain. */
5215
5216 rtx
5217 emit (rtx x)
5218 {
5219 enum rtx_code code = classify_insn (x);
5220
5221 switch (code)
5222 {
5223 case CODE_LABEL:
5224 return emit_label (x);
5225 case INSN:
5226 return emit_insn (x);
5227 case JUMP_INSN:
5228 {
5229 rtx insn = emit_jump_insn (x);
5230 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5231 return emit_barrier ();
5232 return insn;
5233 }
5234 case CALL_INSN:
5235 return emit_call_insn (x);
5236 case DEBUG_INSN:
5237 return emit_debug_insn (x);
5238 default:
5239 gcc_unreachable ();
5240 }
5241 }
5242 \f
5243 /* Space for free sequence stack entries. */
5244 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5245
5246 /* Begin emitting insns to a sequence. If this sequence will contain
5247 something that might cause the compiler to pop arguments to function
5248 calls (because those pops have previously been deferred; see
5249 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5250 before calling this function. That will ensure that the deferred
5251 pops are not accidentally emitted in the middle of this sequence. */
5252
5253 void
5254 start_sequence (void)
5255 {
5256 struct sequence_stack *tem;
5257
5258 if (free_sequence_stack != NULL)
5259 {
5260 tem = free_sequence_stack;
5261 free_sequence_stack = tem->next;
5262 }
5263 else
5264 tem = GGC_NEW (struct sequence_stack);
5265
5266 tem->next = seq_stack;
5267 tem->first = first_insn;
5268 tem->last = last_insn;
5269
5270 seq_stack = tem;
5271
5272 first_insn = 0;
5273 last_insn = 0;
5274 }
5275
5276 /* Set up the insn chain starting with FIRST as the current sequence,
5277 saving the previously current one. See the documentation for
5278 start_sequence for more information about how to use this function. */
5279
5280 void
5281 push_to_sequence (rtx first)
5282 {
5283 rtx last;
5284
5285 start_sequence ();
5286
5287 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5288
5289 first_insn = first;
5290 last_insn = last;
5291 }
5292
5293 /* Like push_to_sequence, but take the last insn as an argument to avoid
5294 looping through the list. */
5295
5296 void
5297 push_to_sequence2 (rtx first, rtx last)
5298 {
5299 start_sequence ();
5300
5301 first_insn = first;
5302 last_insn = last;
5303 }
5304
5305 /* Set up the outer-level insn chain
5306 as the current sequence, saving the previously current one. */
5307
5308 void
5309 push_topmost_sequence (void)
5310 {
5311 struct sequence_stack *stack, *top = NULL;
5312
5313 start_sequence ();
5314
5315 for (stack = seq_stack; stack; stack = stack->next)
5316 top = stack;
5317
5318 first_insn = top->first;
5319 last_insn = top->last;
5320 }
5321
5322 /* After emitting to the outer-level insn chain, update the outer-level
5323 insn chain, and restore the previous saved state. */
5324
5325 void
5326 pop_topmost_sequence (void)
5327 {
5328 struct sequence_stack *stack, *top = NULL;
5329
5330 for (stack = seq_stack; stack; stack = stack->next)
5331 top = stack;
5332
5333 top->first = first_insn;
5334 top->last = last_insn;
5335
5336 end_sequence ();
5337 }
5338
5339 /* After emitting to a sequence, restore previous saved state.
5340
5341 To get the contents of the sequence just made, you must call
5342 `get_insns' *before* calling here.
5343
5344 If the compiler might have deferred popping arguments while
5345 generating this sequence, and this sequence will not be immediately
5346 inserted into the instruction stream, use do_pending_stack_adjust
5347 before calling get_insns. That will ensure that the deferred
5348 pops are inserted into this sequence, and not into some random
5349 location in the instruction stream. See INHIBIT_DEFER_POP for more
5350 information about deferred popping of arguments. */
5351
5352 void
5353 end_sequence (void)
5354 {
5355 struct sequence_stack *tem = seq_stack;
5356
5357 first_insn = tem->first;
5358 last_insn = tem->last;
5359 seq_stack = tem->next;
5360
5361 memset (tem, 0, sizeof (*tem));
5362 tem->next = free_sequence_stack;
5363 free_sequence_stack = tem;
5364 }
5365
5366 /* Return 1 if currently emitting into a sequence. */
5367
5368 int
5369 in_sequence_p (void)
5370 {
5371 return seq_stack != 0;
5372 }
5373 \f
5374 /* Put the various virtual registers into REGNO_REG_RTX. */
5375
5376 static void
5377 init_virtual_regs (void)
5378 {
5379 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5380 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5381 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5382 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5383 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5384 }
5385
5386 \f
5387 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5388 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5389 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5390 static int copy_insn_n_scratches;
5391
5392 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5393 copied an ASM_OPERANDS.
5394 In that case, it is the original input-operand vector. */
5395 static rtvec orig_asm_operands_vector;
5396
5397 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5398 copied an ASM_OPERANDS.
5399 In that case, it is the copied input-operand vector. */
5400 static rtvec copy_asm_operands_vector;
5401
5402 /* Likewise for the constraints vector. */
5403 static rtvec orig_asm_constraints_vector;
5404 static rtvec copy_asm_constraints_vector;
5405
5406 /* Recursively create a new copy of an rtx for copy_insn.
5407 This function differs from copy_rtx in that it handles SCRATCHes and
5408 ASM_OPERANDs properly.
5409 Normally, this function is not used directly; use copy_insn as front end.
5410 However, you could first copy an insn pattern with copy_insn and then use
5411 this function afterwards to properly copy any REG_NOTEs containing
5412 SCRATCHes. */
5413
5414 rtx
5415 copy_insn_1 (rtx orig)
5416 {
5417 rtx copy;
5418 int i, j;
5419 RTX_CODE code;
5420 const char *format_ptr;
5421
5422 if (orig == NULL)
5423 return NULL;
5424
5425 code = GET_CODE (orig);
5426
5427 switch (code)
5428 {
5429 case REG:
5430 case CONST_INT:
5431 case CONST_DOUBLE:
5432 case CONST_FIXED:
5433 case CONST_VECTOR:
5434 case SYMBOL_REF:
5435 case CODE_LABEL:
5436 case PC:
5437 case CC0:
5438 return orig;
5439 case CLOBBER:
5440 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5441 return orig;
5442 break;
5443
5444 case SCRATCH:
5445 for (i = 0; i < copy_insn_n_scratches; i++)
5446 if (copy_insn_scratch_in[i] == orig)
5447 return copy_insn_scratch_out[i];
5448 break;
5449
5450 case CONST:
5451 if (shared_const_p (orig))
5452 return orig;
5453 break;
5454
5455 /* A MEM with a constant address is not sharable. The problem is that
5456 the constant address may need to be reloaded. If the mem is shared,
5457 then reloading one copy of this mem will cause all copies to appear
5458 to have been reloaded. */
5459
5460 default:
5461 break;
5462 }
5463
5464 /* Copy the various flags, fields, and other information. We assume
5465 that all fields need copying, and then clear the fields that should
5466 not be copied. That is the sensible default behavior, and forces
5467 us to explicitly document why we are *not* copying a flag. */
5468 copy = shallow_copy_rtx (orig);
5469
5470 /* We do not copy the USED flag, which is used as a mark bit during
5471 walks over the RTL. */
5472 RTX_FLAG (copy, used) = 0;
5473
5474 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5475 if (INSN_P (orig))
5476 {
5477 RTX_FLAG (copy, jump) = 0;
5478 RTX_FLAG (copy, call) = 0;
5479 RTX_FLAG (copy, frame_related) = 0;
5480 }
5481
5482 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5483
5484 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5485 switch (*format_ptr++)
5486 {
5487 case 'e':
5488 if (XEXP (orig, i) != NULL)
5489 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5490 break;
5491
5492 case 'E':
5493 case 'V':
5494 if (XVEC (orig, i) == orig_asm_constraints_vector)
5495 XVEC (copy, i) = copy_asm_constraints_vector;
5496 else if (XVEC (orig, i) == orig_asm_operands_vector)
5497 XVEC (copy, i) = copy_asm_operands_vector;
5498 else if (XVEC (orig, i) != NULL)
5499 {
5500 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5501 for (j = 0; j < XVECLEN (copy, i); j++)
5502 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5503 }
5504 break;
5505
5506 case 't':
5507 case 'w':
5508 case 'i':
5509 case 's':
5510 case 'S':
5511 case 'u':
5512 case '0':
5513 /* These are left unchanged. */
5514 break;
5515
5516 default:
5517 gcc_unreachable ();
5518 }
5519
5520 if (code == SCRATCH)
5521 {
5522 i = copy_insn_n_scratches++;
5523 gcc_assert (i < MAX_RECOG_OPERANDS);
5524 copy_insn_scratch_in[i] = orig;
5525 copy_insn_scratch_out[i] = copy;
5526 }
5527 else if (code == ASM_OPERANDS)
5528 {
5529 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5530 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5531 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5532 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5533 }
5534
5535 return copy;
5536 }
5537
5538 /* Create a new copy of an rtx.
5539 This function differs from copy_rtx in that it handles SCRATCHes and
5540 ASM_OPERANDs properly.
5541 INSN doesn't really have to be a full INSN; it could be just the
5542 pattern. */
5543 rtx
5544 copy_insn (rtx insn)
5545 {
5546 copy_insn_n_scratches = 0;
5547 orig_asm_operands_vector = 0;
5548 orig_asm_constraints_vector = 0;
5549 copy_asm_operands_vector = 0;
5550 copy_asm_constraints_vector = 0;
5551 return copy_insn_1 (insn);
5552 }
5553
5554 /* Initialize data structures and variables in this file
5555 before generating rtl for each function. */
5556
5557 void
5558 init_emit (void)
5559 {
5560 first_insn = NULL;
5561 last_insn = NULL;
5562 if (MIN_NONDEBUG_INSN_UID)
5563 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5564 else
5565 cur_insn_uid = 1;
5566 cur_debug_insn_uid = 1;
5567 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5568 last_location = UNKNOWN_LOCATION;
5569 first_label_num = label_num;
5570 seq_stack = NULL;
5571
5572 /* Init the tables that describe all the pseudo regs. */
5573
5574 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5575
5576 crtl->emit.regno_pointer_align
5577 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5578
5579 regno_reg_rtx
5580 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
5581
5582 /* Put copies of all the hard registers into regno_reg_rtx. */
5583 memcpy (regno_reg_rtx,
5584 static_regno_reg_rtx,
5585 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5586
5587 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5588 init_virtual_regs ();
5589
5590 /* Indicate that the virtual registers and stack locations are
5591 all pointers. */
5592 REG_POINTER (stack_pointer_rtx) = 1;
5593 REG_POINTER (frame_pointer_rtx) = 1;
5594 REG_POINTER (hard_frame_pointer_rtx) = 1;
5595 REG_POINTER (arg_pointer_rtx) = 1;
5596
5597 REG_POINTER (virtual_incoming_args_rtx) = 1;
5598 REG_POINTER (virtual_stack_vars_rtx) = 1;
5599 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5600 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5601 REG_POINTER (virtual_cfa_rtx) = 1;
5602
5603 #ifdef STACK_BOUNDARY
5604 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5605 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5607 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5608
5609 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5613 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5614 #endif
5615
5616 #ifdef INIT_EXPANDERS
5617 INIT_EXPANDERS;
5618 #endif
5619 }
5620
5621 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5622
5623 static rtx
5624 gen_const_vector (enum machine_mode mode, int constant)
5625 {
5626 rtx tem;
5627 rtvec v;
5628 int units, i;
5629 enum machine_mode inner;
5630
5631 units = GET_MODE_NUNITS (mode);
5632 inner = GET_MODE_INNER (mode);
5633
5634 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5635
5636 v = rtvec_alloc (units);
5637
5638 /* We need to call this function after we set the scalar const_tiny_rtx
5639 entries. */
5640 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5641
5642 for (i = 0; i < units; ++i)
5643 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5644
5645 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5646 return tem;
5647 }
5648
5649 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5650 all elements are zero, and the one vector when all elements are one. */
5651 rtx
5652 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5653 {
5654 enum machine_mode inner = GET_MODE_INNER (mode);
5655 int nunits = GET_MODE_NUNITS (mode);
5656 rtx x;
5657 int i;
5658
5659 /* Check to see if all of the elements have the same value. */
5660 x = RTVEC_ELT (v, nunits - 1);
5661 for (i = nunits - 2; i >= 0; i--)
5662 if (RTVEC_ELT (v, i) != x)
5663 break;
5664
5665 /* If the values are all the same, check to see if we can use one of the
5666 standard constant vectors. */
5667 if (i == -1)
5668 {
5669 if (x == CONST0_RTX (inner))
5670 return CONST0_RTX (mode);
5671 else if (x == CONST1_RTX (inner))
5672 return CONST1_RTX (mode);
5673 }
5674
5675 return gen_rtx_raw_CONST_VECTOR (mode, v);
5676 }
5677
5678 /* Initialise global register information required by all functions. */
5679
5680 void
5681 init_emit_regs (void)
5682 {
5683 int i;
5684
5685 /* Reset register attributes */
5686 htab_empty (reg_attrs_htab);
5687
5688 /* We need reg_raw_mode, so initialize the modes now. */
5689 init_reg_modes_target ();
5690
5691 /* Assign register numbers to the globally defined register rtx. */
5692 pc_rtx = gen_rtx_PC (VOIDmode);
5693 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5694 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5695 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5696 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5697 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5698 virtual_incoming_args_rtx =
5699 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5700 virtual_stack_vars_rtx =
5701 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5702 virtual_stack_dynamic_rtx =
5703 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5704 virtual_outgoing_args_rtx =
5705 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5706 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5707
5708 /* Initialize RTL for commonly used hard registers. These are
5709 copied into regno_reg_rtx as we begin to compile each function. */
5710 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5711 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5712
5713 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5714 return_address_pointer_rtx
5715 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5716 #endif
5717
5718 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5719 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5720 else
5721 pic_offset_table_rtx = NULL_RTX;
5722 }
5723
5724 /* Create some permanent unique rtl objects shared between all functions. */
5725
5726 void
5727 init_emit_once (void)
5728 {
5729 int i;
5730 enum machine_mode mode;
5731 enum machine_mode double_mode;
5732
5733 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5734 hash tables. */
5735 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5736 const_int_htab_eq, NULL);
5737
5738 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5739 const_double_htab_eq, NULL);
5740
5741 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5742 const_fixed_htab_eq, NULL);
5743
5744 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5745 mem_attrs_htab_eq, NULL);
5746 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5747 reg_attrs_htab_eq, NULL);
5748
5749 /* Compute the word and byte modes. */
5750
5751 byte_mode = VOIDmode;
5752 word_mode = VOIDmode;
5753 double_mode = VOIDmode;
5754
5755 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5756 mode != VOIDmode;
5757 mode = GET_MODE_WIDER_MODE (mode))
5758 {
5759 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5760 && byte_mode == VOIDmode)
5761 byte_mode = mode;
5762
5763 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5764 && word_mode == VOIDmode)
5765 word_mode = mode;
5766 }
5767
5768 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5769 mode != VOIDmode;
5770 mode = GET_MODE_WIDER_MODE (mode))
5771 {
5772 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5773 && double_mode == VOIDmode)
5774 double_mode = mode;
5775 }
5776
5777 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5778
5779 #ifdef INIT_EXPANDERS
5780 /* This is to initialize {init|mark|free}_machine_status before the first
5781 call to push_function_context_to. This is needed by the Chill front
5782 end which calls push_function_context_to before the first call to
5783 init_function_start. */
5784 INIT_EXPANDERS;
5785 #endif
5786
5787 /* Create the unique rtx's for certain rtx codes and operand values. */
5788
5789 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5790 tries to use these variables. */
5791 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5792 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5793 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5794
5795 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5796 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5797 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5798 else
5799 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5800
5801 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5802 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5803 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5804
5805 dconstm1 = dconst1;
5806 dconstm1.sign = 1;
5807
5808 dconsthalf = dconst1;
5809 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5810
5811 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5812 {
5813 const REAL_VALUE_TYPE *const r =
5814 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5815
5816 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5817 mode != VOIDmode;
5818 mode = GET_MODE_WIDER_MODE (mode))
5819 const_tiny_rtx[i][(int) mode] =
5820 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5821
5822 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5823 mode != VOIDmode;
5824 mode = GET_MODE_WIDER_MODE (mode))
5825 const_tiny_rtx[i][(int) mode] =
5826 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5827
5828 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5829
5830 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5831 mode != VOIDmode;
5832 mode = GET_MODE_WIDER_MODE (mode))
5833 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5834
5835 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5836 mode != VOIDmode;
5837 mode = GET_MODE_WIDER_MODE (mode))
5838 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5839 }
5840
5841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5842 mode != VOIDmode;
5843 mode = GET_MODE_WIDER_MODE (mode))
5844 {
5845 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5846 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5847 }
5848
5849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5850 mode != VOIDmode;
5851 mode = GET_MODE_WIDER_MODE (mode))
5852 {
5853 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5854 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5855 }
5856
5857 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5858 mode != VOIDmode;
5859 mode = GET_MODE_WIDER_MODE (mode))
5860 {
5861 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5862 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5863 }
5864
5865 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5866 mode != VOIDmode;
5867 mode = GET_MODE_WIDER_MODE (mode))
5868 {
5869 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5870 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5871 }
5872
5873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5874 mode != VOIDmode;
5875 mode = GET_MODE_WIDER_MODE (mode))
5876 {
5877 FCONST0(mode).data.high = 0;
5878 FCONST0(mode).data.low = 0;
5879 FCONST0(mode).mode = mode;
5880 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5881 FCONST0 (mode), mode);
5882 }
5883
5884 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5885 mode != VOIDmode;
5886 mode = GET_MODE_WIDER_MODE (mode))
5887 {
5888 FCONST0(mode).data.high = 0;
5889 FCONST0(mode).data.low = 0;
5890 FCONST0(mode).mode = mode;
5891 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5892 FCONST0 (mode), mode);
5893 }
5894
5895 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5896 mode != VOIDmode;
5897 mode = GET_MODE_WIDER_MODE (mode))
5898 {
5899 FCONST0(mode).data.high = 0;
5900 FCONST0(mode).data.low = 0;
5901 FCONST0(mode).mode = mode;
5902 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5903 FCONST0 (mode), mode);
5904
5905 /* We store the value 1. */
5906 FCONST1(mode).data.high = 0;
5907 FCONST1(mode).data.low = 0;
5908 FCONST1(mode).mode = mode;
5909 lshift_double (1, 0, GET_MODE_FBIT (mode),
5910 2 * HOST_BITS_PER_WIDE_INT,
5911 &FCONST1(mode).data.low,
5912 &FCONST1(mode).data.high,
5913 SIGNED_FIXED_POINT_MODE_P (mode));
5914 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5915 FCONST1 (mode), mode);
5916 }
5917
5918 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5919 mode != VOIDmode;
5920 mode = GET_MODE_WIDER_MODE (mode))
5921 {
5922 FCONST0(mode).data.high = 0;
5923 FCONST0(mode).data.low = 0;
5924 FCONST0(mode).mode = mode;
5925 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5926 FCONST0 (mode), mode);
5927
5928 /* We store the value 1. */
5929 FCONST1(mode).data.high = 0;
5930 FCONST1(mode).data.low = 0;
5931 FCONST1(mode).mode = mode;
5932 lshift_double (1, 0, GET_MODE_FBIT (mode),
5933 2 * HOST_BITS_PER_WIDE_INT,
5934 &FCONST1(mode).data.low,
5935 &FCONST1(mode).data.high,
5936 SIGNED_FIXED_POINT_MODE_P (mode));
5937 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5938 FCONST1 (mode), mode);
5939 }
5940
5941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5942 mode != VOIDmode;
5943 mode = GET_MODE_WIDER_MODE (mode))
5944 {
5945 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5946 }
5947
5948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5949 mode != VOIDmode;
5950 mode = GET_MODE_WIDER_MODE (mode))
5951 {
5952 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5953 }
5954
5955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5956 mode != VOIDmode;
5957 mode = GET_MODE_WIDER_MODE (mode))
5958 {
5959 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5960 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5961 }
5962
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 {
5967 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5968 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5969 }
5970
5971 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5972 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5973 const_tiny_rtx[0][i] = const0_rtx;
5974
5975 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5976 if (STORE_FLAG_VALUE == 1)
5977 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5978 }
5979 \f
5980 /* Produce exact duplicate of insn INSN after AFTER.
5981 Care updating of libcall regions if present. */
5982
5983 rtx
5984 emit_copy_of_insn_after (rtx insn, rtx after)
5985 {
5986 rtx new_rtx, link;
5987
5988 switch (GET_CODE (insn))
5989 {
5990 case INSN:
5991 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5992 break;
5993
5994 case JUMP_INSN:
5995 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5996 break;
5997
5998 case DEBUG_INSN:
5999 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6000 break;
6001
6002 case CALL_INSN:
6003 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6004 if (CALL_INSN_FUNCTION_USAGE (insn))
6005 CALL_INSN_FUNCTION_USAGE (new_rtx)
6006 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6007 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6008 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6009 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6010 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6011 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6012 break;
6013
6014 default:
6015 gcc_unreachable ();
6016 }
6017
6018 /* Update LABEL_NUSES. */
6019 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6020
6021 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
6022
6023 /* If the old insn is frame related, then so is the new one. This is
6024 primarily needed for IA-64 unwind info which marks epilogue insns,
6025 which may be duplicated by the basic block reordering code. */
6026 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6027
6028 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6029 will make them. REG_LABEL_TARGETs are created there too, but are
6030 supposed to be sticky, so we copy them. */
6031 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6032 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6033 {
6034 if (GET_CODE (link) == EXPR_LIST)
6035 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6036 copy_insn_1 (XEXP (link, 0)));
6037 else
6038 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6039 }
6040
6041 INSN_CODE (new_rtx) = INSN_CODE (insn);
6042 return new_rtx;
6043 }
6044
6045 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6046 rtx
6047 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6048 {
6049 if (hard_reg_clobbers[mode][regno])
6050 return hard_reg_clobbers[mode][regno];
6051 else
6052 return (hard_reg_clobbers[mode][regno] =
6053 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6054 }
6055
6056 #include "gt-emit-rtl.h"