]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
rtl.h (validate_subreg): Declare.
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58
59 /* Commonly used modes. */
60
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
65
66
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70 static GTY(()) int label_num = 1;
71
72 /* Nonzero means do not generate NOTEs for source line numbers. */
73
74 static int no_line_numbers;
75
76 /* Commonly used rtx's, so that we only need space for one copy.
77 These are initialized once for the entire compilation.
78 All of these are unique; no other rtx-object will be equal to any
79 of these. */
80
81 rtx global_rtl[GR_MAX];
82
83 /* Commonly used RTL for hard registers. These objects are not necessarily
84 unique, so we allocate them separately from global_rtl. They are
85 initialized once per compilation unit, then copied into regno_reg_rtx
86 at the beginning of each function. */
87 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
88
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
91 record a copy of const[012]_rtx. */
92
93 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
94
95 rtx const_true_rtx;
96
97 REAL_VALUE_TYPE dconst0;
98 REAL_VALUE_TYPE dconst1;
99 REAL_VALUE_TYPE dconst2;
100 REAL_VALUE_TYPE dconst3;
101 REAL_VALUE_TYPE dconst10;
102 REAL_VALUE_TYPE dconstm1;
103 REAL_VALUE_TYPE dconstm2;
104 REAL_VALUE_TYPE dconsthalf;
105 REAL_VALUE_TYPE dconstthird;
106 REAL_VALUE_TYPE dconstpi;
107 REAL_VALUE_TYPE dconste;
108
109 /* All references to the following fixed hard registers go through
110 these unique rtl objects. On machines where the frame-pointer and
111 arg-pointer are the same register, they use the same unique object.
112
113 After register allocation, other rtl objects which used to be pseudo-regs
114 may be clobbered to refer to the frame-pointer register.
115 But references that were originally to the frame-pointer can be
116 distinguished from the others because they contain frame_pointer_rtx.
117
118 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
119 tricky: until register elimination has taken place hard_frame_pointer_rtx
120 should be used if it is being set, and frame_pointer_rtx otherwise. After
121 register elimination hard_frame_pointer_rtx should always be used.
122 On machines where the two registers are same (most) then these are the
123 same.
124
125 In an inline procedure, the stack and frame pointer rtxs may not be
126 used for anything else. */
127 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
128 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
129 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
130
131 /* This is used to implement __builtin_return_address for some machines.
132 See for instance the MIPS port. */
133 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
134
135 /* We make one copy of (const_int C) where C is in
136 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
137 to save space during the compilation and simplify comparisons of
138 integers. */
139
140 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
141
142 /* A hash table storing CONST_INTs whose absolute value is greater
143 than MAX_SAVED_CONST_INT. */
144
145 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
146 htab_t const_int_htab;
147
148 /* A hash table storing memory attribute structures. */
149 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
150 htab_t mem_attrs_htab;
151
152 /* A hash table storing register attribute structures. */
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
154 htab_t reg_attrs_htab;
155
156 /* A hash table storing all CONST_DOUBLEs. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
158 htab_t const_double_htab;
159
160 #define first_insn (cfun->emit->x_first_insn)
161 #define last_insn (cfun->emit->x_last_insn)
162 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
163 #define last_location (cfun->emit->x_last_location)
164 #define first_label_num (cfun->emit->x_first_label_num)
165
166 static rtx make_jump_insn_raw (rtx);
167 static rtx make_call_insn_raw (rtx);
168 static rtx find_line_note (rtx);
169 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
170 static void unshare_all_decls (tree);
171 static void reset_used_decls (tree);
172 static void mark_label_nuses (rtx);
173 static hashval_t const_int_htab_hash (const void *);
174 static int const_int_htab_eq (const void *, const void *);
175 static hashval_t const_double_htab_hash (const void *);
176 static int const_double_htab_eq (const void *, const void *);
177 static rtx lookup_const_double (rtx);
178 static hashval_t mem_attrs_htab_hash (const void *);
179 static int mem_attrs_htab_eq (const void *, const void *);
180 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
181 enum machine_mode);
182 static hashval_t reg_attrs_htab_hash (const void *);
183 static int reg_attrs_htab_eq (const void *, const void *);
184 static reg_attrs *get_reg_attrs (tree, int);
185 static tree component_ref_for_mem_expr (tree);
186 static rtx gen_const_vector (enum machine_mode, int);
187 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
188 static void copy_rtx_if_shared_1 (rtx *orig);
189
190 /* Probability of the conditional branch currently proceeded by try_split.
191 Set to -1 otherwise. */
192 int split_branch_probability = -1;
193 \f
194 /* Returns a hash code for X (which is a really a CONST_INT). */
195
196 static hashval_t
197 const_int_htab_hash (const void *x)
198 {
199 return (hashval_t) INTVAL ((rtx) x);
200 }
201
202 /* Returns nonzero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
205
206 static int
207 const_int_htab_eq (const void *x, const void *y)
208 {
209 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210 }
211
212 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
213 static hashval_t
214 const_double_htab_hash (const void *x)
215 {
216 rtx value = (rtx) x;
217 hashval_t h;
218
219 if (GET_MODE (value) == VOIDmode)
220 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221 else
222 {
223 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224 /* MODE is used in the comparison, so it should be in the hash. */
225 h ^= GET_MODE (value);
226 }
227 return h;
228 }
229
230 /* Returns nonzero if the value represented by X (really a ...)
231 is the same as that represented by Y (really a ...) */
232 static int
233 const_double_htab_eq (const void *x, const void *y)
234 {
235 rtx a = (rtx)x, b = (rtx)y;
236
237 if (GET_MODE (a) != GET_MODE (b))
238 return 0;
239 if (GET_MODE (a) == VOIDmode)
240 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242 else
243 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244 CONST_DOUBLE_REAL_VALUE (b));
245 }
246
247 /* Returns a hash code for X (which is a really a mem_attrs *). */
248
249 static hashval_t
250 mem_attrs_htab_hash (const void *x)
251 {
252 mem_attrs *p = (mem_attrs *) x;
253
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257 ^ (size_t) p->expr);
258 }
259
260 /* Returns nonzero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
263
264 static int
265 mem_attrs_htab_eq (const void *x, const void *y)
266 {
267 mem_attrs *p = (mem_attrs *) x;
268 mem_attrs *q = (mem_attrs *) y;
269
270 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
271 && p->size == q->size && p->align == q->align);
272 }
273
274 /* Allocate a new mem_attrs structure and insert it into the hash table if
275 one identical to it is not already in the table. We are doing this for
276 MEM of mode MODE. */
277
278 static mem_attrs *
279 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
280 unsigned int align, enum machine_mode mode)
281 {
282 mem_attrs attrs;
283 void **slot;
284
285 /* If everything is the default, we can just return zero.
286 This must match what the corresponding MEM_* macros return when the
287 field is not present. */
288 if (alias == 0 && expr == 0 && offset == 0
289 && (size == 0
290 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
291 && (STRICT_ALIGNMENT && mode != BLKmode
292 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
293 return 0;
294
295 attrs.alias = alias;
296 attrs.expr = expr;
297 attrs.offset = offset;
298 attrs.size = size;
299 attrs.align = align;
300
301 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
302 if (*slot == 0)
303 {
304 *slot = ggc_alloc (sizeof (mem_attrs));
305 memcpy (*slot, &attrs, sizeof (mem_attrs));
306 }
307
308 return *slot;
309 }
310
311 /* Returns a hash code for X (which is a really a reg_attrs *). */
312
313 static hashval_t
314 reg_attrs_htab_hash (const void *x)
315 {
316 reg_attrs *p = (reg_attrs *) x;
317
318 return ((p->offset * 1000) ^ (long) p->decl);
319 }
320
321 /* Returns nonzero if the value represented by X (which is really a
322 reg_attrs *) is the same as that given by Y (which is also really a
323 reg_attrs *). */
324
325 static int
326 reg_attrs_htab_eq (const void *x, const void *y)
327 {
328 reg_attrs *p = (reg_attrs *) x;
329 reg_attrs *q = (reg_attrs *) y;
330
331 return (p->decl == q->decl && p->offset == q->offset);
332 }
333 /* Allocate a new reg_attrs structure and insert it into the hash table if
334 one identical to it is not already in the table. We are doing this for
335 MEM of mode MODE. */
336
337 static reg_attrs *
338 get_reg_attrs (tree decl, int offset)
339 {
340 reg_attrs attrs;
341 void **slot;
342
343 /* If everything is the default, we can just return zero. */
344 if (decl == 0 && offset == 0)
345 return 0;
346
347 attrs.decl = decl;
348 attrs.offset = offset;
349
350 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
351 if (*slot == 0)
352 {
353 *slot = ggc_alloc (sizeof (reg_attrs));
354 memcpy (*slot, &attrs, sizeof (reg_attrs));
355 }
356
357 return *slot;
358 }
359
360 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
361 don't attempt to share with the various global pieces of rtl (such as
362 frame_pointer_rtx). */
363
364 rtx
365 gen_raw_REG (enum machine_mode mode, int regno)
366 {
367 rtx x = gen_rtx_raw_REG (mode, regno);
368 ORIGINAL_REGNO (x) = regno;
369 return x;
370 }
371
372 /* There are some RTL codes that require special attention; the generation
373 functions do the raw handling. If you add to this list, modify
374 special_rtx in gengenrtl.c as well. */
375
376 rtx
377 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
378 {
379 void **slot;
380
381 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
382 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
383
384 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
385 if (const_true_rtx && arg == STORE_FLAG_VALUE)
386 return const_true_rtx;
387 #endif
388
389 /* Look up the CONST_INT in the hash table. */
390 slot = htab_find_slot_with_hash (const_int_htab, &arg,
391 (hashval_t) arg, INSERT);
392 if (*slot == 0)
393 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
394
395 return (rtx) *slot;
396 }
397
398 rtx
399 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
400 {
401 return GEN_INT (trunc_int_for_mode (c, mode));
402 }
403
404 /* CONST_DOUBLEs might be created from pairs of integers, or from
405 REAL_VALUE_TYPEs. Also, their length is known only at run time,
406 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
407
408 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
409 hash table. If so, return its counterpart; otherwise add it
410 to the hash table and return it. */
411 static rtx
412 lookup_const_double (rtx real)
413 {
414 void **slot = htab_find_slot (const_double_htab, real, INSERT);
415 if (*slot == 0)
416 *slot = real;
417
418 return (rtx) *slot;
419 }
420
421 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
422 VALUE in mode MODE. */
423 rtx
424 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
425 {
426 rtx real = rtx_alloc (CONST_DOUBLE);
427 PUT_MODE (real, mode);
428
429 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
430
431 return lookup_const_double (real);
432 }
433
434 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
435 of ints: I0 is the low-order word and I1 is the high-order word.
436 Do not use this routine for non-integer modes; convert to
437 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
438
439 rtx
440 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
441 {
442 rtx value;
443 unsigned int i;
444
445 if (mode != VOIDmode)
446 {
447 int width;
448
449 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
450 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
451 /* We can get a 0 for an error mark. */
452 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
453 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
454
455 /* We clear out all bits that don't belong in MODE, unless they and
456 our sign bit are all one. So we get either a reasonable negative
457 value or a reasonable unsigned value for this mode. */
458 width = GET_MODE_BITSIZE (mode);
459 if (width < HOST_BITS_PER_WIDE_INT
460 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
461 != ((HOST_WIDE_INT) (-1) << (width - 1))))
462 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
463 else if (width == HOST_BITS_PER_WIDE_INT
464 && ! (i1 == ~0 && i0 < 0))
465 i1 = 0;
466 else
467 /* We should be able to represent this value as a constant. */
468 gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
469
470 /* If this would be an entire word for the target, but is not for
471 the host, then sign-extend on the host so that the number will
472 look the same way on the host that it would on the target.
473
474 For example, when building a 64 bit alpha hosted 32 bit sparc
475 targeted compiler, then we want the 32 bit unsigned value -1 to be
476 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
477 The latter confuses the sparc backend. */
478
479 if (width < HOST_BITS_PER_WIDE_INT
480 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
481 i0 |= ((HOST_WIDE_INT) (-1) << width);
482
483 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
484 CONST_INT.
485
486 ??? Strictly speaking, this is wrong if we create a CONST_INT for
487 a large unsigned constant with the size of MODE being
488 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
489 in a wider mode. In that case we will mis-interpret it as a
490 negative number.
491
492 Unfortunately, the only alternative is to make a CONST_DOUBLE for
493 any constant in any mode if it is an unsigned constant larger
494 than the maximum signed integer in an int on the host. However,
495 doing this will break everyone that always expects to see a
496 CONST_INT for SImode and smaller.
497
498 We have always been making CONST_INTs in this case, so nothing
499 new is being broken. */
500
501 if (width <= HOST_BITS_PER_WIDE_INT)
502 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
503 }
504
505 /* If this integer fits in one word, return a CONST_INT. */
506 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
507 return GEN_INT (i0);
508
509 /* We use VOIDmode for integers. */
510 value = rtx_alloc (CONST_DOUBLE);
511 PUT_MODE (value, VOIDmode);
512
513 CONST_DOUBLE_LOW (value) = i0;
514 CONST_DOUBLE_HIGH (value) = i1;
515
516 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
517 XWINT (value, i) = 0;
518
519 return lookup_const_double (value);
520 }
521
522 rtx
523 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
524 {
525 /* In case the MD file explicitly references the frame pointer, have
526 all such references point to the same frame pointer. This is
527 used during frame pointer elimination to distinguish the explicit
528 references to these registers from pseudos that happened to be
529 assigned to them.
530
531 If we have eliminated the frame pointer or arg pointer, we will
532 be using it as a normal register, for example as a spill
533 register. In such cases, we might be accessing it in a mode that
534 is not Pmode and therefore cannot use the pre-allocated rtx.
535
536 Also don't do this when we are making new REGs in reload, since
537 we don't want to get confused with the real pointers. */
538
539 if (mode == Pmode && !reload_in_progress)
540 {
541 if (regno == FRAME_POINTER_REGNUM
542 && (!reload_completed || frame_pointer_needed))
543 return frame_pointer_rtx;
544 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
545 if (regno == HARD_FRAME_POINTER_REGNUM
546 && (!reload_completed || frame_pointer_needed))
547 return hard_frame_pointer_rtx;
548 #endif
549 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
550 if (regno == ARG_POINTER_REGNUM)
551 return arg_pointer_rtx;
552 #endif
553 #ifdef RETURN_ADDRESS_POINTER_REGNUM
554 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
555 return return_address_pointer_rtx;
556 #endif
557 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
558 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
559 return pic_offset_table_rtx;
560 if (regno == STACK_POINTER_REGNUM)
561 return stack_pointer_rtx;
562 }
563
564 #if 0
565 /* If the per-function register table has been set up, try to re-use
566 an existing entry in that table to avoid useless generation of RTL.
567
568 This code is disabled for now until we can fix the various backends
569 which depend on having non-shared hard registers in some cases. Long
570 term we want to re-enable this code as it can significantly cut down
571 on the amount of useless RTL that gets generated.
572
573 We'll also need to fix some code that runs after reload that wants to
574 set ORIGINAL_REGNO. */
575
576 if (cfun
577 && cfun->emit
578 && regno_reg_rtx
579 && regno < FIRST_PSEUDO_REGISTER
580 && reg_raw_mode[regno] == mode)
581 return regno_reg_rtx[regno];
582 #endif
583
584 return gen_raw_REG (mode, regno);
585 }
586
587 rtx
588 gen_rtx_MEM (enum machine_mode mode, rtx addr)
589 {
590 rtx rt = gen_rtx_raw_MEM (mode, addr);
591
592 /* This field is not cleared by the mere allocation of the rtx, so
593 we clear it here. */
594 MEM_ATTRS (rt) = 0;
595
596 return rt;
597 }
598
599 /* Generate a memory referring to non-trapping constant memory. */
600
601 rtx
602 gen_const_mem (enum machine_mode mode, rtx addr)
603 {
604 rtx mem = gen_rtx_MEM (mode, addr);
605 MEM_READONLY_P (mem) = 1;
606 MEM_NOTRAP_P (mem) = 1;
607 return mem;
608 }
609
610 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
611 this construct would be valid, and false otherwise. */
612
613 bool
614 validate_subreg (enum machine_mode omode, enum machine_mode imode,
615 rtx reg, unsigned int offset)
616 {
617 unsigned int isize = GET_MODE_SIZE (imode);
618 unsigned int osize = GET_MODE_SIZE (omode);
619
620 /* All subregs must be aligned. */
621 if (offset % osize != 0)
622 return false;
623
624 /* The subreg offset cannot be outside the inner object. */
625 if (offset >= isize)
626 return false;
627
628 /* ??? This should not be here. Temporarily continue to allow word_mode
629 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
630 Generally, backends are doing something sketchy but it'll take time to
631 fix them all. */
632 if (omode == word_mode)
633 ;
634 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
635 is the culprit here, and not the backends. */
636 else if (osize >= UNITS_PER_WORD && isize >= osize)
637 ;
638 /* Allow component subregs of complex and vector. Though given the below
639 extraction rules, it's not always clear what that means. */
640 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
641 && GET_MODE_INNER (imode) == omode)
642 ;
643 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
644 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
645 represent this. It's questionable if this ought to be represented at
646 all -- why can't this all be hidden in post-reload splitters that make
647 arbitrarily mode changes to the registers themselves. */
648 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
649 ;
650 /* Subregs involving floating point modes are not allowed to
651 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
652 (subreg:SI (reg:DF) 0) isn't. */
653 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
654 {
655 if (isize != osize)
656 return false;
657 }
658
659 /* Paradoxical subregs must have offset zero. */
660 if (osize > isize)
661 return offset == 0;
662
663 /* This is a normal subreg. Verify that the offset is representable. */
664
665 /* For hard registers, we already have most of these rules collected in
666 subreg_offset_representable_p. */
667 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
668 {
669 unsigned int regno = REGNO (reg);
670
671 #ifdef CANNOT_CHANGE_MODE_CLASS
672 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
673 && GET_MODE_INNER (imode) == omode)
674 ;
675 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
676 return false;
677 #endif
678
679 return subreg_offset_representable_p (regno, imode, offset, omode);
680 }
681
682 /* For pseudo registers, we want most of the same checks. Namely:
683 If the register no larger than a word, the subreg must be lowpart.
684 If the register is larger than a word, the subreg must be the lowpart
685 of a subword. A subreg does *not* perform arbitrary bit extraction.
686 Given that we've already checked mode/offset alignment, we only have
687 to check subword subregs here. */
688 if (osize < UNITS_PER_WORD)
689 {
690 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
691 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
692 if (offset % UNITS_PER_WORD != low_off)
693 return false;
694 }
695 return true;
696 }
697
698 rtx
699 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
700 {
701 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
702 return gen_rtx_raw_SUBREG (mode, reg, offset);
703 }
704
705 /* Generate a SUBREG representing the least-significant part of REG if MODE
706 is smaller than mode of REG, otherwise paradoxical SUBREG. */
707
708 rtx
709 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
710 {
711 enum machine_mode inmode;
712
713 inmode = GET_MODE (reg);
714 if (inmode == VOIDmode)
715 inmode = mode;
716 return gen_rtx_SUBREG (mode, reg,
717 subreg_lowpart_offset (mode, inmode));
718 }
719 \f
720 /* gen_rtvec (n, [rt1, ..., rtn])
721 **
722 ** This routine creates an rtvec and stores within it the
723 ** pointers to rtx's which are its arguments.
724 */
725
726 /*VARARGS1*/
727 rtvec
728 gen_rtvec (int n, ...)
729 {
730 int i, save_n;
731 rtx *vector;
732 va_list p;
733
734 va_start (p, n);
735
736 if (n == 0)
737 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
738
739 vector = alloca (n * sizeof (rtx));
740
741 for (i = 0; i < n; i++)
742 vector[i] = va_arg (p, rtx);
743
744 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
745 save_n = n;
746 va_end (p);
747
748 return gen_rtvec_v (save_n, vector);
749 }
750
751 rtvec
752 gen_rtvec_v (int n, rtx *argp)
753 {
754 int i;
755 rtvec rt_val;
756
757 if (n == 0)
758 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
759
760 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
761
762 for (i = 0; i < n; i++)
763 rt_val->elem[i] = *argp++;
764
765 return rt_val;
766 }
767 \f
768 /* Generate a REG rtx for a new pseudo register of mode MODE.
769 This pseudo is assigned the next sequential register number. */
770
771 rtx
772 gen_reg_rtx (enum machine_mode mode)
773 {
774 struct function *f = cfun;
775 rtx val;
776
777 /* Don't let anything called after initial flow analysis create new
778 registers. */
779 gcc_assert (!no_new_pseudos);
780
781 if (generating_concat_p
782 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
783 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
784 {
785 /* For complex modes, don't make a single pseudo.
786 Instead, make a CONCAT of two pseudos.
787 This allows noncontiguous allocation of the real and imaginary parts,
788 which makes much better code. Besides, allocating DCmode
789 pseudos overstrains reload on some machines like the 386. */
790 rtx realpart, imagpart;
791 enum machine_mode partmode = GET_MODE_INNER (mode);
792
793 realpart = gen_reg_rtx (partmode);
794 imagpart = gen_reg_rtx (partmode);
795 return gen_rtx_CONCAT (mode, realpart, imagpart);
796 }
797
798 /* Make sure regno_pointer_align, and regno_reg_rtx are large
799 enough to have an element for this pseudo reg number. */
800
801 if (reg_rtx_no == f->emit->regno_pointer_align_length)
802 {
803 int old_size = f->emit->regno_pointer_align_length;
804 char *new;
805 rtx *new1;
806
807 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
808 memset (new + old_size, 0, old_size);
809 f->emit->regno_pointer_align = (unsigned char *) new;
810
811 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
812 old_size * 2 * sizeof (rtx));
813 memset (new1 + old_size, 0, old_size * sizeof (rtx));
814 regno_reg_rtx = new1;
815
816 f->emit->regno_pointer_align_length = old_size * 2;
817 }
818
819 val = gen_raw_REG (mode, reg_rtx_no);
820 regno_reg_rtx[reg_rtx_no++] = val;
821 return val;
822 }
823
824 /* Generate a register with same attributes as REG, but offsetted by OFFSET.
825 Do the big endian correction if needed. */
826
827 rtx
828 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
829 {
830 rtx new = gen_rtx_REG (mode, regno);
831 tree decl;
832 HOST_WIDE_INT var_size;
833
834 /* PR middle-end/14084
835 The problem appears when a variable is stored in a larger register
836 and later it is used in the original mode or some mode in between
837 or some part of variable is accessed.
838
839 On little endian machines there is no problem because
840 the REG_OFFSET of the start of the variable is the same when
841 accessed in any mode (it is 0).
842
843 However, this is not true on big endian machines.
844 The offset of the start of the variable is different when accessed
845 in different modes.
846 When we are taking a part of the REG we have to change the OFFSET
847 from offset WRT size of mode of REG to offset WRT size of variable.
848
849 If we would not do the big endian correction the resulting REG_OFFSET
850 would be larger than the size of the DECL.
851
852 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
853
854 REG.mode MODE DECL size old offset new offset description
855 DI SI 4 4 0 int32 in SImode
856 DI SI 1 4 0 char in SImode
857 DI QI 1 7 0 char in QImode
858 DI QI 4 5 1 1st element in QImode
859 of char[4]
860 DI HI 4 6 2 1st element in HImode
861 of int16[2]
862
863 If the size of DECL is equal or greater than the size of REG
864 we can't do this correction because the register holds the
865 whole variable or a part of the variable and thus the REG_OFFSET
866 is already correct. */
867
868 decl = REG_EXPR (reg);
869 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
870 && decl != NULL
871 && offset > 0
872 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
873 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
875 {
876 int offset_le;
877
878 /* Convert machine endian to little endian WRT size of mode of REG. */
879 if (WORDS_BIG_ENDIAN)
880 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
881 / UNITS_PER_WORD) * UNITS_PER_WORD;
882 else
883 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
884
885 if (BYTES_BIG_ENDIAN)
886 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
887 % UNITS_PER_WORD);
888 else
889 offset_le += offset % UNITS_PER_WORD;
890
891 if (offset_le >= var_size)
892 {
893 /* MODE is wider than the variable so the new reg will cover
894 the whole variable so the resulting OFFSET should be 0. */
895 offset = 0;
896 }
897 else
898 {
899 /* Convert little endian to machine endian WRT size of variable. */
900 if (WORDS_BIG_ENDIAN)
901 offset = ((var_size - 1 - offset_le)
902 / UNITS_PER_WORD) * UNITS_PER_WORD;
903 else
904 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
905
906 if (BYTES_BIG_ENDIAN)
907 offset += ((var_size - 1 - offset_le)
908 % UNITS_PER_WORD);
909 else
910 offset += offset_le % UNITS_PER_WORD;
911 }
912 }
913
914 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
915 REG_OFFSET (reg) + offset);
916 return new;
917 }
918
919 /* Set the decl for MEM to DECL. */
920
921 void
922 set_reg_attrs_from_mem (rtx reg, rtx mem)
923 {
924 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
925 REG_ATTRS (reg)
926 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
927 }
928
929 /* Set the register attributes for registers contained in PARM_RTX.
930 Use needed values from memory attributes of MEM. */
931
932 void
933 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
934 {
935 if (REG_P (parm_rtx))
936 set_reg_attrs_from_mem (parm_rtx, mem);
937 else if (GET_CODE (parm_rtx) == PARALLEL)
938 {
939 /* Check for a NULL entry in the first slot, used to indicate that the
940 parameter goes both on the stack and in registers. */
941 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
942 for (; i < XVECLEN (parm_rtx, 0); i++)
943 {
944 rtx x = XVECEXP (parm_rtx, 0, i);
945 if (REG_P (XEXP (x, 0)))
946 REG_ATTRS (XEXP (x, 0))
947 = get_reg_attrs (MEM_EXPR (mem),
948 INTVAL (XEXP (x, 1)));
949 }
950 }
951 }
952
953 /* Assign the RTX X to declaration T. */
954 void
955 set_decl_rtl (tree t, rtx x)
956 {
957 DECL_CHECK (t)->decl.rtl = x;
958
959 if (!x)
960 return;
961 /* For register, we maintain the reverse information too. */
962 if (REG_P (x))
963 REG_ATTRS (x) = get_reg_attrs (t, 0);
964 else if (GET_CODE (x) == SUBREG)
965 REG_ATTRS (SUBREG_REG (x))
966 = get_reg_attrs (t, -SUBREG_BYTE (x));
967 if (GET_CODE (x) == CONCAT)
968 {
969 if (REG_P (XEXP (x, 0)))
970 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
971 if (REG_P (XEXP (x, 1)))
972 REG_ATTRS (XEXP (x, 1))
973 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
974 }
975 if (GET_CODE (x) == PARALLEL)
976 {
977 int i;
978 for (i = 0; i < XVECLEN (x, 0); i++)
979 {
980 rtx y = XVECEXP (x, 0, i);
981 if (REG_P (XEXP (y, 0)))
982 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
983 }
984 }
985 }
986
987 /* Assign the RTX X to parameter declaration T. */
988 void
989 set_decl_incoming_rtl (tree t, rtx x)
990 {
991 DECL_INCOMING_RTL (t) = x;
992
993 if (!x)
994 return;
995 /* For register, we maintain the reverse information too. */
996 if (REG_P (x))
997 REG_ATTRS (x) = get_reg_attrs (t, 0);
998 else if (GET_CODE (x) == SUBREG)
999 REG_ATTRS (SUBREG_REG (x))
1000 = get_reg_attrs (t, -SUBREG_BYTE (x));
1001 if (GET_CODE (x) == CONCAT)
1002 {
1003 if (REG_P (XEXP (x, 0)))
1004 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1005 if (REG_P (XEXP (x, 1)))
1006 REG_ATTRS (XEXP (x, 1))
1007 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1008 }
1009 if (GET_CODE (x) == PARALLEL)
1010 {
1011 int i, start;
1012
1013 /* Check for a NULL entry, used to indicate that the parameter goes
1014 both on the stack and in registers. */
1015 if (XEXP (XVECEXP (x, 0, 0), 0))
1016 start = 0;
1017 else
1018 start = 1;
1019
1020 for (i = start; i < XVECLEN (x, 0); i++)
1021 {
1022 rtx y = XVECEXP (x, 0, i);
1023 if (REG_P (XEXP (y, 0)))
1024 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1025 }
1026 }
1027 }
1028
1029 /* Identify REG (which may be a CONCAT) as a user register. */
1030
1031 void
1032 mark_user_reg (rtx reg)
1033 {
1034 if (GET_CODE (reg) == CONCAT)
1035 {
1036 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1037 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1038 }
1039 else
1040 {
1041 gcc_assert (REG_P (reg));
1042 REG_USERVAR_P (reg) = 1;
1043 }
1044 }
1045
1046 /* Identify REG as a probable pointer register and show its alignment
1047 as ALIGN, if nonzero. */
1048
1049 void
1050 mark_reg_pointer (rtx reg, int align)
1051 {
1052 if (! REG_POINTER (reg))
1053 {
1054 REG_POINTER (reg) = 1;
1055
1056 if (align)
1057 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1058 }
1059 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1060 /* We can no-longer be sure just how aligned this pointer is. */
1061 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1062 }
1063
1064 /* Return 1 plus largest pseudo reg number used in the current function. */
1065
1066 int
1067 max_reg_num (void)
1068 {
1069 return reg_rtx_no;
1070 }
1071
1072 /* Return 1 + the largest label number used so far in the current function. */
1073
1074 int
1075 max_label_num (void)
1076 {
1077 return label_num;
1078 }
1079
1080 /* Return first label number used in this function (if any were used). */
1081
1082 int
1083 get_first_label_num (void)
1084 {
1085 return first_label_num;
1086 }
1087
1088 /* If the rtx for label was created during the expansion of a nested
1089 function, then first_label_num won't include this label number.
1090 Fix this now so that array indicies work later. */
1091
1092 void
1093 maybe_set_first_label_num (rtx x)
1094 {
1095 if (CODE_LABEL_NUMBER (x) < first_label_num)
1096 first_label_num = CODE_LABEL_NUMBER (x);
1097 }
1098 \f
1099 /* Return a value representing some low-order bits of X, where the number
1100 of low-order bits is given by MODE. Note that no conversion is done
1101 between floating-point and fixed-point values, rather, the bit
1102 representation is returned.
1103
1104 This function handles the cases in common between gen_lowpart, below,
1105 and two variants in cse.c and combine.c. These are the cases that can
1106 be safely handled at all points in the compilation.
1107
1108 If this is not a case we can handle, return 0. */
1109
1110 rtx
1111 gen_lowpart_common (enum machine_mode mode, rtx x)
1112 {
1113 int msize = GET_MODE_SIZE (mode);
1114 int xsize;
1115 int offset = 0;
1116 enum machine_mode innermode;
1117
1118 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1119 so we have to make one up. Yuk. */
1120 innermode = GET_MODE (x);
1121 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1122 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1123 else if (innermode == VOIDmode)
1124 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1125
1126 xsize = GET_MODE_SIZE (innermode);
1127
1128 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1129
1130 if (innermode == mode)
1131 return x;
1132
1133 /* MODE must occupy no more words than the mode of X. */
1134 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1135 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1136 return 0;
1137
1138 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1139 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1140 return 0;
1141
1142 offset = subreg_lowpart_offset (mode, innermode);
1143
1144 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1145 && (GET_MODE_CLASS (mode) == MODE_INT
1146 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1147 {
1148 /* If we are getting the low-order part of something that has been
1149 sign- or zero-extended, we can either just use the object being
1150 extended or make a narrower extension. If we want an even smaller
1151 piece than the size of the object being extended, call ourselves
1152 recursively.
1153
1154 This case is used mostly by combine and cse. */
1155
1156 if (GET_MODE (XEXP (x, 0)) == mode)
1157 return XEXP (x, 0);
1158 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1159 return gen_lowpart_common (mode, XEXP (x, 0));
1160 else if (msize < xsize)
1161 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1162 }
1163 else if (GET_CODE (x) == SUBREG || REG_P (x)
1164 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1165 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1166 return simplify_gen_subreg (mode, x, innermode, offset);
1167
1168 /* Otherwise, we can't do this. */
1169 return 0;
1170 }
1171 \f
1172 /* Return the constant real or imaginary part (which has mode MODE)
1173 of a complex value X. The IMAGPART_P argument determines whether
1174 the real or complex component should be returned. This function
1175 returns NULL_RTX if the component isn't a constant. */
1176
1177 static rtx
1178 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1179 {
1180 tree decl, part;
1181
1182 if (MEM_P (x)
1183 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1184 {
1185 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1186 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1187 {
1188 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1189 if (TREE_CODE (part) == REAL_CST
1190 || TREE_CODE (part) == INTEGER_CST)
1191 return expand_expr (part, NULL_RTX, mode, 0);
1192 }
1193 }
1194 return NULL_RTX;
1195 }
1196
1197 /* Return the real part (which has mode MODE) of a complex value X.
1198 This always comes at the low address in memory. */
1199
1200 rtx
1201 gen_realpart (enum machine_mode mode, rtx x)
1202 {
1203 rtx part;
1204
1205 /* Handle complex constants. */
1206 part = gen_complex_constant_part (mode, x, 0);
1207 if (part != NULL_RTX)
1208 return part;
1209
1210 if (WORDS_BIG_ENDIAN
1211 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1212 && REG_P (x)
1213 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1214 internal_error
1215 ("can't access real part of complex value in hard register");
1216 else if (WORDS_BIG_ENDIAN)
1217 return gen_highpart (mode, x);
1218 else
1219 return gen_lowpart (mode, x);
1220 }
1221
1222 /* Return the imaginary part (which has mode MODE) of a complex value X.
1223 This always comes at the high address in memory. */
1224
1225 rtx
1226 gen_imagpart (enum machine_mode mode, rtx x)
1227 {
1228 rtx part;
1229
1230 /* Handle complex constants. */
1231 part = gen_complex_constant_part (mode, x, 1);
1232 if (part != NULL_RTX)
1233 return part;
1234
1235 if (WORDS_BIG_ENDIAN)
1236 return gen_lowpart (mode, x);
1237 else if (! WORDS_BIG_ENDIAN
1238 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1239 && REG_P (x)
1240 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1241 internal_error
1242 ("can't access imaginary part of complex value in hard register");
1243 else
1244 return gen_highpart (mode, x);
1245 }
1246 \f
1247 rtx
1248 gen_highpart (enum machine_mode mode, rtx x)
1249 {
1250 unsigned int msize = GET_MODE_SIZE (mode);
1251 rtx result;
1252
1253 /* This case loses if X is a subreg. To catch bugs early,
1254 complain if an invalid MODE is used even in other cases. */
1255 gcc_assert (msize <= UNITS_PER_WORD
1256 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1257
1258 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1259 subreg_highpart_offset (mode, GET_MODE (x)));
1260 gcc_assert (result);
1261
1262 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1263 the target if we have a MEM. gen_highpart must return a valid operand,
1264 emitting code if necessary to do so. */
1265 if (MEM_P (result))
1266 {
1267 result = validize_mem (result);
1268 gcc_assert (result);
1269 }
1270
1271 return result;
1272 }
1273
1274 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1275 be VOIDmode constant. */
1276 rtx
1277 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1278 {
1279 if (GET_MODE (exp) != VOIDmode)
1280 {
1281 gcc_assert (GET_MODE (exp) == innermode);
1282 return gen_highpart (outermode, exp);
1283 }
1284 return simplify_gen_subreg (outermode, exp, innermode,
1285 subreg_highpart_offset (outermode, innermode));
1286 }
1287
1288 /* Return offset in bytes to get OUTERMODE low part
1289 of the value in mode INNERMODE stored in memory in target format. */
1290
1291 unsigned int
1292 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1293 {
1294 unsigned int offset = 0;
1295 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1296
1297 if (difference > 0)
1298 {
1299 if (WORDS_BIG_ENDIAN)
1300 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1301 if (BYTES_BIG_ENDIAN)
1302 offset += difference % UNITS_PER_WORD;
1303 }
1304
1305 return offset;
1306 }
1307
1308 /* Return offset in bytes to get OUTERMODE high part
1309 of the value in mode INNERMODE stored in memory in target format. */
1310 unsigned int
1311 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1312 {
1313 unsigned int offset = 0;
1314 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1315
1316 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1317
1318 if (difference > 0)
1319 {
1320 if (! WORDS_BIG_ENDIAN)
1321 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1322 if (! BYTES_BIG_ENDIAN)
1323 offset += difference % UNITS_PER_WORD;
1324 }
1325
1326 return offset;
1327 }
1328
1329 /* Return 1 iff X, assumed to be a SUBREG,
1330 refers to the least significant part of its containing reg.
1331 If X is not a SUBREG, always return 1 (it is its own low part!). */
1332
1333 int
1334 subreg_lowpart_p (rtx x)
1335 {
1336 if (GET_CODE (x) != SUBREG)
1337 return 1;
1338 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1339 return 0;
1340
1341 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1342 == SUBREG_BYTE (x));
1343 }
1344 \f
1345 /* Return subword OFFSET of operand OP.
1346 The word number, OFFSET, is interpreted as the word number starting
1347 at the low-order address. OFFSET 0 is the low-order word if not
1348 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1349
1350 If we cannot extract the required word, we return zero. Otherwise,
1351 an rtx corresponding to the requested word will be returned.
1352
1353 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1354 reload has completed, a valid address will always be returned. After
1355 reload, if a valid address cannot be returned, we return zero.
1356
1357 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1358 it is the responsibility of the caller.
1359
1360 MODE is the mode of OP in case it is a CONST_INT.
1361
1362 ??? This is still rather broken for some cases. The problem for the
1363 moment is that all callers of this thing provide no 'goal mode' to
1364 tell us to work with. This exists because all callers were written
1365 in a word based SUBREG world.
1366 Now use of this function can be deprecated by simplify_subreg in most
1367 cases.
1368 */
1369
1370 rtx
1371 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1372 {
1373 if (mode == VOIDmode)
1374 mode = GET_MODE (op);
1375
1376 gcc_assert (mode != VOIDmode);
1377
1378 /* If OP is narrower than a word, fail. */
1379 if (mode != BLKmode
1380 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1381 return 0;
1382
1383 /* If we want a word outside OP, return zero. */
1384 if (mode != BLKmode
1385 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1386 return const0_rtx;
1387
1388 /* Form a new MEM at the requested address. */
1389 if (MEM_P (op))
1390 {
1391 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1392
1393 if (! validate_address)
1394 return new;
1395
1396 else if (reload_completed)
1397 {
1398 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1399 return 0;
1400 }
1401 else
1402 return replace_equiv_address (new, XEXP (new, 0));
1403 }
1404
1405 /* Rest can be handled by simplify_subreg. */
1406 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1407 }
1408
1409 /* Similar to `operand_subword', but never return 0. If we can't extract
1410 the required subword, put OP into a register and try again. If that fails,
1411 abort. We always validate the address in this case.
1412
1413 MODE is the mode of OP, in case it is CONST_INT. */
1414
1415 rtx
1416 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1417 {
1418 rtx result = operand_subword (op, offset, 1, mode);
1419
1420 if (result)
1421 return result;
1422
1423 if (mode != BLKmode && mode != VOIDmode)
1424 {
1425 /* If this is a register which can not be accessed by words, copy it
1426 to a pseudo register. */
1427 if (REG_P (op))
1428 op = copy_to_reg (op);
1429 else
1430 op = force_reg (mode, op);
1431 }
1432
1433 result = operand_subword (op, offset, 1, mode);
1434 gcc_assert (result);
1435
1436 return result;
1437 }
1438 \f
1439 /* Given a compare instruction, swap the operands.
1440 A test instruction is changed into a compare of 0 against the operand. */
1441
1442 void
1443 reverse_comparison (rtx insn)
1444 {
1445 rtx body = PATTERN (insn);
1446 rtx comp;
1447
1448 if (GET_CODE (body) == SET)
1449 comp = SET_SRC (body);
1450 else
1451 comp = SET_SRC (XVECEXP (body, 0, 0));
1452
1453 if (GET_CODE (comp) == COMPARE)
1454 {
1455 rtx op0 = XEXP (comp, 0);
1456 rtx op1 = XEXP (comp, 1);
1457 XEXP (comp, 0) = op1;
1458 XEXP (comp, 1) = op0;
1459 }
1460 else
1461 {
1462 rtx new = gen_rtx_COMPARE (VOIDmode,
1463 CONST0_RTX (GET_MODE (comp)), comp);
1464 if (GET_CODE (body) == SET)
1465 SET_SRC (body) = new;
1466 else
1467 SET_SRC (XVECEXP (body, 0, 0)) = new;
1468 }
1469 }
1470 \f
1471 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1472 or (2) a component ref of something variable. Represent the later with
1473 a NULL expression. */
1474
1475 static tree
1476 component_ref_for_mem_expr (tree ref)
1477 {
1478 tree inner = TREE_OPERAND (ref, 0);
1479
1480 if (TREE_CODE (inner) == COMPONENT_REF)
1481 inner = component_ref_for_mem_expr (inner);
1482 else
1483 {
1484 /* Now remove any conversions: they don't change what the underlying
1485 object is. Likewise for SAVE_EXPR. */
1486 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1487 || TREE_CODE (inner) == NON_LVALUE_EXPR
1488 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1489 || TREE_CODE (inner) == SAVE_EXPR)
1490 inner = TREE_OPERAND (inner, 0);
1491
1492 if (! DECL_P (inner))
1493 inner = NULL_TREE;
1494 }
1495
1496 if (inner == TREE_OPERAND (ref, 0))
1497 return ref;
1498 else
1499 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1500 TREE_OPERAND (ref, 1), NULL_TREE);
1501 }
1502
1503 /* Returns 1 if both MEM_EXPR can be considered equal
1504 and 0 otherwise. */
1505
1506 int
1507 mem_expr_equal_p (tree expr1, tree expr2)
1508 {
1509 if (expr1 == expr2)
1510 return 1;
1511
1512 if (! expr1 || ! expr2)
1513 return 0;
1514
1515 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1516 return 0;
1517
1518 if (TREE_CODE (expr1) == COMPONENT_REF)
1519 return
1520 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1521 TREE_OPERAND (expr2, 0))
1522 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1523 TREE_OPERAND (expr2, 1));
1524
1525 if (INDIRECT_REF_P (expr1))
1526 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1527 TREE_OPERAND (expr2, 0));
1528
1529 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1530 have been resolved here. */
1531 gcc_assert (DECL_P (expr1));
1532
1533 /* Decls with different pointers can't be equal. */
1534 return 0;
1535 }
1536
1537 /* Given REF, a MEM, and T, either the type of X or the expression
1538 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1539 if we are making a new object of this type. BITPOS is nonzero if
1540 there is an offset outstanding on T that will be applied later. */
1541
1542 void
1543 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1544 HOST_WIDE_INT bitpos)
1545 {
1546 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1547 tree expr = MEM_EXPR (ref);
1548 rtx offset = MEM_OFFSET (ref);
1549 rtx size = MEM_SIZE (ref);
1550 unsigned int align = MEM_ALIGN (ref);
1551 HOST_WIDE_INT apply_bitpos = 0;
1552 tree type;
1553
1554 /* It can happen that type_for_mode was given a mode for which there
1555 is no language-level type. In which case it returns NULL, which
1556 we can see here. */
1557 if (t == NULL_TREE)
1558 return;
1559
1560 type = TYPE_P (t) ? t : TREE_TYPE (t);
1561 if (type == error_mark_node)
1562 return;
1563
1564 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1565 wrong answer, as it assumes that DECL_RTL already has the right alias
1566 info. Callers should not set DECL_RTL until after the call to
1567 set_mem_attributes. */
1568 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1569
1570 /* Get the alias set from the expression or type (perhaps using a
1571 front-end routine) and use it. */
1572 alias = get_alias_set (t);
1573
1574 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1575 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1576 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1577 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
1578
1579 /* If we are making an object of this type, or if this is a DECL, we know
1580 that it is a scalar if the type is not an aggregate. */
1581 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1582 MEM_SCALAR_P (ref) = 1;
1583
1584 /* We can set the alignment from the type if we are making an object,
1585 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1586 if (objectp || TREE_CODE (t) == INDIRECT_REF
1587 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1588 || TYPE_ALIGN_OK (type))
1589 align = MAX (align, TYPE_ALIGN (type));
1590 else
1591 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1592 {
1593 if (integer_zerop (TREE_OPERAND (t, 1)))
1594 /* We don't know anything about the alignment. */
1595 align = BITS_PER_UNIT;
1596 else
1597 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1598 }
1599
1600 /* If the size is known, we can set that. */
1601 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1602 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1603
1604 /* If T is not a type, we may be able to deduce some more information about
1605 the expression. */
1606 if (! TYPE_P (t))
1607 {
1608 tree base = get_base_address (t);
1609 if (base && DECL_P (base)
1610 && TREE_READONLY (base)
1611 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1612 MEM_READONLY_P (ref) = 1;
1613
1614 if (TREE_THIS_VOLATILE (t))
1615 MEM_VOLATILE_P (ref) = 1;
1616
1617 /* Now remove any conversions: they don't change what the underlying
1618 object is. Likewise for SAVE_EXPR. */
1619 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1620 || TREE_CODE (t) == NON_LVALUE_EXPR
1621 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1622 || TREE_CODE (t) == SAVE_EXPR)
1623 t = TREE_OPERAND (t, 0);
1624
1625 /* If this expression can't be addressed (e.g., it contains a reference
1626 to a non-addressable field), show we don't change its alias set. */
1627 if (! can_address_p (t))
1628 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1629
1630 /* If this is a decl, set the attributes of the MEM from it. */
1631 if (DECL_P (t))
1632 {
1633 expr = t;
1634 offset = const0_rtx;
1635 apply_bitpos = bitpos;
1636 size = (DECL_SIZE_UNIT (t)
1637 && host_integerp (DECL_SIZE_UNIT (t), 1)
1638 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1639 align = DECL_ALIGN (t);
1640 }
1641
1642 /* If this is a constant, we know the alignment. */
1643 else if (CONSTANT_CLASS_P (t))
1644 {
1645 align = TYPE_ALIGN (type);
1646 #ifdef CONSTANT_ALIGNMENT
1647 align = CONSTANT_ALIGNMENT (t, align);
1648 #endif
1649 }
1650
1651 /* If this is a field reference and not a bit-field, record it. */
1652 /* ??? There is some information that can be gleened from bit-fields,
1653 such as the word offset in the structure that might be modified.
1654 But skip it for now. */
1655 else if (TREE_CODE (t) == COMPONENT_REF
1656 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1657 {
1658 expr = component_ref_for_mem_expr (t);
1659 offset = const0_rtx;
1660 apply_bitpos = bitpos;
1661 /* ??? Any reason the field size would be different than
1662 the size we got from the type? */
1663 }
1664
1665 /* If this is an array reference, look for an outer field reference. */
1666 else if (TREE_CODE (t) == ARRAY_REF)
1667 {
1668 tree off_tree = size_zero_node;
1669 /* We can't modify t, because we use it at the end of the
1670 function. */
1671 tree t2 = t;
1672
1673 do
1674 {
1675 tree index = TREE_OPERAND (t2, 1);
1676 tree low_bound = array_ref_low_bound (t2);
1677 tree unit_size = array_ref_element_size (t2);
1678
1679 /* We assume all arrays have sizes that are a multiple of a byte.
1680 First subtract the lower bound, if any, in the type of the
1681 index, then convert to sizetype and multiply by the size of
1682 the array element. */
1683 if (! integer_zerop (low_bound))
1684 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
1685 index, low_bound));
1686
1687 off_tree = size_binop (PLUS_EXPR,
1688 size_binop (MULT_EXPR, convert (sizetype,
1689 index),
1690 unit_size),
1691 off_tree);
1692 t2 = TREE_OPERAND (t2, 0);
1693 }
1694 while (TREE_CODE (t2) == ARRAY_REF);
1695
1696 if (DECL_P (t2))
1697 {
1698 expr = t2;
1699 offset = NULL;
1700 if (host_integerp (off_tree, 1))
1701 {
1702 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1703 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1704 align = DECL_ALIGN (t2);
1705 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1706 align = aoff;
1707 offset = GEN_INT (ioff);
1708 apply_bitpos = bitpos;
1709 }
1710 }
1711 else if (TREE_CODE (t2) == COMPONENT_REF)
1712 {
1713 expr = component_ref_for_mem_expr (t2);
1714 if (host_integerp (off_tree, 1))
1715 {
1716 offset = GEN_INT (tree_low_cst (off_tree, 1));
1717 apply_bitpos = bitpos;
1718 }
1719 /* ??? Any reason the field size would be different than
1720 the size we got from the type? */
1721 }
1722 else if (flag_argument_noalias > 1
1723 && (INDIRECT_REF_P (t2))
1724 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1725 {
1726 expr = t2;
1727 offset = NULL;
1728 }
1729 }
1730
1731 /* If this is a Fortran indirect argument reference, record the
1732 parameter decl. */
1733 else if (flag_argument_noalias > 1
1734 && (INDIRECT_REF_P (t))
1735 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1736 {
1737 expr = t;
1738 offset = NULL;
1739 }
1740 }
1741
1742 /* If we modified OFFSET based on T, then subtract the outstanding
1743 bit position offset. Similarly, increase the size of the accessed
1744 object to contain the negative offset. */
1745 if (apply_bitpos)
1746 {
1747 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1748 if (size)
1749 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1750 }
1751
1752 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1753 {
1754 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1755 we're overlapping. */
1756 offset = NULL;
1757 expr = NULL;
1758 }
1759
1760 /* Now set the attributes we computed above. */
1761 MEM_ATTRS (ref)
1762 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1763
1764 /* If this is already known to be a scalar or aggregate, we are done. */
1765 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1766 return;
1767
1768 /* If it is a reference into an aggregate, this is part of an aggregate.
1769 Otherwise we don't know. */
1770 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1771 || TREE_CODE (t) == ARRAY_RANGE_REF
1772 || TREE_CODE (t) == BIT_FIELD_REF)
1773 MEM_IN_STRUCT_P (ref) = 1;
1774 }
1775
1776 void
1777 set_mem_attributes (rtx ref, tree t, int objectp)
1778 {
1779 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1780 }
1781
1782 /* Set the decl for MEM to DECL. */
1783
1784 void
1785 set_mem_attrs_from_reg (rtx mem, rtx reg)
1786 {
1787 MEM_ATTRS (mem)
1788 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1789 GEN_INT (REG_OFFSET (reg)),
1790 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1791 }
1792
1793 /* Set the alias set of MEM to SET. */
1794
1795 void
1796 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1797 {
1798 #ifdef ENABLE_CHECKING
1799 /* If the new and old alias sets don't conflict, something is wrong. */
1800 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1801 #endif
1802
1803 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1804 MEM_SIZE (mem), MEM_ALIGN (mem),
1805 GET_MODE (mem));
1806 }
1807
1808 /* Set the alignment of MEM to ALIGN bits. */
1809
1810 void
1811 set_mem_align (rtx mem, unsigned int align)
1812 {
1813 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1814 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1815 GET_MODE (mem));
1816 }
1817
1818 /* Set the expr for MEM to EXPR. */
1819
1820 void
1821 set_mem_expr (rtx mem, tree expr)
1822 {
1823 MEM_ATTRS (mem)
1824 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1825 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1826 }
1827
1828 /* Set the offset of MEM to OFFSET. */
1829
1830 void
1831 set_mem_offset (rtx mem, rtx offset)
1832 {
1833 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1834 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1835 GET_MODE (mem));
1836 }
1837
1838 /* Set the size of MEM to SIZE. */
1839
1840 void
1841 set_mem_size (rtx mem, rtx size)
1842 {
1843 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1844 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1845 GET_MODE (mem));
1846 }
1847 \f
1848 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1849 and its address changed to ADDR. (VOIDmode means don't change the mode.
1850 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1851 returned memory location is required to be valid. The memory
1852 attributes are not changed. */
1853
1854 static rtx
1855 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1856 {
1857 rtx new;
1858
1859 gcc_assert (MEM_P (memref));
1860 if (mode == VOIDmode)
1861 mode = GET_MODE (memref);
1862 if (addr == 0)
1863 addr = XEXP (memref, 0);
1864 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1865 && (!validate || memory_address_p (mode, addr)))
1866 return memref;
1867
1868 if (validate)
1869 {
1870 if (reload_in_progress || reload_completed)
1871 gcc_assert (memory_address_p (mode, addr));
1872 else
1873 addr = memory_address (mode, addr);
1874 }
1875
1876 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1877 return memref;
1878
1879 new = gen_rtx_MEM (mode, addr);
1880 MEM_COPY_ATTRIBUTES (new, memref);
1881 return new;
1882 }
1883
1884 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1885 way we are changing MEMREF, so we only preserve the alias set. */
1886
1887 rtx
1888 change_address (rtx memref, enum machine_mode mode, rtx addr)
1889 {
1890 rtx new = change_address_1 (memref, mode, addr, 1), size;
1891 enum machine_mode mmode = GET_MODE (new);
1892 unsigned int align;
1893
1894 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1895 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1896
1897 /* If there are no changes, just return the original memory reference. */
1898 if (new == memref)
1899 {
1900 if (MEM_ATTRS (memref) == 0
1901 || (MEM_EXPR (memref) == NULL
1902 && MEM_OFFSET (memref) == NULL
1903 && MEM_SIZE (memref) == size
1904 && MEM_ALIGN (memref) == align))
1905 return new;
1906
1907 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1908 MEM_COPY_ATTRIBUTES (new, memref);
1909 }
1910
1911 MEM_ATTRS (new)
1912 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1913
1914 return new;
1915 }
1916
1917 /* Return a memory reference like MEMREF, but with its mode changed
1918 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1919 nonzero, the memory address is forced to be valid.
1920 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1921 and caller is responsible for adjusting MEMREF base register. */
1922
1923 rtx
1924 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1925 int validate, int adjust)
1926 {
1927 rtx addr = XEXP (memref, 0);
1928 rtx new;
1929 rtx memoffset = MEM_OFFSET (memref);
1930 rtx size = 0;
1931 unsigned int memalign = MEM_ALIGN (memref);
1932
1933 /* If there are no changes, just return the original memory reference. */
1934 if (mode == GET_MODE (memref) && !offset
1935 && (!validate || memory_address_p (mode, addr)))
1936 return memref;
1937
1938 /* ??? Prefer to create garbage instead of creating shared rtl.
1939 This may happen even if offset is nonzero -- consider
1940 (plus (plus reg reg) const_int) -- so do this always. */
1941 addr = copy_rtx (addr);
1942
1943 if (adjust)
1944 {
1945 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1946 object, we can merge it into the LO_SUM. */
1947 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1948 && offset >= 0
1949 && (unsigned HOST_WIDE_INT) offset
1950 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1951 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1952 plus_constant (XEXP (addr, 1), offset));
1953 else
1954 addr = plus_constant (addr, offset);
1955 }
1956
1957 new = change_address_1 (memref, mode, addr, validate);
1958
1959 /* Compute the new values of the memory attributes due to this adjustment.
1960 We add the offsets and update the alignment. */
1961 if (memoffset)
1962 memoffset = GEN_INT (offset + INTVAL (memoffset));
1963
1964 /* Compute the new alignment by taking the MIN of the alignment and the
1965 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1966 if zero. */
1967 if (offset != 0)
1968 memalign
1969 = MIN (memalign,
1970 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1971
1972 /* We can compute the size in a number of ways. */
1973 if (GET_MODE (new) != BLKmode)
1974 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1975 else if (MEM_SIZE (memref))
1976 size = plus_constant (MEM_SIZE (memref), -offset);
1977
1978 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1979 memoffset, size, memalign, GET_MODE (new));
1980
1981 /* At some point, we should validate that this offset is within the object,
1982 if all the appropriate values are known. */
1983 return new;
1984 }
1985
1986 /* Return a memory reference like MEMREF, but with its mode changed
1987 to MODE and its address changed to ADDR, which is assumed to be
1988 MEMREF offseted by OFFSET bytes. If VALIDATE is
1989 nonzero, the memory address is forced to be valid. */
1990
1991 rtx
1992 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1993 HOST_WIDE_INT offset, int validate)
1994 {
1995 memref = change_address_1 (memref, VOIDmode, addr, validate);
1996 return adjust_address_1 (memref, mode, offset, validate, 0);
1997 }
1998
1999 /* Return a memory reference like MEMREF, but whose address is changed by
2000 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2001 known to be in OFFSET (possibly 1). */
2002
2003 rtx
2004 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2005 {
2006 rtx new, addr = XEXP (memref, 0);
2007
2008 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2009
2010 /* At this point we don't know _why_ the address is invalid. It
2011 could have secondary memory references, multiplies or anything.
2012
2013 However, if we did go and rearrange things, we can wind up not
2014 being able to recognize the magic around pic_offset_table_rtx.
2015 This stuff is fragile, and is yet another example of why it is
2016 bad to expose PIC machinery too early. */
2017 if (! memory_address_p (GET_MODE (memref), new)
2018 && GET_CODE (addr) == PLUS
2019 && XEXP (addr, 0) == pic_offset_table_rtx)
2020 {
2021 addr = force_reg (GET_MODE (addr), addr);
2022 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2023 }
2024
2025 update_temp_slot_address (XEXP (memref, 0), new);
2026 new = change_address_1 (memref, VOIDmode, new, 1);
2027
2028 /* If there are no changes, just return the original memory reference. */
2029 if (new == memref)
2030 return new;
2031
2032 /* Update the alignment to reflect the offset. Reset the offset, which
2033 we don't know. */
2034 MEM_ATTRS (new)
2035 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2036 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2037 GET_MODE (new));
2038 return new;
2039 }
2040
2041 /* Return a memory reference like MEMREF, but with its address changed to
2042 ADDR. The caller is asserting that the actual piece of memory pointed
2043 to is the same, just the form of the address is being changed, such as
2044 by putting something into a register. */
2045
2046 rtx
2047 replace_equiv_address (rtx memref, rtx addr)
2048 {
2049 /* change_address_1 copies the memory attribute structure without change
2050 and that's exactly what we want here. */
2051 update_temp_slot_address (XEXP (memref, 0), addr);
2052 return change_address_1 (memref, VOIDmode, addr, 1);
2053 }
2054
2055 /* Likewise, but the reference is not required to be valid. */
2056
2057 rtx
2058 replace_equiv_address_nv (rtx memref, rtx addr)
2059 {
2060 return change_address_1 (memref, VOIDmode, addr, 0);
2061 }
2062
2063 /* Return a memory reference like MEMREF, but with its mode widened to
2064 MODE and offset by OFFSET. This would be used by targets that e.g.
2065 cannot issue QImode memory operations and have to use SImode memory
2066 operations plus masking logic. */
2067
2068 rtx
2069 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2070 {
2071 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2072 tree expr = MEM_EXPR (new);
2073 rtx memoffset = MEM_OFFSET (new);
2074 unsigned int size = GET_MODE_SIZE (mode);
2075
2076 /* If there are no changes, just return the original memory reference. */
2077 if (new == memref)
2078 return new;
2079
2080 /* If we don't know what offset we were at within the expression, then
2081 we can't know if we've overstepped the bounds. */
2082 if (! memoffset)
2083 expr = NULL_TREE;
2084
2085 while (expr)
2086 {
2087 if (TREE_CODE (expr) == COMPONENT_REF)
2088 {
2089 tree field = TREE_OPERAND (expr, 1);
2090 tree offset = component_ref_field_offset (expr);
2091
2092 if (! DECL_SIZE_UNIT (field))
2093 {
2094 expr = NULL_TREE;
2095 break;
2096 }
2097
2098 /* Is the field at least as large as the access? If so, ok,
2099 otherwise strip back to the containing structure. */
2100 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2101 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2102 && INTVAL (memoffset) >= 0)
2103 break;
2104
2105 if (! host_integerp (offset, 1))
2106 {
2107 expr = NULL_TREE;
2108 break;
2109 }
2110
2111 expr = TREE_OPERAND (expr, 0);
2112 memoffset
2113 = (GEN_INT (INTVAL (memoffset)
2114 + tree_low_cst (offset, 1)
2115 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2116 / BITS_PER_UNIT)));
2117 }
2118 /* Similarly for the decl. */
2119 else if (DECL_P (expr)
2120 && DECL_SIZE_UNIT (expr)
2121 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2122 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2123 && (! memoffset || INTVAL (memoffset) >= 0))
2124 break;
2125 else
2126 {
2127 /* The widened memory access overflows the expression, which means
2128 that it could alias another expression. Zap it. */
2129 expr = NULL_TREE;
2130 break;
2131 }
2132 }
2133
2134 if (! expr)
2135 memoffset = NULL_RTX;
2136
2137 /* The widened memory may alias other stuff, so zap the alias set. */
2138 /* ??? Maybe use get_alias_set on any remaining expression. */
2139
2140 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2141 MEM_ALIGN (new), mode);
2142
2143 return new;
2144 }
2145 \f
2146 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2147
2148 rtx
2149 gen_label_rtx (void)
2150 {
2151 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2152 NULL, label_num++, NULL);
2153 }
2154 \f
2155 /* For procedure integration. */
2156
2157 /* Install new pointers to the first and last insns in the chain.
2158 Also, set cur_insn_uid to one higher than the last in use.
2159 Used for an inline-procedure after copying the insn chain. */
2160
2161 void
2162 set_new_first_and_last_insn (rtx first, rtx last)
2163 {
2164 rtx insn;
2165
2166 first_insn = first;
2167 last_insn = last;
2168 cur_insn_uid = 0;
2169
2170 for (insn = first; insn; insn = NEXT_INSN (insn))
2171 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2172
2173 cur_insn_uid++;
2174 }
2175 \f
2176 /* Go through all the RTL insn bodies and copy any invalid shared
2177 structure. This routine should only be called once. */
2178
2179 static void
2180 unshare_all_rtl_1 (tree fndecl, rtx insn)
2181 {
2182 tree decl;
2183
2184 /* Make sure that virtual parameters are not shared. */
2185 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2186 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2187
2188 /* Make sure that virtual stack slots are not shared. */
2189 unshare_all_decls (DECL_INITIAL (fndecl));
2190
2191 /* Unshare just about everything else. */
2192 unshare_all_rtl_in_chain (insn);
2193
2194 /* Make sure the addresses of stack slots found outside the insn chain
2195 (such as, in DECL_RTL of a variable) are not shared
2196 with the insn chain.
2197
2198 This special care is necessary when the stack slot MEM does not
2199 actually appear in the insn chain. If it does appear, its address
2200 is unshared from all else at that point. */
2201 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2202 }
2203
2204 /* Go through all the RTL insn bodies and copy any invalid shared
2205 structure, again. This is a fairly expensive thing to do so it
2206 should be done sparingly. */
2207
2208 void
2209 unshare_all_rtl_again (rtx insn)
2210 {
2211 rtx p;
2212 tree decl;
2213
2214 for (p = insn; p; p = NEXT_INSN (p))
2215 if (INSN_P (p))
2216 {
2217 reset_used_flags (PATTERN (p));
2218 reset_used_flags (REG_NOTES (p));
2219 reset_used_flags (LOG_LINKS (p));
2220 }
2221
2222 /* Make sure that virtual stack slots are not shared. */
2223 reset_used_decls (DECL_INITIAL (cfun->decl));
2224
2225 /* Make sure that virtual parameters are not shared. */
2226 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2227 reset_used_flags (DECL_RTL (decl));
2228
2229 reset_used_flags (stack_slot_list);
2230
2231 unshare_all_rtl_1 (cfun->decl, insn);
2232 }
2233
2234 void
2235 unshare_all_rtl (void)
2236 {
2237 unshare_all_rtl_1 (current_function_decl, get_insns ());
2238 }
2239
2240 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2241 Recursively does the same for subexpressions. */
2242
2243 static void
2244 verify_rtx_sharing (rtx orig, rtx insn)
2245 {
2246 rtx x = orig;
2247 int i;
2248 enum rtx_code code;
2249 const char *format_ptr;
2250
2251 if (x == 0)
2252 return;
2253
2254 code = GET_CODE (x);
2255
2256 /* These types may be freely shared. */
2257
2258 switch (code)
2259 {
2260 case REG:
2261 case CONST_INT:
2262 case CONST_DOUBLE:
2263 case CONST_VECTOR:
2264 case SYMBOL_REF:
2265 case LABEL_REF:
2266 case CODE_LABEL:
2267 case PC:
2268 case CC0:
2269 case SCRATCH:
2270 return;
2271 /* SCRATCH must be shared because they represent distinct values. */
2272 case CLOBBER:
2273 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2274 return;
2275 break;
2276
2277 case CONST:
2278 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2279 a LABEL_REF, it isn't sharable. */
2280 if (GET_CODE (XEXP (x, 0)) == PLUS
2281 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2282 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2283 return;
2284 break;
2285
2286 case MEM:
2287 /* A MEM is allowed to be shared if its address is constant. */
2288 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2289 || reload_completed || reload_in_progress)
2290 return;
2291
2292 break;
2293
2294 default:
2295 break;
2296 }
2297
2298 /* This rtx may not be shared. If it has already been seen,
2299 replace it with a copy of itself. */
2300 #ifdef ENABLE_CHECKING
2301 if (RTX_FLAG (x, used))
2302 {
2303 error ("Invalid rtl sharing found in the insn");
2304 debug_rtx (insn);
2305 error ("Shared rtx");
2306 debug_rtx (x);
2307 internal_error ("Internal consistency failure");
2308 }
2309 #endif
2310 gcc_assert (!RTX_FLAG (x, used));
2311
2312 RTX_FLAG (x, used) = 1;
2313
2314 /* Now scan the subexpressions recursively. */
2315
2316 format_ptr = GET_RTX_FORMAT (code);
2317
2318 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2319 {
2320 switch (*format_ptr++)
2321 {
2322 case 'e':
2323 verify_rtx_sharing (XEXP (x, i), insn);
2324 break;
2325
2326 case 'E':
2327 if (XVEC (x, i) != NULL)
2328 {
2329 int j;
2330 int len = XVECLEN (x, i);
2331
2332 for (j = 0; j < len; j++)
2333 {
2334 /* We allow sharing of ASM_OPERANDS inside single
2335 instruction. */
2336 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2337 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2338 == ASM_OPERANDS))
2339 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2340 else
2341 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2342 }
2343 }
2344 break;
2345 }
2346 }
2347 return;
2348 }
2349
2350 /* Go through all the RTL insn bodies and check that there is no unexpected
2351 sharing in between the subexpressions. */
2352
2353 void
2354 verify_rtl_sharing (void)
2355 {
2356 rtx p;
2357
2358 for (p = get_insns (); p; p = NEXT_INSN (p))
2359 if (INSN_P (p))
2360 {
2361 reset_used_flags (PATTERN (p));
2362 reset_used_flags (REG_NOTES (p));
2363 reset_used_flags (LOG_LINKS (p));
2364 }
2365
2366 for (p = get_insns (); p; p = NEXT_INSN (p))
2367 if (INSN_P (p))
2368 {
2369 verify_rtx_sharing (PATTERN (p), p);
2370 verify_rtx_sharing (REG_NOTES (p), p);
2371 verify_rtx_sharing (LOG_LINKS (p), p);
2372 }
2373 }
2374
2375 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2376 Assumes the mark bits are cleared at entry. */
2377
2378 void
2379 unshare_all_rtl_in_chain (rtx insn)
2380 {
2381 for (; insn; insn = NEXT_INSN (insn))
2382 if (INSN_P (insn))
2383 {
2384 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2385 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2386 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2387 }
2388 }
2389
2390 /* Go through all virtual stack slots of a function and copy any
2391 shared structure. */
2392 static void
2393 unshare_all_decls (tree blk)
2394 {
2395 tree t;
2396
2397 /* Copy shared decls. */
2398 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2399 if (DECL_RTL_SET_P (t))
2400 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2401
2402 /* Now process sub-blocks. */
2403 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2404 unshare_all_decls (t);
2405 }
2406
2407 /* Go through all virtual stack slots of a function and mark them as
2408 not shared. */
2409 static void
2410 reset_used_decls (tree blk)
2411 {
2412 tree t;
2413
2414 /* Mark decls. */
2415 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2416 if (DECL_RTL_SET_P (t))
2417 reset_used_flags (DECL_RTL (t));
2418
2419 /* Now process sub-blocks. */
2420 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2421 reset_used_decls (t);
2422 }
2423
2424 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2425 Recursively does the same for subexpressions. Uses
2426 copy_rtx_if_shared_1 to reduce stack space. */
2427
2428 rtx
2429 copy_rtx_if_shared (rtx orig)
2430 {
2431 copy_rtx_if_shared_1 (&orig);
2432 return orig;
2433 }
2434
2435 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2436 use. Recursively does the same for subexpressions. */
2437
2438 static void
2439 copy_rtx_if_shared_1 (rtx *orig1)
2440 {
2441 rtx x;
2442 int i;
2443 enum rtx_code code;
2444 rtx *last_ptr;
2445 const char *format_ptr;
2446 int copied = 0;
2447 int length;
2448
2449 /* Repeat is used to turn tail-recursion into iteration. */
2450 repeat:
2451 x = *orig1;
2452
2453 if (x == 0)
2454 return;
2455
2456 code = GET_CODE (x);
2457
2458 /* These types may be freely shared. */
2459
2460 switch (code)
2461 {
2462 case REG:
2463 case CONST_INT:
2464 case CONST_DOUBLE:
2465 case CONST_VECTOR:
2466 case SYMBOL_REF:
2467 case LABEL_REF:
2468 case CODE_LABEL:
2469 case PC:
2470 case CC0:
2471 case SCRATCH:
2472 /* SCRATCH must be shared because they represent distinct values. */
2473 return;
2474 case CLOBBER:
2475 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2476 return;
2477 break;
2478
2479 case CONST:
2480 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2481 a LABEL_REF, it isn't sharable. */
2482 if (GET_CODE (XEXP (x, 0)) == PLUS
2483 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2484 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2485 return;
2486 break;
2487
2488 case INSN:
2489 case JUMP_INSN:
2490 case CALL_INSN:
2491 case NOTE:
2492 case BARRIER:
2493 /* The chain of insns is not being copied. */
2494 return;
2495
2496 default:
2497 break;
2498 }
2499
2500 /* This rtx may not be shared. If it has already been seen,
2501 replace it with a copy of itself. */
2502
2503 if (RTX_FLAG (x, used))
2504 {
2505 rtx copy;
2506
2507 copy = rtx_alloc (code);
2508 memcpy (copy, x, RTX_SIZE (code));
2509 x = copy;
2510 copied = 1;
2511 }
2512 RTX_FLAG (x, used) = 1;
2513
2514 /* Now scan the subexpressions recursively.
2515 We can store any replaced subexpressions directly into X
2516 since we know X is not shared! Any vectors in X
2517 must be copied if X was copied. */
2518
2519 format_ptr = GET_RTX_FORMAT (code);
2520 length = GET_RTX_LENGTH (code);
2521 last_ptr = NULL;
2522
2523 for (i = 0; i < length; i++)
2524 {
2525 switch (*format_ptr++)
2526 {
2527 case 'e':
2528 if (last_ptr)
2529 copy_rtx_if_shared_1 (last_ptr);
2530 last_ptr = &XEXP (x, i);
2531 break;
2532
2533 case 'E':
2534 if (XVEC (x, i) != NULL)
2535 {
2536 int j;
2537 int len = XVECLEN (x, i);
2538
2539 /* Copy the vector iff I copied the rtx and the length
2540 is nonzero. */
2541 if (copied && len > 0)
2542 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2543
2544 /* Call recursively on all inside the vector. */
2545 for (j = 0; j < len; j++)
2546 {
2547 if (last_ptr)
2548 copy_rtx_if_shared_1 (last_ptr);
2549 last_ptr = &XVECEXP (x, i, j);
2550 }
2551 }
2552 break;
2553 }
2554 }
2555 *orig1 = x;
2556 if (last_ptr)
2557 {
2558 orig1 = last_ptr;
2559 goto repeat;
2560 }
2561 return;
2562 }
2563
2564 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2565 to look for shared sub-parts. */
2566
2567 void
2568 reset_used_flags (rtx x)
2569 {
2570 int i, j;
2571 enum rtx_code code;
2572 const char *format_ptr;
2573 int length;
2574
2575 /* Repeat is used to turn tail-recursion into iteration. */
2576 repeat:
2577 if (x == 0)
2578 return;
2579
2580 code = GET_CODE (x);
2581
2582 /* These types may be freely shared so we needn't do any resetting
2583 for them. */
2584
2585 switch (code)
2586 {
2587 case REG:
2588 case CONST_INT:
2589 case CONST_DOUBLE:
2590 case CONST_VECTOR:
2591 case SYMBOL_REF:
2592 case CODE_LABEL:
2593 case PC:
2594 case CC0:
2595 return;
2596
2597 case INSN:
2598 case JUMP_INSN:
2599 case CALL_INSN:
2600 case NOTE:
2601 case LABEL_REF:
2602 case BARRIER:
2603 /* The chain of insns is not being copied. */
2604 return;
2605
2606 default:
2607 break;
2608 }
2609
2610 RTX_FLAG (x, used) = 0;
2611
2612 format_ptr = GET_RTX_FORMAT (code);
2613 length = GET_RTX_LENGTH (code);
2614
2615 for (i = 0; i < length; i++)
2616 {
2617 switch (*format_ptr++)
2618 {
2619 case 'e':
2620 if (i == length-1)
2621 {
2622 x = XEXP (x, i);
2623 goto repeat;
2624 }
2625 reset_used_flags (XEXP (x, i));
2626 break;
2627
2628 case 'E':
2629 for (j = 0; j < XVECLEN (x, i); j++)
2630 reset_used_flags (XVECEXP (x, i, j));
2631 break;
2632 }
2633 }
2634 }
2635
2636 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2637 to look for shared sub-parts. */
2638
2639 void
2640 set_used_flags (rtx x)
2641 {
2642 int i, j;
2643 enum rtx_code code;
2644 const char *format_ptr;
2645
2646 if (x == 0)
2647 return;
2648
2649 code = GET_CODE (x);
2650
2651 /* These types may be freely shared so we needn't do any resetting
2652 for them. */
2653
2654 switch (code)
2655 {
2656 case REG:
2657 case CONST_INT:
2658 case CONST_DOUBLE:
2659 case CONST_VECTOR:
2660 case SYMBOL_REF:
2661 case CODE_LABEL:
2662 case PC:
2663 case CC0:
2664 return;
2665
2666 case INSN:
2667 case JUMP_INSN:
2668 case CALL_INSN:
2669 case NOTE:
2670 case LABEL_REF:
2671 case BARRIER:
2672 /* The chain of insns is not being copied. */
2673 return;
2674
2675 default:
2676 break;
2677 }
2678
2679 RTX_FLAG (x, used) = 1;
2680
2681 format_ptr = GET_RTX_FORMAT (code);
2682 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2683 {
2684 switch (*format_ptr++)
2685 {
2686 case 'e':
2687 set_used_flags (XEXP (x, i));
2688 break;
2689
2690 case 'E':
2691 for (j = 0; j < XVECLEN (x, i); j++)
2692 set_used_flags (XVECEXP (x, i, j));
2693 break;
2694 }
2695 }
2696 }
2697 \f
2698 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2699 Return X or the rtx for the pseudo reg the value of X was copied into.
2700 OTHER must be valid as a SET_DEST. */
2701
2702 rtx
2703 make_safe_from (rtx x, rtx other)
2704 {
2705 while (1)
2706 switch (GET_CODE (other))
2707 {
2708 case SUBREG:
2709 other = SUBREG_REG (other);
2710 break;
2711 case STRICT_LOW_PART:
2712 case SIGN_EXTEND:
2713 case ZERO_EXTEND:
2714 other = XEXP (other, 0);
2715 break;
2716 default:
2717 goto done;
2718 }
2719 done:
2720 if ((MEM_P (other)
2721 && ! CONSTANT_P (x)
2722 && !REG_P (x)
2723 && GET_CODE (x) != SUBREG)
2724 || (REG_P (other)
2725 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2726 || reg_mentioned_p (other, x))))
2727 {
2728 rtx temp = gen_reg_rtx (GET_MODE (x));
2729 emit_move_insn (temp, x);
2730 return temp;
2731 }
2732 return x;
2733 }
2734 \f
2735 /* Emission of insns (adding them to the doubly-linked list). */
2736
2737 /* Return the first insn of the current sequence or current function. */
2738
2739 rtx
2740 get_insns (void)
2741 {
2742 return first_insn;
2743 }
2744
2745 /* Specify a new insn as the first in the chain. */
2746
2747 void
2748 set_first_insn (rtx insn)
2749 {
2750 gcc_assert (!PREV_INSN (insn));
2751 first_insn = insn;
2752 }
2753
2754 /* Return the last insn emitted in current sequence or current function. */
2755
2756 rtx
2757 get_last_insn (void)
2758 {
2759 return last_insn;
2760 }
2761
2762 /* Specify a new insn as the last in the chain. */
2763
2764 void
2765 set_last_insn (rtx insn)
2766 {
2767 gcc_assert (!NEXT_INSN (insn));
2768 last_insn = insn;
2769 }
2770
2771 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2772
2773 rtx
2774 get_last_insn_anywhere (void)
2775 {
2776 struct sequence_stack *stack;
2777 if (last_insn)
2778 return last_insn;
2779 for (stack = seq_stack; stack; stack = stack->next)
2780 if (stack->last != 0)
2781 return stack->last;
2782 return 0;
2783 }
2784
2785 /* Return the first nonnote insn emitted in current sequence or current
2786 function. This routine looks inside SEQUENCEs. */
2787
2788 rtx
2789 get_first_nonnote_insn (void)
2790 {
2791 rtx insn;
2792
2793 for (insn = first_insn; insn && NOTE_P (insn); insn = next_insn (insn));
2794 return insn;
2795 }
2796
2797 /* Return the last nonnote insn emitted in current sequence or current
2798 function. This routine looks inside SEQUENCEs. */
2799
2800 rtx
2801 get_last_nonnote_insn (void)
2802 {
2803 rtx insn;
2804
2805 for (insn = last_insn; insn && NOTE_P (insn); insn = previous_insn (insn));
2806 return insn;
2807 }
2808
2809 /* Return a number larger than any instruction's uid in this function. */
2810
2811 int
2812 get_max_uid (void)
2813 {
2814 return cur_insn_uid;
2815 }
2816
2817 /* Renumber instructions so that no instruction UIDs are wasted. */
2818
2819 void
2820 renumber_insns (FILE *stream)
2821 {
2822 rtx insn;
2823
2824 /* If we're not supposed to renumber instructions, don't. */
2825 if (!flag_renumber_insns)
2826 return;
2827
2828 /* If there aren't that many instructions, then it's not really
2829 worth renumbering them. */
2830 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2831 return;
2832
2833 cur_insn_uid = 1;
2834
2835 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2836 {
2837 if (stream)
2838 fprintf (stream, "Renumbering insn %d to %d\n",
2839 INSN_UID (insn), cur_insn_uid);
2840 INSN_UID (insn) = cur_insn_uid++;
2841 }
2842 }
2843 \f
2844 /* Return the next insn. If it is a SEQUENCE, return the first insn
2845 of the sequence. */
2846
2847 rtx
2848 next_insn (rtx insn)
2849 {
2850 if (insn)
2851 {
2852 insn = NEXT_INSN (insn);
2853 if (insn && NONJUMP_INSN_P (insn)
2854 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2855 insn = XVECEXP (PATTERN (insn), 0, 0);
2856 }
2857
2858 return insn;
2859 }
2860
2861 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2862 of the sequence. */
2863
2864 rtx
2865 previous_insn (rtx insn)
2866 {
2867 if (insn)
2868 {
2869 insn = PREV_INSN (insn);
2870 if (insn && NONJUMP_INSN_P (insn)
2871 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2872 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2873 }
2874
2875 return insn;
2876 }
2877
2878 /* Return the next insn after INSN that is not a NOTE. This routine does not
2879 look inside SEQUENCEs. */
2880
2881 rtx
2882 next_nonnote_insn (rtx insn)
2883 {
2884 while (insn)
2885 {
2886 insn = NEXT_INSN (insn);
2887 if (insn == 0 || !NOTE_P (insn))
2888 break;
2889 }
2890
2891 return insn;
2892 }
2893
2894 /* Return the previous insn before INSN that is not a NOTE. This routine does
2895 not look inside SEQUENCEs. */
2896
2897 rtx
2898 prev_nonnote_insn (rtx insn)
2899 {
2900 while (insn)
2901 {
2902 insn = PREV_INSN (insn);
2903 if (insn == 0 || !NOTE_P (insn))
2904 break;
2905 }
2906
2907 return insn;
2908 }
2909
2910 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2911 or 0, if there is none. This routine does not look inside
2912 SEQUENCEs. */
2913
2914 rtx
2915 next_real_insn (rtx insn)
2916 {
2917 while (insn)
2918 {
2919 insn = NEXT_INSN (insn);
2920 if (insn == 0 || INSN_P (insn))
2921 break;
2922 }
2923
2924 return insn;
2925 }
2926
2927 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2928 or 0, if there is none. This routine does not look inside
2929 SEQUENCEs. */
2930
2931 rtx
2932 prev_real_insn (rtx insn)
2933 {
2934 while (insn)
2935 {
2936 insn = PREV_INSN (insn);
2937 if (insn == 0 || INSN_P (insn))
2938 break;
2939 }
2940
2941 return insn;
2942 }
2943
2944 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2945 This routine does not look inside SEQUENCEs. */
2946
2947 rtx
2948 last_call_insn (void)
2949 {
2950 rtx insn;
2951
2952 for (insn = get_last_insn ();
2953 insn && !CALL_P (insn);
2954 insn = PREV_INSN (insn))
2955 ;
2956
2957 return insn;
2958 }
2959
2960 /* Find the next insn after INSN that really does something. This routine
2961 does not look inside SEQUENCEs. Until reload has completed, this is the
2962 same as next_real_insn. */
2963
2964 int
2965 active_insn_p (rtx insn)
2966 {
2967 return (CALL_P (insn) || JUMP_P (insn)
2968 || (NONJUMP_INSN_P (insn)
2969 && (! reload_completed
2970 || (GET_CODE (PATTERN (insn)) != USE
2971 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2972 }
2973
2974 rtx
2975 next_active_insn (rtx insn)
2976 {
2977 while (insn)
2978 {
2979 insn = NEXT_INSN (insn);
2980 if (insn == 0 || active_insn_p (insn))
2981 break;
2982 }
2983
2984 return insn;
2985 }
2986
2987 /* Find the last insn before INSN that really does something. This routine
2988 does not look inside SEQUENCEs. Until reload has completed, this is the
2989 same as prev_real_insn. */
2990
2991 rtx
2992 prev_active_insn (rtx insn)
2993 {
2994 while (insn)
2995 {
2996 insn = PREV_INSN (insn);
2997 if (insn == 0 || active_insn_p (insn))
2998 break;
2999 }
3000
3001 return insn;
3002 }
3003
3004 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3005
3006 rtx
3007 next_label (rtx insn)
3008 {
3009 while (insn)
3010 {
3011 insn = NEXT_INSN (insn);
3012 if (insn == 0 || LABEL_P (insn))
3013 break;
3014 }
3015
3016 return insn;
3017 }
3018
3019 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3020
3021 rtx
3022 prev_label (rtx insn)
3023 {
3024 while (insn)
3025 {
3026 insn = PREV_INSN (insn);
3027 if (insn == 0 || LABEL_P (insn))
3028 break;
3029 }
3030
3031 return insn;
3032 }
3033
3034 /* Return the last label to mark the same position as LABEL. Return null
3035 if LABEL itself is null. */
3036
3037 rtx
3038 skip_consecutive_labels (rtx label)
3039 {
3040 rtx insn;
3041
3042 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3043 if (LABEL_P (insn))
3044 label = insn;
3045
3046 return label;
3047 }
3048 \f
3049 #ifdef HAVE_cc0
3050 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3051 and REG_CC_USER notes so we can find it. */
3052
3053 void
3054 link_cc0_insns (rtx insn)
3055 {
3056 rtx user = next_nonnote_insn (insn);
3057
3058 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3059 user = XVECEXP (PATTERN (user), 0, 0);
3060
3061 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3062 REG_NOTES (user));
3063 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3064 }
3065
3066 /* Return the next insn that uses CC0 after INSN, which is assumed to
3067 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3068 applied to the result of this function should yield INSN).
3069
3070 Normally, this is simply the next insn. However, if a REG_CC_USER note
3071 is present, it contains the insn that uses CC0.
3072
3073 Return 0 if we can't find the insn. */
3074
3075 rtx
3076 next_cc0_user (rtx insn)
3077 {
3078 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3079
3080 if (note)
3081 return XEXP (note, 0);
3082
3083 insn = next_nonnote_insn (insn);
3084 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3085 insn = XVECEXP (PATTERN (insn), 0, 0);
3086
3087 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3088 return insn;
3089
3090 return 0;
3091 }
3092
3093 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3094 note, it is the previous insn. */
3095
3096 rtx
3097 prev_cc0_setter (rtx insn)
3098 {
3099 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3100
3101 if (note)
3102 return XEXP (note, 0);
3103
3104 insn = prev_nonnote_insn (insn);
3105 gcc_assert (sets_cc0_p (PATTERN (insn)));
3106
3107 return insn;
3108 }
3109 #endif
3110
3111 /* Increment the label uses for all labels present in rtx. */
3112
3113 static void
3114 mark_label_nuses (rtx x)
3115 {
3116 enum rtx_code code;
3117 int i, j;
3118 const char *fmt;
3119
3120 code = GET_CODE (x);
3121 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3122 LABEL_NUSES (XEXP (x, 0))++;
3123
3124 fmt = GET_RTX_FORMAT (code);
3125 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3126 {
3127 if (fmt[i] == 'e')
3128 mark_label_nuses (XEXP (x, i));
3129 else if (fmt[i] == 'E')
3130 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3131 mark_label_nuses (XVECEXP (x, i, j));
3132 }
3133 }
3134
3135 \f
3136 /* Try splitting insns that can be split for better scheduling.
3137 PAT is the pattern which might split.
3138 TRIAL is the insn providing PAT.
3139 LAST is nonzero if we should return the last insn of the sequence produced.
3140
3141 If this routine succeeds in splitting, it returns the first or last
3142 replacement insn depending on the value of LAST. Otherwise, it
3143 returns TRIAL. If the insn to be returned can be split, it will be. */
3144
3145 rtx
3146 try_split (rtx pat, rtx trial, int last)
3147 {
3148 rtx before = PREV_INSN (trial);
3149 rtx after = NEXT_INSN (trial);
3150 int has_barrier = 0;
3151 rtx tem;
3152 rtx note, seq;
3153 int probability;
3154 rtx insn_last, insn;
3155 int njumps = 0;
3156
3157 if (any_condjump_p (trial)
3158 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3159 split_branch_probability = INTVAL (XEXP (note, 0));
3160 probability = split_branch_probability;
3161
3162 seq = split_insns (pat, trial);
3163
3164 split_branch_probability = -1;
3165
3166 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3167 We may need to handle this specially. */
3168 if (after && BARRIER_P (after))
3169 {
3170 has_barrier = 1;
3171 after = NEXT_INSN (after);
3172 }
3173
3174 if (!seq)
3175 return trial;
3176
3177 /* Avoid infinite loop if any insn of the result matches
3178 the original pattern. */
3179 insn_last = seq;
3180 while (1)
3181 {
3182 if (INSN_P (insn_last)
3183 && rtx_equal_p (PATTERN (insn_last), pat))
3184 return trial;
3185 if (!NEXT_INSN (insn_last))
3186 break;
3187 insn_last = NEXT_INSN (insn_last);
3188 }
3189
3190 /* Mark labels. */
3191 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3192 {
3193 if (JUMP_P (insn))
3194 {
3195 mark_jump_label (PATTERN (insn), insn, 0);
3196 njumps++;
3197 if (probability != -1
3198 && any_condjump_p (insn)
3199 && !find_reg_note (insn, REG_BR_PROB, 0))
3200 {
3201 /* We can preserve the REG_BR_PROB notes only if exactly
3202 one jump is created, otherwise the machine description
3203 is responsible for this step using
3204 split_branch_probability variable. */
3205 gcc_assert (njumps == 1);
3206 REG_NOTES (insn)
3207 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3208 GEN_INT (probability),
3209 REG_NOTES (insn));
3210 }
3211 }
3212 }
3213
3214 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3215 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3216 if (CALL_P (trial))
3217 {
3218 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3219 if (CALL_P (insn))
3220 {
3221 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3222 while (*p)
3223 p = &XEXP (*p, 1);
3224 *p = CALL_INSN_FUNCTION_USAGE (trial);
3225 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3226 }
3227 }
3228
3229 /* Copy notes, particularly those related to the CFG. */
3230 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3231 {
3232 switch (REG_NOTE_KIND (note))
3233 {
3234 case REG_EH_REGION:
3235 insn = insn_last;
3236 while (insn != NULL_RTX)
3237 {
3238 if (CALL_P (insn)
3239 || (flag_non_call_exceptions && INSN_P (insn)
3240 && may_trap_p (PATTERN (insn))))
3241 REG_NOTES (insn)
3242 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3243 XEXP (note, 0),
3244 REG_NOTES (insn));
3245 insn = PREV_INSN (insn);
3246 }
3247 break;
3248
3249 case REG_NORETURN:
3250 case REG_SETJMP:
3251 case REG_ALWAYS_RETURN:
3252 insn = insn_last;
3253 while (insn != NULL_RTX)
3254 {
3255 if (CALL_P (insn))
3256 REG_NOTES (insn)
3257 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3258 XEXP (note, 0),
3259 REG_NOTES (insn));
3260 insn = PREV_INSN (insn);
3261 }
3262 break;
3263
3264 case REG_NON_LOCAL_GOTO:
3265 insn = insn_last;
3266 while (insn != NULL_RTX)
3267 {
3268 if (JUMP_P (insn))
3269 REG_NOTES (insn)
3270 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3271 XEXP (note, 0),
3272 REG_NOTES (insn));
3273 insn = PREV_INSN (insn);
3274 }
3275 break;
3276
3277 default:
3278 break;
3279 }
3280 }
3281
3282 /* If there are LABELS inside the split insns increment the
3283 usage count so we don't delete the label. */
3284 if (NONJUMP_INSN_P (trial))
3285 {
3286 insn = insn_last;
3287 while (insn != NULL_RTX)
3288 {
3289 if (NONJUMP_INSN_P (insn))
3290 mark_label_nuses (PATTERN (insn));
3291
3292 insn = PREV_INSN (insn);
3293 }
3294 }
3295
3296 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3297
3298 delete_insn (trial);
3299 if (has_barrier)
3300 emit_barrier_after (tem);
3301
3302 /* Recursively call try_split for each new insn created; by the
3303 time control returns here that insn will be fully split, so
3304 set LAST and continue from the insn after the one returned.
3305 We can't use next_active_insn here since AFTER may be a note.
3306 Ignore deleted insns, which can be occur if not optimizing. */
3307 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3308 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3309 tem = try_split (PATTERN (tem), tem, 1);
3310
3311 /* Return either the first or the last insn, depending on which was
3312 requested. */
3313 return last
3314 ? (after ? PREV_INSN (after) : last_insn)
3315 : NEXT_INSN (before);
3316 }
3317 \f
3318 /* Make and return an INSN rtx, initializing all its slots.
3319 Store PATTERN in the pattern slots. */
3320
3321 rtx
3322 make_insn_raw (rtx pattern)
3323 {
3324 rtx insn;
3325
3326 insn = rtx_alloc (INSN);
3327
3328 INSN_UID (insn) = cur_insn_uid++;
3329 PATTERN (insn) = pattern;
3330 INSN_CODE (insn) = -1;
3331 LOG_LINKS (insn) = NULL;
3332 REG_NOTES (insn) = NULL;
3333 INSN_LOCATOR (insn) = 0;
3334 BLOCK_FOR_INSN (insn) = NULL;
3335
3336 #ifdef ENABLE_RTL_CHECKING
3337 if (insn
3338 && INSN_P (insn)
3339 && (returnjump_p (insn)
3340 || (GET_CODE (insn) == SET
3341 && SET_DEST (insn) == pc_rtx)))
3342 {
3343 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3344 debug_rtx (insn);
3345 }
3346 #endif
3347
3348 return insn;
3349 }
3350
3351 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3352
3353 static rtx
3354 make_jump_insn_raw (rtx pattern)
3355 {
3356 rtx insn;
3357
3358 insn = rtx_alloc (JUMP_INSN);
3359 INSN_UID (insn) = cur_insn_uid++;
3360
3361 PATTERN (insn) = pattern;
3362 INSN_CODE (insn) = -1;
3363 LOG_LINKS (insn) = NULL;
3364 REG_NOTES (insn) = NULL;
3365 JUMP_LABEL (insn) = NULL;
3366 INSN_LOCATOR (insn) = 0;
3367 BLOCK_FOR_INSN (insn) = NULL;
3368
3369 return insn;
3370 }
3371
3372 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3373
3374 static rtx
3375 make_call_insn_raw (rtx pattern)
3376 {
3377 rtx insn;
3378
3379 insn = rtx_alloc (CALL_INSN);
3380 INSN_UID (insn) = cur_insn_uid++;
3381
3382 PATTERN (insn) = pattern;
3383 INSN_CODE (insn) = -1;
3384 LOG_LINKS (insn) = NULL;
3385 REG_NOTES (insn) = NULL;
3386 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3387 INSN_LOCATOR (insn) = 0;
3388 BLOCK_FOR_INSN (insn) = NULL;
3389
3390 return insn;
3391 }
3392 \f
3393 /* Add INSN to the end of the doubly-linked list.
3394 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3395
3396 void
3397 add_insn (rtx insn)
3398 {
3399 PREV_INSN (insn) = last_insn;
3400 NEXT_INSN (insn) = 0;
3401
3402 if (NULL != last_insn)
3403 NEXT_INSN (last_insn) = insn;
3404
3405 if (NULL == first_insn)
3406 first_insn = insn;
3407
3408 last_insn = insn;
3409 }
3410
3411 /* Add INSN into the doubly-linked list after insn AFTER. This and
3412 the next should be the only functions called to insert an insn once
3413 delay slots have been filled since only they know how to update a
3414 SEQUENCE. */
3415
3416 void
3417 add_insn_after (rtx insn, rtx after)
3418 {
3419 rtx next = NEXT_INSN (after);
3420 basic_block bb;
3421
3422 gcc_assert (!optimize || !INSN_DELETED_P (after));
3423
3424 NEXT_INSN (insn) = next;
3425 PREV_INSN (insn) = after;
3426
3427 if (next)
3428 {
3429 PREV_INSN (next) = insn;
3430 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3431 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3432 }
3433 else if (last_insn == after)
3434 last_insn = insn;
3435 else
3436 {
3437 struct sequence_stack *stack = seq_stack;
3438 /* Scan all pending sequences too. */
3439 for (; stack; stack = stack->next)
3440 if (after == stack->last)
3441 {
3442 stack->last = insn;
3443 break;
3444 }
3445
3446 gcc_assert (stack);
3447 }
3448
3449 if (!BARRIER_P (after)
3450 && !BARRIER_P (insn)
3451 && (bb = BLOCK_FOR_INSN (after)))
3452 {
3453 set_block_for_insn (insn, bb);
3454 if (INSN_P (insn))
3455 bb->flags |= BB_DIRTY;
3456 /* Should not happen as first in the BB is always
3457 either NOTE or LABEL. */
3458 if (BB_END (bb) == after
3459 /* Avoid clobbering of structure when creating new BB. */
3460 && !BARRIER_P (insn)
3461 && (!NOTE_P (insn)
3462 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3463 BB_END (bb) = insn;
3464 }
3465
3466 NEXT_INSN (after) = insn;
3467 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3468 {
3469 rtx sequence = PATTERN (after);
3470 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3471 }
3472 }
3473
3474 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3475 the previous should be the only functions called to insert an insn once
3476 delay slots have been filled since only they know how to update a
3477 SEQUENCE. */
3478
3479 void
3480 add_insn_before (rtx insn, rtx before)
3481 {
3482 rtx prev = PREV_INSN (before);
3483 basic_block bb;
3484
3485 gcc_assert (!optimize || !INSN_DELETED_P (before));
3486
3487 PREV_INSN (insn) = prev;
3488 NEXT_INSN (insn) = before;
3489
3490 if (prev)
3491 {
3492 NEXT_INSN (prev) = insn;
3493 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3494 {
3495 rtx sequence = PATTERN (prev);
3496 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3497 }
3498 }
3499 else if (first_insn == before)
3500 first_insn = insn;
3501 else
3502 {
3503 struct sequence_stack *stack = seq_stack;
3504 /* Scan all pending sequences too. */
3505 for (; stack; stack = stack->next)
3506 if (before == stack->first)
3507 {
3508 stack->first = insn;
3509 break;
3510 }
3511
3512 gcc_assert (stack);
3513 }
3514
3515 if (!BARRIER_P (before)
3516 && !BARRIER_P (insn)
3517 && (bb = BLOCK_FOR_INSN (before)))
3518 {
3519 set_block_for_insn (insn, bb);
3520 if (INSN_P (insn))
3521 bb->flags |= BB_DIRTY;
3522 /* Should not happen as first in the BB is always either NOTE or
3523 LABEl. */
3524 gcc_assert (BB_HEAD (bb) != insn
3525 /* Avoid clobbering of structure when creating new BB. */
3526 || BARRIER_P (insn)
3527 || (NOTE_P (insn)
3528 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3529 }
3530
3531 PREV_INSN (before) = insn;
3532 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3533 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3534 }
3535
3536 /* Remove an insn from its doubly-linked list. This function knows how
3537 to handle sequences. */
3538 void
3539 remove_insn (rtx insn)
3540 {
3541 rtx next = NEXT_INSN (insn);
3542 rtx prev = PREV_INSN (insn);
3543 basic_block bb;
3544
3545 if (prev)
3546 {
3547 NEXT_INSN (prev) = next;
3548 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3549 {
3550 rtx sequence = PATTERN (prev);
3551 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3552 }
3553 }
3554 else if (first_insn == insn)
3555 first_insn = next;
3556 else
3557 {
3558 struct sequence_stack *stack = seq_stack;
3559 /* Scan all pending sequences too. */
3560 for (; stack; stack = stack->next)
3561 if (insn == stack->first)
3562 {
3563 stack->first = next;
3564 break;
3565 }
3566
3567 gcc_assert (stack);
3568 }
3569
3570 if (next)
3571 {
3572 PREV_INSN (next) = prev;
3573 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3574 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3575 }
3576 else if (last_insn == insn)
3577 last_insn = prev;
3578 else
3579 {
3580 struct sequence_stack *stack = seq_stack;
3581 /* Scan all pending sequences too. */
3582 for (; stack; stack = stack->next)
3583 if (insn == stack->last)
3584 {
3585 stack->last = prev;
3586 break;
3587 }
3588
3589 gcc_assert (stack);
3590 }
3591 if (!BARRIER_P (insn)
3592 && (bb = BLOCK_FOR_INSN (insn)))
3593 {
3594 if (INSN_P (insn))
3595 bb->flags |= BB_DIRTY;
3596 if (BB_HEAD (bb) == insn)
3597 {
3598 /* Never ever delete the basic block note without deleting whole
3599 basic block. */
3600 gcc_assert (!NOTE_P (insn));
3601 BB_HEAD (bb) = next;
3602 }
3603 if (BB_END (bb) == insn)
3604 BB_END (bb) = prev;
3605 }
3606 }
3607
3608 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3609
3610 void
3611 add_function_usage_to (rtx call_insn, rtx call_fusage)
3612 {
3613 gcc_assert (call_insn && CALL_P (call_insn));
3614
3615 /* Put the register usage information on the CALL. If there is already
3616 some usage information, put ours at the end. */
3617 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3618 {
3619 rtx link;
3620
3621 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3622 link = XEXP (link, 1))
3623 ;
3624
3625 XEXP (link, 1) = call_fusage;
3626 }
3627 else
3628 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3629 }
3630
3631 /* Delete all insns made since FROM.
3632 FROM becomes the new last instruction. */
3633
3634 void
3635 delete_insns_since (rtx from)
3636 {
3637 if (from == 0)
3638 first_insn = 0;
3639 else
3640 NEXT_INSN (from) = 0;
3641 last_insn = from;
3642 }
3643
3644 /* This function is deprecated, please use sequences instead.
3645
3646 Move a consecutive bunch of insns to a different place in the chain.
3647 The insns to be moved are those between FROM and TO.
3648 They are moved to a new position after the insn AFTER.
3649 AFTER must not be FROM or TO or any insn in between.
3650
3651 This function does not know about SEQUENCEs and hence should not be
3652 called after delay-slot filling has been done. */
3653
3654 void
3655 reorder_insns_nobb (rtx from, rtx to, rtx after)
3656 {
3657 /* Splice this bunch out of where it is now. */
3658 if (PREV_INSN (from))
3659 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3660 if (NEXT_INSN (to))
3661 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3662 if (last_insn == to)
3663 last_insn = PREV_INSN (from);
3664 if (first_insn == from)
3665 first_insn = NEXT_INSN (to);
3666
3667 /* Make the new neighbors point to it and it to them. */
3668 if (NEXT_INSN (after))
3669 PREV_INSN (NEXT_INSN (after)) = to;
3670
3671 NEXT_INSN (to) = NEXT_INSN (after);
3672 PREV_INSN (from) = after;
3673 NEXT_INSN (after) = from;
3674 if (after == last_insn)
3675 last_insn = to;
3676 }
3677
3678 /* Same as function above, but take care to update BB boundaries. */
3679 void
3680 reorder_insns (rtx from, rtx to, rtx after)
3681 {
3682 rtx prev = PREV_INSN (from);
3683 basic_block bb, bb2;
3684
3685 reorder_insns_nobb (from, to, after);
3686
3687 if (!BARRIER_P (after)
3688 && (bb = BLOCK_FOR_INSN (after)))
3689 {
3690 rtx x;
3691 bb->flags |= BB_DIRTY;
3692
3693 if (!BARRIER_P (from)
3694 && (bb2 = BLOCK_FOR_INSN (from)))
3695 {
3696 if (BB_END (bb2) == to)
3697 BB_END (bb2) = prev;
3698 bb2->flags |= BB_DIRTY;
3699 }
3700
3701 if (BB_END (bb) == after)
3702 BB_END (bb) = to;
3703
3704 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3705 if (!BARRIER_P (x))
3706 set_block_for_insn (x, bb);
3707 }
3708 }
3709
3710 /* Return the line note insn preceding INSN. */
3711
3712 static rtx
3713 find_line_note (rtx insn)
3714 {
3715 if (no_line_numbers)
3716 return 0;
3717
3718 for (; insn; insn = PREV_INSN (insn))
3719 if (NOTE_P (insn)
3720 && NOTE_LINE_NUMBER (insn) >= 0)
3721 break;
3722
3723 return insn;
3724 }
3725
3726 /* Remove unnecessary notes from the instruction stream. */
3727
3728 void
3729 remove_unnecessary_notes (void)
3730 {
3731 rtx block_stack = NULL_RTX;
3732 rtx eh_stack = NULL_RTX;
3733 rtx insn;
3734 rtx next;
3735 rtx tmp;
3736
3737 /* We must not remove the first instruction in the function because
3738 the compiler depends on the first instruction being a note. */
3739 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3740 {
3741 /* Remember what's next. */
3742 next = NEXT_INSN (insn);
3743
3744 /* We're only interested in notes. */
3745 if (!NOTE_P (insn))
3746 continue;
3747
3748 switch (NOTE_LINE_NUMBER (insn))
3749 {
3750 case NOTE_INSN_DELETED:
3751 remove_insn (insn);
3752 break;
3753
3754 case NOTE_INSN_EH_REGION_BEG:
3755 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3756 break;
3757
3758 case NOTE_INSN_EH_REGION_END:
3759 /* Too many end notes. */
3760 gcc_assert (eh_stack);
3761 /* Mismatched nesting. */
3762 gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3763 == NOTE_EH_HANDLER (insn));
3764 tmp = eh_stack;
3765 eh_stack = XEXP (eh_stack, 1);
3766 free_INSN_LIST_node (tmp);
3767 break;
3768
3769 case NOTE_INSN_BLOCK_BEG:
3770 /* By now, all notes indicating lexical blocks should have
3771 NOTE_BLOCK filled in. */
3772 gcc_assert (NOTE_BLOCK (insn));
3773 block_stack = alloc_INSN_LIST (insn, block_stack);
3774 break;
3775
3776 case NOTE_INSN_BLOCK_END:
3777 /* Too many end notes. */
3778 gcc_assert (block_stack);
3779 /* Mismatched nesting. */
3780 gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn));
3781 tmp = block_stack;
3782 block_stack = XEXP (block_stack, 1);
3783 free_INSN_LIST_node (tmp);
3784
3785 /* Scan back to see if there are any non-note instructions
3786 between INSN and the beginning of this block. If not,
3787 then there is no PC range in the generated code that will
3788 actually be in this block, so there's no point in
3789 remembering the existence of the block. */
3790 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3791 {
3792 /* This block contains a real instruction. Note that we
3793 don't include labels; if the only thing in the block
3794 is a label, then there are still no PC values that
3795 lie within the block. */
3796 if (INSN_P (tmp))
3797 break;
3798
3799 /* We're only interested in NOTEs. */
3800 if (!NOTE_P (tmp))
3801 continue;
3802
3803 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3804 {
3805 /* We just verified that this BLOCK matches us with
3806 the block_stack check above. Never delete the
3807 BLOCK for the outermost scope of the function; we
3808 can refer to names from that scope even if the
3809 block notes are messed up. */
3810 if (! is_body_block (NOTE_BLOCK (insn))
3811 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3812 {
3813 remove_insn (tmp);
3814 remove_insn (insn);
3815 }
3816 break;
3817 }
3818 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3819 /* There's a nested block. We need to leave the
3820 current block in place since otherwise the debugger
3821 wouldn't be able to show symbols from our block in
3822 the nested block. */
3823 break;
3824 }
3825 }
3826 }
3827
3828 /* Too many begin notes. */
3829 gcc_assert (!block_stack && !eh_stack);
3830 }
3831
3832 \f
3833 /* Emit insn(s) of given code and pattern
3834 at a specified place within the doubly-linked list.
3835
3836 All of the emit_foo global entry points accept an object
3837 X which is either an insn list or a PATTERN of a single
3838 instruction.
3839
3840 There are thus a few canonical ways to generate code and
3841 emit it at a specific place in the instruction stream. For
3842 example, consider the instruction named SPOT and the fact that
3843 we would like to emit some instructions before SPOT. We might
3844 do it like this:
3845
3846 start_sequence ();
3847 ... emit the new instructions ...
3848 insns_head = get_insns ();
3849 end_sequence ();
3850
3851 emit_insn_before (insns_head, SPOT);
3852
3853 It used to be common to generate SEQUENCE rtl instead, but that
3854 is a relic of the past which no longer occurs. The reason is that
3855 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3856 generated would almost certainly die right after it was created. */
3857
3858 /* Make X be output before the instruction BEFORE. */
3859
3860 rtx
3861 emit_insn_before_noloc (rtx x, rtx before)
3862 {
3863 rtx last = before;
3864 rtx insn;
3865
3866 gcc_assert (before);
3867
3868 if (x == NULL_RTX)
3869 return last;
3870
3871 switch (GET_CODE (x))
3872 {
3873 case INSN:
3874 case JUMP_INSN:
3875 case CALL_INSN:
3876 case CODE_LABEL:
3877 case BARRIER:
3878 case NOTE:
3879 insn = x;
3880 while (insn)
3881 {
3882 rtx next = NEXT_INSN (insn);
3883 add_insn_before (insn, before);
3884 last = insn;
3885 insn = next;
3886 }
3887 break;
3888
3889 #ifdef ENABLE_RTL_CHECKING
3890 case SEQUENCE:
3891 gcc_unreachable ();
3892 break;
3893 #endif
3894
3895 default:
3896 last = make_insn_raw (x);
3897 add_insn_before (last, before);
3898 break;
3899 }
3900
3901 return last;
3902 }
3903
3904 /* Make an instruction with body X and code JUMP_INSN
3905 and output it before the instruction BEFORE. */
3906
3907 rtx
3908 emit_jump_insn_before_noloc (rtx x, rtx before)
3909 {
3910 rtx insn, last = NULL_RTX;
3911
3912 gcc_assert (before);
3913
3914 switch (GET_CODE (x))
3915 {
3916 case INSN:
3917 case JUMP_INSN:
3918 case CALL_INSN:
3919 case CODE_LABEL:
3920 case BARRIER:
3921 case NOTE:
3922 insn = x;
3923 while (insn)
3924 {
3925 rtx next = NEXT_INSN (insn);
3926 add_insn_before (insn, before);
3927 last = insn;
3928 insn = next;
3929 }
3930 break;
3931
3932 #ifdef ENABLE_RTL_CHECKING
3933 case SEQUENCE:
3934 gcc_unreachable ();
3935 break;
3936 #endif
3937
3938 default:
3939 last = make_jump_insn_raw (x);
3940 add_insn_before (last, before);
3941 break;
3942 }
3943
3944 return last;
3945 }
3946
3947 /* Make an instruction with body X and code CALL_INSN
3948 and output it before the instruction BEFORE. */
3949
3950 rtx
3951 emit_call_insn_before_noloc (rtx x, rtx before)
3952 {
3953 rtx last = NULL_RTX, insn;
3954
3955 gcc_assert (before);
3956
3957 switch (GET_CODE (x))
3958 {
3959 case INSN:
3960 case JUMP_INSN:
3961 case CALL_INSN:
3962 case CODE_LABEL:
3963 case BARRIER:
3964 case NOTE:
3965 insn = x;
3966 while (insn)
3967 {
3968 rtx next = NEXT_INSN (insn);
3969 add_insn_before (insn, before);
3970 last = insn;
3971 insn = next;
3972 }
3973 break;
3974
3975 #ifdef ENABLE_RTL_CHECKING
3976 case SEQUENCE:
3977 gcc_unreachable ();
3978 break;
3979 #endif
3980
3981 default:
3982 last = make_call_insn_raw (x);
3983 add_insn_before (last, before);
3984 break;
3985 }
3986
3987 return last;
3988 }
3989
3990 /* Make an insn of code BARRIER
3991 and output it before the insn BEFORE. */
3992
3993 rtx
3994 emit_barrier_before (rtx before)
3995 {
3996 rtx insn = rtx_alloc (BARRIER);
3997
3998 INSN_UID (insn) = cur_insn_uid++;
3999
4000 add_insn_before (insn, before);
4001 return insn;
4002 }
4003
4004 /* Emit the label LABEL before the insn BEFORE. */
4005
4006 rtx
4007 emit_label_before (rtx label, rtx before)
4008 {
4009 /* This can be called twice for the same label as a result of the
4010 confusion that follows a syntax error! So make it harmless. */
4011 if (INSN_UID (label) == 0)
4012 {
4013 INSN_UID (label) = cur_insn_uid++;
4014 add_insn_before (label, before);
4015 }
4016
4017 return label;
4018 }
4019
4020 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4021
4022 rtx
4023 emit_note_before (int subtype, rtx before)
4024 {
4025 rtx note = rtx_alloc (NOTE);
4026 INSN_UID (note) = cur_insn_uid++;
4027 #ifndef USE_MAPPED_LOCATION
4028 NOTE_SOURCE_FILE (note) = 0;
4029 #endif
4030 NOTE_LINE_NUMBER (note) = subtype;
4031 BLOCK_FOR_INSN (note) = NULL;
4032
4033 add_insn_before (note, before);
4034 return note;
4035 }
4036 \f
4037 /* Helper for emit_insn_after, handles lists of instructions
4038 efficiently. */
4039
4040 static rtx emit_insn_after_1 (rtx, rtx);
4041
4042 static rtx
4043 emit_insn_after_1 (rtx first, rtx after)
4044 {
4045 rtx last;
4046 rtx after_after;
4047 basic_block bb;
4048
4049 if (!BARRIER_P (after)
4050 && (bb = BLOCK_FOR_INSN (after)))
4051 {
4052 bb->flags |= BB_DIRTY;
4053 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4054 if (!BARRIER_P (last))
4055 set_block_for_insn (last, bb);
4056 if (!BARRIER_P (last))
4057 set_block_for_insn (last, bb);
4058 if (BB_END (bb) == after)
4059 BB_END (bb) = last;
4060 }
4061 else
4062 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4063 continue;
4064
4065 after_after = NEXT_INSN (after);
4066
4067 NEXT_INSN (after) = first;
4068 PREV_INSN (first) = after;
4069 NEXT_INSN (last) = after_after;
4070 if (after_after)
4071 PREV_INSN (after_after) = last;
4072
4073 if (after == last_insn)
4074 last_insn = last;
4075 return last;
4076 }
4077
4078 /* Make X be output after the insn AFTER. */
4079
4080 rtx
4081 emit_insn_after_noloc (rtx x, rtx after)
4082 {
4083 rtx last = after;
4084
4085 gcc_assert (after);
4086
4087 if (x == NULL_RTX)
4088 return last;
4089
4090 switch (GET_CODE (x))
4091 {
4092 case INSN:
4093 case JUMP_INSN:
4094 case CALL_INSN:
4095 case CODE_LABEL:
4096 case BARRIER:
4097 case NOTE:
4098 last = emit_insn_after_1 (x, after);
4099 break;
4100
4101 #ifdef ENABLE_RTL_CHECKING
4102 case SEQUENCE:
4103 gcc_unreachable ();
4104 break;
4105 #endif
4106
4107 default:
4108 last = make_insn_raw (x);
4109 add_insn_after (last, after);
4110 break;
4111 }
4112
4113 return last;
4114 }
4115
4116 /* Similar to emit_insn_after, except that line notes are to be inserted so
4117 as to act as if this insn were at FROM. */
4118
4119 void
4120 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4121 {
4122 rtx from_line = find_line_note (from);
4123 rtx after_line = find_line_note (after);
4124 rtx insn = emit_insn_after (x, after);
4125
4126 if (from_line)
4127 emit_note_copy_after (from_line, after);
4128
4129 if (after_line)
4130 emit_note_copy_after (after_line, insn);
4131 }
4132
4133 /* Make an insn of code JUMP_INSN with body X
4134 and output it after the insn AFTER. */
4135
4136 rtx
4137 emit_jump_insn_after_noloc (rtx x, rtx after)
4138 {
4139 rtx last;
4140
4141 gcc_assert (after);
4142
4143 switch (GET_CODE (x))
4144 {
4145 case INSN:
4146 case JUMP_INSN:
4147 case CALL_INSN:
4148 case CODE_LABEL:
4149 case BARRIER:
4150 case NOTE:
4151 last = emit_insn_after_1 (x, after);
4152 break;
4153
4154 #ifdef ENABLE_RTL_CHECKING
4155 case SEQUENCE:
4156 gcc_unreachable ();
4157 break;
4158 #endif
4159
4160 default:
4161 last = make_jump_insn_raw (x);
4162 add_insn_after (last, after);
4163 break;
4164 }
4165
4166 return last;
4167 }
4168
4169 /* Make an instruction with body X and code CALL_INSN
4170 and output it after the instruction AFTER. */
4171
4172 rtx
4173 emit_call_insn_after_noloc (rtx x, rtx after)
4174 {
4175 rtx last;
4176
4177 gcc_assert (after);
4178
4179 switch (GET_CODE (x))
4180 {
4181 case INSN:
4182 case JUMP_INSN:
4183 case CALL_INSN:
4184 case CODE_LABEL:
4185 case BARRIER:
4186 case NOTE:
4187 last = emit_insn_after_1 (x, after);
4188 break;
4189
4190 #ifdef ENABLE_RTL_CHECKING
4191 case SEQUENCE:
4192 gcc_unreachable ();
4193 break;
4194 #endif
4195
4196 default:
4197 last = make_call_insn_raw (x);
4198 add_insn_after (last, after);
4199 break;
4200 }
4201
4202 return last;
4203 }
4204
4205 /* Make an insn of code BARRIER
4206 and output it after the insn AFTER. */
4207
4208 rtx
4209 emit_barrier_after (rtx after)
4210 {
4211 rtx insn = rtx_alloc (BARRIER);
4212
4213 INSN_UID (insn) = cur_insn_uid++;
4214
4215 add_insn_after (insn, after);
4216 return insn;
4217 }
4218
4219 /* Emit the label LABEL after the insn AFTER. */
4220
4221 rtx
4222 emit_label_after (rtx label, rtx after)
4223 {
4224 /* This can be called twice for the same label
4225 as a result of the confusion that follows a syntax error!
4226 So make it harmless. */
4227 if (INSN_UID (label) == 0)
4228 {
4229 INSN_UID (label) = cur_insn_uid++;
4230 add_insn_after (label, after);
4231 }
4232
4233 return label;
4234 }
4235
4236 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4237
4238 rtx
4239 emit_note_after (int subtype, rtx after)
4240 {
4241 rtx note = rtx_alloc (NOTE);
4242 INSN_UID (note) = cur_insn_uid++;
4243 #ifndef USE_MAPPED_LOCATION
4244 NOTE_SOURCE_FILE (note) = 0;
4245 #endif
4246 NOTE_LINE_NUMBER (note) = subtype;
4247 BLOCK_FOR_INSN (note) = NULL;
4248 add_insn_after (note, after);
4249 return note;
4250 }
4251
4252 /* Emit a copy of note ORIG after the insn AFTER. */
4253
4254 rtx
4255 emit_note_copy_after (rtx orig, rtx after)
4256 {
4257 rtx note;
4258
4259 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4260 {
4261 cur_insn_uid++;
4262 return 0;
4263 }
4264
4265 note = rtx_alloc (NOTE);
4266 INSN_UID (note) = cur_insn_uid++;
4267 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4268 NOTE_DATA (note) = NOTE_DATA (orig);
4269 BLOCK_FOR_INSN (note) = NULL;
4270 add_insn_after (note, after);
4271 return note;
4272 }
4273 \f
4274 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4275 rtx
4276 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4277 {
4278 rtx last = emit_insn_after_noloc (pattern, after);
4279
4280 if (pattern == NULL_RTX || !loc)
4281 return last;
4282
4283 after = NEXT_INSN (after);
4284 while (1)
4285 {
4286 if (active_insn_p (after) && !INSN_LOCATOR (after))
4287 INSN_LOCATOR (after) = loc;
4288 if (after == last)
4289 break;
4290 after = NEXT_INSN (after);
4291 }
4292 return last;
4293 }
4294
4295 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4296 rtx
4297 emit_insn_after (rtx pattern, rtx after)
4298 {
4299 if (INSN_P (after))
4300 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4301 else
4302 return emit_insn_after_noloc (pattern, after);
4303 }
4304
4305 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4306 rtx
4307 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4308 {
4309 rtx last = emit_jump_insn_after_noloc (pattern, after);
4310
4311 if (pattern == NULL_RTX || !loc)
4312 return last;
4313
4314 after = NEXT_INSN (after);
4315 while (1)
4316 {
4317 if (active_insn_p (after) && !INSN_LOCATOR (after))
4318 INSN_LOCATOR (after) = loc;
4319 if (after == last)
4320 break;
4321 after = NEXT_INSN (after);
4322 }
4323 return last;
4324 }
4325
4326 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4327 rtx
4328 emit_jump_insn_after (rtx pattern, rtx after)
4329 {
4330 if (INSN_P (after))
4331 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4332 else
4333 return emit_jump_insn_after_noloc (pattern, after);
4334 }
4335
4336 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4337 rtx
4338 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4339 {
4340 rtx last = emit_call_insn_after_noloc (pattern, after);
4341
4342 if (pattern == NULL_RTX || !loc)
4343 return last;
4344
4345 after = NEXT_INSN (after);
4346 while (1)
4347 {
4348 if (active_insn_p (after) && !INSN_LOCATOR (after))
4349 INSN_LOCATOR (after) = loc;
4350 if (after == last)
4351 break;
4352 after = NEXT_INSN (after);
4353 }
4354 return last;
4355 }
4356
4357 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4358 rtx
4359 emit_call_insn_after (rtx pattern, rtx after)
4360 {
4361 if (INSN_P (after))
4362 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4363 else
4364 return emit_call_insn_after_noloc (pattern, after);
4365 }
4366
4367 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4368 rtx
4369 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4370 {
4371 rtx first = PREV_INSN (before);
4372 rtx last = emit_insn_before_noloc (pattern, before);
4373
4374 if (pattern == NULL_RTX || !loc)
4375 return last;
4376
4377 first = NEXT_INSN (first);
4378 while (1)
4379 {
4380 if (active_insn_p (first) && !INSN_LOCATOR (first))
4381 INSN_LOCATOR (first) = loc;
4382 if (first == last)
4383 break;
4384 first = NEXT_INSN (first);
4385 }
4386 return last;
4387 }
4388
4389 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4390 rtx
4391 emit_insn_before (rtx pattern, rtx before)
4392 {
4393 if (INSN_P (before))
4394 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4395 else
4396 return emit_insn_before_noloc (pattern, before);
4397 }
4398
4399 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4400 rtx
4401 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4402 {
4403 rtx first = PREV_INSN (before);
4404 rtx last = emit_jump_insn_before_noloc (pattern, before);
4405
4406 if (pattern == NULL_RTX)
4407 return last;
4408
4409 first = NEXT_INSN (first);
4410 while (1)
4411 {
4412 if (active_insn_p (first) && !INSN_LOCATOR (first))
4413 INSN_LOCATOR (first) = loc;
4414 if (first == last)
4415 break;
4416 first = NEXT_INSN (first);
4417 }
4418 return last;
4419 }
4420
4421 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4422 rtx
4423 emit_jump_insn_before (rtx pattern, rtx before)
4424 {
4425 if (INSN_P (before))
4426 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4427 else
4428 return emit_jump_insn_before_noloc (pattern, before);
4429 }
4430
4431 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4432 rtx
4433 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4434 {
4435 rtx first = PREV_INSN (before);
4436 rtx last = emit_call_insn_before_noloc (pattern, before);
4437
4438 if (pattern == NULL_RTX)
4439 return last;
4440
4441 first = NEXT_INSN (first);
4442 while (1)
4443 {
4444 if (active_insn_p (first) && !INSN_LOCATOR (first))
4445 INSN_LOCATOR (first) = loc;
4446 if (first == last)
4447 break;
4448 first = NEXT_INSN (first);
4449 }
4450 return last;
4451 }
4452
4453 /* like emit_call_insn_before_noloc,
4454 but set insn_locator according to before. */
4455 rtx
4456 emit_call_insn_before (rtx pattern, rtx before)
4457 {
4458 if (INSN_P (before))
4459 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4460 else
4461 return emit_call_insn_before_noloc (pattern, before);
4462 }
4463 \f
4464 /* Take X and emit it at the end of the doubly-linked
4465 INSN list.
4466
4467 Returns the last insn emitted. */
4468
4469 rtx
4470 emit_insn (rtx x)
4471 {
4472 rtx last = last_insn;
4473 rtx insn;
4474
4475 if (x == NULL_RTX)
4476 return last;
4477
4478 switch (GET_CODE (x))
4479 {
4480 case INSN:
4481 case JUMP_INSN:
4482 case CALL_INSN:
4483 case CODE_LABEL:
4484 case BARRIER:
4485 case NOTE:
4486 insn = x;
4487 while (insn)
4488 {
4489 rtx next = NEXT_INSN (insn);
4490 add_insn (insn);
4491 last = insn;
4492 insn = next;
4493 }
4494 break;
4495
4496 #ifdef ENABLE_RTL_CHECKING
4497 case SEQUENCE:
4498 gcc_unreachable ();
4499 break;
4500 #endif
4501
4502 default:
4503 last = make_insn_raw (x);
4504 add_insn (last);
4505 break;
4506 }
4507
4508 return last;
4509 }
4510
4511 /* Make an insn of code JUMP_INSN with pattern X
4512 and add it to the end of the doubly-linked list. */
4513
4514 rtx
4515 emit_jump_insn (rtx x)
4516 {
4517 rtx last = NULL_RTX, insn;
4518
4519 switch (GET_CODE (x))
4520 {
4521 case INSN:
4522 case JUMP_INSN:
4523 case CALL_INSN:
4524 case CODE_LABEL:
4525 case BARRIER:
4526 case NOTE:
4527 insn = x;
4528 while (insn)
4529 {
4530 rtx next = NEXT_INSN (insn);
4531 add_insn (insn);
4532 last = insn;
4533 insn = next;
4534 }
4535 break;
4536
4537 #ifdef ENABLE_RTL_CHECKING
4538 case SEQUENCE:
4539 gcc_unreachable ();
4540 break;
4541 #endif
4542
4543 default:
4544 last = make_jump_insn_raw (x);
4545 add_insn (last);
4546 break;
4547 }
4548
4549 return last;
4550 }
4551
4552 /* Make an insn of code CALL_INSN with pattern X
4553 and add it to the end of the doubly-linked list. */
4554
4555 rtx
4556 emit_call_insn (rtx x)
4557 {
4558 rtx insn;
4559
4560 switch (GET_CODE (x))
4561 {
4562 case INSN:
4563 case JUMP_INSN:
4564 case CALL_INSN:
4565 case CODE_LABEL:
4566 case BARRIER:
4567 case NOTE:
4568 insn = emit_insn (x);
4569 break;
4570
4571 #ifdef ENABLE_RTL_CHECKING
4572 case SEQUENCE:
4573 gcc_unreachable ();
4574 break;
4575 #endif
4576
4577 default:
4578 insn = make_call_insn_raw (x);
4579 add_insn (insn);
4580 break;
4581 }
4582
4583 return insn;
4584 }
4585
4586 /* Add the label LABEL to the end of the doubly-linked list. */
4587
4588 rtx
4589 emit_label (rtx label)
4590 {
4591 /* This can be called twice for the same label
4592 as a result of the confusion that follows a syntax error!
4593 So make it harmless. */
4594 if (INSN_UID (label) == 0)
4595 {
4596 INSN_UID (label) = cur_insn_uid++;
4597 add_insn (label);
4598 }
4599 return label;
4600 }
4601
4602 /* Make an insn of code BARRIER
4603 and add it to the end of the doubly-linked list. */
4604
4605 rtx
4606 emit_barrier (void)
4607 {
4608 rtx barrier = rtx_alloc (BARRIER);
4609 INSN_UID (barrier) = cur_insn_uid++;
4610 add_insn (barrier);
4611 return barrier;
4612 }
4613
4614 /* Make line numbering NOTE insn for LOCATION add it to the end
4615 of the doubly-linked list, but only if line-numbers are desired for
4616 debugging info and it doesn't match the previous one. */
4617
4618 rtx
4619 emit_line_note (location_t location)
4620 {
4621 rtx note;
4622
4623 #ifdef USE_MAPPED_LOCATION
4624 if (location == last_location)
4625 return NULL_RTX;
4626 #else
4627 if (location.file && last_location.file
4628 && !strcmp (location.file, last_location.file)
4629 && location.line == last_location.line)
4630 return NULL_RTX;
4631 #endif
4632 last_location = location;
4633
4634 if (no_line_numbers)
4635 {
4636 cur_insn_uid++;
4637 return NULL_RTX;
4638 }
4639
4640 #ifdef USE_MAPPED_LOCATION
4641 note = emit_note ((int) location);
4642 #else
4643 note = emit_note (location.line);
4644 NOTE_SOURCE_FILE (note) = location.file;
4645 #endif
4646
4647 return note;
4648 }
4649
4650 /* Emit a copy of note ORIG. */
4651
4652 rtx
4653 emit_note_copy (rtx orig)
4654 {
4655 rtx note;
4656
4657 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4658 {
4659 cur_insn_uid++;
4660 return NULL_RTX;
4661 }
4662
4663 note = rtx_alloc (NOTE);
4664
4665 INSN_UID (note) = cur_insn_uid++;
4666 NOTE_DATA (note) = NOTE_DATA (orig);
4667 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4668 BLOCK_FOR_INSN (note) = NULL;
4669 add_insn (note);
4670
4671 return note;
4672 }
4673
4674 /* Make an insn of code NOTE or type NOTE_NO
4675 and add it to the end of the doubly-linked list. */
4676
4677 rtx
4678 emit_note (int note_no)
4679 {
4680 rtx note;
4681
4682 note = rtx_alloc (NOTE);
4683 INSN_UID (note) = cur_insn_uid++;
4684 NOTE_LINE_NUMBER (note) = note_no;
4685 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4686 BLOCK_FOR_INSN (note) = NULL;
4687 add_insn (note);
4688 return note;
4689 }
4690
4691 /* Cause next statement to emit a line note even if the line number
4692 has not changed. */
4693
4694 void
4695 force_next_line_note (void)
4696 {
4697 #ifdef USE_MAPPED_LOCATION
4698 last_location = -1;
4699 #else
4700 last_location.line = -1;
4701 #endif
4702 }
4703
4704 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4705 note of this type already exists, remove it first. */
4706
4707 rtx
4708 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4709 {
4710 rtx note = find_reg_note (insn, kind, NULL_RTX);
4711
4712 switch (kind)
4713 {
4714 case REG_EQUAL:
4715 case REG_EQUIV:
4716 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4717 has multiple sets (some callers assume single_set
4718 means the insn only has one set, when in fact it
4719 means the insn only has one * useful * set). */
4720 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4721 {
4722 gcc_assert (!note);
4723 return NULL_RTX;
4724 }
4725
4726 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4727 It serves no useful purpose and breaks eliminate_regs. */
4728 if (GET_CODE (datum) == ASM_OPERANDS)
4729 return NULL_RTX;
4730 break;
4731
4732 default:
4733 break;
4734 }
4735
4736 if (note)
4737 {
4738 XEXP (note, 0) = datum;
4739 return note;
4740 }
4741
4742 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4743 return REG_NOTES (insn);
4744 }
4745 \f
4746 /* Return an indication of which type of insn should have X as a body.
4747 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4748
4749 static enum rtx_code
4750 classify_insn (rtx x)
4751 {
4752 if (LABEL_P (x))
4753 return CODE_LABEL;
4754 if (GET_CODE (x) == CALL)
4755 return CALL_INSN;
4756 if (GET_CODE (x) == RETURN)
4757 return JUMP_INSN;
4758 if (GET_CODE (x) == SET)
4759 {
4760 if (SET_DEST (x) == pc_rtx)
4761 return JUMP_INSN;
4762 else if (GET_CODE (SET_SRC (x)) == CALL)
4763 return CALL_INSN;
4764 else
4765 return INSN;
4766 }
4767 if (GET_CODE (x) == PARALLEL)
4768 {
4769 int j;
4770 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4771 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4772 return CALL_INSN;
4773 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4774 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4775 return JUMP_INSN;
4776 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4777 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4778 return CALL_INSN;
4779 }
4780 return INSN;
4781 }
4782
4783 /* Emit the rtl pattern X as an appropriate kind of insn.
4784 If X is a label, it is simply added into the insn chain. */
4785
4786 rtx
4787 emit (rtx x)
4788 {
4789 enum rtx_code code = classify_insn (x);
4790
4791 switch (code)
4792 {
4793 case CODE_LABEL:
4794 return emit_label (x);
4795 case INSN:
4796 return emit_insn (x);
4797 case JUMP_INSN:
4798 {
4799 rtx insn = emit_jump_insn (x);
4800 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4801 return emit_barrier ();
4802 return insn;
4803 }
4804 case CALL_INSN:
4805 return emit_call_insn (x);
4806 default:
4807 gcc_unreachable ();
4808 }
4809 }
4810 \f
4811 /* Space for free sequence stack entries. */
4812 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4813
4814 /* Begin emitting insns to a sequence. If this sequence will contain
4815 something that might cause the compiler to pop arguments to function
4816 calls (because those pops have previously been deferred; see
4817 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4818 before calling this function. That will ensure that the deferred
4819 pops are not accidentally emitted in the middle of this sequence. */
4820
4821 void
4822 start_sequence (void)
4823 {
4824 struct sequence_stack *tem;
4825
4826 if (free_sequence_stack != NULL)
4827 {
4828 tem = free_sequence_stack;
4829 free_sequence_stack = tem->next;
4830 }
4831 else
4832 tem = ggc_alloc (sizeof (struct sequence_stack));
4833
4834 tem->next = seq_stack;
4835 tem->first = first_insn;
4836 tem->last = last_insn;
4837
4838 seq_stack = tem;
4839
4840 first_insn = 0;
4841 last_insn = 0;
4842 }
4843
4844 /* Set up the insn chain starting with FIRST as the current sequence,
4845 saving the previously current one. See the documentation for
4846 start_sequence for more information about how to use this function. */
4847
4848 void
4849 push_to_sequence (rtx first)
4850 {
4851 rtx last;
4852
4853 start_sequence ();
4854
4855 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4856
4857 first_insn = first;
4858 last_insn = last;
4859 }
4860
4861 /* Set up the outer-level insn chain
4862 as the current sequence, saving the previously current one. */
4863
4864 void
4865 push_topmost_sequence (void)
4866 {
4867 struct sequence_stack *stack, *top = NULL;
4868
4869 start_sequence ();
4870
4871 for (stack = seq_stack; stack; stack = stack->next)
4872 top = stack;
4873
4874 first_insn = top->first;
4875 last_insn = top->last;
4876 }
4877
4878 /* After emitting to the outer-level insn chain, update the outer-level
4879 insn chain, and restore the previous saved state. */
4880
4881 void
4882 pop_topmost_sequence (void)
4883 {
4884 struct sequence_stack *stack, *top = NULL;
4885
4886 for (stack = seq_stack; stack; stack = stack->next)
4887 top = stack;
4888
4889 top->first = first_insn;
4890 top->last = last_insn;
4891
4892 end_sequence ();
4893 }
4894
4895 /* After emitting to a sequence, restore previous saved state.
4896
4897 To get the contents of the sequence just made, you must call
4898 `get_insns' *before* calling here.
4899
4900 If the compiler might have deferred popping arguments while
4901 generating this sequence, and this sequence will not be immediately
4902 inserted into the instruction stream, use do_pending_stack_adjust
4903 before calling get_insns. That will ensure that the deferred
4904 pops are inserted into this sequence, and not into some random
4905 location in the instruction stream. See INHIBIT_DEFER_POP for more
4906 information about deferred popping of arguments. */
4907
4908 void
4909 end_sequence (void)
4910 {
4911 struct sequence_stack *tem = seq_stack;
4912
4913 first_insn = tem->first;
4914 last_insn = tem->last;
4915 seq_stack = tem->next;
4916
4917 memset (tem, 0, sizeof (*tem));
4918 tem->next = free_sequence_stack;
4919 free_sequence_stack = tem;
4920 }
4921
4922 /* Return 1 if currently emitting into a sequence. */
4923
4924 int
4925 in_sequence_p (void)
4926 {
4927 return seq_stack != 0;
4928 }
4929 \f
4930 /* Put the various virtual registers into REGNO_REG_RTX. */
4931
4932 void
4933 init_virtual_regs (struct emit_status *es)
4934 {
4935 rtx *ptr = es->x_regno_reg_rtx;
4936 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4937 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4938 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4939 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4940 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4941 }
4942
4943 \f
4944 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4945 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4946 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4947 static int copy_insn_n_scratches;
4948
4949 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4950 copied an ASM_OPERANDS.
4951 In that case, it is the original input-operand vector. */
4952 static rtvec orig_asm_operands_vector;
4953
4954 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4955 copied an ASM_OPERANDS.
4956 In that case, it is the copied input-operand vector. */
4957 static rtvec copy_asm_operands_vector;
4958
4959 /* Likewise for the constraints vector. */
4960 static rtvec orig_asm_constraints_vector;
4961 static rtvec copy_asm_constraints_vector;
4962
4963 /* Recursively create a new copy of an rtx for copy_insn.
4964 This function differs from copy_rtx in that it handles SCRATCHes and
4965 ASM_OPERANDs properly.
4966 Normally, this function is not used directly; use copy_insn as front end.
4967 However, you could first copy an insn pattern with copy_insn and then use
4968 this function afterwards to properly copy any REG_NOTEs containing
4969 SCRATCHes. */
4970
4971 rtx
4972 copy_insn_1 (rtx orig)
4973 {
4974 rtx copy;
4975 int i, j;
4976 RTX_CODE code;
4977 const char *format_ptr;
4978
4979 code = GET_CODE (orig);
4980
4981 switch (code)
4982 {
4983 case REG:
4984 case CONST_INT:
4985 case CONST_DOUBLE:
4986 case CONST_VECTOR:
4987 case SYMBOL_REF:
4988 case CODE_LABEL:
4989 case PC:
4990 case CC0:
4991 return orig;
4992 case CLOBBER:
4993 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4994 return orig;
4995 break;
4996
4997 case SCRATCH:
4998 for (i = 0; i < copy_insn_n_scratches; i++)
4999 if (copy_insn_scratch_in[i] == orig)
5000 return copy_insn_scratch_out[i];
5001 break;
5002
5003 case CONST:
5004 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5005 a LABEL_REF, it isn't sharable. */
5006 if (GET_CODE (XEXP (orig, 0)) == PLUS
5007 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5008 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5009 return orig;
5010 break;
5011
5012 /* A MEM with a constant address is not sharable. The problem is that
5013 the constant address may need to be reloaded. If the mem is shared,
5014 then reloading one copy of this mem will cause all copies to appear
5015 to have been reloaded. */
5016
5017 default:
5018 break;
5019 }
5020
5021 copy = rtx_alloc (code);
5022
5023 /* Copy the various flags, and other information. We assume that
5024 all fields need copying, and then clear the fields that should
5025 not be copied. That is the sensible default behavior, and forces
5026 us to explicitly document why we are *not* copying a flag. */
5027 memcpy (copy, orig, RTX_HDR_SIZE);
5028
5029 /* We do not copy the USED flag, which is used as a mark bit during
5030 walks over the RTL. */
5031 RTX_FLAG (copy, used) = 0;
5032
5033 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5034 if (INSN_P (orig))
5035 {
5036 RTX_FLAG (copy, jump) = 0;
5037 RTX_FLAG (copy, call) = 0;
5038 RTX_FLAG (copy, frame_related) = 0;
5039 }
5040
5041 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5042
5043 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5044 {
5045 copy->u.fld[i] = orig->u.fld[i];
5046 switch (*format_ptr++)
5047 {
5048 case 'e':
5049 if (XEXP (orig, i) != NULL)
5050 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5051 break;
5052
5053 case 'E':
5054 case 'V':
5055 if (XVEC (orig, i) == orig_asm_constraints_vector)
5056 XVEC (copy, i) = copy_asm_constraints_vector;
5057 else if (XVEC (orig, i) == orig_asm_operands_vector)
5058 XVEC (copy, i) = copy_asm_operands_vector;
5059 else if (XVEC (orig, i) != NULL)
5060 {
5061 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5062 for (j = 0; j < XVECLEN (copy, i); j++)
5063 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5064 }
5065 break;
5066
5067 case 't':
5068 case 'w':
5069 case 'i':
5070 case 's':
5071 case 'S':
5072 case 'u':
5073 case '0':
5074 /* These are left unchanged. */
5075 break;
5076
5077 default:
5078 gcc_unreachable ();
5079 }
5080 }
5081
5082 if (code == SCRATCH)
5083 {
5084 i = copy_insn_n_scratches++;
5085 gcc_assert (i < MAX_RECOG_OPERANDS);
5086 copy_insn_scratch_in[i] = orig;
5087 copy_insn_scratch_out[i] = copy;
5088 }
5089 else if (code == ASM_OPERANDS)
5090 {
5091 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5092 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5093 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5094 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5095 }
5096
5097 return copy;
5098 }
5099
5100 /* Create a new copy of an rtx.
5101 This function differs from copy_rtx in that it handles SCRATCHes and
5102 ASM_OPERANDs properly.
5103 INSN doesn't really have to be a full INSN; it could be just the
5104 pattern. */
5105 rtx
5106 copy_insn (rtx insn)
5107 {
5108 copy_insn_n_scratches = 0;
5109 orig_asm_operands_vector = 0;
5110 orig_asm_constraints_vector = 0;
5111 copy_asm_operands_vector = 0;
5112 copy_asm_constraints_vector = 0;
5113 return copy_insn_1 (insn);
5114 }
5115
5116 /* Initialize data structures and variables in this file
5117 before generating rtl for each function. */
5118
5119 void
5120 init_emit (void)
5121 {
5122 struct function *f = cfun;
5123
5124 f->emit = ggc_alloc (sizeof (struct emit_status));
5125 first_insn = NULL;
5126 last_insn = NULL;
5127 cur_insn_uid = 1;
5128 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5129 last_location = UNKNOWN_LOCATION;
5130 first_label_num = label_num;
5131 seq_stack = NULL;
5132
5133 /* Init the tables that describe all the pseudo regs. */
5134
5135 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5136
5137 f->emit->regno_pointer_align
5138 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5139 * sizeof (unsigned char));
5140
5141 regno_reg_rtx
5142 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5143
5144 /* Put copies of all the hard registers into regno_reg_rtx. */
5145 memcpy (regno_reg_rtx,
5146 static_regno_reg_rtx,
5147 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5148
5149 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5150 init_virtual_regs (f->emit);
5151
5152 /* Indicate that the virtual registers and stack locations are
5153 all pointers. */
5154 REG_POINTER (stack_pointer_rtx) = 1;
5155 REG_POINTER (frame_pointer_rtx) = 1;
5156 REG_POINTER (hard_frame_pointer_rtx) = 1;
5157 REG_POINTER (arg_pointer_rtx) = 1;
5158
5159 REG_POINTER (virtual_incoming_args_rtx) = 1;
5160 REG_POINTER (virtual_stack_vars_rtx) = 1;
5161 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5162 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5163 REG_POINTER (virtual_cfa_rtx) = 1;
5164
5165 #ifdef STACK_BOUNDARY
5166 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5167 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5168 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5169 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5170
5171 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5172 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5173 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5174 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5175 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5176 #endif
5177
5178 #ifdef INIT_EXPANDERS
5179 INIT_EXPANDERS;
5180 #endif
5181 }
5182
5183 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5184
5185 static rtx
5186 gen_const_vector (enum machine_mode mode, int constant)
5187 {
5188 rtx tem;
5189 rtvec v;
5190 int units, i;
5191 enum machine_mode inner;
5192
5193 units = GET_MODE_NUNITS (mode);
5194 inner = GET_MODE_INNER (mode);
5195
5196 v = rtvec_alloc (units);
5197
5198 /* We need to call this function after we set the scalar const_tiny_rtx
5199 entries. */
5200 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5201
5202 for (i = 0; i < units; ++i)
5203 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5204
5205 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5206 return tem;
5207 }
5208
5209 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5210 all elements are zero, and the one vector when all elements are one. */
5211 rtx
5212 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5213 {
5214 enum machine_mode inner = GET_MODE_INNER (mode);
5215 int nunits = GET_MODE_NUNITS (mode);
5216 rtx x;
5217 int i;
5218
5219 /* Check to see if all of the elements have the same value. */
5220 x = RTVEC_ELT (v, nunits - 1);
5221 for (i = nunits - 2; i >= 0; i--)
5222 if (RTVEC_ELT (v, i) != x)
5223 break;
5224
5225 /* If the values are all the same, check to see if we can use one of the
5226 standard constant vectors. */
5227 if (i == -1)
5228 {
5229 if (x == CONST0_RTX (inner))
5230 return CONST0_RTX (mode);
5231 else if (x == CONST1_RTX (inner))
5232 return CONST1_RTX (mode);
5233 }
5234
5235 return gen_rtx_raw_CONST_VECTOR (mode, v);
5236 }
5237
5238 /* Create some permanent unique rtl objects shared between all functions.
5239 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5240
5241 void
5242 init_emit_once (int line_numbers)
5243 {
5244 int i;
5245 enum machine_mode mode;
5246 enum machine_mode double_mode;
5247
5248 /* We need reg_raw_mode, so initialize the modes now. */
5249 init_reg_modes_once ();
5250
5251 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5252 tables. */
5253 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5254 const_int_htab_eq, NULL);
5255
5256 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5257 const_double_htab_eq, NULL);
5258
5259 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5260 mem_attrs_htab_eq, NULL);
5261 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5262 reg_attrs_htab_eq, NULL);
5263
5264 no_line_numbers = ! line_numbers;
5265
5266 /* Compute the word and byte modes. */
5267
5268 byte_mode = VOIDmode;
5269 word_mode = VOIDmode;
5270 double_mode = VOIDmode;
5271
5272 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5273 mode = GET_MODE_WIDER_MODE (mode))
5274 {
5275 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5276 && byte_mode == VOIDmode)
5277 byte_mode = mode;
5278
5279 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5280 && word_mode == VOIDmode)
5281 word_mode = mode;
5282 }
5283
5284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5285 mode = GET_MODE_WIDER_MODE (mode))
5286 {
5287 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5288 && double_mode == VOIDmode)
5289 double_mode = mode;
5290 }
5291
5292 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5293
5294 /* Assign register numbers to the globally defined register rtx.
5295 This must be done at runtime because the register number field
5296 is in a union and some compilers can't initialize unions. */
5297
5298 pc_rtx = gen_rtx_PC (VOIDmode);
5299 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5300 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5301 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5302 if (hard_frame_pointer_rtx == 0)
5303 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5304 HARD_FRAME_POINTER_REGNUM);
5305 if (arg_pointer_rtx == 0)
5306 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5307 virtual_incoming_args_rtx =
5308 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5309 virtual_stack_vars_rtx =
5310 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5311 virtual_stack_dynamic_rtx =
5312 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5313 virtual_outgoing_args_rtx =
5314 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5315 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5316
5317 /* Initialize RTL for commonly used hard registers. These are
5318 copied into regno_reg_rtx as we begin to compile each function. */
5319 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5320 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5321
5322 #ifdef INIT_EXPANDERS
5323 /* This is to initialize {init|mark|free}_machine_status before the first
5324 call to push_function_context_to. This is needed by the Chill front
5325 end which calls push_function_context_to before the first call to
5326 init_function_start. */
5327 INIT_EXPANDERS;
5328 #endif
5329
5330 /* Create the unique rtx's for certain rtx codes and operand values. */
5331
5332 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5333 tries to use these variables. */
5334 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5335 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5336 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5337
5338 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5339 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5340 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5341 else
5342 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5343
5344 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5345 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5346 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5347 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5348 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5349 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5350 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5351
5352 dconsthalf = dconst1;
5353 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5354
5355 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5356
5357 /* Initialize mathematical constants for constant folding builtins.
5358 These constants need to be given to at least 160 bits precision. */
5359 real_from_string (&dconstpi,
5360 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5361 real_from_string (&dconste,
5362 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5363
5364 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5365 {
5366 REAL_VALUE_TYPE *r =
5367 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5368
5369 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5370 mode = GET_MODE_WIDER_MODE (mode))
5371 const_tiny_rtx[i][(int) mode] =
5372 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5373
5374 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5375
5376 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5377 mode = GET_MODE_WIDER_MODE (mode))
5378 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5379
5380 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5381 mode != VOIDmode;
5382 mode = GET_MODE_WIDER_MODE (mode))
5383 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5384 }
5385
5386 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5387 mode != VOIDmode;
5388 mode = GET_MODE_WIDER_MODE (mode))
5389 {
5390 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5391 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5392 }
5393
5394 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5395 mode != VOIDmode;
5396 mode = GET_MODE_WIDER_MODE (mode))
5397 {
5398 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5399 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5400 }
5401
5402 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5403 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5404 const_tiny_rtx[0][i] = const0_rtx;
5405
5406 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5407 if (STORE_FLAG_VALUE == 1)
5408 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5409
5410 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5411 return_address_pointer_rtx
5412 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5413 #endif
5414
5415 #ifdef STATIC_CHAIN_REGNUM
5416 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5417
5418 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5419 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5420 static_chain_incoming_rtx
5421 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5422 else
5423 #endif
5424 static_chain_incoming_rtx = static_chain_rtx;
5425 #endif
5426
5427 #ifdef STATIC_CHAIN
5428 static_chain_rtx = STATIC_CHAIN;
5429
5430 #ifdef STATIC_CHAIN_INCOMING
5431 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5432 #else
5433 static_chain_incoming_rtx = static_chain_rtx;
5434 #endif
5435 #endif
5436
5437 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5438 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5439 }
5440 \f
5441 /* Produce exact duplicate of insn INSN after AFTER.
5442 Care updating of libcall regions if present. */
5443
5444 rtx
5445 emit_copy_of_insn_after (rtx insn, rtx after)
5446 {
5447 rtx new;
5448 rtx note1, note2, link;
5449
5450 switch (GET_CODE (insn))
5451 {
5452 case INSN:
5453 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5454 break;
5455
5456 case JUMP_INSN:
5457 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5458 break;
5459
5460 case CALL_INSN:
5461 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5462 if (CALL_INSN_FUNCTION_USAGE (insn))
5463 CALL_INSN_FUNCTION_USAGE (new)
5464 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5465 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5466 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5467 break;
5468
5469 default:
5470 gcc_unreachable ();
5471 }
5472
5473 /* Update LABEL_NUSES. */
5474 mark_jump_label (PATTERN (new), new, 0);
5475
5476 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5477
5478 /* If the old insn is frame related, then so is the new one. This is
5479 primarily needed for IA-64 unwind info which marks epilogue insns,
5480 which may be duplicated by the basic block reordering code. */
5481 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5482
5483 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5484 make them. */
5485 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5486 if (REG_NOTE_KIND (link) != REG_LABEL)
5487 {
5488 if (GET_CODE (link) == EXPR_LIST)
5489 REG_NOTES (new)
5490 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5491 XEXP (link, 0),
5492 REG_NOTES (new)));
5493 else
5494 REG_NOTES (new)
5495 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5496 XEXP (link, 0),
5497 REG_NOTES (new)));
5498 }
5499
5500 /* Fix the libcall sequences. */
5501 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5502 {
5503 rtx p = new;
5504 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5505 p = PREV_INSN (p);
5506 XEXP (note1, 0) = p;
5507 XEXP (note2, 0) = new;
5508 }
5509 INSN_CODE (new) = INSN_CODE (insn);
5510 return new;
5511 }
5512
5513 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5514 rtx
5515 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5516 {
5517 if (hard_reg_clobbers[mode][regno])
5518 return hard_reg_clobbers[mode][regno];
5519 else
5520 return (hard_reg_clobbers[mode][regno] =
5521 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5522 }
5523
5524 #include "gt-emit-rtl.h"