]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
emit-rtl.c (change_address, [...]): Return early when there is nothing to change.
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_last_label_num was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconst3;
114 REAL_VALUE_TYPE dconst10;
115 REAL_VALUE_TYPE dconstm1;
116 REAL_VALUE_TYPE dconstm2;
117 REAL_VALUE_TYPE dconsthalf;
118 REAL_VALUE_TYPE dconstthird;
119 REAL_VALUE_TYPE dconstpi;
120 REAL_VALUE_TYPE dconste;
121
122 /* All references to the following fixed hard registers go through
123 these unique rtl objects. On machines where the frame-pointer and
124 arg-pointer are the same register, they use the same unique object.
125
126 After register allocation, other rtl objects which used to be pseudo-regs
127 may be clobbered to refer to the frame-pointer register.
128 But references that were originally to the frame-pointer can be
129 distinguished from the others because they contain frame_pointer_rtx.
130
131 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
132 tricky: until register elimination has taken place hard_frame_pointer_rtx
133 should be used if it is being set, and frame_pointer_rtx otherwise. After
134 register elimination hard_frame_pointer_rtx should always be used.
135 On machines where the two registers are same (most) then these are the
136 same.
137
138 In an inline procedure, the stack and frame pointer rtxs may not be
139 used for anything else. */
140 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
141 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
142 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
143
144 /* This is used to implement __builtin_return_address for some machines.
145 See for instance the MIPS port. */
146 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
147
148 /* We make one copy of (const_int C) where C is in
149 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
150 to save space during the compilation and simplify comparisons of
151 integers. */
152
153 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
154
155 /* A hash table storing CONST_INTs whose absolute value is greater
156 than MAX_SAVED_CONST_INT. */
157
158 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_int_htab;
160
161 /* A hash table storing memory attribute structures. */
162 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
163 htab_t mem_attrs_htab;
164
165 /* A hash table storing register attribute structures. */
166 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
167 htab_t reg_attrs_htab;
168
169 /* A hash table storing all CONST_DOUBLEs. */
170 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
171 htab_t const_double_htab;
172
173 #define first_insn (cfun->emit->x_first_insn)
174 #define last_insn (cfun->emit->x_last_insn)
175 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
176 #define last_location (cfun->emit->x_last_location)
177 #define first_label_num (cfun->emit->x_first_label_num)
178
179 static rtx make_jump_insn_raw (rtx);
180 static rtx make_call_insn_raw (rtx);
181 static rtx find_line_note (rtx);
182 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
183 static void unshare_all_decls (tree);
184 static void reset_used_decls (tree);
185 static void mark_label_nuses (rtx);
186 static hashval_t const_int_htab_hash (const void *);
187 static int const_int_htab_eq (const void *, const void *);
188 static hashval_t const_double_htab_hash (const void *);
189 static int const_double_htab_eq (const void *, const void *);
190 static rtx lookup_const_double (rtx);
191 static hashval_t mem_attrs_htab_hash (const void *);
192 static int mem_attrs_htab_eq (const void *, const void *);
193 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
194 enum machine_mode);
195 static hashval_t reg_attrs_htab_hash (const void *);
196 static int reg_attrs_htab_eq (const void *, const void *);
197 static reg_attrs *get_reg_attrs (tree, int);
198 static tree component_ref_for_mem_expr (tree);
199 static rtx gen_const_vector_0 (enum machine_mode);
200 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202
203 /* Probability of the conditional branch currently proceeded by try_split.
204 Set to -1 otherwise. */
205 int split_branch_probability = -1;
206 \f
207 /* Returns a hash code for X (which is a really a CONST_INT). */
208
209 static hashval_t
210 const_int_htab_hash (const void *x)
211 {
212 return (hashval_t) INTVAL ((rtx) x);
213 }
214
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_INT) is the same as that given by Y (which is really a
217 HOST_WIDE_INT *). */
218
219 static int
220 const_int_htab_eq (const void *x, const void *y)
221 {
222 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
223 }
224
225 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
226 static hashval_t
227 const_double_htab_hash (const void *x)
228 {
229 rtx value = (rtx) x;
230 hashval_t h;
231
232 if (GET_MODE (value) == VOIDmode)
233 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
234 else
235 {
236 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
237 /* MODE is used in the comparison, so it should be in the hash. */
238 h ^= GET_MODE (value);
239 }
240 return h;
241 }
242
243 /* Returns nonzero if the value represented by X (really a ...)
244 is the same as that represented by Y (really a ...) */
245 static int
246 const_double_htab_eq (const void *x, const void *y)
247 {
248 rtx a = (rtx)x, b = (rtx)y;
249
250 if (GET_MODE (a) != GET_MODE (b))
251 return 0;
252 if (GET_MODE (a) == VOIDmode)
253 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
254 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
255 else
256 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
257 CONST_DOUBLE_REAL_VALUE (b));
258 }
259
260 /* Returns a hash code for X (which is a really a mem_attrs *). */
261
262 static hashval_t
263 mem_attrs_htab_hash (const void *x)
264 {
265 mem_attrs *p = (mem_attrs *) x;
266
267 return (p->alias ^ (p->align * 1000)
268 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
269 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
270 ^ (size_t) p->expr);
271 }
272
273 /* Returns nonzero if the value represented by X (which is really a
274 mem_attrs *) is the same as that given by Y (which is also really a
275 mem_attrs *). */
276
277 static int
278 mem_attrs_htab_eq (const void *x, const void *y)
279 {
280 mem_attrs *p = (mem_attrs *) x;
281 mem_attrs *q = (mem_attrs *) y;
282
283 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
284 && p->size == q->size && p->align == q->align);
285 }
286
287 /* Allocate a new mem_attrs structure and insert it into the hash table if
288 one identical to it is not already in the table. We are doing this for
289 MEM of mode MODE. */
290
291 static mem_attrs *
292 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
293 unsigned int align, enum machine_mode mode)
294 {
295 mem_attrs attrs;
296 void **slot;
297
298 /* If everything is the default, we can just return zero.
299 This must match what the corresponding MEM_* macros return when the
300 field is not present. */
301 if (alias == 0 && expr == 0 && offset == 0
302 && (size == 0
303 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
304 && (STRICT_ALIGNMENT && mode != BLKmode
305 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
306 return 0;
307
308 attrs.alias = alias;
309 attrs.expr = expr;
310 attrs.offset = offset;
311 attrs.size = size;
312 attrs.align = align;
313
314 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
315 if (*slot == 0)
316 {
317 *slot = ggc_alloc (sizeof (mem_attrs));
318 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 }
320
321 return *slot;
322 }
323
324 /* Returns a hash code for X (which is a really a reg_attrs *). */
325
326 static hashval_t
327 reg_attrs_htab_hash (const void *x)
328 {
329 reg_attrs *p = (reg_attrs *) x;
330
331 return ((p->offset * 1000) ^ (long) p->decl);
332 }
333
334 /* Returns nonzero if the value represented by X (which is really a
335 reg_attrs *) is the same as that given by Y (which is also really a
336 reg_attrs *). */
337
338 static int
339 reg_attrs_htab_eq (const void *x, const void *y)
340 {
341 reg_attrs *p = (reg_attrs *) x;
342 reg_attrs *q = (reg_attrs *) y;
343
344 return (p->decl == q->decl && p->offset == q->offset);
345 }
346 /* Allocate a new reg_attrs structure and insert it into the hash table if
347 one identical to it is not already in the table. We are doing this for
348 MEM of mode MODE. */
349
350 static reg_attrs *
351 get_reg_attrs (tree decl, int offset)
352 {
353 reg_attrs attrs;
354 void **slot;
355
356 /* If everything is the default, we can just return zero. */
357 if (decl == 0 && offset == 0)
358 return 0;
359
360 attrs.decl = decl;
361 attrs.offset = offset;
362
363 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
364 if (*slot == 0)
365 {
366 *slot = ggc_alloc (sizeof (reg_attrs));
367 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 }
369
370 return *slot;
371 }
372
373 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
374 don't attempt to share with the various global pieces of rtl (such as
375 frame_pointer_rtx). */
376
377 rtx
378 gen_raw_REG (enum machine_mode mode, int regno)
379 {
380 rtx x = gen_rtx_raw_REG (mode, regno);
381 ORIGINAL_REGNO (x) = regno;
382 return x;
383 }
384
385 /* There are some RTL codes that require special attention; the generation
386 functions do the raw handling. If you add to this list, modify
387 special_rtx in gengenrtl.c as well. */
388
389 rtx
390 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
391 {
392 void **slot;
393
394 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
395 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
396
397 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
398 if (const_true_rtx && arg == STORE_FLAG_VALUE)
399 return const_true_rtx;
400 #endif
401
402 /* Look up the CONST_INT in the hash table. */
403 slot = htab_find_slot_with_hash (const_int_htab, &arg,
404 (hashval_t) arg, INSERT);
405 if (*slot == 0)
406 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
407
408 return (rtx) *slot;
409 }
410
411 rtx
412 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
413 {
414 return GEN_INT (trunc_int_for_mode (c, mode));
415 }
416
417 /* CONST_DOUBLEs might be created from pairs of integers, or from
418 REAL_VALUE_TYPEs. Also, their length is known only at run time,
419 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
420
421 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
422 hash table. If so, return its counterpart; otherwise add it
423 to the hash table and return it. */
424 static rtx
425 lookup_const_double (rtx real)
426 {
427 void **slot = htab_find_slot (const_double_htab, real, INSERT);
428 if (*slot == 0)
429 *slot = real;
430
431 return (rtx) *slot;
432 }
433
434 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
435 VALUE in mode MODE. */
436 rtx
437 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
438 {
439 rtx real = rtx_alloc (CONST_DOUBLE);
440 PUT_MODE (real, mode);
441
442 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
443
444 return lookup_const_double (real);
445 }
446
447 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
448 of ints: I0 is the low-order word and I1 is the high-order word.
449 Do not use this routine for non-integer modes; convert to
450 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
451
452 rtx
453 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
454 {
455 rtx value;
456 unsigned int i;
457
458 if (mode != VOIDmode)
459 {
460 int width;
461 if (GET_MODE_CLASS (mode) != MODE_INT
462 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
463 /* We can get a 0 for an error mark. */
464 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
465 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
466 abort ();
467
468 /* We clear out all bits that don't belong in MODE, unless they and
469 our sign bit are all one. So we get either a reasonable negative
470 value or a reasonable unsigned value for this mode. */
471 width = GET_MODE_BITSIZE (mode);
472 if (width < HOST_BITS_PER_WIDE_INT
473 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
474 != ((HOST_WIDE_INT) (-1) << (width - 1))))
475 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
476 else if (width == HOST_BITS_PER_WIDE_INT
477 && ! (i1 == ~0 && i0 < 0))
478 i1 = 0;
479 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
480 /* We cannot represent this value as a constant. */
481 abort ();
482
483 /* If this would be an entire word for the target, but is not for
484 the host, then sign-extend on the host so that the number will
485 look the same way on the host that it would on the target.
486
487 For example, when building a 64 bit alpha hosted 32 bit sparc
488 targeted compiler, then we want the 32 bit unsigned value -1 to be
489 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
490 The latter confuses the sparc backend. */
491
492 if (width < HOST_BITS_PER_WIDE_INT
493 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
494 i0 |= ((HOST_WIDE_INT) (-1) << width);
495
496 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
497 CONST_INT.
498
499 ??? Strictly speaking, this is wrong if we create a CONST_INT for
500 a large unsigned constant with the size of MODE being
501 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
502 in a wider mode. In that case we will mis-interpret it as a
503 negative number.
504
505 Unfortunately, the only alternative is to make a CONST_DOUBLE for
506 any constant in any mode if it is an unsigned constant larger
507 than the maximum signed integer in an int on the host. However,
508 doing this will break everyone that always expects to see a
509 CONST_INT for SImode and smaller.
510
511 We have always been making CONST_INTs in this case, so nothing
512 new is being broken. */
513
514 if (width <= HOST_BITS_PER_WIDE_INT)
515 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
516 }
517
518 /* If this integer fits in one word, return a CONST_INT. */
519 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
520 return GEN_INT (i0);
521
522 /* We use VOIDmode for integers. */
523 value = rtx_alloc (CONST_DOUBLE);
524 PUT_MODE (value, VOIDmode);
525
526 CONST_DOUBLE_LOW (value) = i0;
527 CONST_DOUBLE_HIGH (value) = i1;
528
529 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
530 XWINT (value, i) = 0;
531
532 return lookup_const_double (value);
533 }
534
535 rtx
536 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
537 {
538 /* In case the MD file explicitly references the frame pointer, have
539 all such references point to the same frame pointer. This is
540 used during frame pointer elimination to distinguish the explicit
541 references to these registers from pseudos that happened to be
542 assigned to them.
543
544 If we have eliminated the frame pointer or arg pointer, we will
545 be using it as a normal register, for example as a spill
546 register. In such cases, we might be accessing it in a mode that
547 is not Pmode and therefore cannot use the pre-allocated rtx.
548
549 Also don't do this when we are making new REGs in reload, since
550 we don't want to get confused with the real pointers. */
551
552 if (mode == Pmode && !reload_in_progress)
553 {
554 if (regno == FRAME_POINTER_REGNUM
555 && (!reload_completed || frame_pointer_needed))
556 return frame_pointer_rtx;
557 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
558 if (regno == HARD_FRAME_POINTER_REGNUM
559 && (!reload_completed || frame_pointer_needed))
560 return hard_frame_pointer_rtx;
561 #endif
562 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
563 if (regno == ARG_POINTER_REGNUM)
564 return arg_pointer_rtx;
565 #endif
566 #ifdef RETURN_ADDRESS_POINTER_REGNUM
567 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
568 return return_address_pointer_rtx;
569 #endif
570 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
571 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
572 return pic_offset_table_rtx;
573 if (regno == STACK_POINTER_REGNUM)
574 return stack_pointer_rtx;
575 }
576
577 #if 0
578 /* If the per-function register table has been set up, try to re-use
579 an existing entry in that table to avoid useless generation of RTL.
580
581 This code is disabled for now until we can fix the various backends
582 which depend on having non-shared hard registers in some cases. Long
583 term we want to re-enable this code as it can significantly cut down
584 on the amount of useless RTL that gets generated.
585
586 We'll also need to fix some code that runs after reload that wants to
587 set ORIGINAL_REGNO. */
588
589 if (cfun
590 && cfun->emit
591 && regno_reg_rtx
592 && regno < FIRST_PSEUDO_REGISTER
593 && reg_raw_mode[regno] == mode)
594 return regno_reg_rtx[regno];
595 #endif
596
597 return gen_raw_REG (mode, regno);
598 }
599
600 rtx
601 gen_rtx_MEM (enum machine_mode mode, rtx addr)
602 {
603 rtx rt = gen_rtx_raw_MEM (mode, addr);
604
605 /* This field is not cleared by the mere allocation of the rtx, so
606 we clear it here. */
607 MEM_ATTRS (rt) = 0;
608
609 return rt;
610 }
611
612 rtx
613 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
614 {
615 /* This is the most common failure type.
616 Catch it early so we can see who does it. */
617 if ((offset % GET_MODE_SIZE (mode)) != 0)
618 abort ();
619
620 /* This check isn't usable right now because combine will
621 throw arbitrary crap like a CALL into a SUBREG in
622 gen_lowpart_for_combine so we must just eat it. */
623 #if 0
624 /* Check for this too. */
625 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
626 abort ();
627 #endif
628 return gen_rtx_raw_SUBREG (mode, reg, offset);
629 }
630
631 /* Generate a SUBREG representing the least-significant part of REG if MODE
632 is smaller than mode of REG, otherwise paradoxical SUBREG. */
633
634 rtx
635 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
636 {
637 enum machine_mode inmode;
638
639 inmode = GET_MODE (reg);
640 if (inmode == VOIDmode)
641 inmode = mode;
642 return gen_rtx_SUBREG (mode, reg,
643 subreg_lowpart_offset (mode, inmode));
644 }
645 \f
646 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
647 **
648 ** This routine generates an RTX of the size specified by
649 ** <code>, which is an RTX code. The RTX structure is initialized
650 ** from the arguments <element1> through <elementn>, which are
651 ** interpreted according to the specific RTX type's format. The
652 ** special machine mode associated with the rtx (if any) is specified
653 ** in <mode>.
654 **
655 ** gen_rtx can be invoked in a way which resembles the lisp-like
656 ** rtx it will generate. For example, the following rtx structure:
657 **
658 ** (plus:QI (mem:QI (reg:SI 1))
659 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
660 **
661 ** ...would be generated by the following C code:
662 **
663 ** gen_rtx (PLUS, QImode,
664 ** gen_rtx (MEM, QImode,
665 ** gen_rtx (REG, SImode, 1)),
666 ** gen_rtx (MEM, QImode,
667 ** gen_rtx (PLUS, SImode,
668 ** gen_rtx (REG, SImode, 2),
669 ** gen_rtx (REG, SImode, 3)))),
670 */
671
672 /*VARARGS2*/
673 rtx
674 gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
675 {
676 int i; /* Array indices... */
677 const char *fmt; /* Current rtx's format... */
678 rtx rt_val; /* RTX to return to caller... */
679 va_list p;
680
681 va_start (p, mode);
682
683 switch (code)
684 {
685 case CONST_INT:
686 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
687 break;
688
689 case CONST_DOUBLE:
690 {
691 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
692 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
693
694 rt_val = immed_double_const (arg0, arg1, mode);
695 }
696 break;
697
698 case REG:
699 rt_val = gen_rtx_REG (mode, va_arg (p, int));
700 break;
701
702 case MEM:
703 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
704 break;
705
706 default:
707 rt_val = rtx_alloc (code); /* Allocate the storage space. */
708 rt_val->mode = mode; /* Store the machine mode... */
709
710 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
711 for (i = 0; i < GET_RTX_LENGTH (code); i++)
712 {
713 switch (*fmt++)
714 {
715 case '0': /* Field with unknown use. Zero it. */
716 X0EXP (rt_val, i) = NULL_RTX;
717 break;
718
719 case 'i': /* An integer? */
720 XINT (rt_val, i) = va_arg (p, int);
721 break;
722
723 case 'w': /* A wide integer? */
724 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
725 break;
726
727 case 's': /* A string? */
728 XSTR (rt_val, i) = va_arg (p, char *);
729 break;
730
731 case 'e': /* An expression? */
732 case 'u': /* An insn? Same except when printing. */
733 XEXP (rt_val, i) = va_arg (p, rtx);
734 break;
735
736 case 'E': /* An RTX vector? */
737 XVEC (rt_val, i) = va_arg (p, rtvec);
738 break;
739
740 case 'b': /* A bitmap? */
741 XBITMAP (rt_val, i) = va_arg (p, bitmap);
742 break;
743
744 case 't': /* A tree? */
745 XTREE (rt_val, i) = va_arg (p, tree);
746 break;
747
748 default:
749 abort ();
750 }
751 }
752 break;
753 }
754
755 va_end (p);
756 return rt_val;
757 }
758
759 /* gen_rtvec (n, [rt1, ..., rtn])
760 **
761 ** This routine creates an rtvec and stores within it the
762 ** pointers to rtx's which are its arguments.
763 */
764
765 /*VARARGS1*/
766 rtvec
767 gen_rtvec (int n, ...)
768 {
769 int i, save_n;
770 rtx *vector;
771 va_list p;
772
773 va_start (p, n);
774
775 if (n == 0)
776 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
777
778 vector = alloca (n * sizeof (rtx));
779
780 for (i = 0; i < n; i++)
781 vector[i] = va_arg (p, rtx);
782
783 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
784 save_n = n;
785 va_end (p);
786
787 return gen_rtvec_v (save_n, vector);
788 }
789
790 rtvec
791 gen_rtvec_v (int n, rtx *argp)
792 {
793 int i;
794 rtvec rt_val;
795
796 if (n == 0)
797 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
798
799 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
800
801 for (i = 0; i < n; i++)
802 rt_val->elem[i] = *argp++;
803
804 return rt_val;
805 }
806 \f
807 /* Generate a REG rtx for a new pseudo register of mode MODE.
808 This pseudo is assigned the next sequential register number. */
809
810 rtx
811 gen_reg_rtx (enum machine_mode mode)
812 {
813 struct function *f = cfun;
814 rtx val;
815
816 /* Don't let anything called after initial flow analysis create new
817 registers. */
818 if (no_new_pseudos)
819 abort ();
820
821 if (generating_concat_p
822 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
823 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
824 {
825 /* For complex modes, don't make a single pseudo.
826 Instead, make a CONCAT of two pseudos.
827 This allows noncontiguous allocation of the real and imaginary parts,
828 which makes much better code. Besides, allocating DCmode
829 pseudos overstrains reload on some machines like the 386. */
830 rtx realpart, imagpart;
831 enum machine_mode partmode = GET_MODE_INNER (mode);
832
833 realpart = gen_reg_rtx (partmode);
834 imagpart = gen_reg_rtx (partmode);
835 return gen_rtx_CONCAT (mode, realpart, imagpart);
836 }
837
838 /* Make sure regno_pointer_align, and regno_reg_rtx are large
839 enough to have an element for this pseudo reg number. */
840
841 if (reg_rtx_no == f->emit->regno_pointer_align_length)
842 {
843 int old_size = f->emit->regno_pointer_align_length;
844 char *new;
845 rtx *new1;
846
847 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
848 memset (new + old_size, 0, old_size);
849 f->emit->regno_pointer_align = (unsigned char *) new;
850
851 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
852 old_size * 2 * sizeof (rtx));
853 memset (new1 + old_size, 0, old_size * sizeof (rtx));
854 regno_reg_rtx = new1;
855
856 f->emit->regno_pointer_align_length = old_size * 2;
857 }
858
859 val = gen_raw_REG (mode, reg_rtx_no);
860 regno_reg_rtx[reg_rtx_no++] = val;
861 return val;
862 }
863
864 /* Generate a register with same attributes as REG,
865 but offsetted by OFFSET. */
866
867 rtx
868 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
869 {
870 rtx new = gen_rtx_REG (mode, regno);
871 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
872 REG_OFFSET (reg) + offset);
873 return new;
874 }
875
876 /* Set the decl for MEM to DECL. */
877
878 void
879 set_reg_attrs_from_mem (rtx reg, rtx mem)
880 {
881 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
882 REG_ATTRS (reg)
883 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
884 }
885
886 /* Set the register attributes for registers contained in PARM_RTX.
887 Use needed values from memory attributes of MEM. */
888
889 void
890 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
891 {
892 if (GET_CODE (parm_rtx) == REG)
893 set_reg_attrs_from_mem (parm_rtx, mem);
894 else if (GET_CODE (parm_rtx) == PARALLEL)
895 {
896 /* Check for a NULL entry in the first slot, used to indicate that the
897 parameter goes both on the stack and in registers. */
898 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
899 for (; i < XVECLEN (parm_rtx, 0); i++)
900 {
901 rtx x = XVECEXP (parm_rtx, 0, i);
902 if (GET_CODE (XEXP (x, 0)) == REG)
903 REG_ATTRS (XEXP (x, 0))
904 = get_reg_attrs (MEM_EXPR (mem),
905 INTVAL (XEXP (x, 1)));
906 }
907 }
908 }
909
910 /* Assign the RTX X to declaration T. */
911 void
912 set_decl_rtl (tree t, rtx x)
913 {
914 DECL_CHECK (t)->decl.rtl = x;
915
916 if (!x)
917 return;
918 /* For register, we maintain the reverse information too. */
919 if (GET_CODE (x) == REG)
920 REG_ATTRS (x) = get_reg_attrs (t, 0);
921 else if (GET_CODE (x) == SUBREG)
922 REG_ATTRS (SUBREG_REG (x))
923 = get_reg_attrs (t, -SUBREG_BYTE (x));
924 if (GET_CODE (x) == CONCAT)
925 {
926 if (REG_P (XEXP (x, 0)))
927 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
928 if (REG_P (XEXP (x, 1)))
929 REG_ATTRS (XEXP (x, 1))
930 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
931 }
932 if (GET_CODE (x) == PARALLEL)
933 {
934 int i;
935 for (i = 0; i < XVECLEN (x, 0); i++)
936 {
937 rtx y = XVECEXP (x, 0, i);
938 if (REG_P (XEXP (y, 0)))
939 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
940 }
941 }
942 }
943
944 /* Identify REG (which may be a CONCAT) as a user register. */
945
946 void
947 mark_user_reg (rtx reg)
948 {
949 if (GET_CODE (reg) == CONCAT)
950 {
951 REG_USERVAR_P (XEXP (reg, 0)) = 1;
952 REG_USERVAR_P (XEXP (reg, 1)) = 1;
953 }
954 else if (GET_CODE (reg) == REG)
955 REG_USERVAR_P (reg) = 1;
956 else
957 abort ();
958 }
959
960 /* Identify REG as a probable pointer register and show its alignment
961 as ALIGN, if nonzero. */
962
963 void
964 mark_reg_pointer (rtx reg, int align)
965 {
966 if (! REG_POINTER (reg))
967 {
968 REG_POINTER (reg) = 1;
969
970 if (align)
971 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
972 }
973 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
974 /* We can no-longer be sure just how aligned this pointer is. */
975 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
976 }
977
978 /* Return 1 plus largest pseudo reg number used in the current function. */
979
980 int
981 max_reg_num (void)
982 {
983 return reg_rtx_no;
984 }
985
986 /* Return 1 + the largest label number used so far in the current function. */
987
988 int
989 max_label_num (void)
990 {
991 if (last_label_num && label_num == base_label_num)
992 return last_label_num;
993 return label_num;
994 }
995
996 /* Return first label number used in this function (if any were used). */
997
998 int
999 get_first_label_num (void)
1000 {
1001 return first_label_num;
1002 }
1003 \f
1004 /* Return the final regno of X, which is a SUBREG of a hard
1005 register. */
1006 int
1007 subreg_hard_regno (rtx x, int check_mode)
1008 {
1009 enum machine_mode mode = GET_MODE (x);
1010 unsigned int byte_offset, base_regno, final_regno;
1011 rtx reg = SUBREG_REG (x);
1012
1013 /* This is where we attempt to catch illegal subregs
1014 created by the compiler. */
1015 if (GET_CODE (x) != SUBREG
1016 || GET_CODE (reg) != REG)
1017 abort ();
1018 base_regno = REGNO (reg);
1019 if (base_regno >= FIRST_PSEUDO_REGISTER)
1020 abort ();
1021 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1022 abort ();
1023 #ifdef ENABLE_CHECKING
1024 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1025 SUBREG_BYTE (x), mode))
1026 abort ();
1027 #endif
1028 /* Catch non-congruent offsets too. */
1029 byte_offset = SUBREG_BYTE (x);
1030 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1031 abort ();
1032
1033 final_regno = subreg_regno (x);
1034
1035 return final_regno;
1036 }
1037
1038 /* Return a value representing some low-order bits of X, where the number
1039 of low-order bits is given by MODE. Note that no conversion is done
1040 between floating-point and fixed-point values, rather, the bit
1041 representation is returned.
1042
1043 This function handles the cases in common between gen_lowpart, below,
1044 and two variants in cse.c and combine.c. These are the cases that can
1045 be safely handled at all points in the compilation.
1046
1047 If this is not a case we can handle, return 0. */
1048
1049 rtx
1050 gen_lowpart_common (enum machine_mode mode, rtx x)
1051 {
1052 int msize = GET_MODE_SIZE (mode);
1053 int xsize;
1054 int offset = 0;
1055 enum machine_mode innermode;
1056
1057 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1058 so we have to make one up. Yuk. */
1059 innermode = GET_MODE (x);
1060 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1061 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1062 else if (innermode == VOIDmode)
1063 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1064
1065 xsize = GET_MODE_SIZE (innermode);
1066
1067 if (innermode == VOIDmode || innermode == BLKmode)
1068 abort ();
1069
1070 if (innermode == mode)
1071 return x;
1072
1073 /* MODE must occupy no more words than the mode of X. */
1074 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1075 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1076 return 0;
1077
1078 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1079 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1080 return 0;
1081
1082 offset = subreg_lowpart_offset (mode, innermode);
1083
1084 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1085 && (GET_MODE_CLASS (mode) == MODE_INT
1086 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1087 {
1088 /* If we are getting the low-order part of something that has been
1089 sign- or zero-extended, we can either just use the object being
1090 extended or make a narrower extension. If we want an even smaller
1091 piece than the size of the object being extended, call ourselves
1092 recursively.
1093
1094 This case is used mostly by combine and cse. */
1095
1096 if (GET_MODE (XEXP (x, 0)) == mode)
1097 return XEXP (x, 0);
1098 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1099 return gen_lowpart_common (mode, XEXP (x, 0));
1100 else if (msize < xsize)
1101 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1102 }
1103 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1104 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1105 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1106 return simplify_gen_subreg (mode, x, innermode, offset);
1107
1108 /* Otherwise, we can't do this. */
1109 return 0;
1110 }
1111 \f
1112 /* Return the constant real or imaginary part (which has mode MODE)
1113 of a complex value X. The IMAGPART_P argument determines whether
1114 the real or complex component should be returned. This function
1115 returns NULL_RTX if the component isn't a constant. */
1116
1117 static rtx
1118 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1119 {
1120 tree decl, part;
1121
1122 if (GET_CODE (x) == MEM
1123 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1124 {
1125 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1126 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1127 {
1128 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1129 if (TREE_CODE (part) == REAL_CST
1130 || TREE_CODE (part) == INTEGER_CST)
1131 return expand_expr (part, NULL_RTX, mode, 0);
1132 }
1133 }
1134 return NULL_RTX;
1135 }
1136
1137 /* Return the real part (which has mode MODE) of a complex value X.
1138 This always comes at the low address in memory. */
1139
1140 rtx
1141 gen_realpart (enum machine_mode mode, rtx x)
1142 {
1143 rtx part;
1144
1145 /* Handle complex constants. */
1146 part = gen_complex_constant_part (mode, x, 0);
1147 if (part != NULL_RTX)
1148 return part;
1149
1150 if (WORDS_BIG_ENDIAN
1151 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1152 && REG_P (x)
1153 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1154 internal_error
1155 ("can't access real part of complex value in hard register");
1156 else if (WORDS_BIG_ENDIAN)
1157 return gen_highpart (mode, x);
1158 else
1159 return gen_lowpart (mode, x);
1160 }
1161
1162 /* Return the imaginary part (which has mode MODE) of a complex value X.
1163 This always comes at the high address in memory. */
1164
1165 rtx
1166 gen_imagpart (enum machine_mode mode, rtx x)
1167 {
1168 rtx part;
1169
1170 /* Handle complex constants. */
1171 part = gen_complex_constant_part (mode, x, 1);
1172 if (part != NULL_RTX)
1173 return part;
1174
1175 if (WORDS_BIG_ENDIAN)
1176 return gen_lowpart (mode, x);
1177 else if (! WORDS_BIG_ENDIAN
1178 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1179 && REG_P (x)
1180 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1181 internal_error
1182 ("can't access imaginary part of complex value in hard register");
1183 else
1184 return gen_highpart (mode, x);
1185 }
1186
1187 /* Return 1 iff X, assumed to be a SUBREG,
1188 refers to the real part of the complex value in its containing reg.
1189 Complex values are always stored with the real part in the first word,
1190 regardless of WORDS_BIG_ENDIAN. */
1191
1192 int
1193 subreg_realpart_p (rtx x)
1194 {
1195 if (GET_CODE (x) != SUBREG)
1196 abort ();
1197
1198 return ((unsigned int) SUBREG_BYTE (x)
1199 < (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1200 }
1201 \f
1202 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1203 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1204 least-significant part of X.
1205 MODE specifies how big a part of X to return;
1206 it usually should not be larger than a word.
1207 If X is a MEM whose address is a QUEUED, the value may be so also. */
1208
1209 rtx
1210 gen_lowpart (enum machine_mode mode, rtx x)
1211 {
1212 rtx result = gen_lowpart_common (mode, x);
1213
1214 if (result)
1215 return result;
1216 else if (GET_CODE (x) == REG)
1217 {
1218 /* Must be a hard reg that's not valid in MODE. */
1219 result = gen_lowpart_common (mode, copy_to_reg (x));
1220 if (result == 0)
1221 abort ();
1222 return result;
1223 }
1224 else if (GET_CODE (x) == MEM)
1225 {
1226 /* The only additional case we can do is MEM. */
1227 int offset = 0;
1228
1229 /* The following exposes the use of "x" to CSE. */
1230 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1231 && SCALAR_INT_MODE_P (GET_MODE (x))
1232 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1233 GET_MODE_BITSIZE (GET_MODE (x)))
1234 && ! no_new_pseudos)
1235 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1236
1237 if (WORDS_BIG_ENDIAN)
1238 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1239 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1240
1241 if (BYTES_BIG_ENDIAN)
1242 /* Adjust the address so that the address-after-the-data
1243 is unchanged. */
1244 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1245 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1246
1247 return adjust_address (x, mode, offset);
1248 }
1249 else if (GET_CODE (x) == ADDRESSOF)
1250 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1251 else
1252 abort ();
1253 }
1254
1255 /* Like `gen_lowpart', but refer to the most significant part.
1256 This is used to access the imaginary part of a complex number. */
1257
1258 rtx
1259 gen_highpart (enum machine_mode mode, rtx x)
1260 {
1261 unsigned int msize = GET_MODE_SIZE (mode);
1262 rtx result;
1263
1264 /* This case loses if X is a subreg. To catch bugs early,
1265 complain if an invalid MODE is used even in other cases. */
1266 if (msize > UNITS_PER_WORD
1267 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
1268 abort ();
1269
1270 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1271 subreg_highpart_offset (mode, GET_MODE (x)));
1272
1273 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1274 the target if we have a MEM. gen_highpart must return a valid operand,
1275 emitting code if necessary to do so. */
1276 if (result != NULL_RTX && GET_CODE (result) == MEM)
1277 result = validize_mem (result);
1278
1279 if (!result)
1280 abort ();
1281 return result;
1282 }
1283
1284 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1285 be VOIDmode constant. */
1286 rtx
1287 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1288 {
1289 if (GET_MODE (exp) != VOIDmode)
1290 {
1291 if (GET_MODE (exp) != innermode)
1292 abort ();
1293 return gen_highpart (outermode, exp);
1294 }
1295 return simplify_gen_subreg (outermode, exp, innermode,
1296 subreg_highpart_offset (outermode, innermode));
1297 }
1298
1299 /* Return offset in bytes to get OUTERMODE low part
1300 of the value in mode INNERMODE stored in memory in target format. */
1301
1302 unsigned int
1303 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1304 {
1305 unsigned int offset = 0;
1306 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1307
1308 if (difference > 0)
1309 {
1310 if (WORDS_BIG_ENDIAN)
1311 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1312 if (BYTES_BIG_ENDIAN)
1313 offset += difference % UNITS_PER_WORD;
1314 }
1315
1316 return offset;
1317 }
1318
1319 /* Return offset in bytes to get OUTERMODE high part
1320 of the value in mode INNERMODE stored in memory in target format. */
1321 unsigned int
1322 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1323 {
1324 unsigned int offset = 0;
1325 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1326
1327 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1328 abort ();
1329
1330 if (difference > 0)
1331 {
1332 if (! WORDS_BIG_ENDIAN)
1333 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1334 if (! BYTES_BIG_ENDIAN)
1335 offset += difference % UNITS_PER_WORD;
1336 }
1337
1338 return offset;
1339 }
1340
1341 /* Return 1 iff X, assumed to be a SUBREG,
1342 refers to the least significant part of its containing reg.
1343 If X is not a SUBREG, always return 1 (it is its own low part!). */
1344
1345 int
1346 subreg_lowpart_p (rtx x)
1347 {
1348 if (GET_CODE (x) != SUBREG)
1349 return 1;
1350 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1351 return 0;
1352
1353 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1354 == SUBREG_BYTE (x));
1355 }
1356 \f
1357 /* Return subword OFFSET of operand OP.
1358 The word number, OFFSET, is interpreted as the word number starting
1359 at the low-order address. OFFSET 0 is the low-order word if not
1360 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1361
1362 If we cannot extract the required word, we return zero. Otherwise,
1363 an rtx corresponding to the requested word will be returned.
1364
1365 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1366 reload has completed, a valid address will always be returned. After
1367 reload, if a valid address cannot be returned, we return zero.
1368
1369 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1370 it is the responsibility of the caller.
1371
1372 MODE is the mode of OP in case it is a CONST_INT.
1373
1374 ??? This is still rather broken for some cases. The problem for the
1375 moment is that all callers of this thing provide no 'goal mode' to
1376 tell us to work with. This exists because all callers were written
1377 in a word based SUBREG world.
1378 Now use of this function can be deprecated by simplify_subreg in most
1379 cases.
1380 */
1381
1382 rtx
1383 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1384 {
1385 if (mode == VOIDmode)
1386 mode = GET_MODE (op);
1387
1388 if (mode == VOIDmode)
1389 abort ();
1390
1391 /* If OP is narrower than a word, fail. */
1392 if (mode != BLKmode
1393 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1394 return 0;
1395
1396 /* If we want a word outside OP, return zero. */
1397 if (mode != BLKmode
1398 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1399 return const0_rtx;
1400
1401 /* Form a new MEM at the requested address. */
1402 if (GET_CODE (op) == MEM)
1403 {
1404 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1405
1406 if (! validate_address)
1407 return new;
1408
1409 else if (reload_completed)
1410 {
1411 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1412 return 0;
1413 }
1414 else
1415 return replace_equiv_address (new, XEXP (new, 0));
1416 }
1417
1418 /* Rest can be handled by simplify_subreg. */
1419 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1420 }
1421
1422 /* Similar to `operand_subword', but never return 0. If we can't extract
1423 the required subword, put OP into a register and try again. If that fails,
1424 abort. We always validate the address in this case.
1425
1426 MODE is the mode of OP, in case it is CONST_INT. */
1427
1428 rtx
1429 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1430 {
1431 rtx result = operand_subword (op, offset, 1, mode);
1432
1433 if (result)
1434 return result;
1435
1436 if (mode != BLKmode && mode != VOIDmode)
1437 {
1438 /* If this is a register which can not be accessed by words, copy it
1439 to a pseudo register. */
1440 if (GET_CODE (op) == REG)
1441 op = copy_to_reg (op);
1442 else
1443 op = force_reg (mode, op);
1444 }
1445
1446 result = operand_subword (op, offset, 1, mode);
1447 if (result == 0)
1448 abort ();
1449
1450 return result;
1451 }
1452 \f
1453 /* Given a compare instruction, swap the operands.
1454 A test instruction is changed into a compare of 0 against the operand. */
1455
1456 void
1457 reverse_comparison (rtx insn)
1458 {
1459 rtx body = PATTERN (insn);
1460 rtx comp;
1461
1462 if (GET_CODE (body) == SET)
1463 comp = SET_SRC (body);
1464 else
1465 comp = SET_SRC (XVECEXP (body, 0, 0));
1466
1467 if (GET_CODE (comp) == COMPARE)
1468 {
1469 rtx op0 = XEXP (comp, 0);
1470 rtx op1 = XEXP (comp, 1);
1471 XEXP (comp, 0) = op1;
1472 XEXP (comp, 1) = op0;
1473 }
1474 else
1475 {
1476 rtx new = gen_rtx_COMPARE (VOIDmode,
1477 CONST0_RTX (GET_MODE (comp)), comp);
1478 if (GET_CODE (body) == SET)
1479 SET_SRC (body) = new;
1480 else
1481 SET_SRC (XVECEXP (body, 0, 0)) = new;
1482 }
1483 }
1484 \f
1485 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1486 or (2) a component ref of something variable. Represent the later with
1487 a NULL expression. */
1488
1489 static tree
1490 component_ref_for_mem_expr (tree ref)
1491 {
1492 tree inner = TREE_OPERAND (ref, 0);
1493
1494 if (TREE_CODE (inner) == COMPONENT_REF)
1495 inner = component_ref_for_mem_expr (inner);
1496 else
1497 {
1498 tree placeholder_ptr = 0;
1499
1500 /* Now remove any conversions: they don't change what the underlying
1501 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1502 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1503 || TREE_CODE (inner) == NON_LVALUE_EXPR
1504 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1505 || TREE_CODE (inner) == SAVE_EXPR
1506 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1507 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1508 inner = find_placeholder (inner, &placeholder_ptr);
1509 else
1510 inner = TREE_OPERAND (inner, 0);
1511
1512 if (! DECL_P (inner))
1513 inner = NULL_TREE;
1514 }
1515
1516 if (inner == TREE_OPERAND (ref, 0))
1517 return ref;
1518 else
1519 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1520 TREE_OPERAND (ref, 1));
1521 }
1522
1523 /* Given REF, a MEM, and T, either the type of X or the expression
1524 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1525 if we are making a new object of this type. BITPOS is nonzero if
1526 there is an offset outstanding on T that will be applied later. */
1527
1528 void
1529 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1530 HOST_WIDE_INT bitpos)
1531 {
1532 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1533 tree expr = MEM_EXPR (ref);
1534 rtx offset = MEM_OFFSET (ref);
1535 rtx size = MEM_SIZE (ref);
1536 unsigned int align = MEM_ALIGN (ref);
1537 HOST_WIDE_INT apply_bitpos = 0;
1538 tree type;
1539
1540 /* It can happen that type_for_mode was given a mode for which there
1541 is no language-level type. In which case it returns NULL, which
1542 we can see here. */
1543 if (t == NULL_TREE)
1544 return;
1545
1546 type = TYPE_P (t) ? t : TREE_TYPE (t);
1547 if (type == error_mark_node)
1548 return;
1549
1550 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1551 wrong answer, as it assumes that DECL_RTL already has the right alias
1552 info. Callers should not set DECL_RTL until after the call to
1553 set_mem_attributes. */
1554 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1555 abort ();
1556
1557 /* Get the alias set from the expression or type (perhaps using a
1558 front-end routine) and use it. */
1559 alias = get_alias_set (t);
1560
1561 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1562 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1563 RTX_UNCHANGING_P (ref)
1564 |= ((lang_hooks.honor_readonly
1565 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1566 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1567
1568 /* If we are making an object of this type, or if this is a DECL, we know
1569 that it is a scalar if the type is not an aggregate. */
1570 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1571 MEM_SCALAR_P (ref) = 1;
1572
1573 /* We can set the alignment from the type if we are making an object,
1574 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1575 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1576 align = MAX (align, TYPE_ALIGN (type));
1577
1578 /* If the size is known, we can set that. */
1579 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1580 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1581
1582 /* If T is not a type, we may be able to deduce some more information about
1583 the expression. */
1584 if (! TYPE_P (t))
1585 {
1586 maybe_set_unchanging (ref, t);
1587 if (TREE_THIS_VOLATILE (t))
1588 MEM_VOLATILE_P (ref) = 1;
1589
1590 /* Now remove any conversions: they don't change what the underlying
1591 object is. Likewise for SAVE_EXPR. */
1592 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1593 || TREE_CODE (t) == NON_LVALUE_EXPR
1594 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1595 || TREE_CODE (t) == SAVE_EXPR)
1596 t = TREE_OPERAND (t, 0);
1597
1598 /* If this expression can't be addressed (e.g., it contains a reference
1599 to a non-addressable field), show we don't change its alias set. */
1600 if (! can_address_p (t))
1601 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1602
1603 /* If this is a decl, set the attributes of the MEM from it. */
1604 if (DECL_P (t))
1605 {
1606 expr = t;
1607 offset = const0_rtx;
1608 apply_bitpos = bitpos;
1609 size = (DECL_SIZE_UNIT (t)
1610 && host_integerp (DECL_SIZE_UNIT (t), 1)
1611 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1612 align = DECL_ALIGN (t);
1613 }
1614
1615 /* If this is a constant, we know the alignment. */
1616 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1617 {
1618 align = TYPE_ALIGN (type);
1619 #ifdef CONSTANT_ALIGNMENT
1620 align = CONSTANT_ALIGNMENT (t, align);
1621 #endif
1622 }
1623
1624 /* If this is a field reference and not a bit-field, record it. */
1625 /* ??? There is some information that can be gleened from bit-fields,
1626 such as the word offset in the structure that might be modified.
1627 But skip it for now. */
1628 else if (TREE_CODE (t) == COMPONENT_REF
1629 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1630 {
1631 expr = component_ref_for_mem_expr (t);
1632 offset = const0_rtx;
1633 apply_bitpos = bitpos;
1634 /* ??? Any reason the field size would be different than
1635 the size we got from the type? */
1636 }
1637
1638 /* If this is an array reference, look for an outer field reference. */
1639 else if (TREE_CODE (t) == ARRAY_REF)
1640 {
1641 tree off_tree = size_zero_node;
1642 /* We can't modify t, because we use it at the end of the
1643 function. */
1644 tree t2 = t;
1645
1646 do
1647 {
1648 tree index = TREE_OPERAND (t2, 1);
1649 tree array = TREE_OPERAND (t2, 0);
1650 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1651 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1652 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1653
1654 /* We assume all arrays have sizes that are a multiple of a byte.
1655 First subtract the lower bound, if any, in the type of the
1656 index, then convert to sizetype and multiply by the size of the
1657 array element. */
1658 if (low_bound != 0 && ! integer_zerop (low_bound))
1659 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1660 index, low_bound));
1661
1662 /* If the index has a self-referential type, pass it to a
1663 WITH_RECORD_EXPR; if the component size is, pass our
1664 component to one. */
1665 if (CONTAINS_PLACEHOLDER_P (index))
1666 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1667 if (CONTAINS_PLACEHOLDER_P (unit_size))
1668 unit_size = build (WITH_RECORD_EXPR, sizetype,
1669 unit_size, array);
1670
1671 off_tree
1672 = fold (build (PLUS_EXPR, sizetype,
1673 fold (build (MULT_EXPR, sizetype,
1674 index,
1675 unit_size)),
1676 off_tree));
1677 t2 = TREE_OPERAND (t2, 0);
1678 }
1679 while (TREE_CODE (t2) == ARRAY_REF);
1680
1681 if (DECL_P (t2))
1682 {
1683 expr = t2;
1684 offset = NULL;
1685 if (host_integerp (off_tree, 1))
1686 {
1687 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1688 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1689 align = DECL_ALIGN (t2);
1690 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1691 align = aoff;
1692 offset = GEN_INT (ioff);
1693 apply_bitpos = bitpos;
1694 }
1695 }
1696 else if (TREE_CODE (t2) == COMPONENT_REF)
1697 {
1698 expr = component_ref_for_mem_expr (t2);
1699 if (host_integerp (off_tree, 1))
1700 {
1701 offset = GEN_INT (tree_low_cst (off_tree, 1));
1702 apply_bitpos = bitpos;
1703 }
1704 /* ??? Any reason the field size would be different than
1705 the size we got from the type? */
1706 }
1707 else if (flag_argument_noalias > 1
1708 && TREE_CODE (t2) == INDIRECT_REF
1709 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1710 {
1711 expr = t2;
1712 offset = NULL;
1713 }
1714 }
1715
1716 /* If this is a Fortran indirect argument reference, record the
1717 parameter decl. */
1718 else if (flag_argument_noalias > 1
1719 && TREE_CODE (t) == INDIRECT_REF
1720 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1721 {
1722 expr = t;
1723 offset = NULL;
1724 }
1725 }
1726
1727 /* If we modified OFFSET based on T, then subtract the outstanding
1728 bit position offset. Similarly, increase the size of the accessed
1729 object to contain the negative offset. */
1730 if (apply_bitpos)
1731 {
1732 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1733 if (size)
1734 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1735 }
1736
1737 /* Now set the attributes we computed above. */
1738 MEM_ATTRS (ref)
1739 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1740
1741 /* If this is already known to be a scalar or aggregate, we are done. */
1742 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1743 return;
1744
1745 /* If it is a reference into an aggregate, this is part of an aggregate.
1746 Otherwise we don't know. */
1747 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1748 || TREE_CODE (t) == ARRAY_RANGE_REF
1749 || TREE_CODE (t) == BIT_FIELD_REF)
1750 MEM_IN_STRUCT_P (ref) = 1;
1751 }
1752
1753 void
1754 set_mem_attributes (rtx ref, tree t, int objectp)
1755 {
1756 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1757 }
1758
1759 /* Set the decl for MEM to DECL. */
1760
1761 void
1762 set_mem_attrs_from_reg (rtx mem, rtx reg)
1763 {
1764 MEM_ATTRS (mem)
1765 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1766 GEN_INT (REG_OFFSET (reg)),
1767 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1768 }
1769
1770 /* Set the alias set of MEM to SET. */
1771
1772 void
1773 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1774 {
1775 #ifdef ENABLE_CHECKING
1776 /* If the new and old alias sets don't conflict, something is wrong. */
1777 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1778 abort ();
1779 #endif
1780
1781 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1782 MEM_SIZE (mem), MEM_ALIGN (mem),
1783 GET_MODE (mem));
1784 }
1785
1786 /* Set the alignment of MEM to ALIGN bits. */
1787
1788 void
1789 set_mem_align (rtx mem, unsigned int align)
1790 {
1791 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1792 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1793 GET_MODE (mem));
1794 }
1795
1796 /* Set the expr for MEM to EXPR. */
1797
1798 void
1799 set_mem_expr (rtx mem, tree expr)
1800 {
1801 MEM_ATTRS (mem)
1802 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1803 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1804 }
1805
1806 /* Set the offset of MEM to OFFSET. */
1807
1808 void
1809 set_mem_offset (rtx mem, rtx offset)
1810 {
1811 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1812 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1813 GET_MODE (mem));
1814 }
1815
1816 /* Set the size of MEM to SIZE. */
1817
1818 void
1819 set_mem_size (rtx mem, rtx size)
1820 {
1821 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1822 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1823 GET_MODE (mem));
1824 }
1825 \f
1826 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1827 and its address changed to ADDR. (VOIDmode means don't change the mode.
1828 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1829 returned memory location is required to be valid. The memory
1830 attributes are not changed. */
1831
1832 static rtx
1833 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1834 {
1835 rtx new;
1836
1837 if (GET_CODE (memref) != MEM)
1838 abort ();
1839 if (mode == VOIDmode)
1840 mode = GET_MODE (memref);
1841 if (addr == 0)
1842 addr = XEXP (memref, 0);
1843 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1844 && (!validate || memory_address_p (mode, addr)))
1845 return memref;
1846
1847 if (validate)
1848 {
1849 if (reload_in_progress || reload_completed)
1850 {
1851 if (! memory_address_p (mode, addr))
1852 abort ();
1853 }
1854 else
1855 addr = memory_address (mode, addr);
1856 }
1857
1858 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1859 return memref;
1860
1861 new = gen_rtx_MEM (mode, addr);
1862 MEM_COPY_ATTRIBUTES (new, memref);
1863 return new;
1864 }
1865
1866 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1867 way we are changing MEMREF, so we only preserve the alias set. */
1868
1869 rtx
1870 change_address (rtx memref, enum machine_mode mode, rtx addr)
1871 {
1872 rtx new = change_address_1 (memref, mode, addr, 1);
1873 enum machine_mode mmode = GET_MODE (new);
1874
1875 /* If there are no changes, just return the original memory reference. */
1876 if (new == memref)
1877 return new;
1878
1879 MEM_ATTRS (new)
1880 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1881 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1882 (mmode == BLKmode ? BITS_PER_UNIT
1883 : GET_MODE_ALIGNMENT (mmode)),
1884 mmode);
1885
1886 return new;
1887 }
1888
1889 /* Return a memory reference like MEMREF, but with its mode changed
1890 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1891 nonzero, the memory address is forced to be valid.
1892 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1893 and caller is responsible for adjusting MEMREF base register. */
1894
1895 rtx
1896 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1897 int validate, int adjust)
1898 {
1899 rtx addr = XEXP (memref, 0);
1900 rtx new;
1901 rtx memoffset = MEM_OFFSET (memref);
1902 rtx size = 0;
1903 unsigned int memalign = MEM_ALIGN (memref);
1904
1905 /* If there are no changes, just return the original memory reference. */
1906 if (mode == GET_MODE (memref) && !offset
1907 && (!validate || memory_address_p (mode, addr)))
1908 return memref;
1909
1910 /* ??? Prefer to create garbage instead of creating shared rtl.
1911 This may happen even if offset is nonzero -- consider
1912 (plus (plus reg reg) const_int) -- so do this always. */
1913 addr = copy_rtx (addr);
1914
1915 if (adjust)
1916 {
1917 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1918 object, we can merge it into the LO_SUM. */
1919 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1920 && offset >= 0
1921 && (unsigned HOST_WIDE_INT) offset
1922 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1923 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1924 plus_constant (XEXP (addr, 1), offset));
1925 else
1926 addr = plus_constant (addr, offset);
1927 }
1928
1929 new = change_address_1 (memref, mode, addr, validate);
1930
1931 /* Compute the new values of the memory attributes due to this adjustment.
1932 We add the offsets and update the alignment. */
1933 if (memoffset)
1934 memoffset = GEN_INT (offset + INTVAL (memoffset));
1935
1936 /* Compute the new alignment by taking the MIN of the alignment and the
1937 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1938 if zero. */
1939 if (offset != 0)
1940 memalign
1941 = MIN (memalign,
1942 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1943
1944 /* We can compute the size in a number of ways. */
1945 if (GET_MODE (new) != BLKmode)
1946 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1947 else if (MEM_SIZE (memref))
1948 size = plus_constant (MEM_SIZE (memref), -offset);
1949
1950 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1951 memoffset, size, memalign, GET_MODE (new));
1952
1953 /* At some point, we should validate that this offset is within the object,
1954 if all the appropriate values are known. */
1955 return new;
1956 }
1957
1958 /* Return a memory reference like MEMREF, but with its mode changed
1959 to MODE and its address changed to ADDR, which is assumed to be
1960 MEMREF offseted by OFFSET bytes. If VALIDATE is
1961 nonzero, the memory address is forced to be valid. */
1962
1963 rtx
1964 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1965 HOST_WIDE_INT offset, int validate)
1966 {
1967 memref = change_address_1 (memref, VOIDmode, addr, validate);
1968 return adjust_address_1 (memref, mode, offset, validate, 0);
1969 }
1970
1971 /* Return a memory reference like MEMREF, but whose address is changed by
1972 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1973 known to be in OFFSET (possibly 1). */
1974
1975 rtx
1976 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1977 {
1978 rtx new, addr = XEXP (memref, 0);
1979
1980 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1981
1982 /* At this point we don't know _why_ the address is invalid. It
1983 could have secondary memory references, multiplies or anything.
1984
1985 However, if we did go and rearrange things, we can wind up not
1986 being able to recognize the magic around pic_offset_table_rtx.
1987 This stuff is fragile, and is yet another example of why it is
1988 bad to expose PIC machinery too early. */
1989 if (! memory_address_p (GET_MODE (memref), new)
1990 && GET_CODE (addr) == PLUS
1991 && XEXP (addr, 0) == pic_offset_table_rtx)
1992 {
1993 addr = force_reg (GET_MODE (addr), addr);
1994 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1995 }
1996
1997 update_temp_slot_address (XEXP (memref, 0), new);
1998 new = change_address_1 (memref, VOIDmode, new, 1);
1999
2000 /* If there are no changes, just return the original memory reference. */
2001 if (new == memref)
2002 return new;
2003
2004 /* Update the alignment to reflect the offset. Reset the offset, which
2005 we don't know. */
2006 MEM_ATTRS (new)
2007 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2008 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2009 GET_MODE (new));
2010 return new;
2011 }
2012
2013 /* Return a memory reference like MEMREF, but with its address changed to
2014 ADDR. The caller is asserting that the actual piece of memory pointed
2015 to is the same, just the form of the address is being changed, such as
2016 by putting something into a register. */
2017
2018 rtx
2019 replace_equiv_address (rtx memref, rtx addr)
2020 {
2021 /* change_address_1 copies the memory attribute structure without change
2022 and that's exactly what we want here. */
2023 update_temp_slot_address (XEXP (memref, 0), addr);
2024 return change_address_1 (memref, VOIDmode, addr, 1);
2025 }
2026
2027 /* Likewise, but the reference is not required to be valid. */
2028
2029 rtx
2030 replace_equiv_address_nv (rtx memref, rtx addr)
2031 {
2032 return change_address_1 (memref, VOIDmode, addr, 0);
2033 }
2034
2035 /* Return a memory reference like MEMREF, but with its mode widened to
2036 MODE and offset by OFFSET. This would be used by targets that e.g.
2037 cannot issue QImode memory operations and have to use SImode memory
2038 operations plus masking logic. */
2039
2040 rtx
2041 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2042 {
2043 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2044 tree expr = MEM_EXPR (new);
2045 rtx memoffset = MEM_OFFSET (new);
2046 unsigned int size = GET_MODE_SIZE (mode);
2047
2048 /* If there are no changes, just return the original memory reference. */
2049 if (new == memref)
2050 return new;
2051
2052 /* If we don't know what offset we were at within the expression, then
2053 we can't know if we've overstepped the bounds. */
2054 if (! memoffset)
2055 expr = NULL_TREE;
2056
2057 while (expr)
2058 {
2059 if (TREE_CODE (expr) == COMPONENT_REF)
2060 {
2061 tree field = TREE_OPERAND (expr, 1);
2062
2063 if (! DECL_SIZE_UNIT (field))
2064 {
2065 expr = NULL_TREE;
2066 break;
2067 }
2068
2069 /* Is the field at least as large as the access? If so, ok,
2070 otherwise strip back to the containing structure. */
2071 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2072 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2073 && INTVAL (memoffset) >= 0)
2074 break;
2075
2076 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2077 {
2078 expr = NULL_TREE;
2079 break;
2080 }
2081
2082 expr = TREE_OPERAND (expr, 0);
2083 memoffset = (GEN_INT (INTVAL (memoffset)
2084 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2085 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2086 / BITS_PER_UNIT)));
2087 }
2088 /* Similarly for the decl. */
2089 else if (DECL_P (expr)
2090 && DECL_SIZE_UNIT (expr)
2091 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2092 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2093 && (! memoffset || INTVAL (memoffset) >= 0))
2094 break;
2095 else
2096 {
2097 /* The widened memory access overflows the expression, which means
2098 that it could alias another expression. Zap it. */
2099 expr = NULL_TREE;
2100 break;
2101 }
2102 }
2103
2104 if (! expr)
2105 memoffset = NULL_RTX;
2106
2107 /* The widened memory may alias other stuff, so zap the alias set. */
2108 /* ??? Maybe use get_alias_set on any remaining expression. */
2109
2110 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2111 MEM_ALIGN (new), mode);
2112
2113 return new;
2114 }
2115 \f
2116 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2117
2118 rtx
2119 gen_label_rtx (void)
2120 {
2121 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2122 NULL, label_num++, NULL);
2123 }
2124 \f
2125 /* For procedure integration. */
2126
2127 /* Install new pointers to the first and last insns in the chain.
2128 Also, set cur_insn_uid to one higher than the last in use.
2129 Used for an inline-procedure after copying the insn chain. */
2130
2131 void
2132 set_new_first_and_last_insn (rtx first, rtx last)
2133 {
2134 rtx insn;
2135
2136 first_insn = first;
2137 last_insn = last;
2138 cur_insn_uid = 0;
2139
2140 for (insn = first; insn; insn = NEXT_INSN (insn))
2141 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2142
2143 cur_insn_uid++;
2144 }
2145
2146 /* Set the last label number found in the current function.
2147 This is used when belatedly compiling an inline function. */
2148
2149 void
2150 set_new_last_label_num (int last)
2151 {
2152 base_label_num = label_num;
2153 last_label_num = last;
2154 }
2155 \f
2156 /* Restore all variables describing the current status from the structure *P.
2157 This is used after a nested function. */
2158
2159 void
2160 restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2161 {
2162 last_label_num = 0;
2163 }
2164 \f
2165 /* Go through all the RTL insn bodies and copy any invalid shared
2166 structure. This routine should only be called once. */
2167
2168 void
2169 unshare_all_rtl (tree fndecl, rtx insn)
2170 {
2171 tree decl;
2172
2173 /* Make sure that virtual parameters are not shared. */
2174 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2175 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2176
2177 /* Make sure that virtual stack slots are not shared. */
2178 unshare_all_decls (DECL_INITIAL (fndecl));
2179
2180 /* Unshare just about everything else. */
2181 unshare_all_rtl_in_chain (insn);
2182
2183 /* Make sure the addresses of stack slots found outside the insn chain
2184 (such as, in DECL_RTL of a variable) are not shared
2185 with the insn chain.
2186
2187 This special care is necessary when the stack slot MEM does not
2188 actually appear in the insn chain. If it does appear, its address
2189 is unshared from all else at that point. */
2190 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2191 }
2192
2193 /* Go through all the RTL insn bodies and copy any invalid shared
2194 structure, again. This is a fairly expensive thing to do so it
2195 should be done sparingly. */
2196
2197 void
2198 unshare_all_rtl_again (rtx insn)
2199 {
2200 rtx p;
2201 tree decl;
2202
2203 for (p = insn; p; p = NEXT_INSN (p))
2204 if (INSN_P (p))
2205 {
2206 reset_used_flags (PATTERN (p));
2207 reset_used_flags (REG_NOTES (p));
2208 reset_used_flags (LOG_LINKS (p));
2209 }
2210
2211 /* Make sure that virtual stack slots are not shared. */
2212 reset_used_decls (DECL_INITIAL (cfun->decl));
2213
2214 /* Make sure that virtual parameters are not shared. */
2215 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2216 reset_used_flags (DECL_RTL (decl));
2217
2218 reset_used_flags (stack_slot_list);
2219
2220 unshare_all_rtl (cfun->decl, insn);
2221 }
2222
2223 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2224 Recursively does the same for subexpressions. */
2225
2226 static void
2227 verify_rtx_sharing (rtx orig, rtx insn)
2228 {
2229 rtx x = orig;
2230 int i;
2231 enum rtx_code code;
2232 const char *format_ptr;
2233
2234 if (x == 0)
2235 return;
2236
2237 code = GET_CODE (x);
2238
2239 /* These types may be freely shared. */
2240
2241 switch (code)
2242 {
2243 case REG:
2244 case QUEUED:
2245 case CONST_INT:
2246 case CONST_DOUBLE:
2247 case CONST_VECTOR:
2248 case SYMBOL_REF:
2249 case LABEL_REF:
2250 case CODE_LABEL:
2251 case PC:
2252 case CC0:
2253 case SCRATCH:
2254 return;
2255 /* SCRATCH must be shared because they represent distinct values. */
2256 case CLOBBER:
2257 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2258 return;
2259 break;
2260
2261 case CONST:
2262 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2263 a LABEL_REF, it isn't sharable. */
2264 if (GET_CODE (XEXP (x, 0)) == PLUS
2265 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2266 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2267 return;
2268 break;
2269
2270 case MEM:
2271 /* A MEM is allowed to be shared if its address is constant. */
2272 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2273 || reload_completed || reload_in_progress)
2274 return;
2275
2276 break;
2277
2278 default:
2279 break;
2280 }
2281
2282 /* This rtx may not be shared. If it has already been seen,
2283 replace it with a copy of itself. */
2284
2285 if (RTX_FLAG (x, used))
2286 {
2287 error ("Invalid rtl sharing found in the insn");
2288 debug_rtx (insn);
2289 error ("Shared rtx");
2290 debug_rtx (x);
2291 abort ();
2292 }
2293 RTX_FLAG (x, used) = 1;
2294
2295 /* Now scan the subexpressions recursively. */
2296
2297 format_ptr = GET_RTX_FORMAT (code);
2298
2299 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2300 {
2301 switch (*format_ptr++)
2302 {
2303 case 'e':
2304 verify_rtx_sharing (XEXP (x, i), insn);
2305 break;
2306
2307 case 'E':
2308 if (XVEC (x, i) != NULL)
2309 {
2310 int j;
2311 int len = XVECLEN (x, i);
2312
2313 for (j = 0; j < len; j++)
2314 {
2315 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2316 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2317 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2318 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2319 else
2320 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2321 }
2322 }
2323 break;
2324 }
2325 }
2326 return;
2327 }
2328
2329 /* Go through all the RTL insn bodies and check that there is no unexpected
2330 sharing in between the subexpressions. */
2331
2332 void
2333 verify_rtl_sharing (void)
2334 {
2335 rtx p;
2336
2337 for (p = get_insns (); p; p = NEXT_INSN (p))
2338 if (INSN_P (p))
2339 {
2340 reset_used_flags (PATTERN (p));
2341 reset_used_flags (REG_NOTES (p));
2342 reset_used_flags (LOG_LINKS (p));
2343 }
2344
2345 for (p = get_insns (); p; p = NEXT_INSN (p))
2346 if (INSN_P (p))
2347 {
2348 verify_rtx_sharing (PATTERN (p), p);
2349 verify_rtx_sharing (REG_NOTES (p), p);
2350 verify_rtx_sharing (LOG_LINKS (p), p);
2351 }
2352 }
2353
2354 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2355 Assumes the mark bits are cleared at entry. */
2356
2357 void
2358 unshare_all_rtl_in_chain (rtx insn)
2359 {
2360 for (; insn; insn = NEXT_INSN (insn))
2361 if (INSN_P (insn))
2362 {
2363 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2364 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2365 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2366 }
2367 }
2368
2369 /* Go through all virtual stack slots of a function and copy any
2370 shared structure. */
2371 static void
2372 unshare_all_decls (tree blk)
2373 {
2374 tree t;
2375
2376 /* Copy shared decls. */
2377 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2378 if (DECL_RTL_SET_P (t))
2379 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2380
2381 /* Now process sub-blocks. */
2382 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2383 unshare_all_decls (t);
2384 }
2385
2386 /* Go through all virtual stack slots of a function and mark them as
2387 not shared. */
2388 static void
2389 reset_used_decls (tree blk)
2390 {
2391 tree t;
2392
2393 /* Mark decls. */
2394 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2395 if (DECL_RTL_SET_P (t))
2396 reset_used_flags (DECL_RTL (t));
2397
2398 /* Now process sub-blocks. */
2399 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2400 reset_used_decls (t);
2401 }
2402
2403 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2404 placed in the result directly, rather than being copied. MAY_SHARE is
2405 either a MEM of an EXPR_LIST of MEMs. */
2406
2407 rtx
2408 copy_most_rtx (rtx orig, rtx may_share)
2409 {
2410 rtx copy;
2411 int i, j;
2412 RTX_CODE code;
2413 const char *format_ptr;
2414
2415 if (orig == may_share
2416 || (GET_CODE (may_share) == EXPR_LIST
2417 && in_expr_list_p (may_share, orig)))
2418 return orig;
2419
2420 code = GET_CODE (orig);
2421
2422 switch (code)
2423 {
2424 case REG:
2425 case QUEUED:
2426 case CONST_INT:
2427 case CONST_DOUBLE:
2428 case CONST_VECTOR:
2429 case SYMBOL_REF:
2430 case CODE_LABEL:
2431 case PC:
2432 case CC0:
2433 return orig;
2434 default:
2435 break;
2436 }
2437
2438 copy = rtx_alloc (code);
2439 PUT_MODE (copy, GET_MODE (orig));
2440 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2441 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2442 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2443 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2444 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2445
2446 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2447
2448 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2449 {
2450 switch (*format_ptr++)
2451 {
2452 case 'e':
2453 XEXP (copy, i) = XEXP (orig, i);
2454 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2455 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2456 break;
2457
2458 case 'u':
2459 XEXP (copy, i) = XEXP (orig, i);
2460 break;
2461
2462 case 'E':
2463 case 'V':
2464 XVEC (copy, i) = XVEC (orig, i);
2465 if (XVEC (orig, i) != NULL)
2466 {
2467 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2468 for (j = 0; j < XVECLEN (copy, i); j++)
2469 XVECEXP (copy, i, j)
2470 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2471 }
2472 break;
2473
2474 case 'w':
2475 XWINT (copy, i) = XWINT (orig, i);
2476 break;
2477
2478 case 'n':
2479 case 'i':
2480 XINT (copy, i) = XINT (orig, i);
2481 break;
2482
2483 case 't':
2484 XTREE (copy, i) = XTREE (orig, i);
2485 break;
2486
2487 case 's':
2488 case 'S':
2489 XSTR (copy, i) = XSTR (orig, i);
2490 break;
2491
2492 case '0':
2493 X0ANY (copy, i) = X0ANY (orig, i);
2494 break;
2495
2496 default:
2497 abort ();
2498 }
2499 }
2500 return copy;
2501 }
2502
2503 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2504 Recursively does the same for subexpressions. Uses
2505 copy_rtx_if_shared_1 to reduce stack space. */
2506
2507 rtx
2508 copy_rtx_if_shared (rtx orig)
2509 {
2510 copy_rtx_if_shared_1 (&orig);
2511 return orig;
2512 }
2513
2514 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2515 use. Recursively does the same for subexpressions. */
2516
2517 static void
2518 copy_rtx_if_shared_1 (rtx *orig1)
2519 {
2520 rtx x;
2521 int i;
2522 enum rtx_code code;
2523 rtx *last_ptr;
2524 const char *format_ptr;
2525 int copied = 0;
2526 int length;
2527
2528 /* Repeat is used to turn tail-recursion into iteration. */
2529 repeat:
2530 x = *orig1;
2531
2532 if (x == 0)
2533 return;
2534
2535 code = GET_CODE (x);
2536
2537 /* These types may be freely shared. */
2538
2539 switch (code)
2540 {
2541 case REG:
2542 case QUEUED:
2543 case CONST_INT:
2544 case CONST_DOUBLE:
2545 case CONST_VECTOR:
2546 case SYMBOL_REF:
2547 case LABEL_REF:
2548 case CODE_LABEL:
2549 case PC:
2550 case CC0:
2551 case SCRATCH:
2552 /* SCRATCH must be shared because they represent distinct values. */
2553 return;
2554 case CLOBBER:
2555 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2556 return;
2557 break;
2558
2559 case CONST:
2560 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2561 a LABEL_REF, it isn't sharable. */
2562 if (GET_CODE (XEXP (x, 0)) == PLUS
2563 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2564 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2565 return;
2566 break;
2567
2568 case INSN:
2569 case JUMP_INSN:
2570 case CALL_INSN:
2571 case NOTE:
2572 case BARRIER:
2573 /* The chain of insns is not being copied. */
2574 return;
2575
2576 default:
2577 break;
2578 }
2579
2580 /* This rtx may not be shared. If it has already been seen,
2581 replace it with a copy of itself. */
2582
2583 if (RTX_FLAG (x, used))
2584 {
2585 rtx copy;
2586
2587 copy = rtx_alloc (code);
2588 memcpy (copy, x, RTX_SIZE (code));
2589 x = copy;
2590 copied = 1;
2591 }
2592 RTX_FLAG (x, used) = 1;
2593
2594 /* Now scan the subexpressions recursively.
2595 We can store any replaced subexpressions directly into X
2596 since we know X is not shared! Any vectors in X
2597 must be copied if X was copied. */
2598
2599 format_ptr = GET_RTX_FORMAT (code);
2600 length = GET_RTX_LENGTH (code);
2601 last_ptr = NULL;
2602
2603 for (i = 0; i < length; i++)
2604 {
2605 switch (*format_ptr++)
2606 {
2607 case 'e':
2608 if (last_ptr)
2609 copy_rtx_if_shared_1 (last_ptr);
2610 last_ptr = &XEXP (x, i);
2611 break;
2612
2613 case 'E':
2614 if (XVEC (x, i) != NULL)
2615 {
2616 int j;
2617 int len = XVECLEN (x, i);
2618
2619 /* Copy the vector iff I copied the rtx and the length
2620 is nonzero. */
2621 if (copied && len > 0)
2622 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2623
2624 /* Call recursively on all inside the vector. */
2625 for (j = 0; j < len; j++)
2626 {
2627 if (last_ptr)
2628 copy_rtx_if_shared_1 (last_ptr);
2629 last_ptr = &XVECEXP (x, i, j);
2630 }
2631 }
2632 break;
2633 }
2634 }
2635 *orig1 = x;
2636 if (last_ptr)
2637 {
2638 orig1 = last_ptr;
2639 goto repeat;
2640 }
2641 return;
2642 }
2643
2644 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2645 to look for shared sub-parts. */
2646
2647 void
2648 reset_used_flags (rtx x)
2649 {
2650 int i, j;
2651 enum rtx_code code;
2652 const char *format_ptr;
2653 int length;
2654
2655 /* Repeat is used to turn tail-recursion into iteration. */
2656 repeat:
2657 if (x == 0)
2658 return;
2659
2660 code = GET_CODE (x);
2661
2662 /* These types may be freely shared so we needn't do any resetting
2663 for them. */
2664
2665 switch (code)
2666 {
2667 case REG:
2668 case QUEUED:
2669 case CONST_INT:
2670 case CONST_DOUBLE:
2671 case CONST_VECTOR:
2672 case SYMBOL_REF:
2673 case CODE_LABEL:
2674 case PC:
2675 case CC0:
2676 return;
2677
2678 case INSN:
2679 case JUMP_INSN:
2680 case CALL_INSN:
2681 case NOTE:
2682 case LABEL_REF:
2683 case BARRIER:
2684 /* The chain of insns is not being copied. */
2685 return;
2686
2687 default:
2688 break;
2689 }
2690
2691 RTX_FLAG (x, used) = 0;
2692
2693 format_ptr = GET_RTX_FORMAT (code);
2694 length = GET_RTX_LENGTH (code);
2695
2696 for (i = 0; i < length; i++)
2697 {
2698 switch (*format_ptr++)
2699 {
2700 case 'e':
2701 if (i == length-1)
2702 {
2703 x = XEXP (x, i);
2704 goto repeat;
2705 }
2706 reset_used_flags (XEXP (x, i));
2707 break;
2708
2709 case 'E':
2710 for (j = 0; j < XVECLEN (x, i); j++)
2711 reset_used_flags (XVECEXP (x, i, j));
2712 break;
2713 }
2714 }
2715 }
2716
2717 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2718 to look for shared sub-parts. */
2719
2720 void
2721 set_used_flags (rtx x)
2722 {
2723 int i, j;
2724 enum rtx_code code;
2725 const char *format_ptr;
2726
2727 if (x == 0)
2728 return;
2729
2730 code = GET_CODE (x);
2731
2732 /* These types may be freely shared so we needn't do any resetting
2733 for them. */
2734
2735 switch (code)
2736 {
2737 case REG:
2738 case QUEUED:
2739 case CONST_INT:
2740 case CONST_DOUBLE:
2741 case CONST_VECTOR:
2742 case SYMBOL_REF:
2743 case CODE_LABEL:
2744 case PC:
2745 case CC0:
2746 return;
2747
2748 case INSN:
2749 case JUMP_INSN:
2750 case CALL_INSN:
2751 case NOTE:
2752 case LABEL_REF:
2753 case BARRIER:
2754 /* The chain of insns is not being copied. */
2755 return;
2756
2757 default:
2758 break;
2759 }
2760
2761 RTX_FLAG (x, used) = 1;
2762
2763 format_ptr = GET_RTX_FORMAT (code);
2764 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2765 {
2766 switch (*format_ptr++)
2767 {
2768 case 'e':
2769 set_used_flags (XEXP (x, i));
2770 break;
2771
2772 case 'E':
2773 for (j = 0; j < XVECLEN (x, i); j++)
2774 set_used_flags (XVECEXP (x, i, j));
2775 break;
2776 }
2777 }
2778 }
2779 \f
2780 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2781 Return X or the rtx for the pseudo reg the value of X was copied into.
2782 OTHER must be valid as a SET_DEST. */
2783
2784 rtx
2785 make_safe_from (rtx x, rtx other)
2786 {
2787 while (1)
2788 switch (GET_CODE (other))
2789 {
2790 case SUBREG:
2791 other = SUBREG_REG (other);
2792 break;
2793 case STRICT_LOW_PART:
2794 case SIGN_EXTEND:
2795 case ZERO_EXTEND:
2796 other = XEXP (other, 0);
2797 break;
2798 default:
2799 goto done;
2800 }
2801 done:
2802 if ((GET_CODE (other) == MEM
2803 && ! CONSTANT_P (x)
2804 && GET_CODE (x) != REG
2805 && GET_CODE (x) != SUBREG)
2806 || (GET_CODE (other) == REG
2807 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2808 || reg_mentioned_p (other, x))))
2809 {
2810 rtx temp = gen_reg_rtx (GET_MODE (x));
2811 emit_move_insn (temp, x);
2812 return temp;
2813 }
2814 return x;
2815 }
2816 \f
2817 /* Emission of insns (adding them to the doubly-linked list). */
2818
2819 /* Return the first insn of the current sequence or current function. */
2820
2821 rtx
2822 get_insns (void)
2823 {
2824 return first_insn;
2825 }
2826
2827 /* Specify a new insn as the first in the chain. */
2828
2829 void
2830 set_first_insn (rtx insn)
2831 {
2832 if (PREV_INSN (insn) != 0)
2833 abort ();
2834 first_insn = insn;
2835 }
2836
2837 /* Return the last insn emitted in current sequence or current function. */
2838
2839 rtx
2840 get_last_insn (void)
2841 {
2842 return last_insn;
2843 }
2844
2845 /* Specify a new insn as the last in the chain. */
2846
2847 void
2848 set_last_insn (rtx insn)
2849 {
2850 if (NEXT_INSN (insn) != 0)
2851 abort ();
2852 last_insn = insn;
2853 }
2854
2855 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2856
2857 rtx
2858 get_last_insn_anywhere (void)
2859 {
2860 struct sequence_stack *stack;
2861 if (last_insn)
2862 return last_insn;
2863 for (stack = seq_stack; stack; stack = stack->next)
2864 if (stack->last != 0)
2865 return stack->last;
2866 return 0;
2867 }
2868
2869 /* Return the first nonnote insn emitted in current sequence or current
2870 function. This routine looks inside SEQUENCEs. */
2871
2872 rtx
2873 get_first_nonnote_insn (void)
2874 {
2875 rtx insn = first_insn;
2876
2877 while (insn)
2878 {
2879 insn = next_insn (insn);
2880 if (insn == 0 || GET_CODE (insn) != NOTE)
2881 break;
2882 }
2883
2884 return insn;
2885 }
2886
2887 /* Return the last nonnote insn emitted in current sequence or current
2888 function. This routine looks inside SEQUENCEs. */
2889
2890 rtx
2891 get_last_nonnote_insn (void)
2892 {
2893 rtx insn = last_insn;
2894
2895 while (insn)
2896 {
2897 insn = previous_insn (insn);
2898 if (insn == 0 || GET_CODE (insn) != NOTE)
2899 break;
2900 }
2901
2902 return insn;
2903 }
2904
2905 /* Return a number larger than any instruction's uid in this function. */
2906
2907 int
2908 get_max_uid (void)
2909 {
2910 return cur_insn_uid;
2911 }
2912
2913 /* Renumber instructions so that no instruction UIDs are wasted. */
2914
2915 void
2916 renumber_insns (FILE *stream)
2917 {
2918 rtx insn;
2919
2920 /* If we're not supposed to renumber instructions, don't. */
2921 if (!flag_renumber_insns)
2922 return;
2923
2924 /* If there aren't that many instructions, then it's not really
2925 worth renumbering them. */
2926 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2927 return;
2928
2929 cur_insn_uid = 1;
2930
2931 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2932 {
2933 if (stream)
2934 fprintf (stream, "Renumbering insn %d to %d\n",
2935 INSN_UID (insn), cur_insn_uid);
2936 INSN_UID (insn) = cur_insn_uid++;
2937 }
2938 }
2939 \f
2940 /* Return the next insn. If it is a SEQUENCE, return the first insn
2941 of the sequence. */
2942
2943 rtx
2944 next_insn (rtx insn)
2945 {
2946 if (insn)
2947 {
2948 insn = NEXT_INSN (insn);
2949 if (insn && GET_CODE (insn) == INSN
2950 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2951 insn = XVECEXP (PATTERN (insn), 0, 0);
2952 }
2953
2954 return insn;
2955 }
2956
2957 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2958 of the sequence. */
2959
2960 rtx
2961 previous_insn (rtx insn)
2962 {
2963 if (insn)
2964 {
2965 insn = PREV_INSN (insn);
2966 if (insn && GET_CODE (insn) == INSN
2967 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2968 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2969 }
2970
2971 return insn;
2972 }
2973
2974 /* Return the next insn after INSN that is not a NOTE. This routine does not
2975 look inside SEQUENCEs. */
2976
2977 rtx
2978 next_nonnote_insn (rtx insn)
2979 {
2980 while (insn)
2981 {
2982 insn = NEXT_INSN (insn);
2983 if (insn == 0 || GET_CODE (insn) != NOTE)
2984 break;
2985 }
2986
2987 return insn;
2988 }
2989
2990 /* Return the previous insn before INSN that is not a NOTE. This routine does
2991 not look inside SEQUENCEs. */
2992
2993 rtx
2994 prev_nonnote_insn (rtx insn)
2995 {
2996 while (insn)
2997 {
2998 insn = PREV_INSN (insn);
2999 if (insn == 0 || GET_CODE (insn) != NOTE)
3000 break;
3001 }
3002
3003 return insn;
3004 }
3005
3006 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3007 or 0, if there is none. This routine does not look inside
3008 SEQUENCEs. */
3009
3010 rtx
3011 next_real_insn (rtx insn)
3012 {
3013 while (insn)
3014 {
3015 insn = NEXT_INSN (insn);
3016 if (insn == 0 || GET_CODE (insn) == INSN
3017 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3018 break;
3019 }
3020
3021 return insn;
3022 }
3023
3024 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3025 or 0, if there is none. This routine does not look inside
3026 SEQUENCEs. */
3027
3028 rtx
3029 prev_real_insn (rtx insn)
3030 {
3031 while (insn)
3032 {
3033 insn = PREV_INSN (insn);
3034 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3035 || GET_CODE (insn) == JUMP_INSN)
3036 break;
3037 }
3038
3039 return insn;
3040 }
3041
3042 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3043 This routine does not look inside SEQUENCEs. */
3044
3045 rtx
3046 last_call_insn (void)
3047 {
3048 rtx insn;
3049
3050 for (insn = get_last_insn ();
3051 insn && GET_CODE (insn) != CALL_INSN;
3052 insn = PREV_INSN (insn))
3053 ;
3054
3055 return insn;
3056 }
3057
3058 /* Find the next insn after INSN that really does something. This routine
3059 does not look inside SEQUENCEs. Until reload has completed, this is the
3060 same as next_real_insn. */
3061
3062 int
3063 active_insn_p (rtx insn)
3064 {
3065 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3066 || (GET_CODE (insn) == INSN
3067 && (! reload_completed
3068 || (GET_CODE (PATTERN (insn)) != USE
3069 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3070 }
3071
3072 rtx
3073 next_active_insn (rtx insn)
3074 {
3075 while (insn)
3076 {
3077 insn = NEXT_INSN (insn);
3078 if (insn == 0 || active_insn_p (insn))
3079 break;
3080 }
3081
3082 return insn;
3083 }
3084
3085 /* Find the last insn before INSN that really does something. This routine
3086 does not look inside SEQUENCEs. Until reload has completed, this is the
3087 same as prev_real_insn. */
3088
3089 rtx
3090 prev_active_insn (rtx insn)
3091 {
3092 while (insn)
3093 {
3094 insn = PREV_INSN (insn);
3095 if (insn == 0 || active_insn_p (insn))
3096 break;
3097 }
3098
3099 return insn;
3100 }
3101
3102 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3103
3104 rtx
3105 next_label (rtx insn)
3106 {
3107 while (insn)
3108 {
3109 insn = NEXT_INSN (insn);
3110 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3111 break;
3112 }
3113
3114 return insn;
3115 }
3116
3117 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3118
3119 rtx
3120 prev_label (rtx insn)
3121 {
3122 while (insn)
3123 {
3124 insn = PREV_INSN (insn);
3125 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3126 break;
3127 }
3128
3129 return insn;
3130 }
3131 \f
3132 #ifdef HAVE_cc0
3133 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3134 and REG_CC_USER notes so we can find it. */
3135
3136 void
3137 link_cc0_insns (rtx insn)
3138 {
3139 rtx user = next_nonnote_insn (insn);
3140
3141 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3142 user = XVECEXP (PATTERN (user), 0, 0);
3143
3144 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3145 REG_NOTES (user));
3146 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3147 }
3148
3149 /* Return the next insn that uses CC0 after INSN, which is assumed to
3150 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3151 applied to the result of this function should yield INSN).
3152
3153 Normally, this is simply the next insn. However, if a REG_CC_USER note
3154 is present, it contains the insn that uses CC0.
3155
3156 Return 0 if we can't find the insn. */
3157
3158 rtx
3159 next_cc0_user (rtx insn)
3160 {
3161 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3162
3163 if (note)
3164 return XEXP (note, 0);
3165
3166 insn = next_nonnote_insn (insn);
3167 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3168 insn = XVECEXP (PATTERN (insn), 0, 0);
3169
3170 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3171 return insn;
3172
3173 return 0;
3174 }
3175
3176 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3177 note, it is the previous insn. */
3178
3179 rtx
3180 prev_cc0_setter (rtx insn)
3181 {
3182 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3183
3184 if (note)
3185 return XEXP (note, 0);
3186
3187 insn = prev_nonnote_insn (insn);
3188 if (! sets_cc0_p (PATTERN (insn)))
3189 abort ();
3190
3191 return insn;
3192 }
3193 #endif
3194
3195 /* Increment the label uses for all labels present in rtx. */
3196
3197 static void
3198 mark_label_nuses (rtx x)
3199 {
3200 enum rtx_code code;
3201 int i, j;
3202 const char *fmt;
3203
3204 code = GET_CODE (x);
3205 if (code == LABEL_REF)
3206 LABEL_NUSES (XEXP (x, 0))++;
3207
3208 fmt = GET_RTX_FORMAT (code);
3209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3210 {
3211 if (fmt[i] == 'e')
3212 mark_label_nuses (XEXP (x, i));
3213 else if (fmt[i] == 'E')
3214 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3215 mark_label_nuses (XVECEXP (x, i, j));
3216 }
3217 }
3218
3219 \f
3220 /* Try splitting insns that can be split for better scheduling.
3221 PAT is the pattern which might split.
3222 TRIAL is the insn providing PAT.
3223 LAST is nonzero if we should return the last insn of the sequence produced.
3224
3225 If this routine succeeds in splitting, it returns the first or last
3226 replacement insn depending on the value of LAST. Otherwise, it
3227 returns TRIAL. If the insn to be returned can be split, it will be. */
3228
3229 rtx
3230 try_split (rtx pat, rtx trial, int last)
3231 {
3232 rtx before = PREV_INSN (trial);
3233 rtx after = NEXT_INSN (trial);
3234 int has_barrier = 0;
3235 rtx tem;
3236 rtx note, seq;
3237 int probability;
3238 rtx insn_last, insn;
3239 int njumps = 0;
3240
3241 if (any_condjump_p (trial)
3242 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3243 split_branch_probability = INTVAL (XEXP (note, 0));
3244 probability = split_branch_probability;
3245
3246 seq = split_insns (pat, trial);
3247
3248 split_branch_probability = -1;
3249
3250 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3251 We may need to handle this specially. */
3252 if (after && GET_CODE (after) == BARRIER)
3253 {
3254 has_barrier = 1;
3255 after = NEXT_INSN (after);
3256 }
3257
3258 if (!seq)
3259 return trial;
3260
3261 /* Avoid infinite loop if any insn of the result matches
3262 the original pattern. */
3263 insn_last = seq;
3264 while (1)
3265 {
3266 if (INSN_P (insn_last)
3267 && rtx_equal_p (PATTERN (insn_last), pat))
3268 return trial;
3269 if (!NEXT_INSN (insn_last))
3270 break;
3271 insn_last = NEXT_INSN (insn_last);
3272 }
3273
3274 /* Mark labels. */
3275 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3276 {
3277 if (GET_CODE (insn) == JUMP_INSN)
3278 {
3279 mark_jump_label (PATTERN (insn), insn, 0);
3280 njumps++;
3281 if (probability != -1
3282 && any_condjump_p (insn)
3283 && !find_reg_note (insn, REG_BR_PROB, 0))
3284 {
3285 /* We can preserve the REG_BR_PROB notes only if exactly
3286 one jump is created, otherwise the machine description
3287 is responsible for this step using
3288 split_branch_probability variable. */
3289 if (njumps != 1)
3290 abort ();
3291 REG_NOTES (insn)
3292 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3293 GEN_INT (probability),
3294 REG_NOTES (insn));
3295 }
3296 }
3297 }
3298
3299 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3300 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3301 if (GET_CODE (trial) == CALL_INSN)
3302 {
3303 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3304 if (GET_CODE (insn) == CALL_INSN)
3305 {
3306 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3307 while (*p)
3308 p = &XEXP (*p, 1);
3309 *p = CALL_INSN_FUNCTION_USAGE (trial);
3310 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3311 }
3312 }
3313
3314 /* Copy notes, particularly those related to the CFG. */
3315 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3316 {
3317 switch (REG_NOTE_KIND (note))
3318 {
3319 case REG_EH_REGION:
3320 insn = insn_last;
3321 while (insn != NULL_RTX)
3322 {
3323 if (GET_CODE (insn) == CALL_INSN
3324 || (flag_non_call_exceptions
3325 && may_trap_p (PATTERN (insn))))
3326 REG_NOTES (insn)
3327 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3328 XEXP (note, 0),
3329 REG_NOTES (insn));
3330 insn = PREV_INSN (insn);
3331 }
3332 break;
3333
3334 case REG_NORETURN:
3335 case REG_SETJMP:
3336 case REG_ALWAYS_RETURN:
3337 insn = insn_last;
3338 while (insn != NULL_RTX)
3339 {
3340 if (GET_CODE (insn) == CALL_INSN)
3341 REG_NOTES (insn)
3342 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3343 XEXP (note, 0),
3344 REG_NOTES (insn));
3345 insn = PREV_INSN (insn);
3346 }
3347 break;
3348
3349 case REG_NON_LOCAL_GOTO:
3350 insn = insn_last;
3351 while (insn != NULL_RTX)
3352 {
3353 if (GET_CODE (insn) == JUMP_INSN)
3354 REG_NOTES (insn)
3355 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3356 XEXP (note, 0),
3357 REG_NOTES (insn));
3358 insn = PREV_INSN (insn);
3359 }
3360 break;
3361
3362 default:
3363 break;
3364 }
3365 }
3366
3367 /* If there are LABELS inside the split insns increment the
3368 usage count so we don't delete the label. */
3369 if (GET_CODE (trial) == INSN)
3370 {
3371 insn = insn_last;
3372 while (insn != NULL_RTX)
3373 {
3374 if (GET_CODE (insn) == INSN)
3375 mark_label_nuses (PATTERN (insn));
3376
3377 insn = PREV_INSN (insn);
3378 }
3379 }
3380
3381 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3382
3383 delete_insn (trial);
3384 if (has_barrier)
3385 emit_barrier_after (tem);
3386
3387 /* Recursively call try_split for each new insn created; by the
3388 time control returns here that insn will be fully split, so
3389 set LAST and continue from the insn after the one returned.
3390 We can't use next_active_insn here since AFTER may be a note.
3391 Ignore deleted insns, which can be occur if not optimizing. */
3392 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3393 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3394 tem = try_split (PATTERN (tem), tem, 1);
3395
3396 /* Return either the first or the last insn, depending on which was
3397 requested. */
3398 return last
3399 ? (after ? PREV_INSN (after) : last_insn)
3400 : NEXT_INSN (before);
3401 }
3402 \f
3403 /* Make and return an INSN rtx, initializing all its slots.
3404 Store PATTERN in the pattern slots. */
3405
3406 rtx
3407 make_insn_raw (rtx pattern)
3408 {
3409 rtx insn;
3410
3411 insn = rtx_alloc (INSN);
3412
3413 INSN_UID (insn) = cur_insn_uid++;
3414 PATTERN (insn) = pattern;
3415 INSN_CODE (insn) = -1;
3416 LOG_LINKS (insn) = NULL;
3417 REG_NOTES (insn) = NULL;
3418 INSN_LOCATOR (insn) = 0;
3419 BLOCK_FOR_INSN (insn) = NULL;
3420
3421 #ifdef ENABLE_RTL_CHECKING
3422 if (insn
3423 && INSN_P (insn)
3424 && (returnjump_p (insn)
3425 || (GET_CODE (insn) == SET
3426 && SET_DEST (insn) == pc_rtx)))
3427 {
3428 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3429 debug_rtx (insn);
3430 }
3431 #endif
3432
3433 return insn;
3434 }
3435
3436 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3437
3438 static rtx
3439 make_jump_insn_raw (rtx pattern)
3440 {
3441 rtx insn;
3442
3443 insn = rtx_alloc (JUMP_INSN);
3444 INSN_UID (insn) = cur_insn_uid++;
3445
3446 PATTERN (insn) = pattern;
3447 INSN_CODE (insn) = -1;
3448 LOG_LINKS (insn) = NULL;
3449 REG_NOTES (insn) = NULL;
3450 JUMP_LABEL (insn) = NULL;
3451 INSN_LOCATOR (insn) = 0;
3452 BLOCK_FOR_INSN (insn) = NULL;
3453
3454 return insn;
3455 }
3456
3457 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3458
3459 static rtx
3460 make_call_insn_raw (rtx pattern)
3461 {
3462 rtx insn;
3463
3464 insn = rtx_alloc (CALL_INSN);
3465 INSN_UID (insn) = cur_insn_uid++;
3466
3467 PATTERN (insn) = pattern;
3468 INSN_CODE (insn) = -1;
3469 LOG_LINKS (insn) = NULL;
3470 REG_NOTES (insn) = NULL;
3471 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3472 INSN_LOCATOR (insn) = 0;
3473 BLOCK_FOR_INSN (insn) = NULL;
3474
3475 return insn;
3476 }
3477 \f
3478 /* Add INSN to the end of the doubly-linked list.
3479 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3480
3481 void
3482 add_insn (rtx insn)
3483 {
3484 PREV_INSN (insn) = last_insn;
3485 NEXT_INSN (insn) = 0;
3486
3487 if (NULL != last_insn)
3488 NEXT_INSN (last_insn) = insn;
3489
3490 if (NULL == first_insn)
3491 first_insn = insn;
3492
3493 last_insn = insn;
3494 }
3495
3496 /* Add INSN into the doubly-linked list after insn AFTER. This and
3497 the next should be the only functions called to insert an insn once
3498 delay slots have been filled since only they know how to update a
3499 SEQUENCE. */
3500
3501 void
3502 add_insn_after (rtx insn, rtx after)
3503 {
3504 rtx next = NEXT_INSN (after);
3505 basic_block bb;
3506
3507 if (optimize && INSN_DELETED_P (after))
3508 abort ();
3509
3510 NEXT_INSN (insn) = next;
3511 PREV_INSN (insn) = after;
3512
3513 if (next)
3514 {
3515 PREV_INSN (next) = insn;
3516 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3517 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3518 }
3519 else if (last_insn == after)
3520 last_insn = insn;
3521 else
3522 {
3523 struct sequence_stack *stack = seq_stack;
3524 /* Scan all pending sequences too. */
3525 for (; stack; stack = stack->next)
3526 if (after == stack->last)
3527 {
3528 stack->last = insn;
3529 break;
3530 }
3531
3532 if (stack == 0)
3533 abort ();
3534 }
3535
3536 if (GET_CODE (after) != BARRIER
3537 && GET_CODE (insn) != BARRIER
3538 && (bb = BLOCK_FOR_INSN (after)))
3539 {
3540 set_block_for_insn (insn, bb);
3541 if (INSN_P (insn))
3542 bb->flags |= BB_DIRTY;
3543 /* Should not happen as first in the BB is always
3544 either NOTE or LABEL. */
3545 if (BB_END (bb) == after
3546 /* Avoid clobbering of structure when creating new BB. */
3547 && GET_CODE (insn) != BARRIER
3548 && (GET_CODE (insn) != NOTE
3549 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3550 BB_END (bb) = insn;
3551 }
3552
3553 NEXT_INSN (after) = insn;
3554 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3555 {
3556 rtx sequence = PATTERN (after);
3557 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3558 }
3559 }
3560
3561 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3562 the previous should be the only functions called to insert an insn once
3563 delay slots have been filled since only they know how to update a
3564 SEQUENCE. */
3565
3566 void
3567 add_insn_before (rtx insn, rtx before)
3568 {
3569 rtx prev = PREV_INSN (before);
3570 basic_block bb;
3571
3572 if (optimize && INSN_DELETED_P (before))
3573 abort ();
3574
3575 PREV_INSN (insn) = prev;
3576 NEXT_INSN (insn) = before;
3577
3578 if (prev)
3579 {
3580 NEXT_INSN (prev) = insn;
3581 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3582 {
3583 rtx sequence = PATTERN (prev);
3584 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3585 }
3586 }
3587 else if (first_insn == before)
3588 first_insn = insn;
3589 else
3590 {
3591 struct sequence_stack *stack = seq_stack;
3592 /* Scan all pending sequences too. */
3593 for (; stack; stack = stack->next)
3594 if (before == stack->first)
3595 {
3596 stack->first = insn;
3597 break;
3598 }
3599
3600 if (stack == 0)
3601 abort ();
3602 }
3603
3604 if (GET_CODE (before) != BARRIER
3605 && GET_CODE (insn) != BARRIER
3606 && (bb = BLOCK_FOR_INSN (before)))
3607 {
3608 set_block_for_insn (insn, bb);
3609 if (INSN_P (insn))
3610 bb->flags |= BB_DIRTY;
3611 /* Should not happen as first in the BB is always
3612 either NOTE or LABEl. */
3613 if (BB_HEAD (bb) == insn
3614 /* Avoid clobbering of structure when creating new BB. */
3615 && GET_CODE (insn) != BARRIER
3616 && (GET_CODE (insn) != NOTE
3617 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3618 abort ();
3619 }
3620
3621 PREV_INSN (before) = insn;
3622 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3623 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3624 }
3625
3626 /* Remove an insn from its doubly-linked list. This function knows how
3627 to handle sequences. */
3628 void
3629 remove_insn (rtx insn)
3630 {
3631 rtx next = NEXT_INSN (insn);
3632 rtx prev = PREV_INSN (insn);
3633 basic_block bb;
3634
3635 if (prev)
3636 {
3637 NEXT_INSN (prev) = next;
3638 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3639 {
3640 rtx sequence = PATTERN (prev);
3641 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3642 }
3643 }
3644 else if (first_insn == insn)
3645 first_insn = next;
3646 else
3647 {
3648 struct sequence_stack *stack = seq_stack;
3649 /* Scan all pending sequences too. */
3650 for (; stack; stack = stack->next)
3651 if (insn == stack->first)
3652 {
3653 stack->first = next;
3654 break;
3655 }
3656
3657 if (stack == 0)
3658 abort ();
3659 }
3660
3661 if (next)
3662 {
3663 PREV_INSN (next) = prev;
3664 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3665 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3666 }
3667 else if (last_insn == insn)
3668 last_insn = prev;
3669 else
3670 {
3671 struct sequence_stack *stack = seq_stack;
3672 /* Scan all pending sequences too. */
3673 for (; stack; stack = stack->next)
3674 if (insn == stack->last)
3675 {
3676 stack->last = prev;
3677 break;
3678 }
3679
3680 if (stack == 0)
3681 abort ();
3682 }
3683 if (GET_CODE (insn) != BARRIER
3684 && (bb = BLOCK_FOR_INSN (insn)))
3685 {
3686 if (INSN_P (insn))
3687 bb->flags |= BB_DIRTY;
3688 if (BB_HEAD (bb) == insn)
3689 {
3690 /* Never ever delete the basic block note without deleting whole
3691 basic block. */
3692 if (GET_CODE (insn) == NOTE)
3693 abort ();
3694 BB_HEAD (bb) = next;
3695 }
3696 if (BB_END (bb) == insn)
3697 BB_END (bb) = prev;
3698 }
3699 }
3700
3701 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3702
3703 void
3704 add_function_usage_to (rtx call_insn, rtx call_fusage)
3705 {
3706 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3707 abort ();
3708
3709 /* Put the register usage information on the CALL. If there is already
3710 some usage information, put ours at the end. */
3711 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3712 {
3713 rtx link;
3714
3715 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3716 link = XEXP (link, 1))
3717 ;
3718
3719 XEXP (link, 1) = call_fusage;
3720 }
3721 else
3722 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3723 }
3724
3725 /* Delete all insns made since FROM.
3726 FROM becomes the new last instruction. */
3727
3728 void
3729 delete_insns_since (rtx from)
3730 {
3731 if (from == 0)
3732 first_insn = 0;
3733 else
3734 NEXT_INSN (from) = 0;
3735 last_insn = from;
3736 }
3737
3738 /* This function is deprecated, please use sequences instead.
3739
3740 Move a consecutive bunch of insns to a different place in the chain.
3741 The insns to be moved are those between FROM and TO.
3742 They are moved to a new position after the insn AFTER.
3743 AFTER must not be FROM or TO or any insn in between.
3744
3745 This function does not know about SEQUENCEs and hence should not be
3746 called after delay-slot filling has been done. */
3747
3748 void
3749 reorder_insns_nobb (rtx from, rtx to, rtx after)
3750 {
3751 /* Splice this bunch out of where it is now. */
3752 if (PREV_INSN (from))
3753 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3754 if (NEXT_INSN (to))
3755 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3756 if (last_insn == to)
3757 last_insn = PREV_INSN (from);
3758 if (first_insn == from)
3759 first_insn = NEXT_INSN (to);
3760
3761 /* Make the new neighbors point to it and it to them. */
3762 if (NEXT_INSN (after))
3763 PREV_INSN (NEXT_INSN (after)) = to;
3764
3765 NEXT_INSN (to) = NEXT_INSN (after);
3766 PREV_INSN (from) = after;
3767 NEXT_INSN (after) = from;
3768 if (after == last_insn)
3769 last_insn = to;
3770 }
3771
3772 /* Same as function above, but take care to update BB boundaries. */
3773 void
3774 reorder_insns (rtx from, rtx to, rtx after)
3775 {
3776 rtx prev = PREV_INSN (from);
3777 basic_block bb, bb2;
3778
3779 reorder_insns_nobb (from, to, after);
3780
3781 if (GET_CODE (after) != BARRIER
3782 && (bb = BLOCK_FOR_INSN (after)))
3783 {
3784 rtx x;
3785 bb->flags |= BB_DIRTY;
3786
3787 if (GET_CODE (from) != BARRIER
3788 && (bb2 = BLOCK_FOR_INSN (from)))
3789 {
3790 if (BB_END (bb2) == to)
3791 BB_END (bb2) = prev;
3792 bb2->flags |= BB_DIRTY;
3793 }
3794
3795 if (BB_END (bb) == after)
3796 BB_END (bb) = to;
3797
3798 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3799 set_block_for_insn (x, bb);
3800 }
3801 }
3802
3803 /* Return the line note insn preceding INSN. */
3804
3805 static rtx
3806 find_line_note (rtx insn)
3807 {
3808 if (no_line_numbers)
3809 return 0;
3810
3811 for (; insn; insn = PREV_INSN (insn))
3812 if (GET_CODE (insn) == NOTE
3813 && NOTE_LINE_NUMBER (insn) >= 0)
3814 break;
3815
3816 return insn;
3817 }
3818
3819 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3820 of the moved insns when debugging. This may insert a note between AFTER
3821 and FROM, and another one after TO. */
3822
3823 void
3824 reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
3825 {
3826 rtx from_line = find_line_note (from);
3827 rtx after_line = find_line_note (after);
3828
3829 reorder_insns (from, to, after);
3830
3831 if (from_line == after_line)
3832 return;
3833
3834 if (from_line)
3835 emit_note_copy_after (from_line, after);
3836 if (after_line)
3837 emit_note_copy_after (after_line, to);
3838 }
3839
3840 /* Remove unnecessary notes from the instruction stream. */
3841
3842 void
3843 remove_unnecessary_notes (void)
3844 {
3845 rtx block_stack = NULL_RTX;
3846 rtx eh_stack = NULL_RTX;
3847 rtx insn;
3848 rtx next;
3849 rtx tmp;
3850
3851 /* We must not remove the first instruction in the function because
3852 the compiler depends on the first instruction being a note. */
3853 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3854 {
3855 /* Remember what's next. */
3856 next = NEXT_INSN (insn);
3857
3858 /* We're only interested in notes. */
3859 if (GET_CODE (insn) != NOTE)
3860 continue;
3861
3862 switch (NOTE_LINE_NUMBER (insn))
3863 {
3864 case NOTE_INSN_DELETED:
3865 case NOTE_INSN_LOOP_END_TOP_COND:
3866 remove_insn (insn);
3867 break;
3868
3869 case NOTE_INSN_EH_REGION_BEG:
3870 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3871 break;
3872
3873 case NOTE_INSN_EH_REGION_END:
3874 /* Too many end notes. */
3875 if (eh_stack == NULL_RTX)
3876 abort ();
3877 /* Mismatched nesting. */
3878 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3879 abort ();
3880 tmp = eh_stack;
3881 eh_stack = XEXP (eh_stack, 1);
3882 free_INSN_LIST_node (tmp);
3883 break;
3884
3885 case NOTE_INSN_BLOCK_BEG:
3886 /* By now, all notes indicating lexical blocks should have
3887 NOTE_BLOCK filled in. */
3888 if (NOTE_BLOCK (insn) == NULL_TREE)
3889 abort ();
3890 block_stack = alloc_INSN_LIST (insn, block_stack);
3891 break;
3892
3893 case NOTE_INSN_BLOCK_END:
3894 /* Too many end notes. */
3895 if (block_stack == NULL_RTX)
3896 abort ();
3897 /* Mismatched nesting. */
3898 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3899 abort ();
3900 tmp = block_stack;
3901 block_stack = XEXP (block_stack, 1);
3902 free_INSN_LIST_node (tmp);
3903
3904 /* Scan back to see if there are any non-note instructions
3905 between INSN and the beginning of this block. If not,
3906 then there is no PC range in the generated code that will
3907 actually be in this block, so there's no point in
3908 remembering the existence of the block. */
3909 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3910 {
3911 /* This block contains a real instruction. Note that we
3912 don't include labels; if the only thing in the block
3913 is a label, then there are still no PC values that
3914 lie within the block. */
3915 if (INSN_P (tmp))
3916 break;
3917
3918 /* We're only interested in NOTEs. */
3919 if (GET_CODE (tmp) != NOTE)
3920 continue;
3921
3922 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3923 {
3924 /* We just verified that this BLOCK matches us with
3925 the block_stack check above. Never delete the
3926 BLOCK for the outermost scope of the function; we
3927 can refer to names from that scope even if the
3928 block notes are messed up. */
3929 if (! is_body_block (NOTE_BLOCK (insn))
3930 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3931 {
3932 remove_insn (tmp);
3933 remove_insn (insn);
3934 }
3935 break;
3936 }
3937 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3938 /* There's a nested block. We need to leave the
3939 current block in place since otherwise the debugger
3940 wouldn't be able to show symbols from our block in
3941 the nested block. */
3942 break;
3943 }
3944 }
3945 }
3946
3947 /* Too many begin notes. */
3948 if (block_stack || eh_stack)
3949 abort ();
3950 }
3951
3952 \f
3953 /* Emit insn(s) of given code and pattern
3954 at a specified place within the doubly-linked list.
3955
3956 All of the emit_foo global entry points accept an object
3957 X which is either an insn list or a PATTERN of a single
3958 instruction.
3959
3960 There are thus a few canonical ways to generate code and
3961 emit it at a specific place in the instruction stream. For
3962 example, consider the instruction named SPOT and the fact that
3963 we would like to emit some instructions before SPOT. We might
3964 do it like this:
3965
3966 start_sequence ();
3967 ... emit the new instructions ...
3968 insns_head = get_insns ();
3969 end_sequence ();
3970
3971 emit_insn_before (insns_head, SPOT);
3972
3973 It used to be common to generate SEQUENCE rtl instead, but that
3974 is a relic of the past which no longer occurs. The reason is that
3975 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3976 generated would almost certainly die right after it was created. */
3977
3978 /* Make X be output before the instruction BEFORE. */
3979
3980 rtx
3981 emit_insn_before (rtx x, rtx before)
3982 {
3983 rtx last = before;
3984 rtx insn;
3985
3986 #ifdef ENABLE_RTL_CHECKING
3987 if (before == NULL_RTX)
3988 abort ();
3989 #endif
3990
3991 if (x == NULL_RTX)
3992 return last;
3993
3994 switch (GET_CODE (x))
3995 {
3996 case INSN:
3997 case JUMP_INSN:
3998 case CALL_INSN:
3999 case CODE_LABEL:
4000 case BARRIER:
4001 case NOTE:
4002 insn = x;
4003 while (insn)
4004 {
4005 rtx next = NEXT_INSN (insn);
4006 add_insn_before (insn, before);
4007 last = insn;
4008 insn = next;
4009 }
4010 break;
4011
4012 #ifdef ENABLE_RTL_CHECKING
4013 case SEQUENCE:
4014 abort ();
4015 break;
4016 #endif
4017
4018 default:
4019 last = make_insn_raw (x);
4020 add_insn_before (last, before);
4021 break;
4022 }
4023
4024 return last;
4025 }
4026
4027 /* Make an instruction with body X and code JUMP_INSN
4028 and output it before the instruction BEFORE. */
4029
4030 rtx
4031 emit_jump_insn_before (rtx x, rtx before)
4032 {
4033 rtx insn, last = NULL_RTX;
4034
4035 #ifdef ENABLE_RTL_CHECKING
4036 if (before == NULL_RTX)
4037 abort ();
4038 #endif
4039
4040 switch (GET_CODE (x))
4041 {
4042 case INSN:
4043 case JUMP_INSN:
4044 case CALL_INSN:
4045 case CODE_LABEL:
4046 case BARRIER:
4047 case NOTE:
4048 insn = x;
4049 while (insn)
4050 {
4051 rtx next = NEXT_INSN (insn);
4052 add_insn_before (insn, before);
4053 last = insn;
4054 insn = next;
4055 }
4056 break;
4057
4058 #ifdef ENABLE_RTL_CHECKING
4059 case SEQUENCE:
4060 abort ();
4061 break;
4062 #endif
4063
4064 default:
4065 last = make_jump_insn_raw (x);
4066 add_insn_before (last, before);
4067 break;
4068 }
4069
4070 return last;
4071 }
4072
4073 /* Make an instruction with body X and code CALL_INSN
4074 and output it before the instruction BEFORE. */
4075
4076 rtx
4077 emit_call_insn_before (rtx x, rtx before)
4078 {
4079 rtx last = NULL_RTX, insn;
4080
4081 #ifdef ENABLE_RTL_CHECKING
4082 if (before == NULL_RTX)
4083 abort ();
4084 #endif
4085
4086 switch (GET_CODE (x))
4087 {
4088 case INSN:
4089 case JUMP_INSN:
4090 case CALL_INSN:
4091 case CODE_LABEL:
4092 case BARRIER:
4093 case NOTE:
4094 insn = x;
4095 while (insn)
4096 {
4097 rtx next = NEXT_INSN (insn);
4098 add_insn_before (insn, before);
4099 last = insn;
4100 insn = next;
4101 }
4102 break;
4103
4104 #ifdef ENABLE_RTL_CHECKING
4105 case SEQUENCE:
4106 abort ();
4107 break;
4108 #endif
4109
4110 default:
4111 last = make_call_insn_raw (x);
4112 add_insn_before (last, before);
4113 break;
4114 }
4115
4116 return last;
4117 }
4118
4119 /* Make an insn of code BARRIER
4120 and output it before the insn BEFORE. */
4121
4122 rtx
4123 emit_barrier_before (rtx before)
4124 {
4125 rtx insn = rtx_alloc (BARRIER);
4126
4127 INSN_UID (insn) = cur_insn_uid++;
4128
4129 add_insn_before (insn, before);
4130 return insn;
4131 }
4132
4133 /* Emit the label LABEL before the insn BEFORE. */
4134
4135 rtx
4136 emit_label_before (rtx label, rtx before)
4137 {
4138 /* This can be called twice for the same label as a result of the
4139 confusion that follows a syntax error! So make it harmless. */
4140 if (INSN_UID (label) == 0)
4141 {
4142 INSN_UID (label) = cur_insn_uid++;
4143 add_insn_before (label, before);
4144 }
4145
4146 return label;
4147 }
4148
4149 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4150
4151 rtx
4152 emit_note_before (int subtype, rtx before)
4153 {
4154 rtx note = rtx_alloc (NOTE);
4155 INSN_UID (note) = cur_insn_uid++;
4156 NOTE_SOURCE_FILE (note) = 0;
4157 NOTE_LINE_NUMBER (note) = subtype;
4158 BLOCK_FOR_INSN (note) = NULL;
4159
4160 add_insn_before (note, before);
4161 return note;
4162 }
4163 \f
4164 /* Helper for emit_insn_after, handles lists of instructions
4165 efficiently. */
4166
4167 static rtx emit_insn_after_1 (rtx, rtx);
4168
4169 static rtx
4170 emit_insn_after_1 (rtx first, rtx after)
4171 {
4172 rtx last;
4173 rtx after_after;
4174 basic_block bb;
4175
4176 if (GET_CODE (after) != BARRIER
4177 && (bb = BLOCK_FOR_INSN (after)))
4178 {
4179 bb->flags |= BB_DIRTY;
4180 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4181 if (GET_CODE (last) != BARRIER)
4182 set_block_for_insn (last, bb);
4183 if (GET_CODE (last) != BARRIER)
4184 set_block_for_insn (last, bb);
4185 if (BB_END (bb) == after)
4186 BB_END (bb) = last;
4187 }
4188 else
4189 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4190 continue;
4191
4192 after_after = NEXT_INSN (after);
4193
4194 NEXT_INSN (after) = first;
4195 PREV_INSN (first) = after;
4196 NEXT_INSN (last) = after_after;
4197 if (after_after)
4198 PREV_INSN (after_after) = last;
4199
4200 if (after == last_insn)
4201 last_insn = last;
4202 return last;
4203 }
4204
4205 /* Make X be output after the insn AFTER. */
4206
4207 rtx
4208 emit_insn_after (rtx x, rtx after)
4209 {
4210 rtx last = after;
4211
4212 #ifdef ENABLE_RTL_CHECKING
4213 if (after == NULL_RTX)
4214 abort ();
4215 #endif
4216
4217 if (x == NULL_RTX)
4218 return last;
4219
4220 switch (GET_CODE (x))
4221 {
4222 case INSN:
4223 case JUMP_INSN:
4224 case CALL_INSN:
4225 case CODE_LABEL:
4226 case BARRIER:
4227 case NOTE:
4228 last = emit_insn_after_1 (x, after);
4229 break;
4230
4231 #ifdef ENABLE_RTL_CHECKING
4232 case SEQUENCE:
4233 abort ();
4234 break;
4235 #endif
4236
4237 default:
4238 last = make_insn_raw (x);
4239 add_insn_after (last, after);
4240 break;
4241 }
4242
4243 return last;
4244 }
4245
4246 /* Similar to emit_insn_after, except that line notes are to be inserted so
4247 as to act as if this insn were at FROM. */
4248
4249 void
4250 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4251 {
4252 rtx from_line = find_line_note (from);
4253 rtx after_line = find_line_note (after);
4254 rtx insn = emit_insn_after (x, after);
4255
4256 if (from_line)
4257 emit_note_copy_after (from_line, after);
4258
4259 if (after_line)
4260 emit_note_copy_after (after_line, insn);
4261 }
4262
4263 /* Make an insn of code JUMP_INSN with body X
4264 and output it after the insn AFTER. */
4265
4266 rtx
4267 emit_jump_insn_after (rtx x, rtx after)
4268 {
4269 rtx last;
4270
4271 #ifdef ENABLE_RTL_CHECKING
4272 if (after == NULL_RTX)
4273 abort ();
4274 #endif
4275
4276 switch (GET_CODE (x))
4277 {
4278 case INSN:
4279 case JUMP_INSN:
4280 case CALL_INSN:
4281 case CODE_LABEL:
4282 case BARRIER:
4283 case NOTE:
4284 last = emit_insn_after_1 (x, after);
4285 break;
4286
4287 #ifdef ENABLE_RTL_CHECKING
4288 case SEQUENCE:
4289 abort ();
4290 break;
4291 #endif
4292
4293 default:
4294 last = make_jump_insn_raw (x);
4295 add_insn_after (last, after);
4296 break;
4297 }
4298
4299 return last;
4300 }
4301
4302 /* Make an instruction with body X and code CALL_INSN
4303 and output it after the instruction AFTER. */
4304
4305 rtx
4306 emit_call_insn_after (rtx x, rtx after)
4307 {
4308 rtx last;
4309
4310 #ifdef ENABLE_RTL_CHECKING
4311 if (after == NULL_RTX)
4312 abort ();
4313 #endif
4314
4315 switch (GET_CODE (x))
4316 {
4317 case INSN:
4318 case JUMP_INSN:
4319 case CALL_INSN:
4320 case CODE_LABEL:
4321 case BARRIER:
4322 case NOTE:
4323 last = emit_insn_after_1 (x, after);
4324 break;
4325
4326 #ifdef ENABLE_RTL_CHECKING
4327 case SEQUENCE:
4328 abort ();
4329 break;
4330 #endif
4331
4332 default:
4333 last = make_call_insn_raw (x);
4334 add_insn_after (last, after);
4335 break;
4336 }
4337
4338 return last;
4339 }
4340
4341 /* Make an insn of code BARRIER
4342 and output it after the insn AFTER. */
4343
4344 rtx
4345 emit_barrier_after (rtx after)
4346 {
4347 rtx insn = rtx_alloc (BARRIER);
4348
4349 INSN_UID (insn) = cur_insn_uid++;
4350
4351 add_insn_after (insn, after);
4352 return insn;
4353 }
4354
4355 /* Emit the label LABEL after the insn AFTER. */
4356
4357 rtx
4358 emit_label_after (rtx label, rtx after)
4359 {
4360 /* This can be called twice for the same label
4361 as a result of the confusion that follows a syntax error!
4362 So make it harmless. */
4363 if (INSN_UID (label) == 0)
4364 {
4365 INSN_UID (label) = cur_insn_uid++;
4366 add_insn_after (label, after);
4367 }
4368
4369 return label;
4370 }
4371
4372 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4373
4374 rtx
4375 emit_note_after (int subtype, rtx after)
4376 {
4377 rtx note = rtx_alloc (NOTE);
4378 INSN_UID (note) = cur_insn_uid++;
4379 NOTE_SOURCE_FILE (note) = 0;
4380 NOTE_LINE_NUMBER (note) = subtype;
4381 BLOCK_FOR_INSN (note) = NULL;
4382 add_insn_after (note, after);
4383 return note;
4384 }
4385
4386 /* Emit a copy of note ORIG after the insn AFTER. */
4387
4388 rtx
4389 emit_note_copy_after (rtx orig, rtx after)
4390 {
4391 rtx note;
4392
4393 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4394 {
4395 cur_insn_uid++;
4396 return 0;
4397 }
4398
4399 note = rtx_alloc (NOTE);
4400 INSN_UID (note) = cur_insn_uid++;
4401 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4402 NOTE_DATA (note) = NOTE_DATA (orig);
4403 BLOCK_FOR_INSN (note) = NULL;
4404 add_insn_after (note, after);
4405 return note;
4406 }
4407 \f
4408 /* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
4409 rtx
4410 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4411 {
4412 rtx last = emit_insn_after (pattern, after);
4413
4414 if (pattern == NULL_RTX)
4415 return last;
4416
4417 after = NEXT_INSN (after);
4418 while (1)
4419 {
4420 if (active_insn_p (after))
4421 INSN_LOCATOR (after) = loc;
4422 if (after == last)
4423 break;
4424 after = NEXT_INSN (after);
4425 }
4426 return last;
4427 }
4428
4429 /* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
4430 rtx
4431 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4432 {
4433 rtx last = emit_jump_insn_after (pattern, after);
4434
4435 if (pattern == NULL_RTX)
4436 return last;
4437
4438 after = NEXT_INSN (after);
4439 while (1)
4440 {
4441 if (active_insn_p (after))
4442 INSN_LOCATOR (after) = loc;
4443 if (after == last)
4444 break;
4445 after = NEXT_INSN (after);
4446 }
4447 return last;
4448 }
4449
4450 /* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
4451 rtx
4452 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4453 {
4454 rtx last = emit_call_insn_after (pattern, after);
4455
4456 if (pattern == NULL_RTX)
4457 return last;
4458
4459 after = NEXT_INSN (after);
4460 while (1)
4461 {
4462 if (active_insn_p (after))
4463 INSN_LOCATOR (after) = loc;
4464 if (after == last)
4465 break;
4466 after = NEXT_INSN (after);
4467 }
4468 return last;
4469 }
4470
4471 /* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
4472 rtx
4473 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4474 {
4475 rtx first = PREV_INSN (before);
4476 rtx last = emit_insn_before (pattern, before);
4477
4478 if (pattern == NULL_RTX)
4479 return last;
4480
4481 first = NEXT_INSN (first);
4482 while (1)
4483 {
4484 if (active_insn_p (first))
4485 INSN_LOCATOR (first) = loc;
4486 if (first == last)
4487 break;
4488 first = NEXT_INSN (first);
4489 }
4490 return last;
4491 }
4492 \f
4493 /* Take X and emit it at the end of the doubly-linked
4494 INSN list.
4495
4496 Returns the last insn emitted. */
4497
4498 rtx
4499 emit_insn (rtx x)
4500 {
4501 rtx last = last_insn;
4502 rtx insn;
4503
4504 if (x == NULL_RTX)
4505 return last;
4506
4507 switch (GET_CODE (x))
4508 {
4509 case INSN:
4510 case JUMP_INSN:
4511 case CALL_INSN:
4512 case CODE_LABEL:
4513 case BARRIER:
4514 case NOTE:
4515 insn = x;
4516 while (insn)
4517 {
4518 rtx next = NEXT_INSN (insn);
4519 add_insn (insn);
4520 last = insn;
4521 insn = next;
4522 }
4523 break;
4524
4525 #ifdef ENABLE_RTL_CHECKING
4526 case SEQUENCE:
4527 abort ();
4528 break;
4529 #endif
4530
4531 default:
4532 last = make_insn_raw (x);
4533 add_insn (last);
4534 break;
4535 }
4536
4537 return last;
4538 }
4539
4540 /* Make an insn of code JUMP_INSN with pattern X
4541 and add it to the end of the doubly-linked list. */
4542
4543 rtx
4544 emit_jump_insn (rtx x)
4545 {
4546 rtx last = NULL_RTX, insn;
4547
4548 switch (GET_CODE (x))
4549 {
4550 case INSN:
4551 case JUMP_INSN:
4552 case CALL_INSN:
4553 case CODE_LABEL:
4554 case BARRIER:
4555 case NOTE:
4556 insn = x;
4557 while (insn)
4558 {
4559 rtx next = NEXT_INSN (insn);
4560 add_insn (insn);
4561 last = insn;
4562 insn = next;
4563 }
4564 break;
4565
4566 #ifdef ENABLE_RTL_CHECKING
4567 case SEQUENCE:
4568 abort ();
4569 break;
4570 #endif
4571
4572 default:
4573 last = make_jump_insn_raw (x);
4574 add_insn (last);
4575 break;
4576 }
4577
4578 return last;
4579 }
4580
4581 /* Make an insn of code CALL_INSN with pattern X
4582 and add it to the end of the doubly-linked list. */
4583
4584 rtx
4585 emit_call_insn (rtx x)
4586 {
4587 rtx insn;
4588
4589 switch (GET_CODE (x))
4590 {
4591 case INSN:
4592 case JUMP_INSN:
4593 case CALL_INSN:
4594 case CODE_LABEL:
4595 case BARRIER:
4596 case NOTE:
4597 insn = emit_insn (x);
4598 break;
4599
4600 #ifdef ENABLE_RTL_CHECKING
4601 case SEQUENCE:
4602 abort ();
4603 break;
4604 #endif
4605
4606 default:
4607 insn = make_call_insn_raw (x);
4608 add_insn (insn);
4609 break;
4610 }
4611
4612 return insn;
4613 }
4614
4615 /* Add the label LABEL to the end of the doubly-linked list. */
4616
4617 rtx
4618 emit_label (rtx label)
4619 {
4620 /* This can be called twice for the same label
4621 as a result of the confusion that follows a syntax error!
4622 So make it harmless. */
4623 if (INSN_UID (label) == 0)
4624 {
4625 INSN_UID (label) = cur_insn_uid++;
4626 add_insn (label);
4627 }
4628 return label;
4629 }
4630
4631 /* Make an insn of code BARRIER
4632 and add it to the end of the doubly-linked list. */
4633
4634 rtx
4635 emit_barrier (void)
4636 {
4637 rtx barrier = rtx_alloc (BARRIER);
4638 INSN_UID (barrier) = cur_insn_uid++;
4639 add_insn (barrier);
4640 return barrier;
4641 }
4642
4643 /* Make line numbering NOTE insn for LOCATION add it to the end
4644 of the doubly-linked list, but only if line-numbers are desired for
4645 debugging info and it doesn't match the previous one. */
4646
4647 rtx
4648 emit_line_note (location_t location)
4649 {
4650 rtx note;
4651
4652 set_file_and_line_for_stmt (location);
4653
4654 if (location.file && last_location.file
4655 && !strcmp (location.file, last_location.file)
4656 && location.line == last_location.line)
4657 return NULL_RTX;
4658 last_location = location;
4659
4660 if (no_line_numbers)
4661 {
4662 cur_insn_uid++;
4663 return NULL_RTX;
4664 }
4665
4666 note = emit_note (location.line);
4667 NOTE_SOURCE_FILE (note) = location.file;
4668
4669 return note;
4670 }
4671
4672 /* Emit a copy of note ORIG. */
4673
4674 rtx
4675 emit_note_copy (rtx orig)
4676 {
4677 rtx note;
4678
4679 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4680 {
4681 cur_insn_uid++;
4682 return NULL_RTX;
4683 }
4684
4685 note = rtx_alloc (NOTE);
4686
4687 INSN_UID (note) = cur_insn_uid++;
4688 NOTE_DATA (note) = NOTE_DATA (orig);
4689 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4690 BLOCK_FOR_INSN (note) = NULL;
4691 add_insn (note);
4692
4693 return note;
4694 }
4695
4696 /* Make an insn of code NOTE or type NOTE_NO
4697 and add it to the end of the doubly-linked list. */
4698
4699 rtx
4700 emit_note (int note_no)
4701 {
4702 rtx note;
4703
4704 note = rtx_alloc (NOTE);
4705 INSN_UID (note) = cur_insn_uid++;
4706 NOTE_LINE_NUMBER (note) = note_no;
4707 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4708 BLOCK_FOR_INSN (note) = NULL;
4709 add_insn (note);
4710 return note;
4711 }
4712
4713 /* Cause next statement to emit a line note even if the line number
4714 has not changed. */
4715
4716 void
4717 force_next_line_note (void)
4718 {
4719 last_location.line = -1;
4720 }
4721
4722 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4723 note of this type already exists, remove it first. */
4724
4725 rtx
4726 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4727 {
4728 rtx note = find_reg_note (insn, kind, NULL_RTX);
4729
4730 switch (kind)
4731 {
4732 case REG_EQUAL:
4733 case REG_EQUIV:
4734 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4735 has multiple sets (some callers assume single_set
4736 means the insn only has one set, when in fact it
4737 means the insn only has one * useful * set). */
4738 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4739 {
4740 if (note)
4741 abort ();
4742 return NULL_RTX;
4743 }
4744
4745 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4746 It serves no useful purpose and breaks eliminate_regs. */
4747 if (GET_CODE (datum) == ASM_OPERANDS)
4748 return NULL_RTX;
4749 break;
4750
4751 default:
4752 break;
4753 }
4754
4755 if (note)
4756 {
4757 XEXP (note, 0) = datum;
4758 return note;
4759 }
4760
4761 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4762 return REG_NOTES (insn);
4763 }
4764 \f
4765 /* Return an indication of which type of insn should have X as a body.
4766 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4767
4768 enum rtx_code
4769 classify_insn (rtx x)
4770 {
4771 if (GET_CODE (x) == CODE_LABEL)
4772 return CODE_LABEL;
4773 if (GET_CODE (x) == CALL)
4774 return CALL_INSN;
4775 if (GET_CODE (x) == RETURN)
4776 return JUMP_INSN;
4777 if (GET_CODE (x) == SET)
4778 {
4779 if (SET_DEST (x) == pc_rtx)
4780 return JUMP_INSN;
4781 else if (GET_CODE (SET_SRC (x)) == CALL)
4782 return CALL_INSN;
4783 else
4784 return INSN;
4785 }
4786 if (GET_CODE (x) == PARALLEL)
4787 {
4788 int j;
4789 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4790 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4791 return CALL_INSN;
4792 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4793 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4794 return JUMP_INSN;
4795 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4796 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4797 return CALL_INSN;
4798 }
4799 return INSN;
4800 }
4801
4802 /* Emit the rtl pattern X as an appropriate kind of insn.
4803 If X is a label, it is simply added into the insn chain. */
4804
4805 rtx
4806 emit (rtx x)
4807 {
4808 enum rtx_code code = classify_insn (x);
4809
4810 if (code == CODE_LABEL)
4811 return emit_label (x);
4812 else if (code == INSN)
4813 return emit_insn (x);
4814 else if (code == JUMP_INSN)
4815 {
4816 rtx insn = emit_jump_insn (x);
4817 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4818 return emit_barrier ();
4819 return insn;
4820 }
4821 else if (code == CALL_INSN)
4822 return emit_call_insn (x);
4823 else
4824 abort ();
4825 }
4826 \f
4827 /* Space for free sequence stack entries. */
4828 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4829
4830 /* Begin emitting insns to a sequence which can be packaged in an
4831 RTL_EXPR. If this sequence will contain something that might cause
4832 the compiler to pop arguments to function calls (because those
4833 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4834 details), use do_pending_stack_adjust before calling this function.
4835 That will ensure that the deferred pops are not accidentally
4836 emitted in the middle of this sequence. */
4837
4838 void
4839 start_sequence (void)
4840 {
4841 struct sequence_stack *tem;
4842
4843 if (free_sequence_stack != NULL)
4844 {
4845 tem = free_sequence_stack;
4846 free_sequence_stack = tem->next;
4847 }
4848 else
4849 tem = ggc_alloc (sizeof (struct sequence_stack));
4850
4851 tem->next = seq_stack;
4852 tem->first = first_insn;
4853 tem->last = last_insn;
4854 tem->sequence_rtl_expr = seq_rtl_expr;
4855
4856 seq_stack = tem;
4857
4858 first_insn = 0;
4859 last_insn = 0;
4860 }
4861
4862 /* Similarly, but indicate that this sequence will be placed in T, an
4863 RTL_EXPR. See the documentation for start_sequence for more
4864 information about how to use this function. */
4865
4866 void
4867 start_sequence_for_rtl_expr (tree t)
4868 {
4869 start_sequence ();
4870
4871 seq_rtl_expr = t;
4872 }
4873
4874 /* Set up the insn chain starting with FIRST as the current sequence,
4875 saving the previously current one. See the documentation for
4876 start_sequence for more information about how to use this function. */
4877
4878 void
4879 push_to_sequence (rtx first)
4880 {
4881 rtx last;
4882
4883 start_sequence ();
4884
4885 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4886
4887 first_insn = first;
4888 last_insn = last;
4889 }
4890
4891 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4892
4893 void
4894 push_to_full_sequence (rtx first, rtx last)
4895 {
4896 start_sequence ();
4897 first_insn = first;
4898 last_insn = last;
4899 /* We really should have the end of the insn chain here. */
4900 if (last && NEXT_INSN (last))
4901 abort ();
4902 }
4903
4904 /* Set up the outer-level insn chain
4905 as the current sequence, saving the previously current one. */
4906
4907 void
4908 push_topmost_sequence (void)
4909 {
4910 struct sequence_stack *stack, *top = NULL;
4911
4912 start_sequence ();
4913
4914 for (stack = seq_stack; stack; stack = stack->next)
4915 top = stack;
4916
4917 first_insn = top->first;
4918 last_insn = top->last;
4919 seq_rtl_expr = top->sequence_rtl_expr;
4920 }
4921
4922 /* After emitting to the outer-level insn chain, update the outer-level
4923 insn chain, and restore the previous saved state. */
4924
4925 void
4926 pop_topmost_sequence (void)
4927 {
4928 struct sequence_stack *stack, *top = NULL;
4929
4930 for (stack = seq_stack; stack; stack = stack->next)
4931 top = stack;
4932
4933 top->first = first_insn;
4934 top->last = last_insn;
4935 /* ??? Why don't we save seq_rtl_expr here? */
4936
4937 end_sequence ();
4938 }
4939
4940 /* After emitting to a sequence, restore previous saved state.
4941
4942 To get the contents of the sequence just made, you must call
4943 `get_insns' *before* calling here.
4944
4945 If the compiler might have deferred popping arguments while
4946 generating this sequence, and this sequence will not be immediately
4947 inserted into the instruction stream, use do_pending_stack_adjust
4948 before calling get_insns. That will ensure that the deferred
4949 pops are inserted into this sequence, and not into some random
4950 location in the instruction stream. See INHIBIT_DEFER_POP for more
4951 information about deferred popping of arguments. */
4952
4953 void
4954 end_sequence (void)
4955 {
4956 struct sequence_stack *tem = seq_stack;
4957
4958 first_insn = tem->first;
4959 last_insn = tem->last;
4960 seq_rtl_expr = tem->sequence_rtl_expr;
4961 seq_stack = tem->next;
4962
4963 memset (tem, 0, sizeof (*tem));
4964 tem->next = free_sequence_stack;
4965 free_sequence_stack = tem;
4966 }
4967
4968 /* This works like end_sequence, but records the old sequence in FIRST
4969 and LAST. */
4970
4971 void
4972 end_full_sequence (rtx *first, rtx *last)
4973 {
4974 *first = first_insn;
4975 *last = last_insn;
4976 end_sequence ();
4977 }
4978
4979 /* Return 1 if currently emitting into a sequence. */
4980
4981 int
4982 in_sequence_p (void)
4983 {
4984 return seq_stack != 0;
4985 }
4986 \f
4987 /* Put the various virtual registers into REGNO_REG_RTX. */
4988
4989 void
4990 init_virtual_regs (struct emit_status *es)
4991 {
4992 rtx *ptr = es->x_regno_reg_rtx;
4993 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4994 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4995 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4996 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4997 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4998 }
4999
5000 \f
5001 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5002 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5003 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5004 static int copy_insn_n_scratches;
5005
5006 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5007 copied an ASM_OPERANDS.
5008 In that case, it is the original input-operand vector. */
5009 static rtvec orig_asm_operands_vector;
5010
5011 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5012 copied an ASM_OPERANDS.
5013 In that case, it is the copied input-operand vector. */
5014 static rtvec copy_asm_operands_vector;
5015
5016 /* Likewise for the constraints vector. */
5017 static rtvec orig_asm_constraints_vector;
5018 static rtvec copy_asm_constraints_vector;
5019
5020 /* Recursively create a new copy of an rtx for copy_insn.
5021 This function differs from copy_rtx in that it handles SCRATCHes and
5022 ASM_OPERANDs properly.
5023 Normally, this function is not used directly; use copy_insn as front end.
5024 However, you could first copy an insn pattern with copy_insn and then use
5025 this function afterwards to properly copy any REG_NOTEs containing
5026 SCRATCHes. */
5027
5028 rtx
5029 copy_insn_1 (rtx orig)
5030 {
5031 rtx copy;
5032 int i, j;
5033 RTX_CODE code;
5034 const char *format_ptr;
5035
5036 code = GET_CODE (orig);
5037
5038 switch (code)
5039 {
5040 case REG:
5041 case QUEUED:
5042 case CONST_INT:
5043 case CONST_DOUBLE:
5044 case CONST_VECTOR:
5045 case SYMBOL_REF:
5046 case CODE_LABEL:
5047 case PC:
5048 case CC0:
5049 case ADDRESSOF:
5050 return orig;
5051 case CLOBBER:
5052 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5053 return orig;
5054 break;
5055
5056 case SCRATCH:
5057 for (i = 0; i < copy_insn_n_scratches; i++)
5058 if (copy_insn_scratch_in[i] == orig)
5059 return copy_insn_scratch_out[i];
5060 break;
5061
5062 case CONST:
5063 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5064 a LABEL_REF, it isn't sharable. */
5065 if (GET_CODE (XEXP (orig, 0)) == PLUS
5066 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5067 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5068 return orig;
5069 break;
5070
5071 /* A MEM with a constant address is not sharable. The problem is that
5072 the constant address may need to be reloaded. If the mem is shared,
5073 then reloading one copy of this mem will cause all copies to appear
5074 to have been reloaded. */
5075
5076 default:
5077 break;
5078 }
5079
5080 copy = rtx_alloc (code);
5081
5082 /* Copy the various flags, and other information. We assume that
5083 all fields need copying, and then clear the fields that should
5084 not be copied. That is the sensible default behavior, and forces
5085 us to explicitly document why we are *not* copying a flag. */
5086 memcpy (copy, orig, RTX_HDR_SIZE);
5087
5088 /* We do not copy the USED flag, which is used as a mark bit during
5089 walks over the RTL. */
5090 RTX_FLAG (copy, used) = 0;
5091
5092 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5093 if (GET_RTX_CLASS (code) == 'i')
5094 {
5095 RTX_FLAG (copy, jump) = 0;
5096 RTX_FLAG (copy, call) = 0;
5097 RTX_FLAG (copy, frame_related) = 0;
5098 }
5099
5100 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5101
5102 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5103 {
5104 copy->u.fld[i] = orig->u.fld[i];
5105 switch (*format_ptr++)
5106 {
5107 case 'e':
5108 if (XEXP (orig, i) != NULL)
5109 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5110 break;
5111
5112 case 'E':
5113 case 'V':
5114 if (XVEC (orig, i) == orig_asm_constraints_vector)
5115 XVEC (copy, i) = copy_asm_constraints_vector;
5116 else if (XVEC (orig, i) == orig_asm_operands_vector)
5117 XVEC (copy, i) = copy_asm_operands_vector;
5118 else if (XVEC (orig, i) != NULL)
5119 {
5120 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5121 for (j = 0; j < XVECLEN (copy, i); j++)
5122 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5123 }
5124 break;
5125
5126 case 't':
5127 case 'w':
5128 case 'i':
5129 case 's':
5130 case 'S':
5131 case 'u':
5132 case '0':
5133 /* These are left unchanged. */
5134 break;
5135
5136 default:
5137 abort ();
5138 }
5139 }
5140
5141 if (code == SCRATCH)
5142 {
5143 i = copy_insn_n_scratches++;
5144 if (i >= MAX_RECOG_OPERANDS)
5145 abort ();
5146 copy_insn_scratch_in[i] = orig;
5147 copy_insn_scratch_out[i] = copy;
5148 }
5149 else if (code == ASM_OPERANDS)
5150 {
5151 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5152 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5153 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5154 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5155 }
5156
5157 return copy;
5158 }
5159
5160 /* Create a new copy of an rtx.
5161 This function differs from copy_rtx in that it handles SCRATCHes and
5162 ASM_OPERANDs properly.
5163 INSN doesn't really have to be a full INSN; it could be just the
5164 pattern. */
5165 rtx
5166 copy_insn (rtx insn)
5167 {
5168 copy_insn_n_scratches = 0;
5169 orig_asm_operands_vector = 0;
5170 orig_asm_constraints_vector = 0;
5171 copy_asm_operands_vector = 0;
5172 copy_asm_constraints_vector = 0;
5173 return copy_insn_1 (insn);
5174 }
5175
5176 /* Initialize data structures and variables in this file
5177 before generating rtl for each function. */
5178
5179 void
5180 init_emit (void)
5181 {
5182 struct function *f = cfun;
5183
5184 f->emit = ggc_alloc (sizeof (struct emit_status));
5185 first_insn = NULL;
5186 last_insn = NULL;
5187 seq_rtl_expr = NULL;
5188 cur_insn_uid = 1;
5189 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5190 last_location.line = 0;
5191 last_location.file = 0;
5192 first_label_num = label_num;
5193 last_label_num = 0;
5194 seq_stack = NULL;
5195
5196 /* Init the tables that describe all the pseudo regs. */
5197
5198 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5199
5200 f->emit->regno_pointer_align
5201 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5202 * sizeof (unsigned char));
5203
5204 regno_reg_rtx
5205 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5206
5207 /* Put copies of all the hard registers into regno_reg_rtx. */
5208 memcpy (regno_reg_rtx,
5209 static_regno_reg_rtx,
5210 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5211
5212 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5213 init_virtual_regs (f->emit);
5214
5215 /* Indicate that the virtual registers and stack locations are
5216 all pointers. */
5217 REG_POINTER (stack_pointer_rtx) = 1;
5218 REG_POINTER (frame_pointer_rtx) = 1;
5219 REG_POINTER (hard_frame_pointer_rtx) = 1;
5220 REG_POINTER (arg_pointer_rtx) = 1;
5221
5222 REG_POINTER (virtual_incoming_args_rtx) = 1;
5223 REG_POINTER (virtual_stack_vars_rtx) = 1;
5224 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5225 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5226 REG_POINTER (virtual_cfa_rtx) = 1;
5227
5228 #ifdef STACK_BOUNDARY
5229 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5230 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5231 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5232 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5233
5234 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5235 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5236 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5237 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5238 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5239 #endif
5240
5241 #ifdef INIT_EXPANDERS
5242 INIT_EXPANDERS;
5243 #endif
5244 }
5245
5246 /* Generate the constant 0. */
5247
5248 static rtx
5249 gen_const_vector_0 (enum machine_mode mode)
5250 {
5251 rtx tem;
5252 rtvec v;
5253 int units, i;
5254 enum machine_mode inner;
5255
5256 units = GET_MODE_NUNITS (mode);
5257 inner = GET_MODE_INNER (mode);
5258
5259 v = rtvec_alloc (units);
5260
5261 /* We need to call this function after we to set CONST0_RTX first. */
5262 if (!CONST0_RTX (inner))
5263 abort ();
5264
5265 for (i = 0; i < units; ++i)
5266 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5267
5268 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5269 return tem;
5270 }
5271
5272 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5273 all elements are zero. */
5274 rtx
5275 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5276 {
5277 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5278 int i;
5279
5280 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5281 if (RTVEC_ELT (v, i) != inner_zero)
5282 return gen_rtx_raw_CONST_VECTOR (mode, v);
5283 return CONST0_RTX (mode);
5284 }
5285
5286 /* Create some permanent unique rtl objects shared between all functions.
5287 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5288
5289 void
5290 init_emit_once (int line_numbers)
5291 {
5292 int i;
5293 enum machine_mode mode;
5294 enum machine_mode double_mode;
5295
5296 /* We need reg_raw_mode, so initialize the modes now. */
5297 init_reg_modes_once ();
5298
5299 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5300 tables. */
5301 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5302 const_int_htab_eq, NULL);
5303
5304 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5305 const_double_htab_eq, NULL);
5306
5307 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5308 mem_attrs_htab_eq, NULL);
5309 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5310 reg_attrs_htab_eq, NULL);
5311
5312 no_line_numbers = ! line_numbers;
5313
5314 /* Compute the word and byte modes. */
5315
5316 byte_mode = VOIDmode;
5317 word_mode = VOIDmode;
5318 double_mode = VOIDmode;
5319
5320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5321 mode = GET_MODE_WIDER_MODE (mode))
5322 {
5323 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5324 && byte_mode == VOIDmode)
5325 byte_mode = mode;
5326
5327 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5328 && word_mode == VOIDmode)
5329 word_mode = mode;
5330 }
5331
5332 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5333 mode = GET_MODE_WIDER_MODE (mode))
5334 {
5335 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5336 && double_mode == VOIDmode)
5337 double_mode = mode;
5338 }
5339
5340 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5341
5342 /* Assign register numbers to the globally defined register rtx.
5343 This must be done at runtime because the register number field
5344 is in a union and some compilers can't initialize unions. */
5345
5346 pc_rtx = gen_rtx (PC, VOIDmode);
5347 cc0_rtx = gen_rtx (CC0, VOIDmode);
5348 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5349 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5350 if (hard_frame_pointer_rtx == 0)
5351 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5352 HARD_FRAME_POINTER_REGNUM);
5353 if (arg_pointer_rtx == 0)
5354 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5355 virtual_incoming_args_rtx =
5356 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5357 virtual_stack_vars_rtx =
5358 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5359 virtual_stack_dynamic_rtx =
5360 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5361 virtual_outgoing_args_rtx =
5362 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5363 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5364
5365 /* Initialize RTL for commonly used hard registers. These are
5366 copied into regno_reg_rtx as we begin to compile each function. */
5367 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5368 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5369
5370 #ifdef INIT_EXPANDERS
5371 /* This is to initialize {init|mark|free}_machine_status before the first
5372 call to push_function_context_to. This is needed by the Chill front
5373 end which calls push_function_context_to before the first call to
5374 init_function_start. */
5375 INIT_EXPANDERS;
5376 #endif
5377
5378 /* Create the unique rtx's for certain rtx codes and operand values. */
5379
5380 /* Don't use gen_rtx here since gen_rtx in this case
5381 tries to use these variables. */
5382 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5383 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5384 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5385
5386 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5387 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5388 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5389 else
5390 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5391
5392 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5393 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5394 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5395 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5396 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5397 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5398 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5399
5400 dconsthalf = dconst1;
5401 dconsthalf.exp--;
5402
5403 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5404
5405 /* Initialize mathematical constants for constant folding builtins.
5406 These constants need to be given to at least 160 bits precision. */
5407 real_from_string (&dconstpi,
5408 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5409 real_from_string (&dconste,
5410 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5411
5412 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5413 {
5414 REAL_VALUE_TYPE *r =
5415 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5416
5417 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5418 mode = GET_MODE_WIDER_MODE (mode))
5419 const_tiny_rtx[i][(int) mode] =
5420 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5421
5422 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5423
5424 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5425 mode = GET_MODE_WIDER_MODE (mode))
5426 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5427
5428 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5429 mode != VOIDmode;
5430 mode = GET_MODE_WIDER_MODE (mode))
5431 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5432 }
5433
5434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5435 mode != VOIDmode;
5436 mode = GET_MODE_WIDER_MODE (mode))
5437 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5438
5439 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5440 mode != VOIDmode;
5441 mode = GET_MODE_WIDER_MODE (mode))
5442 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5443
5444 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5445 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5446 const_tiny_rtx[0][i] = const0_rtx;
5447
5448 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5449 if (STORE_FLAG_VALUE == 1)
5450 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5451
5452 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5453 return_address_pointer_rtx
5454 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5455 #endif
5456
5457 #ifdef STATIC_CHAIN_REGNUM
5458 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5459
5460 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5461 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5462 static_chain_incoming_rtx
5463 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5464 else
5465 #endif
5466 static_chain_incoming_rtx = static_chain_rtx;
5467 #endif
5468
5469 #ifdef STATIC_CHAIN
5470 static_chain_rtx = STATIC_CHAIN;
5471
5472 #ifdef STATIC_CHAIN_INCOMING
5473 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5474 #else
5475 static_chain_incoming_rtx = static_chain_rtx;
5476 #endif
5477 #endif
5478
5479 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5480 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5481 }
5482 \f
5483 /* Query and clear/ restore no_line_numbers. This is used by the
5484 switch / case handling in stmt.c to give proper line numbers in
5485 warnings about unreachable code. */
5486
5487 int
5488 force_line_numbers (void)
5489 {
5490 int old = no_line_numbers;
5491
5492 no_line_numbers = 0;
5493 if (old)
5494 force_next_line_note ();
5495 return old;
5496 }
5497
5498 void
5499 restore_line_number_status (int old_value)
5500 {
5501 no_line_numbers = old_value;
5502 }
5503
5504 /* Produce exact duplicate of insn INSN after AFTER.
5505 Care updating of libcall regions if present. */
5506
5507 rtx
5508 emit_copy_of_insn_after (rtx insn, rtx after)
5509 {
5510 rtx new;
5511 rtx note1, note2, link;
5512
5513 switch (GET_CODE (insn))
5514 {
5515 case INSN:
5516 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5517 break;
5518
5519 case JUMP_INSN:
5520 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5521 break;
5522
5523 case CALL_INSN:
5524 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5525 if (CALL_INSN_FUNCTION_USAGE (insn))
5526 CALL_INSN_FUNCTION_USAGE (new)
5527 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5528 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5529 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5530 break;
5531
5532 default:
5533 abort ();
5534 }
5535
5536 /* Update LABEL_NUSES. */
5537 mark_jump_label (PATTERN (new), new, 0);
5538
5539 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5540
5541 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5542 make them. */
5543 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5544 if (REG_NOTE_KIND (link) != REG_LABEL)
5545 {
5546 if (GET_CODE (link) == EXPR_LIST)
5547 REG_NOTES (new)
5548 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5549 XEXP (link, 0),
5550 REG_NOTES (new)));
5551 else
5552 REG_NOTES (new)
5553 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5554 XEXP (link, 0),
5555 REG_NOTES (new)));
5556 }
5557
5558 /* Fix the libcall sequences. */
5559 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5560 {
5561 rtx p = new;
5562 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5563 p = PREV_INSN (p);
5564 XEXP (note1, 0) = p;
5565 XEXP (note2, 0) = new;
5566 }
5567 INSN_CODE (new) = INSN_CODE (insn);
5568 return new;
5569 }
5570
5571 static GTY((deletable(""))) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5572 rtx
5573 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5574 {
5575 if (hard_reg_clobbers[mode][regno])
5576 return hard_reg_clobbers[mode][regno];
5577 else
5578 return (hard_reg_clobbers[mode][regno] =
5579 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5580 }
5581
5582 #include "gt-emit-rtl.h"