]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
emit-rtl.c, rtl.h (subreg_hard_regno): Remove.
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58
59 /* Commonly used modes. */
60
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
65
66
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70 static GTY(()) int label_num = 1;
71
72 /* Nonzero means do not generate NOTEs for source line numbers. */
73
74 static int no_line_numbers;
75
76 /* Commonly used rtx's, so that we only need space for one copy.
77 These are initialized once for the entire compilation.
78 All of these are unique; no other rtx-object will be equal to any
79 of these. */
80
81 rtx global_rtl[GR_MAX];
82
83 /* Commonly used RTL for hard registers. These objects are not necessarily
84 unique, so we allocate them separately from global_rtl. They are
85 initialized once per compilation unit, then copied into regno_reg_rtx
86 at the beginning of each function. */
87 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
88
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
91 record a copy of const[012]_rtx. */
92
93 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
94
95 rtx const_true_rtx;
96
97 REAL_VALUE_TYPE dconst0;
98 REAL_VALUE_TYPE dconst1;
99 REAL_VALUE_TYPE dconst2;
100 REAL_VALUE_TYPE dconst3;
101 REAL_VALUE_TYPE dconst10;
102 REAL_VALUE_TYPE dconstm1;
103 REAL_VALUE_TYPE dconstm2;
104 REAL_VALUE_TYPE dconsthalf;
105 REAL_VALUE_TYPE dconstthird;
106 REAL_VALUE_TYPE dconstpi;
107 REAL_VALUE_TYPE dconste;
108
109 /* All references to the following fixed hard registers go through
110 these unique rtl objects. On machines where the frame-pointer and
111 arg-pointer are the same register, they use the same unique object.
112
113 After register allocation, other rtl objects which used to be pseudo-regs
114 may be clobbered to refer to the frame-pointer register.
115 But references that were originally to the frame-pointer can be
116 distinguished from the others because they contain frame_pointer_rtx.
117
118 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
119 tricky: until register elimination has taken place hard_frame_pointer_rtx
120 should be used if it is being set, and frame_pointer_rtx otherwise. After
121 register elimination hard_frame_pointer_rtx should always be used.
122 On machines where the two registers are same (most) then these are the
123 same.
124
125 In an inline procedure, the stack and frame pointer rtxs may not be
126 used for anything else. */
127 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
128 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
129 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
130
131 /* This is used to implement __builtin_return_address for some machines.
132 See for instance the MIPS port. */
133 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
134
135 /* We make one copy of (const_int C) where C is in
136 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
137 to save space during the compilation and simplify comparisons of
138 integers. */
139
140 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
141
142 /* A hash table storing CONST_INTs whose absolute value is greater
143 than MAX_SAVED_CONST_INT. */
144
145 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
146 htab_t const_int_htab;
147
148 /* A hash table storing memory attribute structures. */
149 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
150 htab_t mem_attrs_htab;
151
152 /* A hash table storing register attribute structures. */
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
154 htab_t reg_attrs_htab;
155
156 /* A hash table storing all CONST_DOUBLEs. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
158 htab_t const_double_htab;
159
160 #define first_insn (cfun->emit->x_first_insn)
161 #define last_insn (cfun->emit->x_last_insn)
162 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
163 #define last_location (cfun->emit->x_last_location)
164 #define first_label_num (cfun->emit->x_first_label_num)
165
166 static rtx make_jump_insn_raw (rtx);
167 static rtx make_call_insn_raw (rtx);
168 static rtx find_line_note (rtx);
169 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
170 static void unshare_all_decls (tree);
171 static void reset_used_decls (tree);
172 static void mark_label_nuses (rtx);
173 static hashval_t const_int_htab_hash (const void *);
174 static int const_int_htab_eq (const void *, const void *);
175 static hashval_t const_double_htab_hash (const void *);
176 static int const_double_htab_eq (const void *, const void *);
177 static rtx lookup_const_double (rtx);
178 static hashval_t mem_attrs_htab_hash (const void *);
179 static int mem_attrs_htab_eq (const void *, const void *);
180 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
181 enum machine_mode);
182 static hashval_t reg_attrs_htab_hash (const void *);
183 static int reg_attrs_htab_eq (const void *, const void *);
184 static reg_attrs *get_reg_attrs (tree, int);
185 static tree component_ref_for_mem_expr (tree);
186 static rtx gen_const_vector (enum machine_mode, int);
187 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
188 static void copy_rtx_if_shared_1 (rtx *orig);
189
190 /* Probability of the conditional branch currently proceeded by try_split.
191 Set to -1 otherwise. */
192 int split_branch_probability = -1;
193 \f
194 /* Returns a hash code for X (which is a really a CONST_INT). */
195
196 static hashval_t
197 const_int_htab_hash (const void *x)
198 {
199 return (hashval_t) INTVAL ((rtx) x);
200 }
201
202 /* Returns nonzero if the value represented by X (which is really a
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
205
206 static int
207 const_int_htab_eq (const void *x, const void *y)
208 {
209 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210 }
211
212 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
213 static hashval_t
214 const_double_htab_hash (const void *x)
215 {
216 rtx value = (rtx) x;
217 hashval_t h;
218
219 if (GET_MODE (value) == VOIDmode)
220 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221 else
222 {
223 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224 /* MODE is used in the comparison, so it should be in the hash. */
225 h ^= GET_MODE (value);
226 }
227 return h;
228 }
229
230 /* Returns nonzero if the value represented by X (really a ...)
231 is the same as that represented by Y (really a ...) */
232 static int
233 const_double_htab_eq (const void *x, const void *y)
234 {
235 rtx a = (rtx)x, b = (rtx)y;
236
237 if (GET_MODE (a) != GET_MODE (b))
238 return 0;
239 if (GET_MODE (a) == VOIDmode)
240 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242 else
243 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244 CONST_DOUBLE_REAL_VALUE (b));
245 }
246
247 /* Returns a hash code for X (which is a really a mem_attrs *). */
248
249 static hashval_t
250 mem_attrs_htab_hash (const void *x)
251 {
252 mem_attrs *p = (mem_attrs *) x;
253
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257 ^ (size_t) p->expr);
258 }
259
260 /* Returns nonzero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
263
264 static int
265 mem_attrs_htab_eq (const void *x, const void *y)
266 {
267 mem_attrs *p = (mem_attrs *) x;
268 mem_attrs *q = (mem_attrs *) y;
269
270 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
271 && p->size == q->size && p->align == q->align);
272 }
273
274 /* Allocate a new mem_attrs structure and insert it into the hash table if
275 one identical to it is not already in the table. We are doing this for
276 MEM of mode MODE. */
277
278 static mem_attrs *
279 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
280 unsigned int align, enum machine_mode mode)
281 {
282 mem_attrs attrs;
283 void **slot;
284
285 /* If everything is the default, we can just return zero.
286 This must match what the corresponding MEM_* macros return when the
287 field is not present. */
288 if (alias == 0 && expr == 0 && offset == 0
289 && (size == 0
290 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
291 && (STRICT_ALIGNMENT && mode != BLKmode
292 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
293 return 0;
294
295 attrs.alias = alias;
296 attrs.expr = expr;
297 attrs.offset = offset;
298 attrs.size = size;
299 attrs.align = align;
300
301 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
302 if (*slot == 0)
303 {
304 *slot = ggc_alloc (sizeof (mem_attrs));
305 memcpy (*slot, &attrs, sizeof (mem_attrs));
306 }
307
308 return *slot;
309 }
310
311 /* Returns a hash code for X (which is a really a reg_attrs *). */
312
313 static hashval_t
314 reg_attrs_htab_hash (const void *x)
315 {
316 reg_attrs *p = (reg_attrs *) x;
317
318 return ((p->offset * 1000) ^ (long) p->decl);
319 }
320
321 /* Returns nonzero if the value represented by X (which is really a
322 reg_attrs *) is the same as that given by Y (which is also really a
323 reg_attrs *). */
324
325 static int
326 reg_attrs_htab_eq (const void *x, const void *y)
327 {
328 reg_attrs *p = (reg_attrs *) x;
329 reg_attrs *q = (reg_attrs *) y;
330
331 return (p->decl == q->decl && p->offset == q->offset);
332 }
333 /* Allocate a new reg_attrs structure and insert it into the hash table if
334 one identical to it is not already in the table. We are doing this for
335 MEM of mode MODE. */
336
337 static reg_attrs *
338 get_reg_attrs (tree decl, int offset)
339 {
340 reg_attrs attrs;
341 void **slot;
342
343 /* If everything is the default, we can just return zero. */
344 if (decl == 0 && offset == 0)
345 return 0;
346
347 attrs.decl = decl;
348 attrs.offset = offset;
349
350 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
351 if (*slot == 0)
352 {
353 *slot = ggc_alloc (sizeof (reg_attrs));
354 memcpy (*slot, &attrs, sizeof (reg_attrs));
355 }
356
357 return *slot;
358 }
359
360 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
361 don't attempt to share with the various global pieces of rtl (such as
362 frame_pointer_rtx). */
363
364 rtx
365 gen_raw_REG (enum machine_mode mode, int regno)
366 {
367 rtx x = gen_rtx_raw_REG (mode, regno);
368 ORIGINAL_REGNO (x) = regno;
369 return x;
370 }
371
372 /* There are some RTL codes that require special attention; the generation
373 functions do the raw handling. If you add to this list, modify
374 special_rtx in gengenrtl.c as well. */
375
376 rtx
377 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
378 {
379 void **slot;
380
381 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
382 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
383
384 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
385 if (const_true_rtx && arg == STORE_FLAG_VALUE)
386 return const_true_rtx;
387 #endif
388
389 /* Look up the CONST_INT in the hash table. */
390 slot = htab_find_slot_with_hash (const_int_htab, &arg,
391 (hashval_t) arg, INSERT);
392 if (*slot == 0)
393 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
394
395 return (rtx) *slot;
396 }
397
398 rtx
399 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
400 {
401 return GEN_INT (trunc_int_for_mode (c, mode));
402 }
403
404 /* CONST_DOUBLEs might be created from pairs of integers, or from
405 REAL_VALUE_TYPEs. Also, their length is known only at run time,
406 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
407
408 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
409 hash table. If so, return its counterpart; otherwise add it
410 to the hash table and return it. */
411 static rtx
412 lookup_const_double (rtx real)
413 {
414 void **slot = htab_find_slot (const_double_htab, real, INSERT);
415 if (*slot == 0)
416 *slot = real;
417
418 return (rtx) *slot;
419 }
420
421 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
422 VALUE in mode MODE. */
423 rtx
424 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
425 {
426 rtx real = rtx_alloc (CONST_DOUBLE);
427 PUT_MODE (real, mode);
428
429 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
430
431 return lookup_const_double (real);
432 }
433
434 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
435 of ints: I0 is the low-order word and I1 is the high-order word.
436 Do not use this routine for non-integer modes; convert to
437 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
438
439 rtx
440 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
441 {
442 rtx value;
443 unsigned int i;
444
445 if (mode != VOIDmode)
446 {
447 int width;
448
449 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
450 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
451 /* We can get a 0 for an error mark. */
452 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
453 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
454
455 /* We clear out all bits that don't belong in MODE, unless they and
456 our sign bit are all one. So we get either a reasonable negative
457 value or a reasonable unsigned value for this mode. */
458 width = GET_MODE_BITSIZE (mode);
459 if (width < HOST_BITS_PER_WIDE_INT
460 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
461 != ((HOST_WIDE_INT) (-1) << (width - 1))))
462 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
463 else if (width == HOST_BITS_PER_WIDE_INT
464 && ! (i1 == ~0 && i0 < 0))
465 i1 = 0;
466 else
467 /* We should be able to represent this value as a constant. */
468 gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
469
470 /* If this would be an entire word for the target, but is not for
471 the host, then sign-extend on the host so that the number will
472 look the same way on the host that it would on the target.
473
474 For example, when building a 64 bit alpha hosted 32 bit sparc
475 targeted compiler, then we want the 32 bit unsigned value -1 to be
476 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
477 The latter confuses the sparc backend. */
478
479 if (width < HOST_BITS_PER_WIDE_INT
480 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
481 i0 |= ((HOST_WIDE_INT) (-1) << width);
482
483 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
484 CONST_INT.
485
486 ??? Strictly speaking, this is wrong if we create a CONST_INT for
487 a large unsigned constant with the size of MODE being
488 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
489 in a wider mode. In that case we will mis-interpret it as a
490 negative number.
491
492 Unfortunately, the only alternative is to make a CONST_DOUBLE for
493 any constant in any mode if it is an unsigned constant larger
494 than the maximum signed integer in an int on the host. However,
495 doing this will break everyone that always expects to see a
496 CONST_INT for SImode and smaller.
497
498 We have always been making CONST_INTs in this case, so nothing
499 new is being broken. */
500
501 if (width <= HOST_BITS_PER_WIDE_INT)
502 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
503 }
504
505 /* If this integer fits in one word, return a CONST_INT. */
506 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
507 return GEN_INT (i0);
508
509 /* We use VOIDmode for integers. */
510 value = rtx_alloc (CONST_DOUBLE);
511 PUT_MODE (value, VOIDmode);
512
513 CONST_DOUBLE_LOW (value) = i0;
514 CONST_DOUBLE_HIGH (value) = i1;
515
516 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
517 XWINT (value, i) = 0;
518
519 return lookup_const_double (value);
520 }
521
522 rtx
523 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
524 {
525 /* In case the MD file explicitly references the frame pointer, have
526 all such references point to the same frame pointer. This is
527 used during frame pointer elimination to distinguish the explicit
528 references to these registers from pseudos that happened to be
529 assigned to them.
530
531 If we have eliminated the frame pointer or arg pointer, we will
532 be using it as a normal register, for example as a spill
533 register. In such cases, we might be accessing it in a mode that
534 is not Pmode and therefore cannot use the pre-allocated rtx.
535
536 Also don't do this when we are making new REGs in reload, since
537 we don't want to get confused with the real pointers. */
538
539 if (mode == Pmode && !reload_in_progress)
540 {
541 if (regno == FRAME_POINTER_REGNUM
542 && (!reload_completed || frame_pointer_needed))
543 return frame_pointer_rtx;
544 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
545 if (regno == HARD_FRAME_POINTER_REGNUM
546 && (!reload_completed || frame_pointer_needed))
547 return hard_frame_pointer_rtx;
548 #endif
549 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
550 if (regno == ARG_POINTER_REGNUM)
551 return arg_pointer_rtx;
552 #endif
553 #ifdef RETURN_ADDRESS_POINTER_REGNUM
554 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
555 return return_address_pointer_rtx;
556 #endif
557 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
558 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
559 return pic_offset_table_rtx;
560 if (regno == STACK_POINTER_REGNUM)
561 return stack_pointer_rtx;
562 }
563
564 #if 0
565 /* If the per-function register table has been set up, try to re-use
566 an existing entry in that table to avoid useless generation of RTL.
567
568 This code is disabled for now until we can fix the various backends
569 which depend on having non-shared hard registers in some cases. Long
570 term we want to re-enable this code as it can significantly cut down
571 on the amount of useless RTL that gets generated.
572
573 We'll also need to fix some code that runs after reload that wants to
574 set ORIGINAL_REGNO. */
575
576 if (cfun
577 && cfun->emit
578 && regno_reg_rtx
579 && regno < FIRST_PSEUDO_REGISTER
580 && reg_raw_mode[regno] == mode)
581 return regno_reg_rtx[regno];
582 #endif
583
584 return gen_raw_REG (mode, regno);
585 }
586
587 rtx
588 gen_rtx_MEM (enum machine_mode mode, rtx addr)
589 {
590 rtx rt = gen_rtx_raw_MEM (mode, addr);
591
592 /* This field is not cleared by the mere allocation of the rtx, so
593 we clear it here. */
594 MEM_ATTRS (rt) = 0;
595
596 return rt;
597 }
598
599 /* Generate a memory referring to non-trapping constant memory. */
600
601 rtx
602 gen_const_mem (enum machine_mode mode, rtx addr)
603 {
604 rtx mem = gen_rtx_MEM (mode, addr);
605 MEM_READONLY_P (mem) = 1;
606 MEM_NOTRAP_P (mem) = 1;
607 return mem;
608 }
609
610 rtx
611 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
612 {
613 /* This is the most common failure type.
614 Catch it early so we can see who does it. */
615 gcc_assert (!(offset % GET_MODE_SIZE (mode)));
616
617 /* This check isn't usable right now because combine will
618 throw arbitrary crap like a CALL into a SUBREG in
619 gen_lowpart_for_combine so we must just eat it. */
620 #if 0
621 /* Check for this too. */
622 gcc_assert (offset < GET_MODE_SIZE (GET_MODE (reg)));
623 #endif
624 return gen_rtx_raw_SUBREG (mode, reg, offset);
625 }
626
627 /* Generate a SUBREG representing the least-significant part of REG if MODE
628 is smaller than mode of REG, otherwise paradoxical SUBREG. */
629
630 rtx
631 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
632 {
633 enum machine_mode inmode;
634
635 inmode = GET_MODE (reg);
636 if (inmode == VOIDmode)
637 inmode = mode;
638 return gen_rtx_SUBREG (mode, reg,
639 subreg_lowpart_offset (mode, inmode));
640 }
641 \f
642 /* gen_rtvec (n, [rt1, ..., rtn])
643 **
644 ** This routine creates an rtvec and stores within it the
645 ** pointers to rtx's which are its arguments.
646 */
647
648 /*VARARGS1*/
649 rtvec
650 gen_rtvec (int n, ...)
651 {
652 int i, save_n;
653 rtx *vector;
654 va_list p;
655
656 va_start (p, n);
657
658 if (n == 0)
659 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
660
661 vector = alloca (n * sizeof (rtx));
662
663 for (i = 0; i < n; i++)
664 vector[i] = va_arg (p, rtx);
665
666 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
667 save_n = n;
668 va_end (p);
669
670 return gen_rtvec_v (save_n, vector);
671 }
672
673 rtvec
674 gen_rtvec_v (int n, rtx *argp)
675 {
676 int i;
677 rtvec rt_val;
678
679 if (n == 0)
680 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
681
682 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
683
684 for (i = 0; i < n; i++)
685 rt_val->elem[i] = *argp++;
686
687 return rt_val;
688 }
689 \f
690 /* Generate a REG rtx for a new pseudo register of mode MODE.
691 This pseudo is assigned the next sequential register number. */
692
693 rtx
694 gen_reg_rtx (enum machine_mode mode)
695 {
696 struct function *f = cfun;
697 rtx val;
698
699 /* Don't let anything called after initial flow analysis create new
700 registers. */
701 gcc_assert (!no_new_pseudos);
702
703 if (generating_concat_p
704 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
705 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
706 {
707 /* For complex modes, don't make a single pseudo.
708 Instead, make a CONCAT of two pseudos.
709 This allows noncontiguous allocation of the real and imaginary parts,
710 which makes much better code. Besides, allocating DCmode
711 pseudos overstrains reload on some machines like the 386. */
712 rtx realpart, imagpart;
713 enum machine_mode partmode = GET_MODE_INNER (mode);
714
715 realpart = gen_reg_rtx (partmode);
716 imagpart = gen_reg_rtx (partmode);
717 return gen_rtx_CONCAT (mode, realpart, imagpart);
718 }
719
720 /* Make sure regno_pointer_align, and regno_reg_rtx are large
721 enough to have an element for this pseudo reg number. */
722
723 if (reg_rtx_no == f->emit->regno_pointer_align_length)
724 {
725 int old_size = f->emit->regno_pointer_align_length;
726 char *new;
727 rtx *new1;
728
729 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
730 memset (new + old_size, 0, old_size);
731 f->emit->regno_pointer_align = (unsigned char *) new;
732
733 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
734 old_size * 2 * sizeof (rtx));
735 memset (new1 + old_size, 0, old_size * sizeof (rtx));
736 regno_reg_rtx = new1;
737
738 f->emit->regno_pointer_align_length = old_size * 2;
739 }
740
741 val = gen_raw_REG (mode, reg_rtx_no);
742 regno_reg_rtx[reg_rtx_no++] = val;
743 return val;
744 }
745
746 /* Generate a register with same attributes as REG, but offsetted by OFFSET.
747 Do the big endian correction if needed. */
748
749 rtx
750 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
751 {
752 rtx new = gen_rtx_REG (mode, regno);
753 tree decl;
754 HOST_WIDE_INT var_size;
755
756 /* PR middle-end/14084
757 The problem appears when a variable is stored in a larger register
758 and later it is used in the original mode or some mode in between
759 or some part of variable is accessed.
760
761 On little endian machines there is no problem because
762 the REG_OFFSET of the start of the variable is the same when
763 accessed in any mode (it is 0).
764
765 However, this is not true on big endian machines.
766 The offset of the start of the variable is different when accessed
767 in different modes.
768 When we are taking a part of the REG we have to change the OFFSET
769 from offset WRT size of mode of REG to offset WRT size of variable.
770
771 If we would not do the big endian correction the resulting REG_OFFSET
772 would be larger than the size of the DECL.
773
774 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
775
776 REG.mode MODE DECL size old offset new offset description
777 DI SI 4 4 0 int32 in SImode
778 DI SI 1 4 0 char in SImode
779 DI QI 1 7 0 char in QImode
780 DI QI 4 5 1 1st element in QImode
781 of char[4]
782 DI HI 4 6 2 1st element in HImode
783 of int16[2]
784
785 If the size of DECL is equal or greater than the size of REG
786 we can't do this correction because the register holds the
787 whole variable or a part of the variable and thus the REG_OFFSET
788 is already correct. */
789
790 decl = REG_EXPR (reg);
791 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
792 && decl != NULL
793 && offset > 0
794 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
795 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
796 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
797 {
798 int offset_le;
799
800 /* Convert machine endian to little endian WRT size of mode of REG. */
801 if (WORDS_BIG_ENDIAN)
802 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
803 / UNITS_PER_WORD) * UNITS_PER_WORD;
804 else
805 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
806
807 if (BYTES_BIG_ENDIAN)
808 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
809 % UNITS_PER_WORD);
810 else
811 offset_le += offset % UNITS_PER_WORD;
812
813 if (offset_le >= var_size)
814 {
815 /* MODE is wider than the variable so the new reg will cover
816 the whole variable so the resulting OFFSET should be 0. */
817 offset = 0;
818 }
819 else
820 {
821 /* Convert little endian to machine endian WRT size of variable. */
822 if (WORDS_BIG_ENDIAN)
823 offset = ((var_size - 1 - offset_le)
824 / UNITS_PER_WORD) * UNITS_PER_WORD;
825 else
826 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
827
828 if (BYTES_BIG_ENDIAN)
829 offset += ((var_size - 1 - offset_le)
830 % UNITS_PER_WORD);
831 else
832 offset += offset_le % UNITS_PER_WORD;
833 }
834 }
835
836 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
837 REG_OFFSET (reg) + offset);
838 return new;
839 }
840
841 /* Set the decl for MEM to DECL. */
842
843 void
844 set_reg_attrs_from_mem (rtx reg, rtx mem)
845 {
846 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
847 REG_ATTRS (reg)
848 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
849 }
850
851 /* Set the register attributes for registers contained in PARM_RTX.
852 Use needed values from memory attributes of MEM. */
853
854 void
855 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
856 {
857 if (REG_P (parm_rtx))
858 set_reg_attrs_from_mem (parm_rtx, mem);
859 else if (GET_CODE (parm_rtx) == PARALLEL)
860 {
861 /* Check for a NULL entry in the first slot, used to indicate that the
862 parameter goes both on the stack and in registers. */
863 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
864 for (; i < XVECLEN (parm_rtx, 0); i++)
865 {
866 rtx x = XVECEXP (parm_rtx, 0, i);
867 if (REG_P (XEXP (x, 0)))
868 REG_ATTRS (XEXP (x, 0))
869 = get_reg_attrs (MEM_EXPR (mem),
870 INTVAL (XEXP (x, 1)));
871 }
872 }
873 }
874
875 /* Assign the RTX X to declaration T. */
876 void
877 set_decl_rtl (tree t, rtx x)
878 {
879 DECL_CHECK (t)->decl.rtl = x;
880
881 if (!x)
882 return;
883 /* For register, we maintain the reverse information too. */
884 if (REG_P (x))
885 REG_ATTRS (x) = get_reg_attrs (t, 0);
886 else if (GET_CODE (x) == SUBREG)
887 REG_ATTRS (SUBREG_REG (x))
888 = get_reg_attrs (t, -SUBREG_BYTE (x));
889 if (GET_CODE (x) == CONCAT)
890 {
891 if (REG_P (XEXP (x, 0)))
892 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
893 if (REG_P (XEXP (x, 1)))
894 REG_ATTRS (XEXP (x, 1))
895 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
896 }
897 if (GET_CODE (x) == PARALLEL)
898 {
899 int i;
900 for (i = 0; i < XVECLEN (x, 0); i++)
901 {
902 rtx y = XVECEXP (x, 0, i);
903 if (REG_P (XEXP (y, 0)))
904 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
905 }
906 }
907 }
908
909 /* Assign the RTX X to parameter declaration T. */
910 void
911 set_decl_incoming_rtl (tree t, rtx x)
912 {
913 DECL_INCOMING_RTL (t) = x;
914
915 if (!x)
916 return;
917 /* For register, we maintain the reverse information too. */
918 if (REG_P (x))
919 REG_ATTRS (x) = get_reg_attrs (t, 0);
920 else if (GET_CODE (x) == SUBREG)
921 REG_ATTRS (SUBREG_REG (x))
922 = get_reg_attrs (t, -SUBREG_BYTE (x));
923 if (GET_CODE (x) == CONCAT)
924 {
925 if (REG_P (XEXP (x, 0)))
926 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
927 if (REG_P (XEXP (x, 1)))
928 REG_ATTRS (XEXP (x, 1))
929 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
930 }
931 if (GET_CODE (x) == PARALLEL)
932 {
933 int i, start;
934
935 /* Check for a NULL entry, used to indicate that the parameter goes
936 both on the stack and in registers. */
937 if (XEXP (XVECEXP (x, 0, 0), 0))
938 start = 0;
939 else
940 start = 1;
941
942 for (i = start; i < XVECLEN (x, 0); i++)
943 {
944 rtx y = XVECEXP (x, 0, i);
945 if (REG_P (XEXP (y, 0)))
946 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
947 }
948 }
949 }
950
951 /* Identify REG (which may be a CONCAT) as a user register. */
952
953 void
954 mark_user_reg (rtx reg)
955 {
956 if (GET_CODE (reg) == CONCAT)
957 {
958 REG_USERVAR_P (XEXP (reg, 0)) = 1;
959 REG_USERVAR_P (XEXP (reg, 1)) = 1;
960 }
961 else
962 {
963 gcc_assert (REG_P (reg));
964 REG_USERVAR_P (reg) = 1;
965 }
966 }
967
968 /* Identify REG as a probable pointer register and show its alignment
969 as ALIGN, if nonzero. */
970
971 void
972 mark_reg_pointer (rtx reg, int align)
973 {
974 if (! REG_POINTER (reg))
975 {
976 REG_POINTER (reg) = 1;
977
978 if (align)
979 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
980 }
981 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
982 /* We can no-longer be sure just how aligned this pointer is. */
983 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
984 }
985
986 /* Return 1 plus largest pseudo reg number used in the current function. */
987
988 int
989 max_reg_num (void)
990 {
991 return reg_rtx_no;
992 }
993
994 /* Return 1 + the largest label number used so far in the current function. */
995
996 int
997 max_label_num (void)
998 {
999 return label_num;
1000 }
1001
1002 /* Return first label number used in this function (if any were used). */
1003
1004 int
1005 get_first_label_num (void)
1006 {
1007 return first_label_num;
1008 }
1009
1010 /* If the rtx for label was created during the expansion of a nested
1011 function, then first_label_num won't include this label number.
1012 Fix this now so that array indicies work later. */
1013
1014 void
1015 maybe_set_first_label_num (rtx x)
1016 {
1017 if (CODE_LABEL_NUMBER (x) < first_label_num)
1018 first_label_num = CODE_LABEL_NUMBER (x);
1019 }
1020 \f
1021 /* Return a value representing some low-order bits of X, where the number
1022 of low-order bits is given by MODE. Note that no conversion is done
1023 between floating-point and fixed-point values, rather, the bit
1024 representation is returned.
1025
1026 This function handles the cases in common between gen_lowpart, below,
1027 and two variants in cse.c and combine.c. These are the cases that can
1028 be safely handled at all points in the compilation.
1029
1030 If this is not a case we can handle, return 0. */
1031
1032 rtx
1033 gen_lowpart_common (enum machine_mode mode, rtx x)
1034 {
1035 int msize = GET_MODE_SIZE (mode);
1036 int xsize;
1037 int offset = 0;
1038 enum machine_mode innermode;
1039
1040 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1041 so we have to make one up. Yuk. */
1042 innermode = GET_MODE (x);
1043 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1044 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1045 else if (innermode == VOIDmode)
1046 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1047
1048 xsize = GET_MODE_SIZE (innermode);
1049
1050 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1051
1052 if (innermode == mode)
1053 return x;
1054
1055 /* MODE must occupy no more words than the mode of X. */
1056 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1057 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1058 return 0;
1059
1060 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1061 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1062 return 0;
1063
1064 offset = subreg_lowpart_offset (mode, innermode);
1065
1066 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1067 && (GET_MODE_CLASS (mode) == MODE_INT
1068 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1069 {
1070 /* If we are getting the low-order part of something that has been
1071 sign- or zero-extended, we can either just use the object being
1072 extended or make a narrower extension. If we want an even smaller
1073 piece than the size of the object being extended, call ourselves
1074 recursively.
1075
1076 This case is used mostly by combine and cse. */
1077
1078 if (GET_MODE (XEXP (x, 0)) == mode)
1079 return XEXP (x, 0);
1080 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1081 return gen_lowpart_common (mode, XEXP (x, 0));
1082 else if (msize < xsize)
1083 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1084 }
1085 else if (GET_CODE (x) == SUBREG || REG_P (x)
1086 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1087 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1088 return simplify_gen_subreg (mode, x, innermode, offset);
1089
1090 /* Otherwise, we can't do this. */
1091 return 0;
1092 }
1093 \f
1094 /* Return the constant real or imaginary part (which has mode MODE)
1095 of a complex value X. The IMAGPART_P argument determines whether
1096 the real or complex component should be returned. This function
1097 returns NULL_RTX if the component isn't a constant. */
1098
1099 static rtx
1100 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1101 {
1102 tree decl, part;
1103
1104 if (MEM_P (x)
1105 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1106 {
1107 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1108 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1109 {
1110 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1111 if (TREE_CODE (part) == REAL_CST
1112 || TREE_CODE (part) == INTEGER_CST)
1113 return expand_expr (part, NULL_RTX, mode, 0);
1114 }
1115 }
1116 return NULL_RTX;
1117 }
1118
1119 /* Return the real part (which has mode MODE) of a complex value X.
1120 This always comes at the low address in memory. */
1121
1122 rtx
1123 gen_realpart (enum machine_mode mode, rtx x)
1124 {
1125 rtx part;
1126
1127 /* Handle complex constants. */
1128 part = gen_complex_constant_part (mode, x, 0);
1129 if (part != NULL_RTX)
1130 return part;
1131
1132 if (WORDS_BIG_ENDIAN
1133 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1134 && REG_P (x)
1135 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1136 internal_error
1137 ("can't access real part of complex value in hard register");
1138 else if (WORDS_BIG_ENDIAN)
1139 return gen_highpart (mode, x);
1140 else
1141 return gen_lowpart (mode, x);
1142 }
1143
1144 /* Return the imaginary part (which has mode MODE) of a complex value X.
1145 This always comes at the high address in memory. */
1146
1147 rtx
1148 gen_imagpart (enum machine_mode mode, rtx x)
1149 {
1150 rtx part;
1151
1152 /* Handle complex constants. */
1153 part = gen_complex_constant_part (mode, x, 1);
1154 if (part != NULL_RTX)
1155 return part;
1156
1157 if (WORDS_BIG_ENDIAN)
1158 return gen_lowpart (mode, x);
1159 else if (! WORDS_BIG_ENDIAN
1160 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1161 && REG_P (x)
1162 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1163 internal_error
1164 ("can't access imaginary part of complex value in hard register");
1165 else
1166 return gen_highpart (mode, x);
1167 }
1168 \f
1169 rtx
1170 gen_highpart (enum machine_mode mode, rtx x)
1171 {
1172 unsigned int msize = GET_MODE_SIZE (mode);
1173 rtx result;
1174
1175 /* This case loses if X is a subreg. To catch bugs early,
1176 complain if an invalid MODE is used even in other cases. */
1177 gcc_assert (msize <= UNITS_PER_WORD
1178 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1179
1180 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1181 subreg_highpart_offset (mode, GET_MODE (x)));
1182 gcc_assert (result);
1183
1184 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1185 the target if we have a MEM. gen_highpart must return a valid operand,
1186 emitting code if necessary to do so. */
1187 if (MEM_P (result))
1188 {
1189 result = validize_mem (result);
1190 gcc_assert (result);
1191 }
1192
1193 return result;
1194 }
1195
1196 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1197 be VOIDmode constant. */
1198 rtx
1199 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1200 {
1201 if (GET_MODE (exp) != VOIDmode)
1202 {
1203 gcc_assert (GET_MODE (exp) == innermode);
1204 return gen_highpart (outermode, exp);
1205 }
1206 return simplify_gen_subreg (outermode, exp, innermode,
1207 subreg_highpart_offset (outermode, innermode));
1208 }
1209
1210 /* Return offset in bytes to get OUTERMODE low part
1211 of the value in mode INNERMODE stored in memory in target format. */
1212
1213 unsigned int
1214 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1215 {
1216 unsigned int offset = 0;
1217 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1218
1219 if (difference > 0)
1220 {
1221 if (WORDS_BIG_ENDIAN)
1222 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1223 if (BYTES_BIG_ENDIAN)
1224 offset += difference % UNITS_PER_WORD;
1225 }
1226
1227 return offset;
1228 }
1229
1230 /* Return offset in bytes to get OUTERMODE high part
1231 of the value in mode INNERMODE stored in memory in target format. */
1232 unsigned int
1233 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1234 {
1235 unsigned int offset = 0;
1236 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1237
1238 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1239
1240 if (difference > 0)
1241 {
1242 if (! WORDS_BIG_ENDIAN)
1243 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1244 if (! BYTES_BIG_ENDIAN)
1245 offset += difference % UNITS_PER_WORD;
1246 }
1247
1248 return offset;
1249 }
1250
1251 /* Return 1 iff X, assumed to be a SUBREG,
1252 refers to the least significant part of its containing reg.
1253 If X is not a SUBREG, always return 1 (it is its own low part!). */
1254
1255 int
1256 subreg_lowpart_p (rtx x)
1257 {
1258 if (GET_CODE (x) != SUBREG)
1259 return 1;
1260 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1261 return 0;
1262
1263 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1264 == SUBREG_BYTE (x));
1265 }
1266 \f
1267 /* Return subword OFFSET of operand OP.
1268 The word number, OFFSET, is interpreted as the word number starting
1269 at the low-order address. OFFSET 0 is the low-order word if not
1270 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1271
1272 If we cannot extract the required word, we return zero. Otherwise,
1273 an rtx corresponding to the requested word will be returned.
1274
1275 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1276 reload has completed, a valid address will always be returned. After
1277 reload, if a valid address cannot be returned, we return zero.
1278
1279 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1280 it is the responsibility of the caller.
1281
1282 MODE is the mode of OP in case it is a CONST_INT.
1283
1284 ??? This is still rather broken for some cases. The problem for the
1285 moment is that all callers of this thing provide no 'goal mode' to
1286 tell us to work with. This exists because all callers were written
1287 in a word based SUBREG world.
1288 Now use of this function can be deprecated by simplify_subreg in most
1289 cases.
1290 */
1291
1292 rtx
1293 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1294 {
1295 if (mode == VOIDmode)
1296 mode = GET_MODE (op);
1297
1298 gcc_assert (mode != VOIDmode);
1299
1300 /* If OP is narrower than a word, fail. */
1301 if (mode != BLKmode
1302 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1303 return 0;
1304
1305 /* If we want a word outside OP, return zero. */
1306 if (mode != BLKmode
1307 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1308 return const0_rtx;
1309
1310 /* Form a new MEM at the requested address. */
1311 if (MEM_P (op))
1312 {
1313 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1314
1315 if (! validate_address)
1316 return new;
1317
1318 else if (reload_completed)
1319 {
1320 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1321 return 0;
1322 }
1323 else
1324 return replace_equiv_address (new, XEXP (new, 0));
1325 }
1326
1327 /* Rest can be handled by simplify_subreg. */
1328 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1329 }
1330
1331 /* Similar to `operand_subword', but never return 0. If we can't extract
1332 the required subword, put OP into a register and try again. If that fails,
1333 abort. We always validate the address in this case.
1334
1335 MODE is the mode of OP, in case it is CONST_INT. */
1336
1337 rtx
1338 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1339 {
1340 rtx result = operand_subword (op, offset, 1, mode);
1341
1342 if (result)
1343 return result;
1344
1345 if (mode != BLKmode && mode != VOIDmode)
1346 {
1347 /* If this is a register which can not be accessed by words, copy it
1348 to a pseudo register. */
1349 if (REG_P (op))
1350 op = copy_to_reg (op);
1351 else
1352 op = force_reg (mode, op);
1353 }
1354
1355 result = operand_subword (op, offset, 1, mode);
1356 gcc_assert (result);
1357
1358 return result;
1359 }
1360 \f
1361 /* Given a compare instruction, swap the operands.
1362 A test instruction is changed into a compare of 0 against the operand. */
1363
1364 void
1365 reverse_comparison (rtx insn)
1366 {
1367 rtx body = PATTERN (insn);
1368 rtx comp;
1369
1370 if (GET_CODE (body) == SET)
1371 comp = SET_SRC (body);
1372 else
1373 comp = SET_SRC (XVECEXP (body, 0, 0));
1374
1375 if (GET_CODE (comp) == COMPARE)
1376 {
1377 rtx op0 = XEXP (comp, 0);
1378 rtx op1 = XEXP (comp, 1);
1379 XEXP (comp, 0) = op1;
1380 XEXP (comp, 1) = op0;
1381 }
1382 else
1383 {
1384 rtx new = gen_rtx_COMPARE (VOIDmode,
1385 CONST0_RTX (GET_MODE (comp)), comp);
1386 if (GET_CODE (body) == SET)
1387 SET_SRC (body) = new;
1388 else
1389 SET_SRC (XVECEXP (body, 0, 0)) = new;
1390 }
1391 }
1392 \f
1393 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1394 or (2) a component ref of something variable. Represent the later with
1395 a NULL expression. */
1396
1397 static tree
1398 component_ref_for_mem_expr (tree ref)
1399 {
1400 tree inner = TREE_OPERAND (ref, 0);
1401
1402 if (TREE_CODE (inner) == COMPONENT_REF)
1403 inner = component_ref_for_mem_expr (inner);
1404 else
1405 {
1406 /* Now remove any conversions: they don't change what the underlying
1407 object is. Likewise for SAVE_EXPR. */
1408 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1409 || TREE_CODE (inner) == NON_LVALUE_EXPR
1410 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1411 || TREE_CODE (inner) == SAVE_EXPR)
1412 inner = TREE_OPERAND (inner, 0);
1413
1414 if (! DECL_P (inner))
1415 inner = NULL_TREE;
1416 }
1417
1418 if (inner == TREE_OPERAND (ref, 0))
1419 return ref;
1420 else
1421 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1422 TREE_OPERAND (ref, 1), NULL_TREE);
1423 }
1424
1425 /* Returns 1 if both MEM_EXPR can be considered equal
1426 and 0 otherwise. */
1427
1428 int
1429 mem_expr_equal_p (tree expr1, tree expr2)
1430 {
1431 if (expr1 == expr2)
1432 return 1;
1433
1434 if (! expr1 || ! expr2)
1435 return 0;
1436
1437 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1438 return 0;
1439
1440 if (TREE_CODE (expr1) == COMPONENT_REF)
1441 return
1442 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1443 TREE_OPERAND (expr2, 0))
1444 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1445 TREE_OPERAND (expr2, 1));
1446
1447 if (INDIRECT_REF_P (expr1))
1448 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1449 TREE_OPERAND (expr2, 0));
1450
1451 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1452 have been resolved here. */
1453 gcc_assert (DECL_P (expr1));
1454
1455 /* Decls with different pointers can't be equal. */
1456 return 0;
1457 }
1458
1459 /* Given REF, a MEM, and T, either the type of X or the expression
1460 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1461 if we are making a new object of this type. BITPOS is nonzero if
1462 there is an offset outstanding on T that will be applied later. */
1463
1464 void
1465 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1466 HOST_WIDE_INT bitpos)
1467 {
1468 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1469 tree expr = MEM_EXPR (ref);
1470 rtx offset = MEM_OFFSET (ref);
1471 rtx size = MEM_SIZE (ref);
1472 unsigned int align = MEM_ALIGN (ref);
1473 HOST_WIDE_INT apply_bitpos = 0;
1474 tree type;
1475
1476 /* It can happen that type_for_mode was given a mode for which there
1477 is no language-level type. In which case it returns NULL, which
1478 we can see here. */
1479 if (t == NULL_TREE)
1480 return;
1481
1482 type = TYPE_P (t) ? t : TREE_TYPE (t);
1483 if (type == error_mark_node)
1484 return;
1485
1486 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1487 wrong answer, as it assumes that DECL_RTL already has the right alias
1488 info. Callers should not set DECL_RTL until after the call to
1489 set_mem_attributes. */
1490 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1491
1492 /* Get the alias set from the expression or type (perhaps using a
1493 front-end routine) and use it. */
1494 alias = get_alias_set (t);
1495
1496 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1497 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1498 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1499 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
1500
1501 /* If we are making an object of this type, or if this is a DECL, we know
1502 that it is a scalar if the type is not an aggregate. */
1503 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1504 MEM_SCALAR_P (ref) = 1;
1505
1506 /* We can set the alignment from the type if we are making an object,
1507 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1508 if (objectp || TREE_CODE (t) == INDIRECT_REF
1509 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1510 || TYPE_ALIGN_OK (type))
1511 align = MAX (align, TYPE_ALIGN (type));
1512 else
1513 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1514 {
1515 if (integer_zerop (TREE_OPERAND (t, 1)))
1516 /* We don't know anything about the alignment. */
1517 align = BITS_PER_UNIT;
1518 else
1519 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1520 }
1521
1522 /* If the size is known, we can set that. */
1523 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1524 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1525
1526 /* If T is not a type, we may be able to deduce some more information about
1527 the expression. */
1528 if (! TYPE_P (t))
1529 {
1530 tree base = get_base_address (t);
1531 if (base && DECL_P (base)
1532 && TREE_READONLY (base)
1533 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1534 MEM_READONLY_P (ref) = 1;
1535
1536 if (TREE_THIS_VOLATILE (t))
1537 MEM_VOLATILE_P (ref) = 1;
1538
1539 /* Now remove any conversions: they don't change what the underlying
1540 object is. Likewise for SAVE_EXPR. */
1541 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1542 || TREE_CODE (t) == NON_LVALUE_EXPR
1543 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1544 || TREE_CODE (t) == SAVE_EXPR)
1545 t = TREE_OPERAND (t, 0);
1546
1547 /* If this expression can't be addressed (e.g., it contains a reference
1548 to a non-addressable field), show we don't change its alias set. */
1549 if (! can_address_p (t))
1550 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1551
1552 /* If this is a decl, set the attributes of the MEM from it. */
1553 if (DECL_P (t))
1554 {
1555 expr = t;
1556 offset = const0_rtx;
1557 apply_bitpos = bitpos;
1558 size = (DECL_SIZE_UNIT (t)
1559 && host_integerp (DECL_SIZE_UNIT (t), 1)
1560 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1561 align = DECL_ALIGN (t);
1562 }
1563
1564 /* If this is a constant, we know the alignment. */
1565 else if (CONSTANT_CLASS_P (t))
1566 {
1567 align = TYPE_ALIGN (type);
1568 #ifdef CONSTANT_ALIGNMENT
1569 align = CONSTANT_ALIGNMENT (t, align);
1570 #endif
1571 }
1572
1573 /* If this is a field reference and not a bit-field, record it. */
1574 /* ??? There is some information that can be gleened from bit-fields,
1575 such as the word offset in the structure that might be modified.
1576 But skip it for now. */
1577 else if (TREE_CODE (t) == COMPONENT_REF
1578 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1579 {
1580 expr = component_ref_for_mem_expr (t);
1581 offset = const0_rtx;
1582 apply_bitpos = bitpos;
1583 /* ??? Any reason the field size would be different than
1584 the size we got from the type? */
1585 }
1586
1587 /* If this is an array reference, look for an outer field reference. */
1588 else if (TREE_CODE (t) == ARRAY_REF)
1589 {
1590 tree off_tree = size_zero_node;
1591 /* We can't modify t, because we use it at the end of the
1592 function. */
1593 tree t2 = t;
1594
1595 do
1596 {
1597 tree index = TREE_OPERAND (t2, 1);
1598 tree low_bound = array_ref_low_bound (t2);
1599 tree unit_size = array_ref_element_size (t2);
1600
1601 /* We assume all arrays have sizes that are a multiple of a byte.
1602 First subtract the lower bound, if any, in the type of the
1603 index, then convert to sizetype and multiply by the size of
1604 the array element. */
1605 if (! integer_zerop (low_bound))
1606 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
1607 index, low_bound));
1608
1609 off_tree = size_binop (PLUS_EXPR,
1610 size_binop (MULT_EXPR, convert (sizetype,
1611 index),
1612 unit_size),
1613 off_tree);
1614 t2 = TREE_OPERAND (t2, 0);
1615 }
1616 while (TREE_CODE (t2) == ARRAY_REF);
1617
1618 if (DECL_P (t2))
1619 {
1620 expr = t2;
1621 offset = NULL;
1622 if (host_integerp (off_tree, 1))
1623 {
1624 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1625 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1626 align = DECL_ALIGN (t2);
1627 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1628 align = aoff;
1629 offset = GEN_INT (ioff);
1630 apply_bitpos = bitpos;
1631 }
1632 }
1633 else if (TREE_CODE (t2) == COMPONENT_REF)
1634 {
1635 expr = component_ref_for_mem_expr (t2);
1636 if (host_integerp (off_tree, 1))
1637 {
1638 offset = GEN_INT (tree_low_cst (off_tree, 1));
1639 apply_bitpos = bitpos;
1640 }
1641 /* ??? Any reason the field size would be different than
1642 the size we got from the type? */
1643 }
1644 else if (flag_argument_noalias > 1
1645 && (INDIRECT_REF_P (t2))
1646 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1647 {
1648 expr = t2;
1649 offset = NULL;
1650 }
1651 }
1652
1653 /* If this is a Fortran indirect argument reference, record the
1654 parameter decl. */
1655 else if (flag_argument_noalias > 1
1656 && (INDIRECT_REF_P (t))
1657 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1658 {
1659 expr = t;
1660 offset = NULL;
1661 }
1662 }
1663
1664 /* If we modified OFFSET based on T, then subtract the outstanding
1665 bit position offset. Similarly, increase the size of the accessed
1666 object to contain the negative offset. */
1667 if (apply_bitpos)
1668 {
1669 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1670 if (size)
1671 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1672 }
1673
1674 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1675 {
1676 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1677 we're overlapping. */
1678 offset = NULL;
1679 expr = NULL;
1680 }
1681
1682 /* Now set the attributes we computed above. */
1683 MEM_ATTRS (ref)
1684 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1685
1686 /* If this is already known to be a scalar or aggregate, we are done. */
1687 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1688 return;
1689
1690 /* If it is a reference into an aggregate, this is part of an aggregate.
1691 Otherwise we don't know. */
1692 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1693 || TREE_CODE (t) == ARRAY_RANGE_REF
1694 || TREE_CODE (t) == BIT_FIELD_REF)
1695 MEM_IN_STRUCT_P (ref) = 1;
1696 }
1697
1698 void
1699 set_mem_attributes (rtx ref, tree t, int objectp)
1700 {
1701 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1702 }
1703
1704 /* Set the decl for MEM to DECL. */
1705
1706 void
1707 set_mem_attrs_from_reg (rtx mem, rtx reg)
1708 {
1709 MEM_ATTRS (mem)
1710 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1711 GEN_INT (REG_OFFSET (reg)),
1712 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1713 }
1714
1715 /* Set the alias set of MEM to SET. */
1716
1717 void
1718 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1719 {
1720 #ifdef ENABLE_CHECKING
1721 /* If the new and old alias sets don't conflict, something is wrong. */
1722 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1723 #endif
1724
1725 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1726 MEM_SIZE (mem), MEM_ALIGN (mem),
1727 GET_MODE (mem));
1728 }
1729
1730 /* Set the alignment of MEM to ALIGN bits. */
1731
1732 void
1733 set_mem_align (rtx mem, unsigned int align)
1734 {
1735 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1736 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1737 GET_MODE (mem));
1738 }
1739
1740 /* Set the expr for MEM to EXPR. */
1741
1742 void
1743 set_mem_expr (rtx mem, tree expr)
1744 {
1745 MEM_ATTRS (mem)
1746 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1747 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1748 }
1749
1750 /* Set the offset of MEM to OFFSET. */
1751
1752 void
1753 set_mem_offset (rtx mem, rtx offset)
1754 {
1755 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1756 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1757 GET_MODE (mem));
1758 }
1759
1760 /* Set the size of MEM to SIZE. */
1761
1762 void
1763 set_mem_size (rtx mem, rtx size)
1764 {
1765 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1766 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1767 GET_MODE (mem));
1768 }
1769 \f
1770 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1771 and its address changed to ADDR. (VOIDmode means don't change the mode.
1772 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1773 returned memory location is required to be valid. The memory
1774 attributes are not changed. */
1775
1776 static rtx
1777 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1778 {
1779 rtx new;
1780
1781 gcc_assert (MEM_P (memref));
1782 if (mode == VOIDmode)
1783 mode = GET_MODE (memref);
1784 if (addr == 0)
1785 addr = XEXP (memref, 0);
1786 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1787 && (!validate || memory_address_p (mode, addr)))
1788 return memref;
1789
1790 if (validate)
1791 {
1792 if (reload_in_progress || reload_completed)
1793 gcc_assert (memory_address_p (mode, addr));
1794 else
1795 addr = memory_address (mode, addr);
1796 }
1797
1798 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1799 return memref;
1800
1801 new = gen_rtx_MEM (mode, addr);
1802 MEM_COPY_ATTRIBUTES (new, memref);
1803 return new;
1804 }
1805
1806 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1807 way we are changing MEMREF, so we only preserve the alias set. */
1808
1809 rtx
1810 change_address (rtx memref, enum machine_mode mode, rtx addr)
1811 {
1812 rtx new = change_address_1 (memref, mode, addr, 1), size;
1813 enum machine_mode mmode = GET_MODE (new);
1814 unsigned int align;
1815
1816 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1817 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1818
1819 /* If there are no changes, just return the original memory reference. */
1820 if (new == memref)
1821 {
1822 if (MEM_ATTRS (memref) == 0
1823 || (MEM_EXPR (memref) == NULL
1824 && MEM_OFFSET (memref) == NULL
1825 && MEM_SIZE (memref) == size
1826 && MEM_ALIGN (memref) == align))
1827 return new;
1828
1829 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1830 MEM_COPY_ATTRIBUTES (new, memref);
1831 }
1832
1833 MEM_ATTRS (new)
1834 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1835
1836 return new;
1837 }
1838
1839 /* Return a memory reference like MEMREF, but with its mode changed
1840 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1841 nonzero, the memory address is forced to be valid.
1842 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1843 and caller is responsible for adjusting MEMREF base register. */
1844
1845 rtx
1846 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1847 int validate, int adjust)
1848 {
1849 rtx addr = XEXP (memref, 0);
1850 rtx new;
1851 rtx memoffset = MEM_OFFSET (memref);
1852 rtx size = 0;
1853 unsigned int memalign = MEM_ALIGN (memref);
1854
1855 /* If there are no changes, just return the original memory reference. */
1856 if (mode == GET_MODE (memref) && !offset
1857 && (!validate || memory_address_p (mode, addr)))
1858 return memref;
1859
1860 /* ??? Prefer to create garbage instead of creating shared rtl.
1861 This may happen even if offset is nonzero -- consider
1862 (plus (plus reg reg) const_int) -- so do this always. */
1863 addr = copy_rtx (addr);
1864
1865 if (adjust)
1866 {
1867 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1868 object, we can merge it into the LO_SUM. */
1869 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1870 && offset >= 0
1871 && (unsigned HOST_WIDE_INT) offset
1872 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1873 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1874 plus_constant (XEXP (addr, 1), offset));
1875 else
1876 addr = plus_constant (addr, offset);
1877 }
1878
1879 new = change_address_1 (memref, mode, addr, validate);
1880
1881 /* Compute the new values of the memory attributes due to this adjustment.
1882 We add the offsets and update the alignment. */
1883 if (memoffset)
1884 memoffset = GEN_INT (offset + INTVAL (memoffset));
1885
1886 /* Compute the new alignment by taking the MIN of the alignment and the
1887 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1888 if zero. */
1889 if (offset != 0)
1890 memalign
1891 = MIN (memalign,
1892 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1893
1894 /* We can compute the size in a number of ways. */
1895 if (GET_MODE (new) != BLKmode)
1896 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1897 else if (MEM_SIZE (memref))
1898 size = plus_constant (MEM_SIZE (memref), -offset);
1899
1900 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1901 memoffset, size, memalign, GET_MODE (new));
1902
1903 /* At some point, we should validate that this offset is within the object,
1904 if all the appropriate values are known. */
1905 return new;
1906 }
1907
1908 /* Return a memory reference like MEMREF, but with its mode changed
1909 to MODE and its address changed to ADDR, which is assumed to be
1910 MEMREF offseted by OFFSET bytes. If VALIDATE is
1911 nonzero, the memory address is forced to be valid. */
1912
1913 rtx
1914 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1915 HOST_WIDE_INT offset, int validate)
1916 {
1917 memref = change_address_1 (memref, VOIDmode, addr, validate);
1918 return adjust_address_1 (memref, mode, offset, validate, 0);
1919 }
1920
1921 /* Return a memory reference like MEMREF, but whose address is changed by
1922 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1923 known to be in OFFSET (possibly 1). */
1924
1925 rtx
1926 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1927 {
1928 rtx new, addr = XEXP (memref, 0);
1929
1930 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1931
1932 /* At this point we don't know _why_ the address is invalid. It
1933 could have secondary memory references, multiplies or anything.
1934
1935 However, if we did go and rearrange things, we can wind up not
1936 being able to recognize the magic around pic_offset_table_rtx.
1937 This stuff is fragile, and is yet another example of why it is
1938 bad to expose PIC machinery too early. */
1939 if (! memory_address_p (GET_MODE (memref), new)
1940 && GET_CODE (addr) == PLUS
1941 && XEXP (addr, 0) == pic_offset_table_rtx)
1942 {
1943 addr = force_reg (GET_MODE (addr), addr);
1944 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1945 }
1946
1947 update_temp_slot_address (XEXP (memref, 0), new);
1948 new = change_address_1 (memref, VOIDmode, new, 1);
1949
1950 /* If there are no changes, just return the original memory reference. */
1951 if (new == memref)
1952 return new;
1953
1954 /* Update the alignment to reflect the offset. Reset the offset, which
1955 we don't know. */
1956 MEM_ATTRS (new)
1957 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1958 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1959 GET_MODE (new));
1960 return new;
1961 }
1962
1963 /* Return a memory reference like MEMREF, but with its address changed to
1964 ADDR. The caller is asserting that the actual piece of memory pointed
1965 to is the same, just the form of the address is being changed, such as
1966 by putting something into a register. */
1967
1968 rtx
1969 replace_equiv_address (rtx memref, rtx addr)
1970 {
1971 /* change_address_1 copies the memory attribute structure without change
1972 and that's exactly what we want here. */
1973 update_temp_slot_address (XEXP (memref, 0), addr);
1974 return change_address_1 (memref, VOIDmode, addr, 1);
1975 }
1976
1977 /* Likewise, but the reference is not required to be valid. */
1978
1979 rtx
1980 replace_equiv_address_nv (rtx memref, rtx addr)
1981 {
1982 return change_address_1 (memref, VOIDmode, addr, 0);
1983 }
1984
1985 /* Return a memory reference like MEMREF, but with its mode widened to
1986 MODE and offset by OFFSET. This would be used by targets that e.g.
1987 cannot issue QImode memory operations and have to use SImode memory
1988 operations plus masking logic. */
1989
1990 rtx
1991 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1992 {
1993 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1994 tree expr = MEM_EXPR (new);
1995 rtx memoffset = MEM_OFFSET (new);
1996 unsigned int size = GET_MODE_SIZE (mode);
1997
1998 /* If there are no changes, just return the original memory reference. */
1999 if (new == memref)
2000 return new;
2001
2002 /* If we don't know what offset we were at within the expression, then
2003 we can't know if we've overstepped the bounds. */
2004 if (! memoffset)
2005 expr = NULL_TREE;
2006
2007 while (expr)
2008 {
2009 if (TREE_CODE (expr) == COMPONENT_REF)
2010 {
2011 tree field = TREE_OPERAND (expr, 1);
2012 tree offset = component_ref_field_offset (expr);
2013
2014 if (! DECL_SIZE_UNIT (field))
2015 {
2016 expr = NULL_TREE;
2017 break;
2018 }
2019
2020 /* Is the field at least as large as the access? If so, ok,
2021 otherwise strip back to the containing structure. */
2022 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2023 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2024 && INTVAL (memoffset) >= 0)
2025 break;
2026
2027 if (! host_integerp (offset, 1))
2028 {
2029 expr = NULL_TREE;
2030 break;
2031 }
2032
2033 expr = TREE_OPERAND (expr, 0);
2034 memoffset
2035 = (GEN_INT (INTVAL (memoffset)
2036 + tree_low_cst (offset, 1)
2037 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2038 / BITS_PER_UNIT)));
2039 }
2040 /* Similarly for the decl. */
2041 else if (DECL_P (expr)
2042 && DECL_SIZE_UNIT (expr)
2043 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2044 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2045 && (! memoffset || INTVAL (memoffset) >= 0))
2046 break;
2047 else
2048 {
2049 /* The widened memory access overflows the expression, which means
2050 that it could alias another expression. Zap it. */
2051 expr = NULL_TREE;
2052 break;
2053 }
2054 }
2055
2056 if (! expr)
2057 memoffset = NULL_RTX;
2058
2059 /* The widened memory may alias other stuff, so zap the alias set. */
2060 /* ??? Maybe use get_alias_set on any remaining expression. */
2061
2062 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2063 MEM_ALIGN (new), mode);
2064
2065 return new;
2066 }
2067 \f
2068 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2069
2070 rtx
2071 gen_label_rtx (void)
2072 {
2073 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2074 NULL, label_num++, NULL);
2075 }
2076 \f
2077 /* For procedure integration. */
2078
2079 /* Install new pointers to the first and last insns in the chain.
2080 Also, set cur_insn_uid to one higher than the last in use.
2081 Used for an inline-procedure after copying the insn chain. */
2082
2083 void
2084 set_new_first_and_last_insn (rtx first, rtx last)
2085 {
2086 rtx insn;
2087
2088 first_insn = first;
2089 last_insn = last;
2090 cur_insn_uid = 0;
2091
2092 for (insn = first; insn; insn = NEXT_INSN (insn))
2093 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2094
2095 cur_insn_uid++;
2096 }
2097 \f
2098 /* Go through all the RTL insn bodies and copy any invalid shared
2099 structure. This routine should only be called once. */
2100
2101 static void
2102 unshare_all_rtl_1 (tree fndecl, rtx insn)
2103 {
2104 tree decl;
2105
2106 /* Make sure that virtual parameters are not shared. */
2107 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2108 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2109
2110 /* Make sure that virtual stack slots are not shared. */
2111 unshare_all_decls (DECL_INITIAL (fndecl));
2112
2113 /* Unshare just about everything else. */
2114 unshare_all_rtl_in_chain (insn);
2115
2116 /* Make sure the addresses of stack slots found outside the insn chain
2117 (such as, in DECL_RTL of a variable) are not shared
2118 with the insn chain.
2119
2120 This special care is necessary when the stack slot MEM does not
2121 actually appear in the insn chain. If it does appear, its address
2122 is unshared from all else at that point. */
2123 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2124 }
2125
2126 /* Go through all the RTL insn bodies and copy any invalid shared
2127 structure, again. This is a fairly expensive thing to do so it
2128 should be done sparingly. */
2129
2130 void
2131 unshare_all_rtl_again (rtx insn)
2132 {
2133 rtx p;
2134 tree decl;
2135
2136 for (p = insn; p; p = NEXT_INSN (p))
2137 if (INSN_P (p))
2138 {
2139 reset_used_flags (PATTERN (p));
2140 reset_used_flags (REG_NOTES (p));
2141 reset_used_flags (LOG_LINKS (p));
2142 }
2143
2144 /* Make sure that virtual stack slots are not shared. */
2145 reset_used_decls (DECL_INITIAL (cfun->decl));
2146
2147 /* Make sure that virtual parameters are not shared. */
2148 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2149 reset_used_flags (DECL_RTL (decl));
2150
2151 reset_used_flags (stack_slot_list);
2152
2153 unshare_all_rtl_1 (cfun->decl, insn);
2154 }
2155
2156 void
2157 unshare_all_rtl (void)
2158 {
2159 unshare_all_rtl_1 (current_function_decl, get_insns ());
2160 }
2161
2162 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2163 Recursively does the same for subexpressions. */
2164
2165 static void
2166 verify_rtx_sharing (rtx orig, rtx insn)
2167 {
2168 rtx x = orig;
2169 int i;
2170 enum rtx_code code;
2171 const char *format_ptr;
2172
2173 if (x == 0)
2174 return;
2175
2176 code = GET_CODE (x);
2177
2178 /* These types may be freely shared. */
2179
2180 switch (code)
2181 {
2182 case REG:
2183 case CONST_INT:
2184 case CONST_DOUBLE:
2185 case CONST_VECTOR:
2186 case SYMBOL_REF:
2187 case LABEL_REF:
2188 case CODE_LABEL:
2189 case PC:
2190 case CC0:
2191 case SCRATCH:
2192 return;
2193 /* SCRATCH must be shared because they represent distinct values. */
2194 case CLOBBER:
2195 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2196 return;
2197 break;
2198
2199 case CONST:
2200 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2201 a LABEL_REF, it isn't sharable. */
2202 if (GET_CODE (XEXP (x, 0)) == PLUS
2203 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2204 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2205 return;
2206 break;
2207
2208 case MEM:
2209 /* A MEM is allowed to be shared if its address is constant. */
2210 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2211 || reload_completed || reload_in_progress)
2212 return;
2213
2214 break;
2215
2216 default:
2217 break;
2218 }
2219
2220 /* This rtx may not be shared. If it has already been seen,
2221 replace it with a copy of itself. */
2222 #ifdef ENABLE_CHECKING
2223 if (RTX_FLAG (x, used))
2224 {
2225 error ("Invalid rtl sharing found in the insn");
2226 debug_rtx (insn);
2227 error ("Shared rtx");
2228 debug_rtx (x);
2229 internal_error ("Internal consistency failure");
2230 }
2231 #endif
2232 gcc_assert (!RTX_FLAG (x, used));
2233
2234 RTX_FLAG (x, used) = 1;
2235
2236 /* Now scan the subexpressions recursively. */
2237
2238 format_ptr = GET_RTX_FORMAT (code);
2239
2240 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2241 {
2242 switch (*format_ptr++)
2243 {
2244 case 'e':
2245 verify_rtx_sharing (XEXP (x, i), insn);
2246 break;
2247
2248 case 'E':
2249 if (XVEC (x, i) != NULL)
2250 {
2251 int j;
2252 int len = XVECLEN (x, i);
2253
2254 for (j = 0; j < len; j++)
2255 {
2256 /* We allow sharing of ASM_OPERANDS inside single
2257 instruction. */
2258 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2259 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2260 == ASM_OPERANDS))
2261 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2262 else
2263 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2264 }
2265 }
2266 break;
2267 }
2268 }
2269 return;
2270 }
2271
2272 /* Go through all the RTL insn bodies and check that there is no unexpected
2273 sharing in between the subexpressions. */
2274
2275 void
2276 verify_rtl_sharing (void)
2277 {
2278 rtx p;
2279
2280 for (p = get_insns (); p; p = NEXT_INSN (p))
2281 if (INSN_P (p))
2282 {
2283 reset_used_flags (PATTERN (p));
2284 reset_used_flags (REG_NOTES (p));
2285 reset_used_flags (LOG_LINKS (p));
2286 }
2287
2288 for (p = get_insns (); p; p = NEXT_INSN (p))
2289 if (INSN_P (p))
2290 {
2291 verify_rtx_sharing (PATTERN (p), p);
2292 verify_rtx_sharing (REG_NOTES (p), p);
2293 verify_rtx_sharing (LOG_LINKS (p), p);
2294 }
2295 }
2296
2297 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2298 Assumes the mark bits are cleared at entry. */
2299
2300 void
2301 unshare_all_rtl_in_chain (rtx insn)
2302 {
2303 for (; insn; insn = NEXT_INSN (insn))
2304 if (INSN_P (insn))
2305 {
2306 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2307 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2308 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2309 }
2310 }
2311
2312 /* Go through all virtual stack slots of a function and copy any
2313 shared structure. */
2314 static void
2315 unshare_all_decls (tree blk)
2316 {
2317 tree t;
2318
2319 /* Copy shared decls. */
2320 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2321 if (DECL_RTL_SET_P (t))
2322 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2323
2324 /* Now process sub-blocks. */
2325 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2326 unshare_all_decls (t);
2327 }
2328
2329 /* Go through all virtual stack slots of a function and mark them as
2330 not shared. */
2331 static void
2332 reset_used_decls (tree blk)
2333 {
2334 tree t;
2335
2336 /* Mark decls. */
2337 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2338 if (DECL_RTL_SET_P (t))
2339 reset_used_flags (DECL_RTL (t));
2340
2341 /* Now process sub-blocks. */
2342 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2343 reset_used_decls (t);
2344 }
2345
2346 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2347 Recursively does the same for subexpressions. Uses
2348 copy_rtx_if_shared_1 to reduce stack space. */
2349
2350 rtx
2351 copy_rtx_if_shared (rtx orig)
2352 {
2353 copy_rtx_if_shared_1 (&orig);
2354 return orig;
2355 }
2356
2357 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2358 use. Recursively does the same for subexpressions. */
2359
2360 static void
2361 copy_rtx_if_shared_1 (rtx *orig1)
2362 {
2363 rtx x;
2364 int i;
2365 enum rtx_code code;
2366 rtx *last_ptr;
2367 const char *format_ptr;
2368 int copied = 0;
2369 int length;
2370
2371 /* Repeat is used to turn tail-recursion into iteration. */
2372 repeat:
2373 x = *orig1;
2374
2375 if (x == 0)
2376 return;
2377
2378 code = GET_CODE (x);
2379
2380 /* These types may be freely shared. */
2381
2382 switch (code)
2383 {
2384 case REG:
2385 case CONST_INT:
2386 case CONST_DOUBLE:
2387 case CONST_VECTOR:
2388 case SYMBOL_REF:
2389 case LABEL_REF:
2390 case CODE_LABEL:
2391 case PC:
2392 case CC0:
2393 case SCRATCH:
2394 /* SCRATCH must be shared because they represent distinct values. */
2395 return;
2396 case CLOBBER:
2397 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2398 return;
2399 break;
2400
2401 case CONST:
2402 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2403 a LABEL_REF, it isn't sharable. */
2404 if (GET_CODE (XEXP (x, 0)) == PLUS
2405 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2406 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2407 return;
2408 break;
2409
2410 case INSN:
2411 case JUMP_INSN:
2412 case CALL_INSN:
2413 case NOTE:
2414 case BARRIER:
2415 /* The chain of insns is not being copied. */
2416 return;
2417
2418 default:
2419 break;
2420 }
2421
2422 /* This rtx may not be shared. If it has already been seen,
2423 replace it with a copy of itself. */
2424
2425 if (RTX_FLAG (x, used))
2426 {
2427 rtx copy;
2428
2429 copy = rtx_alloc (code);
2430 memcpy (copy, x, RTX_SIZE (code));
2431 x = copy;
2432 copied = 1;
2433 }
2434 RTX_FLAG (x, used) = 1;
2435
2436 /* Now scan the subexpressions recursively.
2437 We can store any replaced subexpressions directly into X
2438 since we know X is not shared! Any vectors in X
2439 must be copied if X was copied. */
2440
2441 format_ptr = GET_RTX_FORMAT (code);
2442 length = GET_RTX_LENGTH (code);
2443 last_ptr = NULL;
2444
2445 for (i = 0; i < length; i++)
2446 {
2447 switch (*format_ptr++)
2448 {
2449 case 'e':
2450 if (last_ptr)
2451 copy_rtx_if_shared_1 (last_ptr);
2452 last_ptr = &XEXP (x, i);
2453 break;
2454
2455 case 'E':
2456 if (XVEC (x, i) != NULL)
2457 {
2458 int j;
2459 int len = XVECLEN (x, i);
2460
2461 /* Copy the vector iff I copied the rtx and the length
2462 is nonzero. */
2463 if (copied && len > 0)
2464 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2465
2466 /* Call recursively on all inside the vector. */
2467 for (j = 0; j < len; j++)
2468 {
2469 if (last_ptr)
2470 copy_rtx_if_shared_1 (last_ptr);
2471 last_ptr = &XVECEXP (x, i, j);
2472 }
2473 }
2474 break;
2475 }
2476 }
2477 *orig1 = x;
2478 if (last_ptr)
2479 {
2480 orig1 = last_ptr;
2481 goto repeat;
2482 }
2483 return;
2484 }
2485
2486 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2487 to look for shared sub-parts. */
2488
2489 void
2490 reset_used_flags (rtx x)
2491 {
2492 int i, j;
2493 enum rtx_code code;
2494 const char *format_ptr;
2495 int length;
2496
2497 /* Repeat is used to turn tail-recursion into iteration. */
2498 repeat:
2499 if (x == 0)
2500 return;
2501
2502 code = GET_CODE (x);
2503
2504 /* These types may be freely shared so we needn't do any resetting
2505 for them. */
2506
2507 switch (code)
2508 {
2509 case REG:
2510 case CONST_INT:
2511 case CONST_DOUBLE:
2512 case CONST_VECTOR:
2513 case SYMBOL_REF:
2514 case CODE_LABEL:
2515 case PC:
2516 case CC0:
2517 return;
2518
2519 case INSN:
2520 case JUMP_INSN:
2521 case CALL_INSN:
2522 case NOTE:
2523 case LABEL_REF:
2524 case BARRIER:
2525 /* The chain of insns is not being copied. */
2526 return;
2527
2528 default:
2529 break;
2530 }
2531
2532 RTX_FLAG (x, used) = 0;
2533
2534 format_ptr = GET_RTX_FORMAT (code);
2535 length = GET_RTX_LENGTH (code);
2536
2537 for (i = 0; i < length; i++)
2538 {
2539 switch (*format_ptr++)
2540 {
2541 case 'e':
2542 if (i == length-1)
2543 {
2544 x = XEXP (x, i);
2545 goto repeat;
2546 }
2547 reset_used_flags (XEXP (x, i));
2548 break;
2549
2550 case 'E':
2551 for (j = 0; j < XVECLEN (x, i); j++)
2552 reset_used_flags (XVECEXP (x, i, j));
2553 break;
2554 }
2555 }
2556 }
2557
2558 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2559 to look for shared sub-parts. */
2560
2561 void
2562 set_used_flags (rtx x)
2563 {
2564 int i, j;
2565 enum rtx_code code;
2566 const char *format_ptr;
2567
2568 if (x == 0)
2569 return;
2570
2571 code = GET_CODE (x);
2572
2573 /* These types may be freely shared so we needn't do any resetting
2574 for them. */
2575
2576 switch (code)
2577 {
2578 case REG:
2579 case CONST_INT:
2580 case CONST_DOUBLE:
2581 case CONST_VECTOR:
2582 case SYMBOL_REF:
2583 case CODE_LABEL:
2584 case PC:
2585 case CC0:
2586 return;
2587
2588 case INSN:
2589 case JUMP_INSN:
2590 case CALL_INSN:
2591 case NOTE:
2592 case LABEL_REF:
2593 case BARRIER:
2594 /* The chain of insns is not being copied. */
2595 return;
2596
2597 default:
2598 break;
2599 }
2600
2601 RTX_FLAG (x, used) = 1;
2602
2603 format_ptr = GET_RTX_FORMAT (code);
2604 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2605 {
2606 switch (*format_ptr++)
2607 {
2608 case 'e':
2609 set_used_flags (XEXP (x, i));
2610 break;
2611
2612 case 'E':
2613 for (j = 0; j < XVECLEN (x, i); j++)
2614 set_used_flags (XVECEXP (x, i, j));
2615 break;
2616 }
2617 }
2618 }
2619 \f
2620 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2621 Return X or the rtx for the pseudo reg the value of X was copied into.
2622 OTHER must be valid as a SET_DEST. */
2623
2624 rtx
2625 make_safe_from (rtx x, rtx other)
2626 {
2627 while (1)
2628 switch (GET_CODE (other))
2629 {
2630 case SUBREG:
2631 other = SUBREG_REG (other);
2632 break;
2633 case STRICT_LOW_PART:
2634 case SIGN_EXTEND:
2635 case ZERO_EXTEND:
2636 other = XEXP (other, 0);
2637 break;
2638 default:
2639 goto done;
2640 }
2641 done:
2642 if ((MEM_P (other)
2643 && ! CONSTANT_P (x)
2644 && !REG_P (x)
2645 && GET_CODE (x) != SUBREG)
2646 || (REG_P (other)
2647 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2648 || reg_mentioned_p (other, x))))
2649 {
2650 rtx temp = gen_reg_rtx (GET_MODE (x));
2651 emit_move_insn (temp, x);
2652 return temp;
2653 }
2654 return x;
2655 }
2656 \f
2657 /* Emission of insns (adding them to the doubly-linked list). */
2658
2659 /* Return the first insn of the current sequence or current function. */
2660
2661 rtx
2662 get_insns (void)
2663 {
2664 return first_insn;
2665 }
2666
2667 /* Specify a new insn as the first in the chain. */
2668
2669 void
2670 set_first_insn (rtx insn)
2671 {
2672 gcc_assert (!PREV_INSN (insn));
2673 first_insn = insn;
2674 }
2675
2676 /* Return the last insn emitted in current sequence or current function. */
2677
2678 rtx
2679 get_last_insn (void)
2680 {
2681 return last_insn;
2682 }
2683
2684 /* Specify a new insn as the last in the chain. */
2685
2686 void
2687 set_last_insn (rtx insn)
2688 {
2689 gcc_assert (!NEXT_INSN (insn));
2690 last_insn = insn;
2691 }
2692
2693 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2694
2695 rtx
2696 get_last_insn_anywhere (void)
2697 {
2698 struct sequence_stack *stack;
2699 if (last_insn)
2700 return last_insn;
2701 for (stack = seq_stack; stack; stack = stack->next)
2702 if (stack->last != 0)
2703 return stack->last;
2704 return 0;
2705 }
2706
2707 /* Return the first nonnote insn emitted in current sequence or current
2708 function. This routine looks inside SEQUENCEs. */
2709
2710 rtx
2711 get_first_nonnote_insn (void)
2712 {
2713 rtx insn;
2714
2715 for (insn = first_insn; insn && NOTE_P (insn); insn = next_insn (insn));
2716 return insn;
2717 }
2718
2719 /* Return the last nonnote insn emitted in current sequence or current
2720 function. This routine looks inside SEQUENCEs. */
2721
2722 rtx
2723 get_last_nonnote_insn (void)
2724 {
2725 rtx insn;
2726
2727 for (insn = last_insn; insn && NOTE_P (insn); insn = previous_insn (insn));
2728 return insn;
2729 }
2730
2731 /* Return a number larger than any instruction's uid in this function. */
2732
2733 int
2734 get_max_uid (void)
2735 {
2736 return cur_insn_uid;
2737 }
2738
2739 /* Renumber instructions so that no instruction UIDs are wasted. */
2740
2741 void
2742 renumber_insns (FILE *stream)
2743 {
2744 rtx insn;
2745
2746 /* If we're not supposed to renumber instructions, don't. */
2747 if (!flag_renumber_insns)
2748 return;
2749
2750 /* If there aren't that many instructions, then it's not really
2751 worth renumbering them. */
2752 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2753 return;
2754
2755 cur_insn_uid = 1;
2756
2757 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2758 {
2759 if (stream)
2760 fprintf (stream, "Renumbering insn %d to %d\n",
2761 INSN_UID (insn), cur_insn_uid);
2762 INSN_UID (insn) = cur_insn_uid++;
2763 }
2764 }
2765 \f
2766 /* Return the next insn. If it is a SEQUENCE, return the first insn
2767 of the sequence. */
2768
2769 rtx
2770 next_insn (rtx insn)
2771 {
2772 if (insn)
2773 {
2774 insn = NEXT_INSN (insn);
2775 if (insn && NONJUMP_INSN_P (insn)
2776 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2777 insn = XVECEXP (PATTERN (insn), 0, 0);
2778 }
2779
2780 return insn;
2781 }
2782
2783 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2784 of the sequence. */
2785
2786 rtx
2787 previous_insn (rtx insn)
2788 {
2789 if (insn)
2790 {
2791 insn = PREV_INSN (insn);
2792 if (insn && NONJUMP_INSN_P (insn)
2793 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2794 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2795 }
2796
2797 return insn;
2798 }
2799
2800 /* Return the next insn after INSN that is not a NOTE. This routine does not
2801 look inside SEQUENCEs. */
2802
2803 rtx
2804 next_nonnote_insn (rtx insn)
2805 {
2806 while (insn)
2807 {
2808 insn = NEXT_INSN (insn);
2809 if (insn == 0 || !NOTE_P (insn))
2810 break;
2811 }
2812
2813 return insn;
2814 }
2815
2816 /* Return the previous insn before INSN that is not a NOTE. This routine does
2817 not look inside SEQUENCEs. */
2818
2819 rtx
2820 prev_nonnote_insn (rtx insn)
2821 {
2822 while (insn)
2823 {
2824 insn = PREV_INSN (insn);
2825 if (insn == 0 || !NOTE_P (insn))
2826 break;
2827 }
2828
2829 return insn;
2830 }
2831
2832 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2833 or 0, if there is none. This routine does not look inside
2834 SEQUENCEs. */
2835
2836 rtx
2837 next_real_insn (rtx insn)
2838 {
2839 while (insn)
2840 {
2841 insn = NEXT_INSN (insn);
2842 if (insn == 0 || INSN_P (insn))
2843 break;
2844 }
2845
2846 return insn;
2847 }
2848
2849 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2850 or 0, if there is none. This routine does not look inside
2851 SEQUENCEs. */
2852
2853 rtx
2854 prev_real_insn (rtx insn)
2855 {
2856 while (insn)
2857 {
2858 insn = PREV_INSN (insn);
2859 if (insn == 0 || INSN_P (insn))
2860 break;
2861 }
2862
2863 return insn;
2864 }
2865
2866 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2867 This routine does not look inside SEQUENCEs. */
2868
2869 rtx
2870 last_call_insn (void)
2871 {
2872 rtx insn;
2873
2874 for (insn = get_last_insn ();
2875 insn && !CALL_P (insn);
2876 insn = PREV_INSN (insn))
2877 ;
2878
2879 return insn;
2880 }
2881
2882 /* Find the next insn after INSN that really does something. This routine
2883 does not look inside SEQUENCEs. Until reload has completed, this is the
2884 same as next_real_insn. */
2885
2886 int
2887 active_insn_p (rtx insn)
2888 {
2889 return (CALL_P (insn) || JUMP_P (insn)
2890 || (NONJUMP_INSN_P (insn)
2891 && (! reload_completed
2892 || (GET_CODE (PATTERN (insn)) != USE
2893 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2894 }
2895
2896 rtx
2897 next_active_insn (rtx insn)
2898 {
2899 while (insn)
2900 {
2901 insn = NEXT_INSN (insn);
2902 if (insn == 0 || active_insn_p (insn))
2903 break;
2904 }
2905
2906 return insn;
2907 }
2908
2909 /* Find the last insn before INSN that really does something. This routine
2910 does not look inside SEQUENCEs. Until reload has completed, this is the
2911 same as prev_real_insn. */
2912
2913 rtx
2914 prev_active_insn (rtx insn)
2915 {
2916 while (insn)
2917 {
2918 insn = PREV_INSN (insn);
2919 if (insn == 0 || active_insn_p (insn))
2920 break;
2921 }
2922
2923 return insn;
2924 }
2925
2926 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2927
2928 rtx
2929 next_label (rtx insn)
2930 {
2931 while (insn)
2932 {
2933 insn = NEXT_INSN (insn);
2934 if (insn == 0 || LABEL_P (insn))
2935 break;
2936 }
2937
2938 return insn;
2939 }
2940
2941 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2942
2943 rtx
2944 prev_label (rtx insn)
2945 {
2946 while (insn)
2947 {
2948 insn = PREV_INSN (insn);
2949 if (insn == 0 || LABEL_P (insn))
2950 break;
2951 }
2952
2953 return insn;
2954 }
2955
2956 /* Return the last label to mark the same position as LABEL. Return null
2957 if LABEL itself is null. */
2958
2959 rtx
2960 skip_consecutive_labels (rtx label)
2961 {
2962 rtx insn;
2963
2964 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
2965 if (LABEL_P (insn))
2966 label = insn;
2967
2968 return label;
2969 }
2970 \f
2971 #ifdef HAVE_cc0
2972 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2973 and REG_CC_USER notes so we can find it. */
2974
2975 void
2976 link_cc0_insns (rtx insn)
2977 {
2978 rtx user = next_nonnote_insn (insn);
2979
2980 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
2981 user = XVECEXP (PATTERN (user), 0, 0);
2982
2983 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2984 REG_NOTES (user));
2985 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2986 }
2987
2988 /* Return the next insn that uses CC0 after INSN, which is assumed to
2989 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2990 applied to the result of this function should yield INSN).
2991
2992 Normally, this is simply the next insn. However, if a REG_CC_USER note
2993 is present, it contains the insn that uses CC0.
2994
2995 Return 0 if we can't find the insn. */
2996
2997 rtx
2998 next_cc0_user (rtx insn)
2999 {
3000 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3001
3002 if (note)
3003 return XEXP (note, 0);
3004
3005 insn = next_nonnote_insn (insn);
3006 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3007 insn = XVECEXP (PATTERN (insn), 0, 0);
3008
3009 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3010 return insn;
3011
3012 return 0;
3013 }
3014
3015 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3016 note, it is the previous insn. */
3017
3018 rtx
3019 prev_cc0_setter (rtx insn)
3020 {
3021 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3022
3023 if (note)
3024 return XEXP (note, 0);
3025
3026 insn = prev_nonnote_insn (insn);
3027 gcc_assert (sets_cc0_p (PATTERN (insn)));
3028
3029 return insn;
3030 }
3031 #endif
3032
3033 /* Increment the label uses for all labels present in rtx. */
3034
3035 static void
3036 mark_label_nuses (rtx x)
3037 {
3038 enum rtx_code code;
3039 int i, j;
3040 const char *fmt;
3041
3042 code = GET_CODE (x);
3043 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3044 LABEL_NUSES (XEXP (x, 0))++;
3045
3046 fmt = GET_RTX_FORMAT (code);
3047 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3048 {
3049 if (fmt[i] == 'e')
3050 mark_label_nuses (XEXP (x, i));
3051 else if (fmt[i] == 'E')
3052 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3053 mark_label_nuses (XVECEXP (x, i, j));
3054 }
3055 }
3056
3057 \f
3058 /* Try splitting insns that can be split for better scheduling.
3059 PAT is the pattern which might split.
3060 TRIAL is the insn providing PAT.
3061 LAST is nonzero if we should return the last insn of the sequence produced.
3062
3063 If this routine succeeds in splitting, it returns the first or last
3064 replacement insn depending on the value of LAST. Otherwise, it
3065 returns TRIAL. If the insn to be returned can be split, it will be. */
3066
3067 rtx
3068 try_split (rtx pat, rtx trial, int last)
3069 {
3070 rtx before = PREV_INSN (trial);
3071 rtx after = NEXT_INSN (trial);
3072 int has_barrier = 0;
3073 rtx tem;
3074 rtx note, seq;
3075 int probability;
3076 rtx insn_last, insn;
3077 int njumps = 0;
3078
3079 if (any_condjump_p (trial)
3080 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3081 split_branch_probability = INTVAL (XEXP (note, 0));
3082 probability = split_branch_probability;
3083
3084 seq = split_insns (pat, trial);
3085
3086 split_branch_probability = -1;
3087
3088 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3089 We may need to handle this specially. */
3090 if (after && BARRIER_P (after))
3091 {
3092 has_barrier = 1;
3093 after = NEXT_INSN (after);
3094 }
3095
3096 if (!seq)
3097 return trial;
3098
3099 /* Avoid infinite loop if any insn of the result matches
3100 the original pattern. */
3101 insn_last = seq;
3102 while (1)
3103 {
3104 if (INSN_P (insn_last)
3105 && rtx_equal_p (PATTERN (insn_last), pat))
3106 return trial;
3107 if (!NEXT_INSN (insn_last))
3108 break;
3109 insn_last = NEXT_INSN (insn_last);
3110 }
3111
3112 /* Mark labels. */
3113 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3114 {
3115 if (JUMP_P (insn))
3116 {
3117 mark_jump_label (PATTERN (insn), insn, 0);
3118 njumps++;
3119 if (probability != -1
3120 && any_condjump_p (insn)
3121 && !find_reg_note (insn, REG_BR_PROB, 0))
3122 {
3123 /* We can preserve the REG_BR_PROB notes only if exactly
3124 one jump is created, otherwise the machine description
3125 is responsible for this step using
3126 split_branch_probability variable. */
3127 gcc_assert (njumps == 1);
3128 REG_NOTES (insn)
3129 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3130 GEN_INT (probability),
3131 REG_NOTES (insn));
3132 }
3133 }
3134 }
3135
3136 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3137 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3138 if (CALL_P (trial))
3139 {
3140 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3141 if (CALL_P (insn))
3142 {
3143 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3144 while (*p)
3145 p = &XEXP (*p, 1);
3146 *p = CALL_INSN_FUNCTION_USAGE (trial);
3147 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3148 }
3149 }
3150
3151 /* Copy notes, particularly those related to the CFG. */
3152 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3153 {
3154 switch (REG_NOTE_KIND (note))
3155 {
3156 case REG_EH_REGION:
3157 insn = insn_last;
3158 while (insn != NULL_RTX)
3159 {
3160 if (CALL_P (insn)
3161 || (flag_non_call_exceptions && INSN_P (insn)
3162 && may_trap_p (PATTERN (insn))))
3163 REG_NOTES (insn)
3164 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3165 XEXP (note, 0),
3166 REG_NOTES (insn));
3167 insn = PREV_INSN (insn);
3168 }
3169 break;
3170
3171 case REG_NORETURN:
3172 case REG_SETJMP:
3173 case REG_ALWAYS_RETURN:
3174 insn = insn_last;
3175 while (insn != NULL_RTX)
3176 {
3177 if (CALL_P (insn))
3178 REG_NOTES (insn)
3179 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3180 XEXP (note, 0),
3181 REG_NOTES (insn));
3182 insn = PREV_INSN (insn);
3183 }
3184 break;
3185
3186 case REG_NON_LOCAL_GOTO:
3187 insn = insn_last;
3188 while (insn != NULL_RTX)
3189 {
3190 if (JUMP_P (insn))
3191 REG_NOTES (insn)
3192 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3193 XEXP (note, 0),
3194 REG_NOTES (insn));
3195 insn = PREV_INSN (insn);
3196 }
3197 break;
3198
3199 default:
3200 break;
3201 }
3202 }
3203
3204 /* If there are LABELS inside the split insns increment the
3205 usage count so we don't delete the label. */
3206 if (NONJUMP_INSN_P (trial))
3207 {
3208 insn = insn_last;
3209 while (insn != NULL_RTX)
3210 {
3211 if (NONJUMP_INSN_P (insn))
3212 mark_label_nuses (PATTERN (insn));
3213
3214 insn = PREV_INSN (insn);
3215 }
3216 }
3217
3218 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3219
3220 delete_insn (trial);
3221 if (has_barrier)
3222 emit_barrier_after (tem);
3223
3224 /* Recursively call try_split for each new insn created; by the
3225 time control returns here that insn will be fully split, so
3226 set LAST and continue from the insn after the one returned.
3227 We can't use next_active_insn here since AFTER may be a note.
3228 Ignore deleted insns, which can be occur if not optimizing. */
3229 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3230 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3231 tem = try_split (PATTERN (tem), tem, 1);
3232
3233 /* Return either the first or the last insn, depending on which was
3234 requested. */
3235 return last
3236 ? (after ? PREV_INSN (after) : last_insn)
3237 : NEXT_INSN (before);
3238 }
3239 \f
3240 /* Make and return an INSN rtx, initializing all its slots.
3241 Store PATTERN in the pattern slots. */
3242
3243 rtx
3244 make_insn_raw (rtx pattern)
3245 {
3246 rtx insn;
3247
3248 insn = rtx_alloc (INSN);
3249
3250 INSN_UID (insn) = cur_insn_uid++;
3251 PATTERN (insn) = pattern;
3252 INSN_CODE (insn) = -1;
3253 LOG_LINKS (insn) = NULL;
3254 REG_NOTES (insn) = NULL;
3255 INSN_LOCATOR (insn) = 0;
3256 BLOCK_FOR_INSN (insn) = NULL;
3257
3258 #ifdef ENABLE_RTL_CHECKING
3259 if (insn
3260 && INSN_P (insn)
3261 && (returnjump_p (insn)
3262 || (GET_CODE (insn) == SET
3263 && SET_DEST (insn) == pc_rtx)))
3264 {
3265 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3266 debug_rtx (insn);
3267 }
3268 #endif
3269
3270 return insn;
3271 }
3272
3273 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3274
3275 static rtx
3276 make_jump_insn_raw (rtx pattern)
3277 {
3278 rtx insn;
3279
3280 insn = rtx_alloc (JUMP_INSN);
3281 INSN_UID (insn) = cur_insn_uid++;
3282
3283 PATTERN (insn) = pattern;
3284 INSN_CODE (insn) = -1;
3285 LOG_LINKS (insn) = NULL;
3286 REG_NOTES (insn) = NULL;
3287 JUMP_LABEL (insn) = NULL;
3288 INSN_LOCATOR (insn) = 0;
3289 BLOCK_FOR_INSN (insn) = NULL;
3290
3291 return insn;
3292 }
3293
3294 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3295
3296 static rtx
3297 make_call_insn_raw (rtx pattern)
3298 {
3299 rtx insn;
3300
3301 insn = rtx_alloc (CALL_INSN);
3302 INSN_UID (insn) = cur_insn_uid++;
3303
3304 PATTERN (insn) = pattern;
3305 INSN_CODE (insn) = -1;
3306 LOG_LINKS (insn) = NULL;
3307 REG_NOTES (insn) = NULL;
3308 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3309 INSN_LOCATOR (insn) = 0;
3310 BLOCK_FOR_INSN (insn) = NULL;
3311
3312 return insn;
3313 }
3314 \f
3315 /* Add INSN to the end of the doubly-linked list.
3316 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3317
3318 void
3319 add_insn (rtx insn)
3320 {
3321 PREV_INSN (insn) = last_insn;
3322 NEXT_INSN (insn) = 0;
3323
3324 if (NULL != last_insn)
3325 NEXT_INSN (last_insn) = insn;
3326
3327 if (NULL == first_insn)
3328 first_insn = insn;
3329
3330 last_insn = insn;
3331 }
3332
3333 /* Add INSN into the doubly-linked list after insn AFTER. This and
3334 the next should be the only functions called to insert an insn once
3335 delay slots have been filled since only they know how to update a
3336 SEQUENCE. */
3337
3338 void
3339 add_insn_after (rtx insn, rtx after)
3340 {
3341 rtx next = NEXT_INSN (after);
3342 basic_block bb;
3343
3344 gcc_assert (!optimize || !INSN_DELETED_P (after));
3345
3346 NEXT_INSN (insn) = next;
3347 PREV_INSN (insn) = after;
3348
3349 if (next)
3350 {
3351 PREV_INSN (next) = insn;
3352 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3353 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3354 }
3355 else if (last_insn == after)
3356 last_insn = insn;
3357 else
3358 {
3359 struct sequence_stack *stack = seq_stack;
3360 /* Scan all pending sequences too. */
3361 for (; stack; stack = stack->next)
3362 if (after == stack->last)
3363 {
3364 stack->last = insn;
3365 break;
3366 }
3367
3368 gcc_assert (stack);
3369 }
3370
3371 if (!BARRIER_P (after)
3372 && !BARRIER_P (insn)
3373 && (bb = BLOCK_FOR_INSN (after)))
3374 {
3375 set_block_for_insn (insn, bb);
3376 if (INSN_P (insn))
3377 bb->flags |= BB_DIRTY;
3378 /* Should not happen as first in the BB is always
3379 either NOTE or LABEL. */
3380 if (BB_END (bb) == after
3381 /* Avoid clobbering of structure when creating new BB. */
3382 && !BARRIER_P (insn)
3383 && (!NOTE_P (insn)
3384 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3385 BB_END (bb) = insn;
3386 }
3387
3388 NEXT_INSN (after) = insn;
3389 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3390 {
3391 rtx sequence = PATTERN (after);
3392 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3393 }
3394 }
3395
3396 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3397 the previous should be the only functions called to insert an insn once
3398 delay slots have been filled since only they know how to update a
3399 SEQUENCE. */
3400
3401 void
3402 add_insn_before (rtx insn, rtx before)
3403 {
3404 rtx prev = PREV_INSN (before);
3405 basic_block bb;
3406
3407 gcc_assert (!optimize || !INSN_DELETED_P (before));
3408
3409 PREV_INSN (insn) = prev;
3410 NEXT_INSN (insn) = before;
3411
3412 if (prev)
3413 {
3414 NEXT_INSN (prev) = insn;
3415 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3416 {
3417 rtx sequence = PATTERN (prev);
3418 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3419 }
3420 }
3421 else if (first_insn == before)
3422 first_insn = insn;
3423 else
3424 {
3425 struct sequence_stack *stack = seq_stack;
3426 /* Scan all pending sequences too. */
3427 for (; stack; stack = stack->next)
3428 if (before == stack->first)
3429 {
3430 stack->first = insn;
3431 break;
3432 }
3433
3434 gcc_assert (stack);
3435 }
3436
3437 if (!BARRIER_P (before)
3438 && !BARRIER_P (insn)
3439 && (bb = BLOCK_FOR_INSN (before)))
3440 {
3441 set_block_for_insn (insn, bb);
3442 if (INSN_P (insn))
3443 bb->flags |= BB_DIRTY;
3444 /* Should not happen as first in the BB is always either NOTE or
3445 LABEl. */
3446 gcc_assert (BB_HEAD (bb) != insn
3447 /* Avoid clobbering of structure when creating new BB. */
3448 || BARRIER_P (insn)
3449 || (NOTE_P (insn)
3450 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3451 }
3452
3453 PREV_INSN (before) = insn;
3454 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3455 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3456 }
3457
3458 /* Remove an insn from its doubly-linked list. This function knows how
3459 to handle sequences. */
3460 void
3461 remove_insn (rtx insn)
3462 {
3463 rtx next = NEXT_INSN (insn);
3464 rtx prev = PREV_INSN (insn);
3465 basic_block bb;
3466
3467 if (prev)
3468 {
3469 NEXT_INSN (prev) = next;
3470 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3471 {
3472 rtx sequence = PATTERN (prev);
3473 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3474 }
3475 }
3476 else if (first_insn == insn)
3477 first_insn = next;
3478 else
3479 {
3480 struct sequence_stack *stack = seq_stack;
3481 /* Scan all pending sequences too. */
3482 for (; stack; stack = stack->next)
3483 if (insn == stack->first)
3484 {
3485 stack->first = next;
3486 break;
3487 }
3488
3489 gcc_assert (stack);
3490 }
3491
3492 if (next)
3493 {
3494 PREV_INSN (next) = prev;
3495 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3496 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3497 }
3498 else if (last_insn == insn)
3499 last_insn = prev;
3500 else
3501 {
3502 struct sequence_stack *stack = seq_stack;
3503 /* Scan all pending sequences too. */
3504 for (; stack; stack = stack->next)
3505 if (insn == stack->last)
3506 {
3507 stack->last = prev;
3508 break;
3509 }
3510
3511 gcc_assert (stack);
3512 }
3513 if (!BARRIER_P (insn)
3514 && (bb = BLOCK_FOR_INSN (insn)))
3515 {
3516 if (INSN_P (insn))
3517 bb->flags |= BB_DIRTY;
3518 if (BB_HEAD (bb) == insn)
3519 {
3520 /* Never ever delete the basic block note without deleting whole
3521 basic block. */
3522 gcc_assert (!NOTE_P (insn));
3523 BB_HEAD (bb) = next;
3524 }
3525 if (BB_END (bb) == insn)
3526 BB_END (bb) = prev;
3527 }
3528 }
3529
3530 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3531
3532 void
3533 add_function_usage_to (rtx call_insn, rtx call_fusage)
3534 {
3535 gcc_assert (call_insn && CALL_P (call_insn));
3536
3537 /* Put the register usage information on the CALL. If there is already
3538 some usage information, put ours at the end. */
3539 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3540 {
3541 rtx link;
3542
3543 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3544 link = XEXP (link, 1))
3545 ;
3546
3547 XEXP (link, 1) = call_fusage;
3548 }
3549 else
3550 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3551 }
3552
3553 /* Delete all insns made since FROM.
3554 FROM becomes the new last instruction. */
3555
3556 void
3557 delete_insns_since (rtx from)
3558 {
3559 if (from == 0)
3560 first_insn = 0;
3561 else
3562 NEXT_INSN (from) = 0;
3563 last_insn = from;
3564 }
3565
3566 /* This function is deprecated, please use sequences instead.
3567
3568 Move a consecutive bunch of insns to a different place in the chain.
3569 The insns to be moved are those between FROM and TO.
3570 They are moved to a new position after the insn AFTER.
3571 AFTER must not be FROM or TO or any insn in between.
3572
3573 This function does not know about SEQUENCEs and hence should not be
3574 called after delay-slot filling has been done. */
3575
3576 void
3577 reorder_insns_nobb (rtx from, rtx to, rtx after)
3578 {
3579 /* Splice this bunch out of where it is now. */
3580 if (PREV_INSN (from))
3581 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3582 if (NEXT_INSN (to))
3583 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3584 if (last_insn == to)
3585 last_insn = PREV_INSN (from);
3586 if (first_insn == from)
3587 first_insn = NEXT_INSN (to);
3588
3589 /* Make the new neighbors point to it and it to them. */
3590 if (NEXT_INSN (after))
3591 PREV_INSN (NEXT_INSN (after)) = to;
3592
3593 NEXT_INSN (to) = NEXT_INSN (after);
3594 PREV_INSN (from) = after;
3595 NEXT_INSN (after) = from;
3596 if (after == last_insn)
3597 last_insn = to;
3598 }
3599
3600 /* Same as function above, but take care to update BB boundaries. */
3601 void
3602 reorder_insns (rtx from, rtx to, rtx after)
3603 {
3604 rtx prev = PREV_INSN (from);
3605 basic_block bb, bb2;
3606
3607 reorder_insns_nobb (from, to, after);
3608
3609 if (!BARRIER_P (after)
3610 && (bb = BLOCK_FOR_INSN (after)))
3611 {
3612 rtx x;
3613 bb->flags |= BB_DIRTY;
3614
3615 if (!BARRIER_P (from)
3616 && (bb2 = BLOCK_FOR_INSN (from)))
3617 {
3618 if (BB_END (bb2) == to)
3619 BB_END (bb2) = prev;
3620 bb2->flags |= BB_DIRTY;
3621 }
3622
3623 if (BB_END (bb) == after)
3624 BB_END (bb) = to;
3625
3626 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3627 if (!BARRIER_P (x))
3628 set_block_for_insn (x, bb);
3629 }
3630 }
3631
3632 /* Return the line note insn preceding INSN. */
3633
3634 static rtx
3635 find_line_note (rtx insn)
3636 {
3637 if (no_line_numbers)
3638 return 0;
3639
3640 for (; insn; insn = PREV_INSN (insn))
3641 if (NOTE_P (insn)
3642 && NOTE_LINE_NUMBER (insn) >= 0)
3643 break;
3644
3645 return insn;
3646 }
3647
3648 /* Remove unnecessary notes from the instruction stream. */
3649
3650 void
3651 remove_unnecessary_notes (void)
3652 {
3653 rtx block_stack = NULL_RTX;
3654 rtx eh_stack = NULL_RTX;
3655 rtx insn;
3656 rtx next;
3657 rtx tmp;
3658
3659 /* We must not remove the first instruction in the function because
3660 the compiler depends on the first instruction being a note. */
3661 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3662 {
3663 /* Remember what's next. */
3664 next = NEXT_INSN (insn);
3665
3666 /* We're only interested in notes. */
3667 if (!NOTE_P (insn))
3668 continue;
3669
3670 switch (NOTE_LINE_NUMBER (insn))
3671 {
3672 case NOTE_INSN_DELETED:
3673 remove_insn (insn);
3674 break;
3675
3676 case NOTE_INSN_EH_REGION_BEG:
3677 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3678 break;
3679
3680 case NOTE_INSN_EH_REGION_END:
3681 /* Too many end notes. */
3682 gcc_assert (eh_stack);
3683 /* Mismatched nesting. */
3684 gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3685 == NOTE_EH_HANDLER (insn));
3686 tmp = eh_stack;
3687 eh_stack = XEXP (eh_stack, 1);
3688 free_INSN_LIST_node (tmp);
3689 break;
3690
3691 case NOTE_INSN_BLOCK_BEG:
3692 /* By now, all notes indicating lexical blocks should have
3693 NOTE_BLOCK filled in. */
3694 gcc_assert (NOTE_BLOCK (insn));
3695 block_stack = alloc_INSN_LIST (insn, block_stack);
3696 break;
3697
3698 case NOTE_INSN_BLOCK_END:
3699 /* Too many end notes. */
3700 gcc_assert (block_stack);
3701 /* Mismatched nesting. */
3702 gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn));
3703 tmp = block_stack;
3704 block_stack = XEXP (block_stack, 1);
3705 free_INSN_LIST_node (tmp);
3706
3707 /* Scan back to see if there are any non-note instructions
3708 between INSN and the beginning of this block. If not,
3709 then there is no PC range in the generated code that will
3710 actually be in this block, so there's no point in
3711 remembering the existence of the block. */
3712 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3713 {
3714 /* This block contains a real instruction. Note that we
3715 don't include labels; if the only thing in the block
3716 is a label, then there are still no PC values that
3717 lie within the block. */
3718 if (INSN_P (tmp))
3719 break;
3720
3721 /* We're only interested in NOTEs. */
3722 if (!NOTE_P (tmp))
3723 continue;
3724
3725 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3726 {
3727 /* We just verified that this BLOCK matches us with
3728 the block_stack check above. Never delete the
3729 BLOCK for the outermost scope of the function; we
3730 can refer to names from that scope even if the
3731 block notes are messed up. */
3732 if (! is_body_block (NOTE_BLOCK (insn))
3733 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3734 {
3735 remove_insn (tmp);
3736 remove_insn (insn);
3737 }
3738 break;
3739 }
3740 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3741 /* There's a nested block. We need to leave the
3742 current block in place since otherwise the debugger
3743 wouldn't be able to show symbols from our block in
3744 the nested block. */
3745 break;
3746 }
3747 }
3748 }
3749
3750 /* Too many begin notes. */
3751 gcc_assert (!block_stack && !eh_stack);
3752 }
3753
3754 \f
3755 /* Emit insn(s) of given code and pattern
3756 at a specified place within the doubly-linked list.
3757
3758 All of the emit_foo global entry points accept an object
3759 X which is either an insn list or a PATTERN of a single
3760 instruction.
3761
3762 There are thus a few canonical ways to generate code and
3763 emit it at a specific place in the instruction stream. For
3764 example, consider the instruction named SPOT and the fact that
3765 we would like to emit some instructions before SPOT. We might
3766 do it like this:
3767
3768 start_sequence ();
3769 ... emit the new instructions ...
3770 insns_head = get_insns ();
3771 end_sequence ();
3772
3773 emit_insn_before (insns_head, SPOT);
3774
3775 It used to be common to generate SEQUENCE rtl instead, but that
3776 is a relic of the past which no longer occurs. The reason is that
3777 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3778 generated would almost certainly die right after it was created. */
3779
3780 /* Make X be output before the instruction BEFORE. */
3781
3782 rtx
3783 emit_insn_before_noloc (rtx x, rtx before)
3784 {
3785 rtx last = before;
3786 rtx insn;
3787
3788 gcc_assert (before);
3789
3790 if (x == NULL_RTX)
3791 return last;
3792
3793 switch (GET_CODE (x))
3794 {
3795 case INSN:
3796 case JUMP_INSN:
3797 case CALL_INSN:
3798 case CODE_LABEL:
3799 case BARRIER:
3800 case NOTE:
3801 insn = x;
3802 while (insn)
3803 {
3804 rtx next = NEXT_INSN (insn);
3805 add_insn_before (insn, before);
3806 last = insn;
3807 insn = next;
3808 }
3809 break;
3810
3811 #ifdef ENABLE_RTL_CHECKING
3812 case SEQUENCE:
3813 gcc_unreachable ();
3814 break;
3815 #endif
3816
3817 default:
3818 last = make_insn_raw (x);
3819 add_insn_before (last, before);
3820 break;
3821 }
3822
3823 return last;
3824 }
3825
3826 /* Make an instruction with body X and code JUMP_INSN
3827 and output it before the instruction BEFORE. */
3828
3829 rtx
3830 emit_jump_insn_before_noloc (rtx x, rtx before)
3831 {
3832 rtx insn, last = NULL_RTX;
3833
3834 gcc_assert (before);
3835
3836 switch (GET_CODE (x))
3837 {
3838 case INSN:
3839 case JUMP_INSN:
3840 case CALL_INSN:
3841 case CODE_LABEL:
3842 case BARRIER:
3843 case NOTE:
3844 insn = x;
3845 while (insn)
3846 {
3847 rtx next = NEXT_INSN (insn);
3848 add_insn_before (insn, before);
3849 last = insn;
3850 insn = next;
3851 }
3852 break;
3853
3854 #ifdef ENABLE_RTL_CHECKING
3855 case SEQUENCE:
3856 gcc_unreachable ();
3857 break;
3858 #endif
3859
3860 default:
3861 last = make_jump_insn_raw (x);
3862 add_insn_before (last, before);
3863 break;
3864 }
3865
3866 return last;
3867 }
3868
3869 /* Make an instruction with body X and code CALL_INSN
3870 and output it before the instruction BEFORE. */
3871
3872 rtx
3873 emit_call_insn_before_noloc (rtx x, rtx before)
3874 {
3875 rtx last = NULL_RTX, insn;
3876
3877 gcc_assert (before);
3878
3879 switch (GET_CODE (x))
3880 {
3881 case INSN:
3882 case JUMP_INSN:
3883 case CALL_INSN:
3884 case CODE_LABEL:
3885 case BARRIER:
3886 case NOTE:
3887 insn = x;
3888 while (insn)
3889 {
3890 rtx next = NEXT_INSN (insn);
3891 add_insn_before (insn, before);
3892 last = insn;
3893 insn = next;
3894 }
3895 break;
3896
3897 #ifdef ENABLE_RTL_CHECKING
3898 case SEQUENCE:
3899 gcc_unreachable ();
3900 break;
3901 #endif
3902
3903 default:
3904 last = make_call_insn_raw (x);
3905 add_insn_before (last, before);
3906 break;
3907 }
3908
3909 return last;
3910 }
3911
3912 /* Make an insn of code BARRIER
3913 and output it before the insn BEFORE. */
3914
3915 rtx
3916 emit_barrier_before (rtx before)
3917 {
3918 rtx insn = rtx_alloc (BARRIER);
3919
3920 INSN_UID (insn) = cur_insn_uid++;
3921
3922 add_insn_before (insn, before);
3923 return insn;
3924 }
3925
3926 /* Emit the label LABEL before the insn BEFORE. */
3927
3928 rtx
3929 emit_label_before (rtx label, rtx before)
3930 {
3931 /* This can be called twice for the same label as a result of the
3932 confusion that follows a syntax error! So make it harmless. */
3933 if (INSN_UID (label) == 0)
3934 {
3935 INSN_UID (label) = cur_insn_uid++;
3936 add_insn_before (label, before);
3937 }
3938
3939 return label;
3940 }
3941
3942 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3943
3944 rtx
3945 emit_note_before (int subtype, rtx before)
3946 {
3947 rtx note = rtx_alloc (NOTE);
3948 INSN_UID (note) = cur_insn_uid++;
3949 #ifndef USE_MAPPED_LOCATION
3950 NOTE_SOURCE_FILE (note) = 0;
3951 #endif
3952 NOTE_LINE_NUMBER (note) = subtype;
3953 BLOCK_FOR_INSN (note) = NULL;
3954
3955 add_insn_before (note, before);
3956 return note;
3957 }
3958 \f
3959 /* Helper for emit_insn_after, handles lists of instructions
3960 efficiently. */
3961
3962 static rtx emit_insn_after_1 (rtx, rtx);
3963
3964 static rtx
3965 emit_insn_after_1 (rtx first, rtx after)
3966 {
3967 rtx last;
3968 rtx after_after;
3969 basic_block bb;
3970
3971 if (!BARRIER_P (after)
3972 && (bb = BLOCK_FOR_INSN (after)))
3973 {
3974 bb->flags |= BB_DIRTY;
3975 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3976 if (!BARRIER_P (last))
3977 set_block_for_insn (last, bb);
3978 if (!BARRIER_P (last))
3979 set_block_for_insn (last, bb);
3980 if (BB_END (bb) == after)
3981 BB_END (bb) = last;
3982 }
3983 else
3984 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3985 continue;
3986
3987 after_after = NEXT_INSN (after);
3988
3989 NEXT_INSN (after) = first;
3990 PREV_INSN (first) = after;
3991 NEXT_INSN (last) = after_after;
3992 if (after_after)
3993 PREV_INSN (after_after) = last;
3994
3995 if (after == last_insn)
3996 last_insn = last;
3997 return last;
3998 }
3999
4000 /* Make X be output after the insn AFTER. */
4001
4002 rtx
4003 emit_insn_after_noloc (rtx x, rtx after)
4004 {
4005 rtx last = after;
4006
4007 gcc_assert (after);
4008
4009 if (x == NULL_RTX)
4010 return last;
4011
4012 switch (GET_CODE (x))
4013 {
4014 case INSN:
4015 case JUMP_INSN:
4016 case CALL_INSN:
4017 case CODE_LABEL:
4018 case BARRIER:
4019 case NOTE:
4020 last = emit_insn_after_1 (x, after);
4021 break;
4022
4023 #ifdef ENABLE_RTL_CHECKING
4024 case SEQUENCE:
4025 gcc_unreachable ();
4026 break;
4027 #endif
4028
4029 default:
4030 last = make_insn_raw (x);
4031 add_insn_after (last, after);
4032 break;
4033 }
4034
4035 return last;
4036 }
4037
4038 /* Similar to emit_insn_after, except that line notes are to be inserted so
4039 as to act as if this insn were at FROM. */
4040
4041 void
4042 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4043 {
4044 rtx from_line = find_line_note (from);
4045 rtx after_line = find_line_note (after);
4046 rtx insn = emit_insn_after (x, after);
4047
4048 if (from_line)
4049 emit_note_copy_after (from_line, after);
4050
4051 if (after_line)
4052 emit_note_copy_after (after_line, insn);
4053 }
4054
4055 /* Make an insn of code JUMP_INSN with body X
4056 and output it after the insn AFTER. */
4057
4058 rtx
4059 emit_jump_insn_after_noloc (rtx x, rtx after)
4060 {
4061 rtx last;
4062
4063 gcc_assert (after);
4064
4065 switch (GET_CODE (x))
4066 {
4067 case INSN:
4068 case JUMP_INSN:
4069 case CALL_INSN:
4070 case CODE_LABEL:
4071 case BARRIER:
4072 case NOTE:
4073 last = emit_insn_after_1 (x, after);
4074 break;
4075
4076 #ifdef ENABLE_RTL_CHECKING
4077 case SEQUENCE:
4078 gcc_unreachable ();
4079 break;
4080 #endif
4081
4082 default:
4083 last = make_jump_insn_raw (x);
4084 add_insn_after (last, after);
4085 break;
4086 }
4087
4088 return last;
4089 }
4090
4091 /* Make an instruction with body X and code CALL_INSN
4092 and output it after the instruction AFTER. */
4093
4094 rtx
4095 emit_call_insn_after_noloc (rtx x, rtx after)
4096 {
4097 rtx last;
4098
4099 gcc_assert (after);
4100
4101 switch (GET_CODE (x))
4102 {
4103 case INSN:
4104 case JUMP_INSN:
4105 case CALL_INSN:
4106 case CODE_LABEL:
4107 case BARRIER:
4108 case NOTE:
4109 last = emit_insn_after_1 (x, after);
4110 break;
4111
4112 #ifdef ENABLE_RTL_CHECKING
4113 case SEQUENCE:
4114 gcc_unreachable ();
4115 break;
4116 #endif
4117
4118 default:
4119 last = make_call_insn_raw (x);
4120 add_insn_after (last, after);
4121 break;
4122 }
4123
4124 return last;
4125 }
4126
4127 /* Make an insn of code BARRIER
4128 and output it after the insn AFTER. */
4129
4130 rtx
4131 emit_barrier_after (rtx after)
4132 {
4133 rtx insn = rtx_alloc (BARRIER);
4134
4135 INSN_UID (insn) = cur_insn_uid++;
4136
4137 add_insn_after (insn, after);
4138 return insn;
4139 }
4140
4141 /* Emit the label LABEL after the insn AFTER. */
4142
4143 rtx
4144 emit_label_after (rtx label, rtx after)
4145 {
4146 /* This can be called twice for the same label
4147 as a result of the confusion that follows a syntax error!
4148 So make it harmless. */
4149 if (INSN_UID (label) == 0)
4150 {
4151 INSN_UID (label) = cur_insn_uid++;
4152 add_insn_after (label, after);
4153 }
4154
4155 return label;
4156 }
4157
4158 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4159
4160 rtx
4161 emit_note_after (int subtype, rtx after)
4162 {
4163 rtx note = rtx_alloc (NOTE);
4164 INSN_UID (note) = cur_insn_uid++;
4165 #ifndef USE_MAPPED_LOCATION
4166 NOTE_SOURCE_FILE (note) = 0;
4167 #endif
4168 NOTE_LINE_NUMBER (note) = subtype;
4169 BLOCK_FOR_INSN (note) = NULL;
4170 add_insn_after (note, after);
4171 return note;
4172 }
4173
4174 /* Emit a copy of note ORIG after the insn AFTER. */
4175
4176 rtx
4177 emit_note_copy_after (rtx orig, rtx after)
4178 {
4179 rtx note;
4180
4181 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4182 {
4183 cur_insn_uid++;
4184 return 0;
4185 }
4186
4187 note = rtx_alloc (NOTE);
4188 INSN_UID (note) = cur_insn_uid++;
4189 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4190 NOTE_DATA (note) = NOTE_DATA (orig);
4191 BLOCK_FOR_INSN (note) = NULL;
4192 add_insn_after (note, after);
4193 return note;
4194 }
4195 \f
4196 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4197 rtx
4198 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4199 {
4200 rtx last = emit_insn_after_noloc (pattern, after);
4201
4202 if (pattern == NULL_RTX || !loc)
4203 return last;
4204
4205 after = NEXT_INSN (after);
4206 while (1)
4207 {
4208 if (active_insn_p (after) && !INSN_LOCATOR (after))
4209 INSN_LOCATOR (after) = loc;
4210 if (after == last)
4211 break;
4212 after = NEXT_INSN (after);
4213 }
4214 return last;
4215 }
4216
4217 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4218 rtx
4219 emit_insn_after (rtx pattern, rtx after)
4220 {
4221 if (INSN_P (after))
4222 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4223 else
4224 return emit_insn_after_noloc (pattern, after);
4225 }
4226
4227 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4228 rtx
4229 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4230 {
4231 rtx last = emit_jump_insn_after_noloc (pattern, after);
4232
4233 if (pattern == NULL_RTX || !loc)
4234 return last;
4235
4236 after = NEXT_INSN (after);
4237 while (1)
4238 {
4239 if (active_insn_p (after) && !INSN_LOCATOR (after))
4240 INSN_LOCATOR (after) = loc;
4241 if (after == last)
4242 break;
4243 after = NEXT_INSN (after);
4244 }
4245 return last;
4246 }
4247
4248 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4249 rtx
4250 emit_jump_insn_after (rtx pattern, rtx after)
4251 {
4252 if (INSN_P (after))
4253 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4254 else
4255 return emit_jump_insn_after_noloc (pattern, after);
4256 }
4257
4258 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4259 rtx
4260 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4261 {
4262 rtx last = emit_call_insn_after_noloc (pattern, after);
4263
4264 if (pattern == NULL_RTX || !loc)
4265 return last;
4266
4267 after = NEXT_INSN (after);
4268 while (1)
4269 {
4270 if (active_insn_p (after) && !INSN_LOCATOR (after))
4271 INSN_LOCATOR (after) = loc;
4272 if (after == last)
4273 break;
4274 after = NEXT_INSN (after);
4275 }
4276 return last;
4277 }
4278
4279 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4280 rtx
4281 emit_call_insn_after (rtx pattern, rtx after)
4282 {
4283 if (INSN_P (after))
4284 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4285 else
4286 return emit_call_insn_after_noloc (pattern, after);
4287 }
4288
4289 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4290 rtx
4291 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4292 {
4293 rtx first = PREV_INSN (before);
4294 rtx last = emit_insn_before_noloc (pattern, before);
4295
4296 if (pattern == NULL_RTX || !loc)
4297 return last;
4298
4299 first = NEXT_INSN (first);
4300 while (1)
4301 {
4302 if (active_insn_p (first) && !INSN_LOCATOR (first))
4303 INSN_LOCATOR (first) = loc;
4304 if (first == last)
4305 break;
4306 first = NEXT_INSN (first);
4307 }
4308 return last;
4309 }
4310
4311 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4312 rtx
4313 emit_insn_before (rtx pattern, rtx before)
4314 {
4315 if (INSN_P (before))
4316 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4317 else
4318 return emit_insn_before_noloc (pattern, before);
4319 }
4320
4321 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4322 rtx
4323 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4324 {
4325 rtx first = PREV_INSN (before);
4326 rtx last = emit_jump_insn_before_noloc (pattern, before);
4327
4328 if (pattern == NULL_RTX)
4329 return last;
4330
4331 first = NEXT_INSN (first);
4332 while (1)
4333 {
4334 if (active_insn_p (first) && !INSN_LOCATOR (first))
4335 INSN_LOCATOR (first) = loc;
4336 if (first == last)
4337 break;
4338 first = NEXT_INSN (first);
4339 }
4340 return last;
4341 }
4342
4343 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4344 rtx
4345 emit_jump_insn_before (rtx pattern, rtx before)
4346 {
4347 if (INSN_P (before))
4348 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4349 else
4350 return emit_jump_insn_before_noloc (pattern, before);
4351 }
4352
4353 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4354 rtx
4355 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4356 {
4357 rtx first = PREV_INSN (before);
4358 rtx last = emit_call_insn_before_noloc (pattern, before);
4359
4360 if (pattern == NULL_RTX)
4361 return last;
4362
4363 first = NEXT_INSN (first);
4364 while (1)
4365 {
4366 if (active_insn_p (first) && !INSN_LOCATOR (first))
4367 INSN_LOCATOR (first) = loc;
4368 if (first == last)
4369 break;
4370 first = NEXT_INSN (first);
4371 }
4372 return last;
4373 }
4374
4375 /* like emit_call_insn_before_noloc,
4376 but set insn_locator according to before. */
4377 rtx
4378 emit_call_insn_before (rtx pattern, rtx before)
4379 {
4380 if (INSN_P (before))
4381 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4382 else
4383 return emit_call_insn_before_noloc (pattern, before);
4384 }
4385 \f
4386 /* Take X and emit it at the end of the doubly-linked
4387 INSN list.
4388
4389 Returns the last insn emitted. */
4390
4391 rtx
4392 emit_insn (rtx x)
4393 {
4394 rtx last = last_insn;
4395 rtx insn;
4396
4397 if (x == NULL_RTX)
4398 return last;
4399
4400 switch (GET_CODE (x))
4401 {
4402 case INSN:
4403 case JUMP_INSN:
4404 case CALL_INSN:
4405 case CODE_LABEL:
4406 case BARRIER:
4407 case NOTE:
4408 insn = x;
4409 while (insn)
4410 {
4411 rtx next = NEXT_INSN (insn);
4412 add_insn (insn);
4413 last = insn;
4414 insn = next;
4415 }
4416 break;
4417
4418 #ifdef ENABLE_RTL_CHECKING
4419 case SEQUENCE:
4420 gcc_unreachable ();
4421 break;
4422 #endif
4423
4424 default:
4425 last = make_insn_raw (x);
4426 add_insn (last);
4427 break;
4428 }
4429
4430 return last;
4431 }
4432
4433 /* Make an insn of code JUMP_INSN with pattern X
4434 and add it to the end of the doubly-linked list. */
4435
4436 rtx
4437 emit_jump_insn (rtx x)
4438 {
4439 rtx last = NULL_RTX, insn;
4440
4441 switch (GET_CODE (x))
4442 {
4443 case INSN:
4444 case JUMP_INSN:
4445 case CALL_INSN:
4446 case CODE_LABEL:
4447 case BARRIER:
4448 case NOTE:
4449 insn = x;
4450 while (insn)
4451 {
4452 rtx next = NEXT_INSN (insn);
4453 add_insn (insn);
4454 last = insn;
4455 insn = next;
4456 }
4457 break;
4458
4459 #ifdef ENABLE_RTL_CHECKING
4460 case SEQUENCE:
4461 gcc_unreachable ();
4462 break;
4463 #endif
4464
4465 default:
4466 last = make_jump_insn_raw (x);
4467 add_insn (last);
4468 break;
4469 }
4470
4471 return last;
4472 }
4473
4474 /* Make an insn of code CALL_INSN with pattern X
4475 and add it to the end of the doubly-linked list. */
4476
4477 rtx
4478 emit_call_insn (rtx x)
4479 {
4480 rtx insn;
4481
4482 switch (GET_CODE (x))
4483 {
4484 case INSN:
4485 case JUMP_INSN:
4486 case CALL_INSN:
4487 case CODE_LABEL:
4488 case BARRIER:
4489 case NOTE:
4490 insn = emit_insn (x);
4491 break;
4492
4493 #ifdef ENABLE_RTL_CHECKING
4494 case SEQUENCE:
4495 gcc_unreachable ();
4496 break;
4497 #endif
4498
4499 default:
4500 insn = make_call_insn_raw (x);
4501 add_insn (insn);
4502 break;
4503 }
4504
4505 return insn;
4506 }
4507
4508 /* Add the label LABEL to the end of the doubly-linked list. */
4509
4510 rtx
4511 emit_label (rtx label)
4512 {
4513 /* This can be called twice for the same label
4514 as a result of the confusion that follows a syntax error!
4515 So make it harmless. */
4516 if (INSN_UID (label) == 0)
4517 {
4518 INSN_UID (label) = cur_insn_uid++;
4519 add_insn (label);
4520 }
4521 return label;
4522 }
4523
4524 /* Make an insn of code BARRIER
4525 and add it to the end of the doubly-linked list. */
4526
4527 rtx
4528 emit_barrier (void)
4529 {
4530 rtx barrier = rtx_alloc (BARRIER);
4531 INSN_UID (barrier) = cur_insn_uid++;
4532 add_insn (barrier);
4533 return barrier;
4534 }
4535
4536 /* Make line numbering NOTE insn for LOCATION add it to the end
4537 of the doubly-linked list, but only if line-numbers are desired for
4538 debugging info and it doesn't match the previous one. */
4539
4540 rtx
4541 emit_line_note (location_t location)
4542 {
4543 rtx note;
4544
4545 #ifdef USE_MAPPED_LOCATION
4546 if (location == last_location)
4547 return NULL_RTX;
4548 #else
4549 if (location.file && last_location.file
4550 && !strcmp (location.file, last_location.file)
4551 && location.line == last_location.line)
4552 return NULL_RTX;
4553 #endif
4554 last_location = location;
4555
4556 if (no_line_numbers)
4557 {
4558 cur_insn_uid++;
4559 return NULL_RTX;
4560 }
4561
4562 #ifdef USE_MAPPED_LOCATION
4563 note = emit_note ((int) location);
4564 #else
4565 note = emit_note (location.line);
4566 NOTE_SOURCE_FILE (note) = location.file;
4567 #endif
4568
4569 return note;
4570 }
4571
4572 /* Emit a copy of note ORIG. */
4573
4574 rtx
4575 emit_note_copy (rtx orig)
4576 {
4577 rtx note;
4578
4579 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4580 {
4581 cur_insn_uid++;
4582 return NULL_RTX;
4583 }
4584
4585 note = rtx_alloc (NOTE);
4586
4587 INSN_UID (note) = cur_insn_uid++;
4588 NOTE_DATA (note) = NOTE_DATA (orig);
4589 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4590 BLOCK_FOR_INSN (note) = NULL;
4591 add_insn (note);
4592
4593 return note;
4594 }
4595
4596 /* Make an insn of code NOTE or type NOTE_NO
4597 and add it to the end of the doubly-linked list. */
4598
4599 rtx
4600 emit_note (int note_no)
4601 {
4602 rtx note;
4603
4604 note = rtx_alloc (NOTE);
4605 INSN_UID (note) = cur_insn_uid++;
4606 NOTE_LINE_NUMBER (note) = note_no;
4607 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4608 BLOCK_FOR_INSN (note) = NULL;
4609 add_insn (note);
4610 return note;
4611 }
4612
4613 /* Cause next statement to emit a line note even if the line number
4614 has not changed. */
4615
4616 void
4617 force_next_line_note (void)
4618 {
4619 #ifdef USE_MAPPED_LOCATION
4620 last_location = -1;
4621 #else
4622 last_location.line = -1;
4623 #endif
4624 }
4625
4626 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4627 note of this type already exists, remove it first. */
4628
4629 rtx
4630 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4631 {
4632 rtx note = find_reg_note (insn, kind, NULL_RTX);
4633
4634 switch (kind)
4635 {
4636 case REG_EQUAL:
4637 case REG_EQUIV:
4638 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4639 has multiple sets (some callers assume single_set
4640 means the insn only has one set, when in fact it
4641 means the insn only has one * useful * set). */
4642 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4643 {
4644 gcc_assert (!note);
4645 return NULL_RTX;
4646 }
4647
4648 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4649 It serves no useful purpose and breaks eliminate_regs. */
4650 if (GET_CODE (datum) == ASM_OPERANDS)
4651 return NULL_RTX;
4652 break;
4653
4654 default:
4655 break;
4656 }
4657
4658 if (note)
4659 {
4660 XEXP (note, 0) = datum;
4661 return note;
4662 }
4663
4664 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4665 return REG_NOTES (insn);
4666 }
4667 \f
4668 /* Return an indication of which type of insn should have X as a body.
4669 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4670
4671 static enum rtx_code
4672 classify_insn (rtx x)
4673 {
4674 if (LABEL_P (x))
4675 return CODE_LABEL;
4676 if (GET_CODE (x) == CALL)
4677 return CALL_INSN;
4678 if (GET_CODE (x) == RETURN)
4679 return JUMP_INSN;
4680 if (GET_CODE (x) == SET)
4681 {
4682 if (SET_DEST (x) == pc_rtx)
4683 return JUMP_INSN;
4684 else if (GET_CODE (SET_SRC (x)) == CALL)
4685 return CALL_INSN;
4686 else
4687 return INSN;
4688 }
4689 if (GET_CODE (x) == PARALLEL)
4690 {
4691 int j;
4692 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4693 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4694 return CALL_INSN;
4695 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4696 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4697 return JUMP_INSN;
4698 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4699 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4700 return CALL_INSN;
4701 }
4702 return INSN;
4703 }
4704
4705 /* Emit the rtl pattern X as an appropriate kind of insn.
4706 If X is a label, it is simply added into the insn chain. */
4707
4708 rtx
4709 emit (rtx x)
4710 {
4711 enum rtx_code code = classify_insn (x);
4712
4713 switch (code)
4714 {
4715 case CODE_LABEL:
4716 return emit_label (x);
4717 case INSN:
4718 return emit_insn (x);
4719 case JUMP_INSN:
4720 {
4721 rtx insn = emit_jump_insn (x);
4722 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4723 return emit_barrier ();
4724 return insn;
4725 }
4726 case CALL_INSN:
4727 return emit_call_insn (x);
4728 default:
4729 gcc_unreachable ();
4730 }
4731 }
4732 \f
4733 /* Space for free sequence stack entries. */
4734 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4735
4736 /* Begin emitting insns to a sequence. If this sequence will contain
4737 something that might cause the compiler to pop arguments to function
4738 calls (because those pops have previously been deferred; see
4739 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4740 before calling this function. That will ensure that the deferred
4741 pops are not accidentally emitted in the middle of this sequence. */
4742
4743 void
4744 start_sequence (void)
4745 {
4746 struct sequence_stack *tem;
4747
4748 if (free_sequence_stack != NULL)
4749 {
4750 tem = free_sequence_stack;
4751 free_sequence_stack = tem->next;
4752 }
4753 else
4754 tem = ggc_alloc (sizeof (struct sequence_stack));
4755
4756 tem->next = seq_stack;
4757 tem->first = first_insn;
4758 tem->last = last_insn;
4759
4760 seq_stack = tem;
4761
4762 first_insn = 0;
4763 last_insn = 0;
4764 }
4765
4766 /* Set up the insn chain starting with FIRST as the current sequence,
4767 saving the previously current one. See the documentation for
4768 start_sequence for more information about how to use this function. */
4769
4770 void
4771 push_to_sequence (rtx first)
4772 {
4773 rtx last;
4774
4775 start_sequence ();
4776
4777 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4778
4779 first_insn = first;
4780 last_insn = last;
4781 }
4782
4783 /* Set up the outer-level insn chain
4784 as the current sequence, saving the previously current one. */
4785
4786 void
4787 push_topmost_sequence (void)
4788 {
4789 struct sequence_stack *stack, *top = NULL;
4790
4791 start_sequence ();
4792
4793 for (stack = seq_stack; stack; stack = stack->next)
4794 top = stack;
4795
4796 first_insn = top->first;
4797 last_insn = top->last;
4798 }
4799
4800 /* After emitting to the outer-level insn chain, update the outer-level
4801 insn chain, and restore the previous saved state. */
4802
4803 void
4804 pop_topmost_sequence (void)
4805 {
4806 struct sequence_stack *stack, *top = NULL;
4807
4808 for (stack = seq_stack; stack; stack = stack->next)
4809 top = stack;
4810
4811 top->first = first_insn;
4812 top->last = last_insn;
4813
4814 end_sequence ();
4815 }
4816
4817 /* After emitting to a sequence, restore previous saved state.
4818
4819 To get the contents of the sequence just made, you must call
4820 `get_insns' *before* calling here.
4821
4822 If the compiler might have deferred popping arguments while
4823 generating this sequence, and this sequence will not be immediately
4824 inserted into the instruction stream, use do_pending_stack_adjust
4825 before calling get_insns. That will ensure that the deferred
4826 pops are inserted into this sequence, and not into some random
4827 location in the instruction stream. See INHIBIT_DEFER_POP for more
4828 information about deferred popping of arguments. */
4829
4830 void
4831 end_sequence (void)
4832 {
4833 struct sequence_stack *tem = seq_stack;
4834
4835 first_insn = tem->first;
4836 last_insn = tem->last;
4837 seq_stack = tem->next;
4838
4839 memset (tem, 0, sizeof (*tem));
4840 tem->next = free_sequence_stack;
4841 free_sequence_stack = tem;
4842 }
4843
4844 /* Return 1 if currently emitting into a sequence. */
4845
4846 int
4847 in_sequence_p (void)
4848 {
4849 return seq_stack != 0;
4850 }
4851 \f
4852 /* Put the various virtual registers into REGNO_REG_RTX. */
4853
4854 void
4855 init_virtual_regs (struct emit_status *es)
4856 {
4857 rtx *ptr = es->x_regno_reg_rtx;
4858 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4859 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4860 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4861 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4862 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4863 }
4864
4865 \f
4866 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4867 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4868 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4869 static int copy_insn_n_scratches;
4870
4871 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4872 copied an ASM_OPERANDS.
4873 In that case, it is the original input-operand vector. */
4874 static rtvec orig_asm_operands_vector;
4875
4876 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4877 copied an ASM_OPERANDS.
4878 In that case, it is the copied input-operand vector. */
4879 static rtvec copy_asm_operands_vector;
4880
4881 /* Likewise for the constraints vector. */
4882 static rtvec orig_asm_constraints_vector;
4883 static rtvec copy_asm_constraints_vector;
4884
4885 /* Recursively create a new copy of an rtx for copy_insn.
4886 This function differs from copy_rtx in that it handles SCRATCHes and
4887 ASM_OPERANDs properly.
4888 Normally, this function is not used directly; use copy_insn as front end.
4889 However, you could first copy an insn pattern with copy_insn and then use
4890 this function afterwards to properly copy any REG_NOTEs containing
4891 SCRATCHes. */
4892
4893 rtx
4894 copy_insn_1 (rtx orig)
4895 {
4896 rtx copy;
4897 int i, j;
4898 RTX_CODE code;
4899 const char *format_ptr;
4900
4901 code = GET_CODE (orig);
4902
4903 switch (code)
4904 {
4905 case REG:
4906 case CONST_INT:
4907 case CONST_DOUBLE:
4908 case CONST_VECTOR:
4909 case SYMBOL_REF:
4910 case CODE_LABEL:
4911 case PC:
4912 case CC0:
4913 return orig;
4914 case CLOBBER:
4915 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4916 return orig;
4917 break;
4918
4919 case SCRATCH:
4920 for (i = 0; i < copy_insn_n_scratches; i++)
4921 if (copy_insn_scratch_in[i] == orig)
4922 return copy_insn_scratch_out[i];
4923 break;
4924
4925 case CONST:
4926 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4927 a LABEL_REF, it isn't sharable. */
4928 if (GET_CODE (XEXP (orig, 0)) == PLUS
4929 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4930 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4931 return orig;
4932 break;
4933
4934 /* A MEM with a constant address is not sharable. The problem is that
4935 the constant address may need to be reloaded. If the mem is shared,
4936 then reloading one copy of this mem will cause all copies to appear
4937 to have been reloaded. */
4938
4939 default:
4940 break;
4941 }
4942
4943 copy = rtx_alloc (code);
4944
4945 /* Copy the various flags, and other information. We assume that
4946 all fields need copying, and then clear the fields that should
4947 not be copied. That is the sensible default behavior, and forces
4948 us to explicitly document why we are *not* copying a flag. */
4949 memcpy (copy, orig, RTX_HDR_SIZE);
4950
4951 /* We do not copy the USED flag, which is used as a mark bit during
4952 walks over the RTL. */
4953 RTX_FLAG (copy, used) = 0;
4954
4955 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4956 if (INSN_P (orig))
4957 {
4958 RTX_FLAG (copy, jump) = 0;
4959 RTX_FLAG (copy, call) = 0;
4960 RTX_FLAG (copy, frame_related) = 0;
4961 }
4962
4963 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4964
4965 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4966 {
4967 copy->u.fld[i] = orig->u.fld[i];
4968 switch (*format_ptr++)
4969 {
4970 case 'e':
4971 if (XEXP (orig, i) != NULL)
4972 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4973 break;
4974
4975 case 'E':
4976 case 'V':
4977 if (XVEC (orig, i) == orig_asm_constraints_vector)
4978 XVEC (copy, i) = copy_asm_constraints_vector;
4979 else if (XVEC (orig, i) == orig_asm_operands_vector)
4980 XVEC (copy, i) = copy_asm_operands_vector;
4981 else if (XVEC (orig, i) != NULL)
4982 {
4983 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4984 for (j = 0; j < XVECLEN (copy, i); j++)
4985 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4986 }
4987 break;
4988
4989 case 't':
4990 case 'w':
4991 case 'i':
4992 case 's':
4993 case 'S':
4994 case 'u':
4995 case '0':
4996 /* These are left unchanged. */
4997 break;
4998
4999 default:
5000 gcc_unreachable ();
5001 }
5002 }
5003
5004 if (code == SCRATCH)
5005 {
5006 i = copy_insn_n_scratches++;
5007 gcc_assert (i < MAX_RECOG_OPERANDS);
5008 copy_insn_scratch_in[i] = orig;
5009 copy_insn_scratch_out[i] = copy;
5010 }
5011 else if (code == ASM_OPERANDS)
5012 {
5013 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5014 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5015 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5016 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5017 }
5018
5019 return copy;
5020 }
5021
5022 /* Create a new copy of an rtx.
5023 This function differs from copy_rtx in that it handles SCRATCHes and
5024 ASM_OPERANDs properly.
5025 INSN doesn't really have to be a full INSN; it could be just the
5026 pattern. */
5027 rtx
5028 copy_insn (rtx insn)
5029 {
5030 copy_insn_n_scratches = 0;
5031 orig_asm_operands_vector = 0;
5032 orig_asm_constraints_vector = 0;
5033 copy_asm_operands_vector = 0;
5034 copy_asm_constraints_vector = 0;
5035 return copy_insn_1 (insn);
5036 }
5037
5038 /* Initialize data structures and variables in this file
5039 before generating rtl for each function. */
5040
5041 void
5042 init_emit (void)
5043 {
5044 struct function *f = cfun;
5045
5046 f->emit = ggc_alloc (sizeof (struct emit_status));
5047 first_insn = NULL;
5048 last_insn = NULL;
5049 cur_insn_uid = 1;
5050 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5051 last_location = UNKNOWN_LOCATION;
5052 first_label_num = label_num;
5053 seq_stack = NULL;
5054
5055 /* Init the tables that describe all the pseudo regs. */
5056
5057 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5058
5059 f->emit->regno_pointer_align
5060 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5061 * sizeof (unsigned char));
5062
5063 regno_reg_rtx
5064 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5065
5066 /* Put copies of all the hard registers into regno_reg_rtx. */
5067 memcpy (regno_reg_rtx,
5068 static_regno_reg_rtx,
5069 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5070
5071 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5072 init_virtual_regs (f->emit);
5073
5074 /* Indicate that the virtual registers and stack locations are
5075 all pointers. */
5076 REG_POINTER (stack_pointer_rtx) = 1;
5077 REG_POINTER (frame_pointer_rtx) = 1;
5078 REG_POINTER (hard_frame_pointer_rtx) = 1;
5079 REG_POINTER (arg_pointer_rtx) = 1;
5080
5081 REG_POINTER (virtual_incoming_args_rtx) = 1;
5082 REG_POINTER (virtual_stack_vars_rtx) = 1;
5083 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5084 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5085 REG_POINTER (virtual_cfa_rtx) = 1;
5086
5087 #ifdef STACK_BOUNDARY
5088 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5089 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5090 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5091 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5092
5093 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5094 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5095 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5096 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5097 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5098 #endif
5099
5100 #ifdef INIT_EXPANDERS
5101 INIT_EXPANDERS;
5102 #endif
5103 }
5104
5105 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5106
5107 static rtx
5108 gen_const_vector (enum machine_mode mode, int constant)
5109 {
5110 rtx tem;
5111 rtvec v;
5112 int units, i;
5113 enum machine_mode inner;
5114
5115 units = GET_MODE_NUNITS (mode);
5116 inner = GET_MODE_INNER (mode);
5117
5118 v = rtvec_alloc (units);
5119
5120 /* We need to call this function after we set the scalar const_tiny_rtx
5121 entries. */
5122 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5123
5124 for (i = 0; i < units; ++i)
5125 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5126
5127 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5128 return tem;
5129 }
5130
5131 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5132 all elements are zero, and the one vector when all elements are one. */
5133 rtx
5134 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5135 {
5136 enum machine_mode inner = GET_MODE_INNER (mode);
5137 int nunits = GET_MODE_NUNITS (mode);
5138 rtx x;
5139 int i;
5140
5141 /* Check to see if all of the elements have the same value. */
5142 x = RTVEC_ELT (v, nunits - 1);
5143 for (i = nunits - 2; i >= 0; i--)
5144 if (RTVEC_ELT (v, i) != x)
5145 break;
5146
5147 /* If the values are all the same, check to see if we can use one of the
5148 standard constant vectors. */
5149 if (i == -1)
5150 {
5151 if (x == CONST0_RTX (inner))
5152 return CONST0_RTX (mode);
5153 else if (x == CONST1_RTX (inner))
5154 return CONST1_RTX (mode);
5155 }
5156
5157 return gen_rtx_raw_CONST_VECTOR (mode, v);
5158 }
5159
5160 /* Create some permanent unique rtl objects shared between all functions.
5161 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5162
5163 void
5164 init_emit_once (int line_numbers)
5165 {
5166 int i;
5167 enum machine_mode mode;
5168 enum machine_mode double_mode;
5169
5170 /* We need reg_raw_mode, so initialize the modes now. */
5171 init_reg_modes_once ();
5172
5173 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5174 tables. */
5175 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5176 const_int_htab_eq, NULL);
5177
5178 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5179 const_double_htab_eq, NULL);
5180
5181 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5182 mem_attrs_htab_eq, NULL);
5183 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5184 reg_attrs_htab_eq, NULL);
5185
5186 no_line_numbers = ! line_numbers;
5187
5188 /* Compute the word and byte modes. */
5189
5190 byte_mode = VOIDmode;
5191 word_mode = VOIDmode;
5192 double_mode = VOIDmode;
5193
5194 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5195 mode = GET_MODE_WIDER_MODE (mode))
5196 {
5197 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5198 && byte_mode == VOIDmode)
5199 byte_mode = mode;
5200
5201 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5202 && word_mode == VOIDmode)
5203 word_mode = mode;
5204 }
5205
5206 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5207 mode = GET_MODE_WIDER_MODE (mode))
5208 {
5209 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5210 && double_mode == VOIDmode)
5211 double_mode = mode;
5212 }
5213
5214 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5215
5216 /* Assign register numbers to the globally defined register rtx.
5217 This must be done at runtime because the register number field
5218 is in a union and some compilers can't initialize unions. */
5219
5220 pc_rtx = gen_rtx_PC (VOIDmode);
5221 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5222 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5223 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5224 if (hard_frame_pointer_rtx == 0)
5225 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5226 HARD_FRAME_POINTER_REGNUM);
5227 if (arg_pointer_rtx == 0)
5228 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5229 virtual_incoming_args_rtx =
5230 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5231 virtual_stack_vars_rtx =
5232 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5233 virtual_stack_dynamic_rtx =
5234 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5235 virtual_outgoing_args_rtx =
5236 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5237 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5238
5239 /* Initialize RTL for commonly used hard registers. These are
5240 copied into regno_reg_rtx as we begin to compile each function. */
5241 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5242 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5243
5244 #ifdef INIT_EXPANDERS
5245 /* This is to initialize {init|mark|free}_machine_status before the first
5246 call to push_function_context_to. This is needed by the Chill front
5247 end which calls push_function_context_to before the first call to
5248 init_function_start. */
5249 INIT_EXPANDERS;
5250 #endif
5251
5252 /* Create the unique rtx's for certain rtx codes and operand values. */
5253
5254 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5255 tries to use these variables. */
5256 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5257 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5258 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5259
5260 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5261 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5262 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5263 else
5264 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5265
5266 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5267 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5268 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5269 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5270 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5271 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5272 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5273
5274 dconsthalf = dconst1;
5275 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5276
5277 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5278
5279 /* Initialize mathematical constants for constant folding builtins.
5280 These constants need to be given to at least 160 bits precision. */
5281 real_from_string (&dconstpi,
5282 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5283 real_from_string (&dconste,
5284 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5285
5286 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5287 {
5288 REAL_VALUE_TYPE *r =
5289 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5290
5291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5292 mode = GET_MODE_WIDER_MODE (mode))
5293 const_tiny_rtx[i][(int) mode] =
5294 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5295
5296 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5297
5298 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5299 mode = GET_MODE_WIDER_MODE (mode))
5300 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5301
5302 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5303 mode != VOIDmode;
5304 mode = GET_MODE_WIDER_MODE (mode))
5305 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5306 }
5307
5308 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5309 mode != VOIDmode;
5310 mode = GET_MODE_WIDER_MODE (mode))
5311 {
5312 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5313 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5314 }
5315
5316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5317 mode != VOIDmode;
5318 mode = GET_MODE_WIDER_MODE (mode))
5319 {
5320 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5321 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5322 }
5323
5324 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5325 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5326 const_tiny_rtx[0][i] = const0_rtx;
5327
5328 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5329 if (STORE_FLAG_VALUE == 1)
5330 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5331
5332 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5333 return_address_pointer_rtx
5334 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5335 #endif
5336
5337 #ifdef STATIC_CHAIN_REGNUM
5338 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5339
5340 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5341 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5342 static_chain_incoming_rtx
5343 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5344 else
5345 #endif
5346 static_chain_incoming_rtx = static_chain_rtx;
5347 #endif
5348
5349 #ifdef STATIC_CHAIN
5350 static_chain_rtx = STATIC_CHAIN;
5351
5352 #ifdef STATIC_CHAIN_INCOMING
5353 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5354 #else
5355 static_chain_incoming_rtx = static_chain_rtx;
5356 #endif
5357 #endif
5358
5359 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5360 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5361 }
5362 \f
5363 /* Produce exact duplicate of insn INSN after AFTER.
5364 Care updating of libcall regions if present. */
5365
5366 rtx
5367 emit_copy_of_insn_after (rtx insn, rtx after)
5368 {
5369 rtx new;
5370 rtx note1, note2, link;
5371
5372 switch (GET_CODE (insn))
5373 {
5374 case INSN:
5375 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5376 break;
5377
5378 case JUMP_INSN:
5379 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5380 break;
5381
5382 case CALL_INSN:
5383 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5384 if (CALL_INSN_FUNCTION_USAGE (insn))
5385 CALL_INSN_FUNCTION_USAGE (new)
5386 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5387 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5388 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5389 break;
5390
5391 default:
5392 gcc_unreachable ();
5393 }
5394
5395 /* Update LABEL_NUSES. */
5396 mark_jump_label (PATTERN (new), new, 0);
5397
5398 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5399
5400 /* If the old insn is frame related, then so is the new one. This is
5401 primarily needed for IA-64 unwind info which marks epilogue insns,
5402 which may be duplicated by the basic block reordering code. */
5403 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5404
5405 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5406 make them. */
5407 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5408 if (REG_NOTE_KIND (link) != REG_LABEL)
5409 {
5410 if (GET_CODE (link) == EXPR_LIST)
5411 REG_NOTES (new)
5412 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5413 XEXP (link, 0),
5414 REG_NOTES (new)));
5415 else
5416 REG_NOTES (new)
5417 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5418 XEXP (link, 0),
5419 REG_NOTES (new)));
5420 }
5421
5422 /* Fix the libcall sequences. */
5423 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5424 {
5425 rtx p = new;
5426 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5427 p = PREV_INSN (p);
5428 XEXP (note1, 0) = p;
5429 XEXP (note2, 0) = new;
5430 }
5431 INSN_CODE (new) = INSN_CODE (insn);
5432 return new;
5433 }
5434
5435 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5436 rtx
5437 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5438 {
5439 if (hard_reg_clobbers[mode][regno])
5440 return hard_reg_clobbers[mode][regno];
5441 else
5442 return (hard_reg_clobbers[mode][regno] =
5443 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5444 }
5445
5446 #include "gt-emit-rtl.h"