]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
alias.c (rtx_equal_for_memref_p): Use predicates to test rtx classes and new rtx...
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58
59 /* Commonly used modes. */
60
61 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
63 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
64 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
65
66
67 /* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70 static GTY(()) int label_num = 1;
71
72 /* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76 static int last_label_num;
77
78 /* Value label_num had when set_new_last_label_num was called.
79 If label_num has not changed since then, last_label_num is valid. */
80
81 static int base_label_num;
82
83 /* Nonzero means do not generate NOTEs for source line numbers. */
84
85 static int no_line_numbers;
86
87 /* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
91
92 rtx global_rtl[GR_MAX];
93
94 /* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
100 rtx (*gen_lowpart) (enum machine_mode mode, rtx x) = gen_lowpart_general;
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconst3;
114 REAL_VALUE_TYPE dconst10;
115 REAL_VALUE_TYPE dconstm1;
116 REAL_VALUE_TYPE dconstm2;
117 REAL_VALUE_TYPE dconsthalf;
118 REAL_VALUE_TYPE dconstthird;
119 REAL_VALUE_TYPE dconstpi;
120 REAL_VALUE_TYPE dconste;
121
122 /* All references to the following fixed hard registers go through
123 these unique rtl objects. On machines where the frame-pointer and
124 arg-pointer are the same register, they use the same unique object.
125
126 After register allocation, other rtl objects which used to be pseudo-regs
127 may be clobbered to refer to the frame-pointer register.
128 But references that were originally to the frame-pointer can be
129 distinguished from the others because they contain frame_pointer_rtx.
130
131 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
132 tricky: until register elimination has taken place hard_frame_pointer_rtx
133 should be used if it is being set, and frame_pointer_rtx otherwise. After
134 register elimination hard_frame_pointer_rtx should always be used.
135 On machines where the two registers are same (most) then these are the
136 same.
137
138 In an inline procedure, the stack and frame pointer rtxs may not be
139 used for anything else. */
140 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
141 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
142 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
143
144 /* This is used to implement __builtin_return_address for some machines.
145 See for instance the MIPS port. */
146 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
147
148 /* We make one copy of (const_int C) where C is in
149 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
150 to save space during the compilation and simplify comparisons of
151 integers. */
152
153 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
154
155 /* A hash table storing CONST_INTs whose absolute value is greater
156 than MAX_SAVED_CONST_INT. */
157
158 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_int_htab;
160
161 /* A hash table storing memory attribute structures. */
162 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
163 htab_t mem_attrs_htab;
164
165 /* A hash table storing register attribute structures. */
166 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
167 htab_t reg_attrs_htab;
168
169 /* A hash table storing all CONST_DOUBLEs. */
170 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
171 htab_t const_double_htab;
172
173 #define first_insn (cfun->emit->x_first_insn)
174 #define last_insn (cfun->emit->x_last_insn)
175 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
176 #define last_location (cfun->emit->x_last_location)
177 #define first_label_num (cfun->emit->x_first_label_num)
178
179 static rtx make_jump_insn_raw (rtx);
180 static rtx make_call_insn_raw (rtx);
181 static rtx find_line_note (rtx);
182 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
183 static void unshare_all_decls (tree);
184 static void reset_used_decls (tree);
185 static void mark_label_nuses (rtx);
186 static hashval_t const_int_htab_hash (const void *);
187 static int const_int_htab_eq (const void *, const void *);
188 static hashval_t const_double_htab_hash (const void *);
189 static int const_double_htab_eq (const void *, const void *);
190 static rtx lookup_const_double (rtx);
191 static hashval_t mem_attrs_htab_hash (const void *);
192 static int mem_attrs_htab_eq (const void *, const void *);
193 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
194 enum machine_mode);
195 static hashval_t reg_attrs_htab_hash (const void *);
196 static int reg_attrs_htab_eq (const void *, const void *);
197 static reg_attrs *get_reg_attrs (tree, int);
198 static tree component_ref_for_mem_expr (tree);
199 static rtx gen_const_vector_0 (enum machine_mode);
200 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
201 static void copy_rtx_if_shared_1 (rtx *orig);
202
203 /* Probability of the conditional branch currently proceeded by try_split.
204 Set to -1 otherwise. */
205 int split_branch_probability = -1;
206 \f
207 /* Returns a hash code for X (which is a really a CONST_INT). */
208
209 static hashval_t
210 const_int_htab_hash (const void *x)
211 {
212 return (hashval_t) INTVAL ((rtx) x);
213 }
214
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_INT) is the same as that given by Y (which is really a
217 HOST_WIDE_INT *). */
218
219 static int
220 const_int_htab_eq (const void *x, const void *y)
221 {
222 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
223 }
224
225 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
226 static hashval_t
227 const_double_htab_hash (const void *x)
228 {
229 rtx value = (rtx) x;
230 hashval_t h;
231
232 if (GET_MODE (value) == VOIDmode)
233 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
234 else
235 {
236 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
237 /* MODE is used in the comparison, so it should be in the hash. */
238 h ^= GET_MODE (value);
239 }
240 return h;
241 }
242
243 /* Returns nonzero if the value represented by X (really a ...)
244 is the same as that represented by Y (really a ...) */
245 static int
246 const_double_htab_eq (const void *x, const void *y)
247 {
248 rtx a = (rtx)x, b = (rtx)y;
249
250 if (GET_MODE (a) != GET_MODE (b))
251 return 0;
252 if (GET_MODE (a) == VOIDmode)
253 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
254 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
255 else
256 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
257 CONST_DOUBLE_REAL_VALUE (b));
258 }
259
260 /* Returns a hash code for X (which is a really a mem_attrs *). */
261
262 static hashval_t
263 mem_attrs_htab_hash (const void *x)
264 {
265 mem_attrs *p = (mem_attrs *) x;
266
267 return (p->alias ^ (p->align * 1000)
268 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
269 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
270 ^ (size_t) p->expr);
271 }
272
273 /* Returns nonzero if the value represented by X (which is really a
274 mem_attrs *) is the same as that given by Y (which is also really a
275 mem_attrs *). */
276
277 static int
278 mem_attrs_htab_eq (const void *x, const void *y)
279 {
280 mem_attrs *p = (mem_attrs *) x;
281 mem_attrs *q = (mem_attrs *) y;
282
283 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
284 && p->size == q->size && p->align == q->align);
285 }
286
287 /* Allocate a new mem_attrs structure and insert it into the hash table if
288 one identical to it is not already in the table. We are doing this for
289 MEM of mode MODE. */
290
291 static mem_attrs *
292 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
293 unsigned int align, enum machine_mode mode)
294 {
295 mem_attrs attrs;
296 void **slot;
297
298 /* If everything is the default, we can just return zero.
299 This must match what the corresponding MEM_* macros return when the
300 field is not present. */
301 if (alias == 0 && expr == 0 && offset == 0
302 && (size == 0
303 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
304 && (STRICT_ALIGNMENT && mode != BLKmode
305 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
306 return 0;
307
308 attrs.alias = alias;
309 attrs.expr = expr;
310 attrs.offset = offset;
311 attrs.size = size;
312 attrs.align = align;
313
314 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
315 if (*slot == 0)
316 {
317 *slot = ggc_alloc (sizeof (mem_attrs));
318 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 }
320
321 return *slot;
322 }
323
324 /* Returns a hash code for X (which is a really a reg_attrs *). */
325
326 static hashval_t
327 reg_attrs_htab_hash (const void *x)
328 {
329 reg_attrs *p = (reg_attrs *) x;
330
331 return ((p->offset * 1000) ^ (long) p->decl);
332 }
333
334 /* Returns nonzero if the value represented by X (which is really a
335 reg_attrs *) is the same as that given by Y (which is also really a
336 reg_attrs *). */
337
338 static int
339 reg_attrs_htab_eq (const void *x, const void *y)
340 {
341 reg_attrs *p = (reg_attrs *) x;
342 reg_attrs *q = (reg_attrs *) y;
343
344 return (p->decl == q->decl && p->offset == q->offset);
345 }
346 /* Allocate a new reg_attrs structure and insert it into the hash table if
347 one identical to it is not already in the table. We are doing this for
348 MEM of mode MODE. */
349
350 static reg_attrs *
351 get_reg_attrs (tree decl, int offset)
352 {
353 reg_attrs attrs;
354 void **slot;
355
356 /* If everything is the default, we can just return zero. */
357 if (decl == 0 && offset == 0)
358 return 0;
359
360 attrs.decl = decl;
361 attrs.offset = offset;
362
363 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
364 if (*slot == 0)
365 {
366 *slot = ggc_alloc (sizeof (reg_attrs));
367 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 }
369
370 return *slot;
371 }
372
373 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
374 don't attempt to share with the various global pieces of rtl (such as
375 frame_pointer_rtx). */
376
377 rtx
378 gen_raw_REG (enum machine_mode mode, int regno)
379 {
380 rtx x = gen_rtx_raw_REG (mode, regno);
381 ORIGINAL_REGNO (x) = regno;
382 return x;
383 }
384
385 /* There are some RTL codes that require special attention; the generation
386 functions do the raw handling. If you add to this list, modify
387 special_rtx in gengenrtl.c as well. */
388
389 rtx
390 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
391 {
392 void **slot;
393
394 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
395 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
396
397 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
398 if (const_true_rtx && arg == STORE_FLAG_VALUE)
399 return const_true_rtx;
400 #endif
401
402 /* Look up the CONST_INT in the hash table. */
403 slot = htab_find_slot_with_hash (const_int_htab, &arg,
404 (hashval_t) arg, INSERT);
405 if (*slot == 0)
406 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
407
408 return (rtx) *slot;
409 }
410
411 rtx
412 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
413 {
414 return GEN_INT (trunc_int_for_mode (c, mode));
415 }
416
417 /* CONST_DOUBLEs might be created from pairs of integers, or from
418 REAL_VALUE_TYPEs. Also, their length is known only at run time,
419 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
420
421 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
422 hash table. If so, return its counterpart; otherwise add it
423 to the hash table and return it. */
424 static rtx
425 lookup_const_double (rtx real)
426 {
427 void **slot = htab_find_slot (const_double_htab, real, INSERT);
428 if (*slot == 0)
429 *slot = real;
430
431 return (rtx) *slot;
432 }
433
434 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
435 VALUE in mode MODE. */
436 rtx
437 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
438 {
439 rtx real = rtx_alloc (CONST_DOUBLE);
440 PUT_MODE (real, mode);
441
442 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
443
444 return lookup_const_double (real);
445 }
446
447 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
448 of ints: I0 is the low-order word and I1 is the high-order word.
449 Do not use this routine for non-integer modes; convert to
450 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
451
452 rtx
453 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
454 {
455 rtx value;
456 unsigned int i;
457
458 if (mode != VOIDmode)
459 {
460 int width;
461 if (GET_MODE_CLASS (mode) != MODE_INT
462 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
463 /* We can get a 0 for an error mark. */
464 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
465 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
466 abort ();
467
468 /* We clear out all bits that don't belong in MODE, unless they and
469 our sign bit are all one. So we get either a reasonable negative
470 value or a reasonable unsigned value for this mode. */
471 width = GET_MODE_BITSIZE (mode);
472 if (width < HOST_BITS_PER_WIDE_INT
473 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
474 != ((HOST_WIDE_INT) (-1) << (width - 1))))
475 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
476 else if (width == HOST_BITS_PER_WIDE_INT
477 && ! (i1 == ~0 && i0 < 0))
478 i1 = 0;
479 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
480 /* We cannot represent this value as a constant. */
481 abort ();
482
483 /* If this would be an entire word for the target, but is not for
484 the host, then sign-extend on the host so that the number will
485 look the same way on the host that it would on the target.
486
487 For example, when building a 64 bit alpha hosted 32 bit sparc
488 targeted compiler, then we want the 32 bit unsigned value -1 to be
489 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
490 The latter confuses the sparc backend. */
491
492 if (width < HOST_BITS_PER_WIDE_INT
493 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
494 i0 |= ((HOST_WIDE_INT) (-1) << width);
495
496 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
497 CONST_INT.
498
499 ??? Strictly speaking, this is wrong if we create a CONST_INT for
500 a large unsigned constant with the size of MODE being
501 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
502 in a wider mode. In that case we will mis-interpret it as a
503 negative number.
504
505 Unfortunately, the only alternative is to make a CONST_DOUBLE for
506 any constant in any mode if it is an unsigned constant larger
507 than the maximum signed integer in an int on the host. However,
508 doing this will break everyone that always expects to see a
509 CONST_INT for SImode and smaller.
510
511 We have always been making CONST_INTs in this case, so nothing
512 new is being broken. */
513
514 if (width <= HOST_BITS_PER_WIDE_INT)
515 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
516 }
517
518 /* If this integer fits in one word, return a CONST_INT. */
519 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
520 return GEN_INT (i0);
521
522 /* We use VOIDmode for integers. */
523 value = rtx_alloc (CONST_DOUBLE);
524 PUT_MODE (value, VOIDmode);
525
526 CONST_DOUBLE_LOW (value) = i0;
527 CONST_DOUBLE_HIGH (value) = i1;
528
529 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
530 XWINT (value, i) = 0;
531
532 return lookup_const_double (value);
533 }
534
535 rtx
536 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
537 {
538 /* In case the MD file explicitly references the frame pointer, have
539 all such references point to the same frame pointer. This is
540 used during frame pointer elimination to distinguish the explicit
541 references to these registers from pseudos that happened to be
542 assigned to them.
543
544 If we have eliminated the frame pointer or arg pointer, we will
545 be using it as a normal register, for example as a spill
546 register. In such cases, we might be accessing it in a mode that
547 is not Pmode and therefore cannot use the pre-allocated rtx.
548
549 Also don't do this when we are making new REGs in reload, since
550 we don't want to get confused with the real pointers. */
551
552 if (mode == Pmode && !reload_in_progress)
553 {
554 if (regno == FRAME_POINTER_REGNUM
555 && (!reload_completed || frame_pointer_needed))
556 return frame_pointer_rtx;
557 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
558 if (regno == HARD_FRAME_POINTER_REGNUM
559 && (!reload_completed || frame_pointer_needed))
560 return hard_frame_pointer_rtx;
561 #endif
562 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
563 if (regno == ARG_POINTER_REGNUM)
564 return arg_pointer_rtx;
565 #endif
566 #ifdef RETURN_ADDRESS_POINTER_REGNUM
567 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
568 return return_address_pointer_rtx;
569 #endif
570 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
571 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
572 return pic_offset_table_rtx;
573 if (regno == STACK_POINTER_REGNUM)
574 return stack_pointer_rtx;
575 }
576
577 #if 0
578 /* If the per-function register table has been set up, try to re-use
579 an existing entry in that table to avoid useless generation of RTL.
580
581 This code is disabled for now until we can fix the various backends
582 which depend on having non-shared hard registers in some cases. Long
583 term we want to re-enable this code as it can significantly cut down
584 on the amount of useless RTL that gets generated.
585
586 We'll also need to fix some code that runs after reload that wants to
587 set ORIGINAL_REGNO. */
588
589 if (cfun
590 && cfun->emit
591 && regno_reg_rtx
592 && regno < FIRST_PSEUDO_REGISTER
593 && reg_raw_mode[regno] == mode)
594 return regno_reg_rtx[regno];
595 #endif
596
597 return gen_raw_REG (mode, regno);
598 }
599
600 rtx
601 gen_rtx_MEM (enum machine_mode mode, rtx addr)
602 {
603 rtx rt = gen_rtx_raw_MEM (mode, addr);
604
605 /* This field is not cleared by the mere allocation of the rtx, so
606 we clear it here. */
607 MEM_ATTRS (rt) = 0;
608
609 return rt;
610 }
611
612 rtx
613 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
614 {
615 /* This is the most common failure type.
616 Catch it early so we can see who does it. */
617 if ((offset % GET_MODE_SIZE (mode)) != 0)
618 abort ();
619
620 /* This check isn't usable right now because combine will
621 throw arbitrary crap like a CALL into a SUBREG in
622 gen_lowpart_for_combine so we must just eat it. */
623 #if 0
624 /* Check for this too. */
625 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
626 abort ();
627 #endif
628 return gen_rtx_raw_SUBREG (mode, reg, offset);
629 }
630
631 /* Generate a SUBREG representing the least-significant part of REG if MODE
632 is smaller than mode of REG, otherwise paradoxical SUBREG. */
633
634 rtx
635 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
636 {
637 enum machine_mode inmode;
638
639 inmode = GET_MODE (reg);
640 if (inmode == VOIDmode)
641 inmode = mode;
642 return gen_rtx_SUBREG (mode, reg,
643 subreg_lowpart_offset (mode, inmode));
644 }
645 \f
646 /* gen_rtvec (n, [rt1, ..., rtn])
647 **
648 ** This routine creates an rtvec and stores within it the
649 ** pointers to rtx's which are its arguments.
650 */
651
652 /*VARARGS1*/
653 rtvec
654 gen_rtvec (int n, ...)
655 {
656 int i, save_n;
657 rtx *vector;
658 va_list p;
659
660 va_start (p, n);
661
662 if (n == 0)
663 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
664
665 vector = alloca (n * sizeof (rtx));
666
667 for (i = 0; i < n; i++)
668 vector[i] = va_arg (p, rtx);
669
670 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
671 save_n = n;
672 va_end (p);
673
674 return gen_rtvec_v (save_n, vector);
675 }
676
677 rtvec
678 gen_rtvec_v (int n, rtx *argp)
679 {
680 int i;
681 rtvec rt_val;
682
683 if (n == 0)
684 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
685
686 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
687
688 for (i = 0; i < n; i++)
689 rt_val->elem[i] = *argp++;
690
691 return rt_val;
692 }
693 \f
694 /* Generate a REG rtx for a new pseudo register of mode MODE.
695 This pseudo is assigned the next sequential register number. */
696
697 rtx
698 gen_reg_rtx (enum machine_mode mode)
699 {
700 struct function *f = cfun;
701 rtx val;
702
703 /* Don't let anything called after initial flow analysis create new
704 registers. */
705 if (no_new_pseudos)
706 abort ();
707
708 if (generating_concat_p
709 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
710 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
711 {
712 /* For complex modes, don't make a single pseudo.
713 Instead, make a CONCAT of two pseudos.
714 This allows noncontiguous allocation of the real and imaginary parts,
715 which makes much better code. Besides, allocating DCmode
716 pseudos overstrains reload on some machines like the 386. */
717 rtx realpart, imagpart;
718 enum machine_mode partmode = GET_MODE_INNER (mode);
719
720 realpart = gen_reg_rtx (partmode);
721 imagpart = gen_reg_rtx (partmode);
722 return gen_rtx_CONCAT (mode, realpart, imagpart);
723 }
724
725 /* Make sure regno_pointer_align, and regno_reg_rtx are large
726 enough to have an element for this pseudo reg number. */
727
728 if (reg_rtx_no == f->emit->regno_pointer_align_length)
729 {
730 int old_size = f->emit->regno_pointer_align_length;
731 char *new;
732 rtx *new1;
733
734 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
735 memset (new + old_size, 0, old_size);
736 f->emit->regno_pointer_align = (unsigned char *) new;
737
738 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
739 old_size * 2 * sizeof (rtx));
740 memset (new1 + old_size, 0, old_size * sizeof (rtx));
741 regno_reg_rtx = new1;
742
743 f->emit->regno_pointer_align_length = old_size * 2;
744 }
745
746 val = gen_raw_REG (mode, reg_rtx_no);
747 regno_reg_rtx[reg_rtx_no++] = val;
748 return val;
749 }
750
751 /* Generate a register with same attributes as REG,
752 but offsetted by OFFSET. */
753
754 rtx
755 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
756 {
757 rtx new = gen_rtx_REG (mode, regno);
758 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
759 REG_OFFSET (reg) + offset);
760 return new;
761 }
762
763 /* Set the decl for MEM to DECL. */
764
765 void
766 set_reg_attrs_from_mem (rtx reg, rtx mem)
767 {
768 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
769 REG_ATTRS (reg)
770 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
771 }
772
773 /* Set the register attributes for registers contained in PARM_RTX.
774 Use needed values from memory attributes of MEM. */
775
776 void
777 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
778 {
779 if (GET_CODE (parm_rtx) == REG)
780 set_reg_attrs_from_mem (parm_rtx, mem);
781 else if (GET_CODE (parm_rtx) == PARALLEL)
782 {
783 /* Check for a NULL entry in the first slot, used to indicate that the
784 parameter goes both on the stack and in registers. */
785 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
786 for (; i < XVECLEN (parm_rtx, 0); i++)
787 {
788 rtx x = XVECEXP (parm_rtx, 0, i);
789 if (GET_CODE (XEXP (x, 0)) == REG)
790 REG_ATTRS (XEXP (x, 0))
791 = get_reg_attrs (MEM_EXPR (mem),
792 INTVAL (XEXP (x, 1)));
793 }
794 }
795 }
796
797 /* Assign the RTX X to declaration T. */
798 void
799 set_decl_rtl (tree t, rtx x)
800 {
801 DECL_CHECK (t)->decl.rtl = x;
802
803 if (!x)
804 return;
805 /* For register, we maintain the reverse information too. */
806 if (GET_CODE (x) == REG)
807 REG_ATTRS (x) = get_reg_attrs (t, 0);
808 else if (GET_CODE (x) == SUBREG)
809 REG_ATTRS (SUBREG_REG (x))
810 = get_reg_attrs (t, -SUBREG_BYTE (x));
811 if (GET_CODE (x) == CONCAT)
812 {
813 if (REG_P (XEXP (x, 0)))
814 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
815 if (REG_P (XEXP (x, 1)))
816 REG_ATTRS (XEXP (x, 1))
817 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
818 }
819 if (GET_CODE (x) == PARALLEL)
820 {
821 int i;
822 for (i = 0; i < XVECLEN (x, 0); i++)
823 {
824 rtx y = XVECEXP (x, 0, i);
825 if (REG_P (XEXP (y, 0)))
826 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
827 }
828 }
829 }
830
831 /* Assign the RTX X to parameter declaration T. */
832 void
833 set_decl_incoming_rtl (tree t, rtx x)
834 {
835 DECL_INCOMING_RTL (t) = x;
836
837 if (!x)
838 return;
839 /* For register, we maintain the reverse information too. */
840 if (GET_CODE (x) == REG)
841 REG_ATTRS (x) = get_reg_attrs (t, 0);
842 else if (GET_CODE (x) == SUBREG)
843 REG_ATTRS (SUBREG_REG (x))
844 = get_reg_attrs (t, -SUBREG_BYTE (x));
845 if (GET_CODE (x) == CONCAT)
846 {
847 if (REG_P (XEXP (x, 0)))
848 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
849 if (REG_P (XEXP (x, 1)))
850 REG_ATTRS (XEXP (x, 1))
851 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
852 }
853 if (GET_CODE (x) == PARALLEL)
854 {
855 int i, start;
856
857 /* Check for a NULL entry, used to indicate that the parameter goes
858 both on the stack and in registers. */
859 if (XEXP (XVECEXP (x, 0, 0), 0))
860 start = 0;
861 else
862 start = 1;
863
864 for (i = start; i < XVECLEN (x, 0); i++)
865 {
866 rtx y = XVECEXP (x, 0, i);
867 if (REG_P (XEXP (y, 0)))
868 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
869 }
870 }
871 }
872
873 /* Identify REG (which may be a CONCAT) as a user register. */
874
875 void
876 mark_user_reg (rtx reg)
877 {
878 if (GET_CODE (reg) == CONCAT)
879 {
880 REG_USERVAR_P (XEXP (reg, 0)) = 1;
881 REG_USERVAR_P (XEXP (reg, 1)) = 1;
882 }
883 else if (GET_CODE (reg) == REG)
884 REG_USERVAR_P (reg) = 1;
885 else
886 abort ();
887 }
888
889 /* Identify REG as a probable pointer register and show its alignment
890 as ALIGN, if nonzero. */
891
892 void
893 mark_reg_pointer (rtx reg, int align)
894 {
895 if (! REG_POINTER (reg))
896 {
897 REG_POINTER (reg) = 1;
898
899 if (align)
900 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
901 }
902 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
903 /* We can no-longer be sure just how aligned this pointer is. */
904 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
905 }
906
907 /* Return 1 plus largest pseudo reg number used in the current function. */
908
909 int
910 max_reg_num (void)
911 {
912 return reg_rtx_no;
913 }
914
915 /* Return 1 + the largest label number used so far in the current function. */
916
917 int
918 max_label_num (void)
919 {
920 if (last_label_num && label_num == base_label_num)
921 return last_label_num;
922 return label_num;
923 }
924
925 /* Return first label number used in this function (if any were used). */
926
927 int
928 get_first_label_num (void)
929 {
930 return first_label_num;
931 }
932 \f
933 /* Return the final regno of X, which is a SUBREG of a hard
934 register. */
935 int
936 subreg_hard_regno (rtx x, int check_mode)
937 {
938 enum machine_mode mode = GET_MODE (x);
939 unsigned int byte_offset, base_regno, final_regno;
940 rtx reg = SUBREG_REG (x);
941
942 /* This is where we attempt to catch illegal subregs
943 created by the compiler. */
944 if (GET_CODE (x) != SUBREG
945 || GET_CODE (reg) != REG)
946 abort ();
947 base_regno = REGNO (reg);
948 if (base_regno >= FIRST_PSEUDO_REGISTER)
949 abort ();
950 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
951 abort ();
952 #ifdef ENABLE_CHECKING
953 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
954 SUBREG_BYTE (x), mode))
955 abort ();
956 #endif
957 /* Catch non-congruent offsets too. */
958 byte_offset = SUBREG_BYTE (x);
959 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
960 abort ();
961
962 final_regno = subreg_regno (x);
963
964 return final_regno;
965 }
966
967 /* Return a value representing some low-order bits of X, where the number
968 of low-order bits is given by MODE. Note that no conversion is done
969 between floating-point and fixed-point values, rather, the bit
970 representation is returned.
971
972 This function handles the cases in common between gen_lowpart, below,
973 and two variants in cse.c and combine.c. These are the cases that can
974 be safely handled at all points in the compilation.
975
976 If this is not a case we can handle, return 0. */
977
978 rtx
979 gen_lowpart_common (enum machine_mode mode, rtx x)
980 {
981 int msize = GET_MODE_SIZE (mode);
982 int xsize;
983 int offset = 0;
984 enum machine_mode innermode;
985
986 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
987 so we have to make one up. Yuk. */
988 innermode = GET_MODE (x);
989 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
990 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
991 else if (innermode == VOIDmode)
992 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
993
994 xsize = GET_MODE_SIZE (innermode);
995
996 if (innermode == VOIDmode || innermode == BLKmode)
997 abort ();
998
999 if (innermode == mode)
1000 return x;
1001
1002 /* MODE must occupy no more words than the mode of X. */
1003 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1004 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1005 return 0;
1006
1007 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1008 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1009 return 0;
1010
1011 offset = subreg_lowpart_offset (mode, innermode);
1012
1013 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1014 && (GET_MODE_CLASS (mode) == MODE_INT
1015 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1016 {
1017 /* If we are getting the low-order part of something that has been
1018 sign- or zero-extended, we can either just use the object being
1019 extended or make a narrower extension. If we want an even smaller
1020 piece than the size of the object being extended, call ourselves
1021 recursively.
1022
1023 This case is used mostly by combine and cse. */
1024
1025 if (GET_MODE (XEXP (x, 0)) == mode)
1026 return XEXP (x, 0);
1027 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1028 return gen_lowpart_common (mode, XEXP (x, 0));
1029 else if (msize < xsize)
1030 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1031 }
1032 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1033 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1034 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1035 return simplify_gen_subreg (mode, x, innermode, offset);
1036
1037 /* Otherwise, we can't do this. */
1038 return 0;
1039 }
1040 \f
1041 /* Return the constant real or imaginary part (which has mode MODE)
1042 of a complex value X. The IMAGPART_P argument determines whether
1043 the real or complex component should be returned. This function
1044 returns NULL_RTX if the component isn't a constant. */
1045
1046 static rtx
1047 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1048 {
1049 tree decl, part;
1050
1051 if (GET_CODE (x) == MEM
1052 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1053 {
1054 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1055 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1056 {
1057 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1058 if (TREE_CODE (part) == REAL_CST
1059 || TREE_CODE (part) == INTEGER_CST)
1060 return expand_expr (part, NULL_RTX, mode, 0);
1061 }
1062 }
1063 return NULL_RTX;
1064 }
1065
1066 /* Return the real part (which has mode MODE) of a complex value X.
1067 This always comes at the low address in memory. */
1068
1069 rtx
1070 gen_realpart (enum machine_mode mode, rtx x)
1071 {
1072 rtx part;
1073
1074 /* Handle complex constants. */
1075 part = gen_complex_constant_part (mode, x, 0);
1076 if (part != NULL_RTX)
1077 return part;
1078
1079 if (WORDS_BIG_ENDIAN
1080 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1081 && REG_P (x)
1082 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1083 internal_error
1084 ("can't access real part of complex value in hard register");
1085 else if (WORDS_BIG_ENDIAN)
1086 return gen_highpart (mode, x);
1087 else
1088 return gen_lowpart (mode, x);
1089 }
1090
1091 /* Return the imaginary part (which has mode MODE) of a complex value X.
1092 This always comes at the high address in memory. */
1093
1094 rtx
1095 gen_imagpart (enum machine_mode mode, rtx x)
1096 {
1097 rtx part;
1098
1099 /* Handle complex constants. */
1100 part = gen_complex_constant_part (mode, x, 1);
1101 if (part != NULL_RTX)
1102 return part;
1103
1104 if (WORDS_BIG_ENDIAN)
1105 return gen_lowpart (mode, x);
1106 else if (! WORDS_BIG_ENDIAN
1107 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1108 && REG_P (x)
1109 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1110 internal_error
1111 ("can't access imaginary part of complex value in hard register");
1112 else
1113 return gen_highpart (mode, x);
1114 }
1115 \f
1116 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1117 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1118 least-significant part of X.
1119 MODE specifies how big a part of X to return;
1120 it usually should not be larger than a word.
1121 If X is a MEM whose address is a QUEUED, the value may be so also. */
1122
1123 rtx
1124 gen_lowpart_general (enum machine_mode mode, rtx x)
1125 {
1126 rtx result = gen_lowpart_common (mode, x);
1127
1128 if (result)
1129 return result;
1130 else if (GET_CODE (x) == REG)
1131 {
1132 /* Must be a hard reg that's not valid in MODE. */
1133 result = gen_lowpart_common (mode, copy_to_reg (x));
1134 if (result == 0)
1135 abort ();
1136 return result;
1137 }
1138 else if (GET_CODE (x) == MEM)
1139 {
1140 /* The only additional case we can do is MEM. */
1141 int offset = 0;
1142
1143 /* The following exposes the use of "x" to CSE. */
1144 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1145 && SCALAR_INT_MODE_P (GET_MODE (x))
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1147 GET_MODE_BITSIZE (GET_MODE (x)))
1148 && ! no_new_pseudos)
1149 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1150
1151 if (WORDS_BIG_ENDIAN)
1152 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1153 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1154
1155 if (BYTES_BIG_ENDIAN)
1156 /* Adjust the address so that the address-after-the-data
1157 is unchanged. */
1158 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1159 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1160
1161 return adjust_address (x, mode, offset);
1162 }
1163 else if (GET_CODE (x) == ADDRESSOF)
1164 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1165 else
1166 abort ();
1167 }
1168
1169 /* Like `gen_lowpart', but refer to the most significant part.
1170 This is used to access the imaginary part of a complex number. */
1171
1172 rtx
1173 gen_highpart (enum machine_mode mode, rtx x)
1174 {
1175 unsigned int msize = GET_MODE_SIZE (mode);
1176 rtx result;
1177
1178 /* This case loses if X is a subreg. To catch bugs early,
1179 complain if an invalid MODE is used even in other cases. */
1180 if (msize > UNITS_PER_WORD
1181 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
1182 abort ();
1183
1184 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1185 subreg_highpart_offset (mode, GET_MODE (x)));
1186
1187 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1188 the target if we have a MEM. gen_highpart must return a valid operand,
1189 emitting code if necessary to do so. */
1190 if (result != NULL_RTX && GET_CODE (result) == MEM)
1191 result = validize_mem (result);
1192
1193 if (!result)
1194 abort ();
1195 return result;
1196 }
1197
1198 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1199 be VOIDmode constant. */
1200 rtx
1201 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1202 {
1203 if (GET_MODE (exp) != VOIDmode)
1204 {
1205 if (GET_MODE (exp) != innermode)
1206 abort ();
1207 return gen_highpart (outermode, exp);
1208 }
1209 return simplify_gen_subreg (outermode, exp, innermode,
1210 subreg_highpart_offset (outermode, innermode));
1211 }
1212
1213 /* Return offset in bytes to get OUTERMODE low part
1214 of the value in mode INNERMODE stored in memory in target format. */
1215
1216 unsigned int
1217 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1218 {
1219 unsigned int offset = 0;
1220 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1221
1222 if (difference > 0)
1223 {
1224 if (WORDS_BIG_ENDIAN)
1225 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1226 if (BYTES_BIG_ENDIAN)
1227 offset += difference % UNITS_PER_WORD;
1228 }
1229
1230 return offset;
1231 }
1232
1233 /* Return offset in bytes to get OUTERMODE high part
1234 of the value in mode INNERMODE stored in memory in target format. */
1235 unsigned int
1236 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1237 {
1238 unsigned int offset = 0;
1239 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1240
1241 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1242 abort ();
1243
1244 if (difference > 0)
1245 {
1246 if (! WORDS_BIG_ENDIAN)
1247 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1248 if (! BYTES_BIG_ENDIAN)
1249 offset += difference % UNITS_PER_WORD;
1250 }
1251
1252 return offset;
1253 }
1254
1255 /* Return 1 iff X, assumed to be a SUBREG,
1256 refers to the least significant part of its containing reg.
1257 If X is not a SUBREG, always return 1 (it is its own low part!). */
1258
1259 int
1260 subreg_lowpart_p (rtx x)
1261 {
1262 if (GET_CODE (x) != SUBREG)
1263 return 1;
1264 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1265 return 0;
1266
1267 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1268 == SUBREG_BYTE (x));
1269 }
1270 \f
1271 /* Return subword OFFSET of operand OP.
1272 The word number, OFFSET, is interpreted as the word number starting
1273 at the low-order address. OFFSET 0 is the low-order word if not
1274 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1275
1276 If we cannot extract the required word, we return zero. Otherwise,
1277 an rtx corresponding to the requested word will be returned.
1278
1279 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1280 reload has completed, a valid address will always be returned. After
1281 reload, if a valid address cannot be returned, we return zero.
1282
1283 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1284 it is the responsibility of the caller.
1285
1286 MODE is the mode of OP in case it is a CONST_INT.
1287
1288 ??? This is still rather broken for some cases. The problem for the
1289 moment is that all callers of this thing provide no 'goal mode' to
1290 tell us to work with. This exists because all callers were written
1291 in a word based SUBREG world.
1292 Now use of this function can be deprecated by simplify_subreg in most
1293 cases.
1294 */
1295
1296 rtx
1297 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1298 {
1299 if (mode == VOIDmode)
1300 mode = GET_MODE (op);
1301
1302 if (mode == VOIDmode)
1303 abort ();
1304
1305 /* If OP is narrower than a word, fail. */
1306 if (mode != BLKmode
1307 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1308 return 0;
1309
1310 /* If we want a word outside OP, return zero. */
1311 if (mode != BLKmode
1312 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1313 return const0_rtx;
1314
1315 /* Form a new MEM at the requested address. */
1316 if (GET_CODE (op) == MEM)
1317 {
1318 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1319
1320 if (! validate_address)
1321 return new;
1322
1323 else if (reload_completed)
1324 {
1325 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1326 return 0;
1327 }
1328 else
1329 return replace_equiv_address (new, XEXP (new, 0));
1330 }
1331
1332 /* Rest can be handled by simplify_subreg. */
1333 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1334 }
1335
1336 /* Similar to `operand_subword', but never return 0. If we can't extract
1337 the required subword, put OP into a register and try again. If that fails,
1338 abort. We always validate the address in this case.
1339
1340 MODE is the mode of OP, in case it is CONST_INT. */
1341
1342 rtx
1343 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1344 {
1345 rtx result = operand_subword (op, offset, 1, mode);
1346
1347 if (result)
1348 return result;
1349
1350 if (mode != BLKmode && mode != VOIDmode)
1351 {
1352 /* If this is a register which can not be accessed by words, copy it
1353 to a pseudo register. */
1354 if (GET_CODE (op) == REG)
1355 op = copy_to_reg (op);
1356 else
1357 op = force_reg (mode, op);
1358 }
1359
1360 result = operand_subword (op, offset, 1, mode);
1361 if (result == 0)
1362 abort ();
1363
1364 return result;
1365 }
1366 \f
1367 /* Given a compare instruction, swap the operands.
1368 A test instruction is changed into a compare of 0 against the operand. */
1369
1370 void
1371 reverse_comparison (rtx insn)
1372 {
1373 rtx body = PATTERN (insn);
1374 rtx comp;
1375
1376 if (GET_CODE (body) == SET)
1377 comp = SET_SRC (body);
1378 else
1379 comp = SET_SRC (XVECEXP (body, 0, 0));
1380
1381 if (GET_CODE (comp) == COMPARE)
1382 {
1383 rtx op0 = XEXP (comp, 0);
1384 rtx op1 = XEXP (comp, 1);
1385 XEXP (comp, 0) = op1;
1386 XEXP (comp, 1) = op0;
1387 }
1388 else
1389 {
1390 rtx new = gen_rtx_COMPARE (VOIDmode,
1391 CONST0_RTX (GET_MODE (comp)), comp);
1392 if (GET_CODE (body) == SET)
1393 SET_SRC (body) = new;
1394 else
1395 SET_SRC (XVECEXP (body, 0, 0)) = new;
1396 }
1397 }
1398 \f
1399 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1400 or (2) a component ref of something variable. Represent the later with
1401 a NULL expression. */
1402
1403 static tree
1404 component_ref_for_mem_expr (tree ref)
1405 {
1406 tree inner = TREE_OPERAND (ref, 0);
1407
1408 if (TREE_CODE (inner) == COMPONENT_REF)
1409 inner = component_ref_for_mem_expr (inner);
1410 else
1411 {
1412 tree placeholder_ptr = 0;
1413
1414 /* Now remove any conversions: they don't change what the underlying
1415 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1416 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1417 || TREE_CODE (inner) == NON_LVALUE_EXPR
1418 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1419 || TREE_CODE (inner) == SAVE_EXPR
1420 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1421 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1422 inner = find_placeholder (inner, &placeholder_ptr);
1423 else
1424 inner = TREE_OPERAND (inner, 0);
1425
1426 if (! DECL_P (inner))
1427 inner = NULL_TREE;
1428 }
1429
1430 if (inner == TREE_OPERAND (ref, 0))
1431 return ref;
1432 else
1433 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1434 TREE_OPERAND (ref, 1));
1435 }
1436
1437 /* Given REF, a MEM, and T, either the type of X or the expression
1438 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1439 if we are making a new object of this type. BITPOS is nonzero if
1440 there is an offset outstanding on T that will be applied later. */
1441
1442 void
1443 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1444 HOST_WIDE_INT bitpos)
1445 {
1446 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1447 tree expr = MEM_EXPR (ref);
1448 rtx offset = MEM_OFFSET (ref);
1449 rtx size = MEM_SIZE (ref);
1450 unsigned int align = MEM_ALIGN (ref);
1451 HOST_WIDE_INT apply_bitpos = 0;
1452 tree type;
1453
1454 /* It can happen that type_for_mode was given a mode for which there
1455 is no language-level type. In which case it returns NULL, which
1456 we can see here. */
1457 if (t == NULL_TREE)
1458 return;
1459
1460 type = TYPE_P (t) ? t : TREE_TYPE (t);
1461 if (type == error_mark_node)
1462 return;
1463
1464 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1465 wrong answer, as it assumes that DECL_RTL already has the right alias
1466 info. Callers should not set DECL_RTL until after the call to
1467 set_mem_attributes. */
1468 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1469 abort ();
1470
1471 /* Get the alias set from the expression or type (perhaps using a
1472 front-end routine) and use it. */
1473 alias = get_alias_set (t);
1474
1475 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1476 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1477 RTX_UNCHANGING_P (ref)
1478 |= ((lang_hooks.honor_readonly
1479 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1480 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1481
1482 /* If we are making an object of this type, or if this is a DECL, we know
1483 that it is a scalar if the type is not an aggregate. */
1484 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1485 MEM_SCALAR_P (ref) = 1;
1486
1487 /* We can set the alignment from the type if we are making an object,
1488 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1489 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1490 align = MAX (align, TYPE_ALIGN (type));
1491
1492 /* If the size is known, we can set that. */
1493 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1494 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1495
1496 /* If T is not a type, we may be able to deduce some more information about
1497 the expression. */
1498 if (! TYPE_P (t))
1499 {
1500 maybe_set_unchanging (ref, t);
1501 if (TREE_THIS_VOLATILE (t))
1502 MEM_VOLATILE_P (ref) = 1;
1503
1504 /* Now remove any conversions: they don't change what the underlying
1505 object is. Likewise for SAVE_EXPR. */
1506 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1507 || TREE_CODE (t) == NON_LVALUE_EXPR
1508 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1509 || TREE_CODE (t) == SAVE_EXPR)
1510 t = TREE_OPERAND (t, 0);
1511
1512 /* If this expression can't be addressed (e.g., it contains a reference
1513 to a non-addressable field), show we don't change its alias set. */
1514 if (! can_address_p (t))
1515 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1516
1517 /* If this is a decl, set the attributes of the MEM from it. */
1518 if (DECL_P (t))
1519 {
1520 expr = t;
1521 offset = const0_rtx;
1522 apply_bitpos = bitpos;
1523 size = (DECL_SIZE_UNIT (t)
1524 && host_integerp (DECL_SIZE_UNIT (t), 1)
1525 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1526 align = DECL_ALIGN (t);
1527 }
1528
1529 /* If this is a constant, we know the alignment. */
1530 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1531 {
1532 align = TYPE_ALIGN (type);
1533 #ifdef CONSTANT_ALIGNMENT
1534 align = CONSTANT_ALIGNMENT (t, align);
1535 #endif
1536 }
1537
1538 /* If this is a field reference and not a bit-field, record it. */
1539 /* ??? There is some information that can be gleened from bit-fields,
1540 such as the word offset in the structure that might be modified.
1541 But skip it for now. */
1542 else if (TREE_CODE (t) == COMPONENT_REF
1543 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1544 {
1545 expr = component_ref_for_mem_expr (t);
1546 offset = const0_rtx;
1547 apply_bitpos = bitpos;
1548 /* ??? Any reason the field size would be different than
1549 the size we got from the type? */
1550 }
1551
1552 /* If this is an array reference, look for an outer field reference. */
1553 else if (TREE_CODE (t) == ARRAY_REF)
1554 {
1555 tree off_tree = size_zero_node;
1556 /* We can't modify t, because we use it at the end of the
1557 function. */
1558 tree t2 = t;
1559
1560 do
1561 {
1562 tree index = TREE_OPERAND (t2, 1);
1563 tree array = TREE_OPERAND (t2, 0);
1564 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1565 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1566 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1567
1568 /* We assume all arrays have sizes that are a multiple of a byte.
1569 First subtract the lower bound, if any, in the type of the
1570 index, then convert to sizetype and multiply by the size of the
1571 array element. */
1572 if (low_bound != 0 && ! integer_zerop (low_bound))
1573 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1574 index, low_bound));
1575
1576 /* If the index has a self-referential type, pass it to a
1577 WITH_RECORD_EXPR; if the component size is, pass our
1578 component to one. */
1579 if (CONTAINS_PLACEHOLDER_P (index))
1580 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1581 if (CONTAINS_PLACEHOLDER_P (unit_size))
1582 unit_size = build (WITH_RECORD_EXPR, sizetype,
1583 unit_size, array);
1584
1585 off_tree
1586 = fold (build (PLUS_EXPR, sizetype,
1587 fold (build (MULT_EXPR, sizetype,
1588 index,
1589 unit_size)),
1590 off_tree));
1591 t2 = TREE_OPERAND (t2, 0);
1592 }
1593 while (TREE_CODE (t2) == ARRAY_REF);
1594
1595 if (DECL_P (t2))
1596 {
1597 expr = t2;
1598 offset = NULL;
1599 if (host_integerp (off_tree, 1))
1600 {
1601 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1602 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1603 align = DECL_ALIGN (t2);
1604 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1605 align = aoff;
1606 offset = GEN_INT (ioff);
1607 apply_bitpos = bitpos;
1608 }
1609 }
1610 else if (TREE_CODE (t2) == COMPONENT_REF)
1611 {
1612 expr = component_ref_for_mem_expr (t2);
1613 if (host_integerp (off_tree, 1))
1614 {
1615 offset = GEN_INT (tree_low_cst (off_tree, 1));
1616 apply_bitpos = bitpos;
1617 }
1618 /* ??? Any reason the field size would be different than
1619 the size we got from the type? */
1620 }
1621 else if (flag_argument_noalias > 1
1622 && TREE_CODE (t2) == INDIRECT_REF
1623 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1624 {
1625 expr = t2;
1626 offset = NULL;
1627 }
1628 }
1629
1630 /* If this is a Fortran indirect argument reference, record the
1631 parameter decl. */
1632 else if (flag_argument_noalias > 1
1633 && TREE_CODE (t) == INDIRECT_REF
1634 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1635 {
1636 expr = t;
1637 offset = NULL;
1638 }
1639 }
1640
1641 /* If we modified OFFSET based on T, then subtract the outstanding
1642 bit position offset. Similarly, increase the size of the accessed
1643 object to contain the negative offset. */
1644 if (apply_bitpos)
1645 {
1646 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1647 if (size)
1648 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1649 }
1650
1651 /* Now set the attributes we computed above. */
1652 MEM_ATTRS (ref)
1653 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1654
1655 /* If this is already known to be a scalar or aggregate, we are done. */
1656 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1657 return;
1658
1659 /* If it is a reference into an aggregate, this is part of an aggregate.
1660 Otherwise we don't know. */
1661 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1662 || TREE_CODE (t) == ARRAY_RANGE_REF
1663 || TREE_CODE (t) == BIT_FIELD_REF)
1664 MEM_IN_STRUCT_P (ref) = 1;
1665 }
1666
1667 void
1668 set_mem_attributes (rtx ref, tree t, int objectp)
1669 {
1670 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1671 }
1672
1673 /* Set the decl for MEM to DECL. */
1674
1675 void
1676 set_mem_attrs_from_reg (rtx mem, rtx reg)
1677 {
1678 MEM_ATTRS (mem)
1679 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1680 GEN_INT (REG_OFFSET (reg)),
1681 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1682 }
1683
1684 /* Set the alias set of MEM to SET. */
1685
1686 void
1687 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1688 {
1689 #ifdef ENABLE_CHECKING
1690 /* If the new and old alias sets don't conflict, something is wrong. */
1691 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1692 abort ();
1693 #endif
1694
1695 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1696 MEM_SIZE (mem), MEM_ALIGN (mem),
1697 GET_MODE (mem));
1698 }
1699
1700 /* Set the alignment of MEM to ALIGN bits. */
1701
1702 void
1703 set_mem_align (rtx mem, unsigned int align)
1704 {
1705 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1706 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1707 GET_MODE (mem));
1708 }
1709
1710 /* Set the expr for MEM to EXPR. */
1711
1712 void
1713 set_mem_expr (rtx mem, tree expr)
1714 {
1715 MEM_ATTRS (mem)
1716 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1717 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1718 }
1719
1720 /* Set the offset of MEM to OFFSET. */
1721
1722 void
1723 set_mem_offset (rtx mem, rtx offset)
1724 {
1725 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1726 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1727 GET_MODE (mem));
1728 }
1729
1730 /* Set the size of MEM to SIZE. */
1731
1732 void
1733 set_mem_size (rtx mem, rtx size)
1734 {
1735 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1736 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1737 GET_MODE (mem));
1738 }
1739 \f
1740 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1741 and its address changed to ADDR. (VOIDmode means don't change the mode.
1742 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1743 returned memory location is required to be valid. The memory
1744 attributes are not changed. */
1745
1746 static rtx
1747 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1748 {
1749 rtx new;
1750
1751 if (GET_CODE (memref) != MEM)
1752 abort ();
1753 if (mode == VOIDmode)
1754 mode = GET_MODE (memref);
1755 if (addr == 0)
1756 addr = XEXP (memref, 0);
1757 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1758 && (!validate || memory_address_p (mode, addr)))
1759 return memref;
1760
1761 if (validate)
1762 {
1763 if (reload_in_progress || reload_completed)
1764 {
1765 if (! memory_address_p (mode, addr))
1766 abort ();
1767 }
1768 else
1769 addr = memory_address (mode, addr);
1770 }
1771
1772 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1773 return memref;
1774
1775 new = gen_rtx_MEM (mode, addr);
1776 MEM_COPY_ATTRIBUTES (new, memref);
1777 return new;
1778 }
1779
1780 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1781 way we are changing MEMREF, so we only preserve the alias set. */
1782
1783 rtx
1784 change_address (rtx memref, enum machine_mode mode, rtx addr)
1785 {
1786 rtx new = change_address_1 (memref, mode, addr, 1), size;
1787 enum machine_mode mmode = GET_MODE (new);
1788 unsigned int align;
1789
1790 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1791 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1792
1793 /* If there are no changes, just return the original memory reference. */
1794 if (new == memref)
1795 {
1796 if (MEM_ATTRS (memref) == 0
1797 || (MEM_EXPR (memref) == NULL
1798 && MEM_OFFSET (memref) == NULL
1799 && MEM_SIZE (memref) == size
1800 && MEM_ALIGN (memref) == align))
1801 return new;
1802
1803 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1804 MEM_COPY_ATTRIBUTES (new, memref);
1805 }
1806
1807 MEM_ATTRS (new)
1808 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1809
1810 return new;
1811 }
1812
1813 /* Return a memory reference like MEMREF, but with its mode changed
1814 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1815 nonzero, the memory address is forced to be valid.
1816 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1817 and caller is responsible for adjusting MEMREF base register. */
1818
1819 rtx
1820 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1821 int validate, int adjust)
1822 {
1823 rtx addr = XEXP (memref, 0);
1824 rtx new;
1825 rtx memoffset = MEM_OFFSET (memref);
1826 rtx size = 0;
1827 unsigned int memalign = MEM_ALIGN (memref);
1828
1829 /* If there are no changes, just return the original memory reference. */
1830 if (mode == GET_MODE (memref) && !offset
1831 && (!validate || memory_address_p (mode, addr)))
1832 return memref;
1833
1834 /* ??? Prefer to create garbage instead of creating shared rtl.
1835 This may happen even if offset is nonzero -- consider
1836 (plus (plus reg reg) const_int) -- so do this always. */
1837 addr = copy_rtx (addr);
1838
1839 if (adjust)
1840 {
1841 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1842 object, we can merge it into the LO_SUM. */
1843 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1844 && offset >= 0
1845 && (unsigned HOST_WIDE_INT) offset
1846 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1847 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1848 plus_constant (XEXP (addr, 1), offset));
1849 else
1850 addr = plus_constant (addr, offset);
1851 }
1852
1853 new = change_address_1 (memref, mode, addr, validate);
1854
1855 /* Compute the new values of the memory attributes due to this adjustment.
1856 We add the offsets and update the alignment. */
1857 if (memoffset)
1858 memoffset = GEN_INT (offset + INTVAL (memoffset));
1859
1860 /* Compute the new alignment by taking the MIN of the alignment and the
1861 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1862 if zero. */
1863 if (offset != 0)
1864 memalign
1865 = MIN (memalign,
1866 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1867
1868 /* We can compute the size in a number of ways. */
1869 if (GET_MODE (new) != BLKmode)
1870 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1871 else if (MEM_SIZE (memref))
1872 size = plus_constant (MEM_SIZE (memref), -offset);
1873
1874 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1875 memoffset, size, memalign, GET_MODE (new));
1876
1877 /* At some point, we should validate that this offset is within the object,
1878 if all the appropriate values are known. */
1879 return new;
1880 }
1881
1882 /* Return a memory reference like MEMREF, but with its mode changed
1883 to MODE and its address changed to ADDR, which is assumed to be
1884 MEMREF offseted by OFFSET bytes. If VALIDATE is
1885 nonzero, the memory address is forced to be valid. */
1886
1887 rtx
1888 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1889 HOST_WIDE_INT offset, int validate)
1890 {
1891 memref = change_address_1 (memref, VOIDmode, addr, validate);
1892 return adjust_address_1 (memref, mode, offset, validate, 0);
1893 }
1894
1895 /* Return a memory reference like MEMREF, but whose address is changed by
1896 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1897 known to be in OFFSET (possibly 1). */
1898
1899 rtx
1900 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1901 {
1902 rtx new, addr = XEXP (memref, 0);
1903
1904 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1905
1906 /* At this point we don't know _why_ the address is invalid. It
1907 could have secondary memory references, multiplies or anything.
1908
1909 However, if we did go and rearrange things, we can wind up not
1910 being able to recognize the magic around pic_offset_table_rtx.
1911 This stuff is fragile, and is yet another example of why it is
1912 bad to expose PIC machinery too early. */
1913 if (! memory_address_p (GET_MODE (memref), new)
1914 && GET_CODE (addr) == PLUS
1915 && XEXP (addr, 0) == pic_offset_table_rtx)
1916 {
1917 addr = force_reg (GET_MODE (addr), addr);
1918 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1919 }
1920
1921 update_temp_slot_address (XEXP (memref, 0), new);
1922 new = change_address_1 (memref, VOIDmode, new, 1);
1923
1924 /* If there are no changes, just return the original memory reference. */
1925 if (new == memref)
1926 return new;
1927
1928 /* Update the alignment to reflect the offset. Reset the offset, which
1929 we don't know. */
1930 MEM_ATTRS (new)
1931 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1932 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1933 GET_MODE (new));
1934 return new;
1935 }
1936
1937 /* Return a memory reference like MEMREF, but with its address changed to
1938 ADDR. The caller is asserting that the actual piece of memory pointed
1939 to is the same, just the form of the address is being changed, such as
1940 by putting something into a register. */
1941
1942 rtx
1943 replace_equiv_address (rtx memref, rtx addr)
1944 {
1945 /* change_address_1 copies the memory attribute structure without change
1946 and that's exactly what we want here. */
1947 update_temp_slot_address (XEXP (memref, 0), addr);
1948 return change_address_1 (memref, VOIDmode, addr, 1);
1949 }
1950
1951 /* Likewise, but the reference is not required to be valid. */
1952
1953 rtx
1954 replace_equiv_address_nv (rtx memref, rtx addr)
1955 {
1956 return change_address_1 (memref, VOIDmode, addr, 0);
1957 }
1958
1959 /* Return a memory reference like MEMREF, but with its mode widened to
1960 MODE and offset by OFFSET. This would be used by targets that e.g.
1961 cannot issue QImode memory operations and have to use SImode memory
1962 operations plus masking logic. */
1963
1964 rtx
1965 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1966 {
1967 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1968 tree expr = MEM_EXPR (new);
1969 rtx memoffset = MEM_OFFSET (new);
1970 unsigned int size = GET_MODE_SIZE (mode);
1971
1972 /* If there are no changes, just return the original memory reference. */
1973 if (new == memref)
1974 return new;
1975
1976 /* If we don't know what offset we were at within the expression, then
1977 we can't know if we've overstepped the bounds. */
1978 if (! memoffset)
1979 expr = NULL_TREE;
1980
1981 while (expr)
1982 {
1983 if (TREE_CODE (expr) == COMPONENT_REF)
1984 {
1985 tree field = TREE_OPERAND (expr, 1);
1986
1987 if (! DECL_SIZE_UNIT (field))
1988 {
1989 expr = NULL_TREE;
1990 break;
1991 }
1992
1993 /* Is the field at least as large as the access? If so, ok,
1994 otherwise strip back to the containing structure. */
1995 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
1996 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
1997 && INTVAL (memoffset) >= 0)
1998 break;
1999
2000 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2001 {
2002 expr = NULL_TREE;
2003 break;
2004 }
2005
2006 expr = TREE_OPERAND (expr, 0);
2007 memoffset = (GEN_INT (INTVAL (memoffset)
2008 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2009 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2010 / BITS_PER_UNIT)));
2011 }
2012 /* Similarly for the decl. */
2013 else if (DECL_P (expr)
2014 && DECL_SIZE_UNIT (expr)
2015 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2016 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2017 && (! memoffset || INTVAL (memoffset) >= 0))
2018 break;
2019 else
2020 {
2021 /* The widened memory access overflows the expression, which means
2022 that it could alias another expression. Zap it. */
2023 expr = NULL_TREE;
2024 break;
2025 }
2026 }
2027
2028 if (! expr)
2029 memoffset = NULL_RTX;
2030
2031 /* The widened memory may alias other stuff, so zap the alias set. */
2032 /* ??? Maybe use get_alias_set on any remaining expression. */
2033
2034 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2035 MEM_ALIGN (new), mode);
2036
2037 return new;
2038 }
2039 \f
2040 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2041
2042 rtx
2043 gen_label_rtx (void)
2044 {
2045 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2046 NULL, label_num++, NULL);
2047 }
2048 \f
2049 /* For procedure integration. */
2050
2051 /* Install new pointers to the first and last insns in the chain.
2052 Also, set cur_insn_uid to one higher than the last in use.
2053 Used for an inline-procedure after copying the insn chain. */
2054
2055 void
2056 set_new_first_and_last_insn (rtx first, rtx last)
2057 {
2058 rtx insn;
2059
2060 first_insn = first;
2061 last_insn = last;
2062 cur_insn_uid = 0;
2063
2064 for (insn = first; insn; insn = NEXT_INSN (insn))
2065 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2066
2067 cur_insn_uid++;
2068 }
2069
2070 /* Set the last label number found in the current function.
2071 This is used when belatedly compiling an inline function. */
2072
2073 void
2074 set_new_last_label_num (int last)
2075 {
2076 base_label_num = label_num;
2077 last_label_num = last;
2078 }
2079 \f
2080 /* Restore all variables describing the current status from the structure *P.
2081 This is used after a nested function. */
2082
2083 void
2084 restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2085 {
2086 last_label_num = 0;
2087 }
2088 \f
2089 /* Go through all the RTL insn bodies and copy any invalid shared
2090 structure. This routine should only be called once. */
2091
2092 void
2093 unshare_all_rtl (tree fndecl, rtx insn)
2094 {
2095 tree decl;
2096
2097 /* Make sure that virtual parameters are not shared. */
2098 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2099 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2100
2101 /* Make sure that virtual stack slots are not shared. */
2102 unshare_all_decls (DECL_INITIAL (fndecl));
2103
2104 /* Unshare just about everything else. */
2105 unshare_all_rtl_in_chain (insn);
2106
2107 /* Make sure the addresses of stack slots found outside the insn chain
2108 (such as, in DECL_RTL of a variable) are not shared
2109 with the insn chain.
2110
2111 This special care is necessary when the stack slot MEM does not
2112 actually appear in the insn chain. If it does appear, its address
2113 is unshared from all else at that point. */
2114 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2115 }
2116
2117 /* Go through all the RTL insn bodies and copy any invalid shared
2118 structure, again. This is a fairly expensive thing to do so it
2119 should be done sparingly. */
2120
2121 void
2122 unshare_all_rtl_again (rtx insn)
2123 {
2124 rtx p;
2125 tree decl;
2126
2127 for (p = insn; p; p = NEXT_INSN (p))
2128 if (INSN_P (p))
2129 {
2130 reset_used_flags (PATTERN (p));
2131 reset_used_flags (REG_NOTES (p));
2132 reset_used_flags (LOG_LINKS (p));
2133 }
2134
2135 /* Make sure that virtual stack slots are not shared. */
2136 reset_used_decls (DECL_INITIAL (cfun->decl));
2137
2138 /* Make sure that virtual parameters are not shared. */
2139 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2140 reset_used_flags (DECL_RTL (decl));
2141
2142 reset_used_flags (stack_slot_list);
2143
2144 unshare_all_rtl (cfun->decl, insn);
2145 }
2146
2147 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2148 Recursively does the same for subexpressions. */
2149
2150 static void
2151 verify_rtx_sharing (rtx orig, rtx insn)
2152 {
2153 rtx x = orig;
2154 int i;
2155 enum rtx_code code;
2156 const char *format_ptr;
2157
2158 if (x == 0)
2159 return;
2160
2161 code = GET_CODE (x);
2162
2163 /* These types may be freely shared. */
2164
2165 switch (code)
2166 {
2167 case REG:
2168 case QUEUED:
2169 case CONST_INT:
2170 case CONST_DOUBLE:
2171 case CONST_VECTOR:
2172 case SYMBOL_REF:
2173 case LABEL_REF:
2174 case CODE_LABEL:
2175 case PC:
2176 case CC0:
2177 case SCRATCH:
2178 return;
2179 /* SCRATCH must be shared because they represent distinct values. */
2180 case CLOBBER:
2181 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2182 return;
2183 break;
2184
2185 case CONST:
2186 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2187 a LABEL_REF, it isn't sharable. */
2188 if (GET_CODE (XEXP (x, 0)) == PLUS
2189 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2190 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2191 return;
2192 break;
2193
2194 case MEM:
2195 /* A MEM is allowed to be shared if its address is constant. */
2196 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2197 || reload_completed || reload_in_progress)
2198 return;
2199
2200 break;
2201
2202 default:
2203 break;
2204 }
2205
2206 /* This rtx may not be shared. If it has already been seen,
2207 replace it with a copy of itself. */
2208
2209 if (RTX_FLAG (x, used))
2210 {
2211 error ("Invalid rtl sharing found in the insn");
2212 debug_rtx (insn);
2213 error ("Shared rtx");
2214 debug_rtx (x);
2215 abort ();
2216 }
2217 RTX_FLAG (x, used) = 1;
2218
2219 /* Now scan the subexpressions recursively. */
2220
2221 format_ptr = GET_RTX_FORMAT (code);
2222
2223 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2224 {
2225 switch (*format_ptr++)
2226 {
2227 case 'e':
2228 verify_rtx_sharing (XEXP (x, i), insn);
2229 break;
2230
2231 case 'E':
2232 if (XVEC (x, i) != NULL)
2233 {
2234 int j;
2235 int len = XVECLEN (x, i);
2236
2237 for (j = 0; j < len; j++)
2238 {
2239 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2240 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2241 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2242 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2243 else
2244 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2245 }
2246 }
2247 break;
2248 }
2249 }
2250 return;
2251 }
2252
2253 /* Go through all the RTL insn bodies and check that there is no unexpected
2254 sharing in between the subexpressions. */
2255
2256 void
2257 verify_rtl_sharing (void)
2258 {
2259 rtx p;
2260
2261 for (p = get_insns (); p; p = NEXT_INSN (p))
2262 if (INSN_P (p))
2263 {
2264 reset_used_flags (PATTERN (p));
2265 reset_used_flags (REG_NOTES (p));
2266 reset_used_flags (LOG_LINKS (p));
2267 }
2268
2269 for (p = get_insns (); p; p = NEXT_INSN (p))
2270 if (INSN_P (p))
2271 {
2272 verify_rtx_sharing (PATTERN (p), p);
2273 verify_rtx_sharing (REG_NOTES (p), p);
2274 verify_rtx_sharing (LOG_LINKS (p), p);
2275 }
2276 }
2277
2278 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2279 Assumes the mark bits are cleared at entry. */
2280
2281 void
2282 unshare_all_rtl_in_chain (rtx insn)
2283 {
2284 for (; insn; insn = NEXT_INSN (insn))
2285 if (INSN_P (insn))
2286 {
2287 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2288 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2289 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2290 }
2291 }
2292
2293 /* Go through all virtual stack slots of a function and copy any
2294 shared structure. */
2295 static void
2296 unshare_all_decls (tree blk)
2297 {
2298 tree t;
2299
2300 /* Copy shared decls. */
2301 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2302 if (DECL_RTL_SET_P (t))
2303 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2304
2305 /* Now process sub-blocks. */
2306 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2307 unshare_all_decls (t);
2308 }
2309
2310 /* Go through all virtual stack slots of a function and mark them as
2311 not shared. */
2312 static void
2313 reset_used_decls (tree blk)
2314 {
2315 tree t;
2316
2317 /* Mark decls. */
2318 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2319 if (DECL_RTL_SET_P (t))
2320 reset_used_flags (DECL_RTL (t));
2321
2322 /* Now process sub-blocks. */
2323 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2324 reset_used_decls (t);
2325 }
2326
2327 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2328 placed in the result directly, rather than being copied. MAY_SHARE is
2329 either a MEM of an EXPR_LIST of MEMs. */
2330
2331 rtx
2332 copy_most_rtx (rtx orig, rtx may_share)
2333 {
2334 rtx copy;
2335 int i, j;
2336 RTX_CODE code;
2337 const char *format_ptr;
2338
2339 if (orig == may_share
2340 || (GET_CODE (may_share) == EXPR_LIST
2341 && in_expr_list_p (may_share, orig)))
2342 return orig;
2343
2344 code = GET_CODE (orig);
2345
2346 switch (code)
2347 {
2348 case REG:
2349 case QUEUED:
2350 case CONST_INT:
2351 case CONST_DOUBLE:
2352 case CONST_VECTOR:
2353 case SYMBOL_REF:
2354 case CODE_LABEL:
2355 case PC:
2356 case CC0:
2357 return orig;
2358 default:
2359 break;
2360 }
2361
2362 copy = rtx_alloc (code);
2363 PUT_MODE (copy, GET_MODE (orig));
2364 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2365 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2366 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2367 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2368 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2369
2370 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2371
2372 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2373 {
2374 switch (*format_ptr++)
2375 {
2376 case 'e':
2377 XEXP (copy, i) = XEXP (orig, i);
2378 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2379 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2380 break;
2381
2382 case 'u':
2383 XEXP (copy, i) = XEXP (orig, i);
2384 break;
2385
2386 case 'E':
2387 case 'V':
2388 XVEC (copy, i) = XVEC (orig, i);
2389 if (XVEC (orig, i) != NULL)
2390 {
2391 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2392 for (j = 0; j < XVECLEN (copy, i); j++)
2393 XVECEXP (copy, i, j)
2394 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2395 }
2396 break;
2397
2398 case 'w':
2399 XWINT (copy, i) = XWINT (orig, i);
2400 break;
2401
2402 case 'n':
2403 case 'i':
2404 XINT (copy, i) = XINT (orig, i);
2405 break;
2406
2407 case 't':
2408 XTREE (copy, i) = XTREE (orig, i);
2409 break;
2410
2411 case 's':
2412 case 'S':
2413 XSTR (copy, i) = XSTR (orig, i);
2414 break;
2415
2416 case '0':
2417 X0ANY (copy, i) = X0ANY (orig, i);
2418 break;
2419
2420 default:
2421 abort ();
2422 }
2423 }
2424 return copy;
2425 }
2426
2427 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2428 Recursively does the same for subexpressions. Uses
2429 copy_rtx_if_shared_1 to reduce stack space. */
2430
2431 rtx
2432 copy_rtx_if_shared (rtx orig)
2433 {
2434 copy_rtx_if_shared_1 (&orig);
2435 return orig;
2436 }
2437
2438 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2439 use. Recursively does the same for subexpressions. */
2440
2441 static void
2442 copy_rtx_if_shared_1 (rtx *orig1)
2443 {
2444 rtx x;
2445 int i;
2446 enum rtx_code code;
2447 rtx *last_ptr;
2448 const char *format_ptr;
2449 int copied = 0;
2450 int length;
2451
2452 /* Repeat is used to turn tail-recursion into iteration. */
2453 repeat:
2454 x = *orig1;
2455
2456 if (x == 0)
2457 return;
2458
2459 code = GET_CODE (x);
2460
2461 /* These types may be freely shared. */
2462
2463 switch (code)
2464 {
2465 case REG:
2466 case QUEUED:
2467 case CONST_INT:
2468 case CONST_DOUBLE:
2469 case CONST_VECTOR:
2470 case SYMBOL_REF:
2471 case LABEL_REF:
2472 case CODE_LABEL:
2473 case PC:
2474 case CC0:
2475 case SCRATCH:
2476 /* SCRATCH must be shared because they represent distinct values. */
2477 return;
2478 case CLOBBER:
2479 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2480 return;
2481 break;
2482
2483 case CONST:
2484 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2485 a LABEL_REF, it isn't sharable. */
2486 if (GET_CODE (XEXP (x, 0)) == PLUS
2487 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2488 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2489 return;
2490 break;
2491
2492 case INSN:
2493 case JUMP_INSN:
2494 case CALL_INSN:
2495 case NOTE:
2496 case BARRIER:
2497 /* The chain of insns is not being copied. */
2498 return;
2499
2500 default:
2501 break;
2502 }
2503
2504 /* This rtx may not be shared. If it has already been seen,
2505 replace it with a copy of itself. */
2506
2507 if (RTX_FLAG (x, used))
2508 {
2509 rtx copy;
2510
2511 copy = rtx_alloc (code);
2512 memcpy (copy, x, RTX_SIZE (code));
2513 x = copy;
2514 copied = 1;
2515 }
2516 RTX_FLAG (x, used) = 1;
2517
2518 /* Now scan the subexpressions recursively.
2519 We can store any replaced subexpressions directly into X
2520 since we know X is not shared! Any vectors in X
2521 must be copied if X was copied. */
2522
2523 format_ptr = GET_RTX_FORMAT (code);
2524 length = GET_RTX_LENGTH (code);
2525 last_ptr = NULL;
2526
2527 for (i = 0; i < length; i++)
2528 {
2529 switch (*format_ptr++)
2530 {
2531 case 'e':
2532 if (last_ptr)
2533 copy_rtx_if_shared_1 (last_ptr);
2534 last_ptr = &XEXP (x, i);
2535 break;
2536
2537 case 'E':
2538 if (XVEC (x, i) != NULL)
2539 {
2540 int j;
2541 int len = XVECLEN (x, i);
2542
2543 /* Copy the vector iff I copied the rtx and the length
2544 is nonzero. */
2545 if (copied && len > 0)
2546 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2547
2548 /* Call recursively on all inside the vector. */
2549 for (j = 0; j < len; j++)
2550 {
2551 if (last_ptr)
2552 copy_rtx_if_shared_1 (last_ptr);
2553 last_ptr = &XVECEXP (x, i, j);
2554 }
2555 }
2556 break;
2557 }
2558 }
2559 *orig1 = x;
2560 if (last_ptr)
2561 {
2562 orig1 = last_ptr;
2563 goto repeat;
2564 }
2565 return;
2566 }
2567
2568 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2569 to look for shared sub-parts. */
2570
2571 void
2572 reset_used_flags (rtx x)
2573 {
2574 int i, j;
2575 enum rtx_code code;
2576 const char *format_ptr;
2577 int length;
2578
2579 /* Repeat is used to turn tail-recursion into iteration. */
2580 repeat:
2581 if (x == 0)
2582 return;
2583
2584 code = GET_CODE (x);
2585
2586 /* These types may be freely shared so we needn't do any resetting
2587 for them. */
2588
2589 switch (code)
2590 {
2591 case REG:
2592 case QUEUED:
2593 case CONST_INT:
2594 case CONST_DOUBLE:
2595 case CONST_VECTOR:
2596 case SYMBOL_REF:
2597 case CODE_LABEL:
2598 case PC:
2599 case CC0:
2600 return;
2601
2602 case INSN:
2603 case JUMP_INSN:
2604 case CALL_INSN:
2605 case NOTE:
2606 case LABEL_REF:
2607 case BARRIER:
2608 /* The chain of insns is not being copied. */
2609 return;
2610
2611 default:
2612 break;
2613 }
2614
2615 RTX_FLAG (x, used) = 0;
2616
2617 format_ptr = GET_RTX_FORMAT (code);
2618 length = GET_RTX_LENGTH (code);
2619
2620 for (i = 0; i < length; i++)
2621 {
2622 switch (*format_ptr++)
2623 {
2624 case 'e':
2625 if (i == length-1)
2626 {
2627 x = XEXP (x, i);
2628 goto repeat;
2629 }
2630 reset_used_flags (XEXP (x, i));
2631 break;
2632
2633 case 'E':
2634 for (j = 0; j < XVECLEN (x, i); j++)
2635 reset_used_flags (XVECEXP (x, i, j));
2636 break;
2637 }
2638 }
2639 }
2640
2641 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2642 to look for shared sub-parts. */
2643
2644 void
2645 set_used_flags (rtx x)
2646 {
2647 int i, j;
2648 enum rtx_code code;
2649 const char *format_ptr;
2650
2651 if (x == 0)
2652 return;
2653
2654 code = GET_CODE (x);
2655
2656 /* These types may be freely shared so we needn't do any resetting
2657 for them. */
2658
2659 switch (code)
2660 {
2661 case REG:
2662 case QUEUED:
2663 case CONST_INT:
2664 case CONST_DOUBLE:
2665 case CONST_VECTOR:
2666 case SYMBOL_REF:
2667 case CODE_LABEL:
2668 case PC:
2669 case CC0:
2670 return;
2671
2672 case INSN:
2673 case JUMP_INSN:
2674 case CALL_INSN:
2675 case NOTE:
2676 case LABEL_REF:
2677 case BARRIER:
2678 /* The chain of insns is not being copied. */
2679 return;
2680
2681 default:
2682 break;
2683 }
2684
2685 RTX_FLAG (x, used) = 1;
2686
2687 format_ptr = GET_RTX_FORMAT (code);
2688 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2689 {
2690 switch (*format_ptr++)
2691 {
2692 case 'e':
2693 set_used_flags (XEXP (x, i));
2694 break;
2695
2696 case 'E':
2697 for (j = 0; j < XVECLEN (x, i); j++)
2698 set_used_flags (XVECEXP (x, i, j));
2699 break;
2700 }
2701 }
2702 }
2703 \f
2704 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2705 Return X or the rtx for the pseudo reg the value of X was copied into.
2706 OTHER must be valid as a SET_DEST. */
2707
2708 rtx
2709 make_safe_from (rtx x, rtx other)
2710 {
2711 while (1)
2712 switch (GET_CODE (other))
2713 {
2714 case SUBREG:
2715 other = SUBREG_REG (other);
2716 break;
2717 case STRICT_LOW_PART:
2718 case SIGN_EXTEND:
2719 case ZERO_EXTEND:
2720 other = XEXP (other, 0);
2721 break;
2722 default:
2723 goto done;
2724 }
2725 done:
2726 if ((GET_CODE (other) == MEM
2727 && ! CONSTANT_P (x)
2728 && GET_CODE (x) != REG
2729 && GET_CODE (x) != SUBREG)
2730 || (GET_CODE (other) == REG
2731 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2732 || reg_mentioned_p (other, x))))
2733 {
2734 rtx temp = gen_reg_rtx (GET_MODE (x));
2735 emit_move_insn (temp, x);
2736 return temp;
2737 }
2738 return x;
2739 }
2740 \f
2741 /* Emission of insns (adding them to the doubly-linked list). */
2742
2743 /* Return the first insn of the current sequence or current function. */
2744
2745 rtx
2746 get_insns (void)
2747 {
2748 return first_insn;
2749 }
2750
2751 /* Specify a new insn as the first in the chain. */
2752
2753 void
2754 set_first_insn (rtx insn)
2755 {
2756 if (PREV_INSN (insn) != 0)
2757 abort ();
2758 first_insn = insn;
2759 }
2760
2761 /* Return the last insn emitted in current sequence or current function. */
2762
2763 rtx
2764 get_last_insn (void)
2765 {
2766 return last_insn;
2767 }
2768
2769 /* Specify a new insn as the last in the chain. */
2770
2771 void
2772 set_last_insn (rtx insn)
2773 {
2774 if (NEXT_INSN (insn) != 0)
2775 abort ();
2776 last_insn = insn;
2777 }
2778
2779 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2780
2781 rtx
2782 get_last_insn_anywhere (void)
2783 {
2784 struct sequence_stack *stack;
2785 if (last_insn)
2786 return last_insn;
2787 for (stack = seq_stack; stack; stack = stack->next)
2788 if (stack->last != 0)
2789 return stack->last;
2790 return 0;
2791 }
2792
2793 /* Return the first nonnote insn emitted in current sequence or current
2794 function. This routine looks inside SEQUENCEs. */
2795
2796 rtx
2797 get_first_nonnote_insn (void)
2798 {
2799 rtx insn = first_insn;
2800
2801 while (insn)
2802 {
2803 insn = next_insn (insn);
2804 if (insn == 0 || GET_CODE (insn) != NOTE)
2805 break;
2806 }
2807
2808 return insn;
2809 }
2810
2811 /* Return the last nonnote insn emitted in current sequence or current
2812 function. This routine looks inside SEQUENCEs. */
2813
2814 rtx
2815 get_last_nonnote_insn (void)
2816 {
2817 rtx insn = last_insn;
2818
2819 while (insn)
2820 {
2821 insn = previous_insn (insn);
2822 if (insn == 0 || GET_CODE (insn) != NOTE)
2823 break;
2824 }
2825
2826 return insn;
2827 }
2828
2829 /* Return a number larger than any instruction's uid in this function. */
2830
2831 int
2832 get_max_uid (void)
2833 {
2834 return cur_insn_uid;
2835 }
2836
2837 /* Renumber instructions so that no instruction UIDs are wasted. */
2838
2839 void
2840 renumber_insns (FILE *stream)
2841 {
2842 rtx insn;
2843
2844 /* If we're not supposed to renumber instructions, don't. */
2845 if (!flag_renumber_insns)
2846 return;
2847
2848 /* If there aren't that many instructions, then it's not really
2849 worth renumbering them. */
2850 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2851 return;
2852
2853 cur_insn_uid = 1;
2854
2855 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2856 {
2857 if (stream)
2858 fprintf (stream, "Renumbering insn %d to %d\n",
2859 INSN_UID (insn), cur_insn_uid);
2860 INSN_UID (insn) = cur_insn_uid++;
2861 }
2862 }
2863 \f
2864 /* Return the next insn. If it is a SEQUENCE, return the first insn
2865 of the sequence. */
2866
2867 rtx
2868 next_insn (rtx insn)
2869 {
2870 if (insn)
2871 {
2872 insn = NEXT_INSN (insn);
2873 if (insn && GET_CODE (insn) == INSN
2874 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2875 insn = XVECEXP (PATTERN (insn), 0, 0);
2876 }
2877
2878 return insn;
2879 }
2880
2881 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2882 of the sequence. */
2883
2884 rtx
2885 previous_insn (rtx insn)
2886 {
2887 if (insn)
2888 {
2889 insn = PREV_INSN (insn);
2890 if (insn && GET_CODE (insn) == INSN
2891 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2892 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2893 }
2894
2895 return insn;
2896 }
2897
2898 /* Return the next insn after INSN that is not a NOTE. This routine does not
2899 look inside SEQUENCEs. */
2900
2901 rtx
2902 next_nonnote_insn (rtx insn)
2903 {
2904 while (insn)
2905 {
2906 insn = NEXT_INSN (insn);
2907 if (insn == 0 || GET_CODE (insn) != NOTE)
2908 break;
2909 }
2910
2911 return insn;
2912 }
2913
2914 /* Return the previous insn before INSN that is not a NOTE. This routine does
2915 not look inside SEQUENCEs. */
2916
2917 rtx
2918 prev_nonnote_insn (rtx insn)
2919 {
2920 while (insn)
2921 {
2922 insn = PREV_INSN (insn);
2923 if (insn == 0 || GET_CODE (insn) != NOTE)
2924 break;
2925 }
2926
2927 return insn;
2928 }
2929
2930 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2931 or 0, if there is none. This routine does not look inside
2932 SEQUENCEs. */
2933
2934 rtx
2935 next_real_insn (rtx insn)
2936 {
2937 while (insn)
2938 {
2939 insn = NEXT_INSN (insn);
2940 if (insn == 0 || GET_CODE (insn) == INSN
2941 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2942 break;
2943 }
2944
2945 return insn;
2946 }
2947
2948 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2949 or 0, if there is none. This routine does not look inside
2950 SEQUENCEs. */
2951
2952 rtx
2953 prev_real_insn (rtx insn)
2954 {
2955 while (insn)
2956 {
2957 insn = PREV_INSN (insn);
2958 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2959 || GET_CODE (insn) == JUMP_INSN)
2960 break;
2961 }
2962
2963 return insn;
2964 }
2965
2966 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2967 This routine does not look inside SEQUENCEs. */
2968
2969 rtx
2970 last_call_insn (void)
2971 {
2972 rtx insn;
2973
2974 for (insn = get_last_insn ();
2975 insn && GET_CODE (insn) != CALL_INSN;
2976 insn = PREV_INSN (insn))
2977 ;
2978
2979 return insn;
2980 }
2981
2982 /* Find the next insn after INSN that really does something. This routine
2983 does not look inside SEQUENCEs. Until reload has completed, this is the
2984 same as next_real_insn. */
2985
2986 int
2987 active_insn_p (rtx insn)
2988 {
2989 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2990 || (GET_CODE (insn) == INSN
2991 && (! reload_completed
2992 || (GET_CODE (PATTERN (insn)) != USE
2993 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2994 }
2995
2996 rtx
2997 next_active_insn (rtx insn)
2998 {
2999 while (insn)
3000 {
3001 insn = NEXT_INSN (insn);
3002 if (insn == 0 || active_insn_p (insn))
3003 break;
3004 }
3005
3006 return insn;
3007 }
3008
3009 /* Find the last insn before INSN that really does something. This routine
3010 does not look inside SEQUENCEs. Until reload has completed, this is the
3011 same as prev_real_insn. */
3012
3013 rtx
3014 prev_active_insn (rtx insn)
3015 {
3016 while (insn)
3017 {
3018 insn = PREV_INSN (insn);
3019 if (insn == 0 || active_insn_p (insn))
3020 break;
3021 }
3022
3023 return insn;
3024 }
3025
3026 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3027
3028 rtx
3029 next_label (rtx insn)
3030 {
3031 while (insn)
3032 {
3033 insn = NEXT_INSN (insn);
3034 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3035 break;
3036 }
3037
3038 return insn;
3039 }
3040
3041 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3042
3043 rtx
3044 prev_label (rtx insn)
3045 {
3046 while (insn)
3047 {
3048 insn = PREV_INSN (insn);
3049 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3050 break;
3051 }
3052
3053 return insn;
3054 }
3055 \f
3056 #ifdef HAVE_cc0
3057 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3058 and REG_CC_USER notes so we can find it. */
3059
3060 void
3061 link_cc0_insns (rtx insn)
3062 {
3063 rtx user = next_nonnote_insn (insn);
3064
3065 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3066 user = XVECEXP (PATTERN (user), 0, 0);
3067
3068 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3069 REG_NOTES (user));
3070 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3071 }
3072
3073 /* Return the next insn that uses CC0 after INSN, which is assumed to
3074 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3075 applied to the result of this function should yield INSN).
3076
3077 Normally, this is simply the next insn. However, if a REG_CC_USER note
3078 is present, it contains the insn that uses CC0.
3079
3080 Return 0 if we can't find the insn. */
3081
3082 rtx
3083 next_cc0_user (rtx insn)
3084 {
3085 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3086
3087 if (note)
3088 return XEXP (note, 0);
3089
3090 insn = next_nonnote_insn (insn);
3091 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3092 insn = XVECEXP (PATTERN (insn), 0, 0);
3093
3094 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3095 return insn;
3096
3097 return 0;
3098 }
3099
3100 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3101 note, it is the previous insn. */
3102
3103 rtx
3104 prev_cc0_setter (rtx insn)
3105 {
3106 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3107
3108 if (note)
3109 return XEXP (note, 0);
3110
3111 insn = prev_nonnote_insn (insn);
3112 if (! sets_cc0_p (PATTERN (insn)))
3113 abort ();
3114
3115 return insn;
3116 }
3117 #endif
3118
3119 /* Increment the label uses for all labels present in rtx. */
3120
3121 static void
3122 mark_label_nuses (rtx x)
3123 {
3124 enum rtx_code code;
3125 int i, j;
3126 const char *fmt;
3127
3128 code = GET_CODE (x);
3129 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3130 LABEL_NUSES (XEXP (x, 0))++;
3131
3132 fmt = GET_RTX_FORMAT (code);
3133 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3134 {
3135 if (fmt[i] == 'e')
3136 mark_label_nuses (XEXP (x, i));
3137 else if (fmt[i] == 'E')
3138 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3139 mark_label_nuses (XVECEXP (x, i, j));
3140 }
3141 }
3142
3143 \f
3144 /* Try splitting insns that can be split for better scheduling.
3145 PAT is the pattern which might split.
3146 TRIAL is the insn providing PAT.
3147 LAST is nonzero if we should return the last insn of the sequence produced.
3148
3149 If this routine succeeds in splitting, it returns the first or last
3150 replacement insn depending on the value of LAST. Otherwise, it
3151 returns TRIAL. If the insn to be returned can be split, it will be. */
3152
3153 rtx
3154 try_split (rtx pat, rtx trial, int last)
3155 {
3156 rtx before = PREV_INSN (trial);
3157 rtx after = NEXT_INSN (trial);
3158 int has_barrier = 0;
3159 rtx tem;
3160 rtx note, seq;
3161 int probability;
3162 rtx insn_last, insn;
3163 int njumps = 0;
3164
3165 if (any_condjump_p (trial)
3166 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3167 split_branch_probability = INTVAL (XEXP (note, 0));
3168 probability = split_branch_probability;
3169
3170 seq = split_insns (pat, trial);
3171
3172 split_branch_probability = -1;
3173
3174 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3175 We may need to handle this specially. */
3176 if (after && GET_CODE (after) == BARRIER)
3177 {
3178 has_barrier = 1;
3179 after = NEXT_INSN (after);
3180 }
3181
3182 if (!seq)
3183 return trial;
3184
3185 /* Avoid infinite loop if any insn of the result matches
3186 the original pattern. */
3187 insn_last = seq;
3188 while (1)
3189 {
3190 if (INSN_P (insn_last)
3191 && rtx_equal_p (PATTERN (insn_last), pat))
3192 return trial;
3193 if (!NEXT_INSN (insn_last))
3194 break;
3195 insn_last = NEXT_INSN (insn_last);
3196 }
3197
3198 /* Mark labels. */
3199 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3200 {
3201 if (GET_CODE (insn) == JUMP_INSN)
3202 {
3203 mark_jump_label (PATTERN (insn), insn, 0);
3204 njumps++;
3205 if (probability != -1
3206 && any_condjump_p (insn)
3207 && !find_reg_note (insn, REG_BR_PROB, 0))
3208 {
3209 /* We can preserve the REG_BR_PROB notes only if exactly
3210 one jump is created, otherwise the machine description
3211 is responsible for this step using
3212 split_branch_probability variable. */
3213 if (njumps != 1)
3214 abort ();
3215 REG_NOTES (insn)
3216 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3217 GEN_INT (probability),
3218 REG_NOTES (insn));
3219 }
3220 }
3221 }
3222
3223 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3224 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3225 if (GET_CODE (trial) == CALL_INSN)
3226 {
3227 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3228 if (GET_CODE (insn) == CALL_INSN)
3229 {
3230 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3231 while (*p)
3232 p = &XEXP (*p, 1);
3233 *p = CALL_INSN_FUNCTION_USAGE (trial);
3234 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3235 }
3236 }
3237
3238 /* Copy notes, particularly those related to the CFG. */
3239 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3240 {
3241 switch (REG_NOTE_KIND (note))
3242 {
3243 case REG_EH_REGION:
3244 insn = insn_last;
3245 while (insn != NULL_RTX)
3246 {
3247 if (GET_CODE (insn) == CALL_INSN
3248 || (flag_non_call_exceptions
3249 && may_trap_p (PATTERN (insn))))
3250 REG_NOTES (insn)
3251 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3252 XEXP (note, 0),
3253 REG_NOTES (insn));
3254 insn = PREV_INSN (insn);
3255 }
3256 break;
3257
3258 case REG_NORETURN:
3259 case REG_SETJMP:
3260 case REG_ALWAYS_RETURN:
3261 insn = insn_last;
3262 while (insn != NULL_RTX)
3263 {
3264 if (GET_CODE (insn) == CALL_INSN)
3265 REG_NOTES (insn)
3266 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3267 XEXP (note, 0),
3268 REG_NOTES (insn));
3269 insn = PREV_INSN (insn);
3270 }
3271 break;
3272
3273 case REG_NON_LOCAL_GOTO:
3274 insn = insn_last;
3275 while (insn != NULL_RTX)
3276 {
3277 if (GET_CODE (insn) == JUMP_INSN)
3278 REG_NOTES (insn)
3279 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3280 XEXP (note, 0),
3281 REG_NOTES (insn));
3282 insn = PREV_INSN (insn);
3283 }
3284 break;
3285
3286 default:
3287 break;
3288 }
3289 }
3290
3291 /* If there are LABELS inside the split insns increment the
3292 usage count so we don't delete the label. */
3293 if (GET_CODE (trial) == INSN)
3294 {
3295 insn = insn_last;
3296 while (insn != NULL_RTX)
3297 {
3298 if (GET_CODE (insn) == INSN)
3299 mark_label_nuses (PATTERN (insn));
3300
3301 insn = PREV_INSN (insn);
3302 }
3303 }
3304
3305 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3306
3307 delete_insn (trial);
3308 if (has_barrier)
3309 emit_barrier_after (tem);
3310
3311 /* Recursively call try_split for each new insn created; by the
3312 time control returns here that insn will be fully split, so
3313 set LAST and continue from the insn after the one returned.
3314 We can't use next_active_insn here since AFTER may be a note.
3315 Ignore deleted insns, which can be occur if not optimizing. */
3316 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3317 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3318 tem = try_split (PATTERN (tem), tem, 1);
3319
3320 /* Return either the first or the last insn, depending on which was
3321 requested. */
3322 return last
3323 ? (after ? PREV_INSN (after) : last_insn)
3324 : NEXT_INSN (before);
3325 }
3326 \f
3327 /* Make and return an INSN rtx, initializing all its slots.
3328 Store PATTERN in the pattern slots. */
3329
3330 rtx
3331 make_insn_raw (rtx pattern)
3332 {
3333 rtx insn;
3334
3335 insn = rtx_alloc (INSN);
3336
3337 INSN_UID (insn) = cur_insn_uid++;
3338 PATTERN (insn) = pattern;
3339 INSN_CODE (insn) = -1;
3340 LOG_LINKS (insn) = NULL;
3341 REG_NOTES (insn) = NULL;
3342 INSN_LOCATOR (insn) = 0;
3343 BLOCK_FOR_INSN (insn) = NULL;
3344
3345 #ifdef ENABLE_RTL_CHECKING
3346 if (insn
3347 && INSN_P (insn)
3348 && (returnjump_p (insn)
3349 || (GET_CODE (insn) == SET
3350 && SET_DEST (insn) == pc_rtx)))
3351 {
3352 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3353 debug_rtx (insn);
3354 }
3355 #endif
3356
3357 return insn;
3358 }
3359
3360 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3361
3362 static rtx
3363 make_jump_insn_raw (rtx pattern)
3364 {
3365 rtx insn;
3366
3367 insn = rtx_alloc (JUMP_INSN);
3368 INSN_UID (insn) = cur_insn_uid++;
3369
3370 PATTERN (insn) = pattern;
3371 INSN_CODE (insn) = -1;
3372 LOG_LINKS (insn) = NULL;
3373 REG_NOTES (insn) = NULL;
3374 JUMP_LABEL (insn) = NULL;
3375 INSN_LOCATOR (insn) = 0;
3376 BLOCK_FOR_INSN (insn) = NULL;
3377
3378 return insn;
3379 }
3380
3381 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3382
3383 static rtx
3384 make_call_insn_raw (rtx pattern)
3385 {
3386 rtx insn;
3387
3388 insn = rtx_alloc (CALL_INSN);
3389 INSN_UID (insn) = cur_insn_uid++;
3390
3391 PATTERN (insn) = pattern;
3392 INSN_CODE (insn) = -1;
3393 LOG_LINKS (insn) = NULL;
3394 REG_NOTES (insn) = NULL;
3395 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3396 INSN_LOCATOR (insn) = 0;
3397 BLOCK_FOR_INSN (insn) = NULL;
3398
3399 return insn;
3400 }
3401 \f
3402 /* Add INSN to the end of the doubly-linked list.
3403 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3404
3405 void
3406 add_insn (rtx insn)
3407 {
3408 PREV_INSN (insn) = last_insn;
3409 NEXT_INSN (insn) = 0;
3410
3411 if (NULL != last_insn)
3412 NEXT_INSN (last_insn) = insn;
3413
3414 if (NULL == first_insn)
3415 first_insn = insn;
3416
3417 last_insn = insn;
3418 }
3419
3420 /* Add INSN into the doubly-linked list after insn AFTER. This and
3421 the next should be the only functions called to insert an insn once
3422 delay slots have been filled since only they know how to update a
3423 SEQUENCE. */
3424
3425 void
3426 add_insn_after (rtx insn, rtx after)
3427 {
3428 rtx next = NEXT_INSN (after);
3429 basic_block bb;
3430
3431 if (optimize && INSN_DELETED_P (after))
3432 abort ();
3433
3434 NEXT_INSN (insn) = next;
3435 PREV_INSN (insn) = after;
3436
3437 if (next)
3438 {
3439 PREV_INSN (next) = insn;
3440 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3441 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3442 }
3443 else if (last_insn == after)
3444 last_insn = insn;
3445 else
3446 {
3447 struct sequence_stack *stack = seq_stack;
3448 /* Scan all pending sequences too. */
3449 for (; stack; stack = stack->next)
3450 if (after == stack->last)
3451 {
3452 stack->last = insn;
3453 break;
3454 }
3455
3456 if (stack == 0)
3457 abort ();
3458 }
3459
3460 if (GET_CODE (after) != BARRIER
3461 && GET_CODE (insn) != BARRIER
3462 && (bb = BLOCK_FOR_INSN (after)))
3463 {
3464 set_block_for_insn (insn, bb);
3465 if (INSN_P (insn))
3466 bb->flags |= BB_DIRTY;
3467 /* Should not happen as first in the BB is always
3468 either NOTE or LABEL. */
3469 if (BB_END (bb) == after
3470 /* Avoid clobbering of structure when creating new BB. */
3471 && GET_CODE (insn) != BARRIER
3472 && (GET_CODE (insn) != NOTE
3473 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3474 BB_END (bb) = insn;
3475 }
3476
3477 NEXT_INSN (after) = insn;
3478 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3479 {
3480 rtx sequence = PATTERN (after);
3481 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3482 }
3483 }
3484
3485 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3486 the previous should be the only functions called to insert an insn once
3487 delay slots have been filled since only they know how to update a
3488 SEQUENCE. */
3489
3490 void
3491 add_insn_before (rtx insn, rtx before)
3492 {
3493 rtx prev = PREV_INSN (before);
3494 basic_block bb;
3495
3496 if (optimize && INSN_DELETED_P (before))
3497 abort ();
3498
3499 PREV_INSN (insn) = prev;
3500 NEXT_INSN (insn) = before;
3501
3502 if (prev)
3503 {
3504 NEXT_INSN (prev) = insn;
3505 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3506 {
3507 rtx sequence = PATTERN (prev);
3508 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3509 }
3510 }
3511 else if (first_insn == before)
3512 first_insn = insn;
3513 else
3514 {
3515 struct sequence_stack *stack = seq_stack;
3516 /* Scan all pending sequences too. */
3517 for (; stack; stack = stack->next)
3518 if (before == stack->first)
3519 {
3520 stack->first = insn;
3521 break;
3522 }
3523
3524 if (stack == 0)
3525 abort ();
3526 }
3527
3528 if (GET_CODE (before) != BARRIER
3529 && GET_CODE (insn) != BARRIER
3530 && (bb = BLOCK_FOR_INSN (before)))
3531 {
3532 set_block_for_insn (insn, bb);
3533 if (INSN_P (insn))
3534 bb->flags |= BB_DIRTY;
3535 /* Should not happen as first in the BB is always
3536 either NOTE or LABEl. */
3537 if (BB_HEAD (bb) == insn
3538 /* Avoid clobbering of structure when creating new BB. */
3539 && GET_CODE (insn) != BARRIER
3540 && (GET_CODE (insn) != NOTE
3541 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3542 abort ();
3543 }
3544
3545 PREV_INSN (before) = insn;
3546 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3547 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3548 }
3549
3550 /* Remove an insn from its doubly-linked list. This function knows how
3551 to handle sequences. */
3552 void
3553 remove_insn (rtx insn)
3554 {
3555 rtx next = NEXT_INSN (insn);
3556 rtx prev = PREV_INSN (insn);
3557 basic_block bb;
3558
3559 if (prev)
3560 {
3561 NEXT_INSN (prev) = next;
3562 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3563 {
3564 rtx sequence = PATTERN (prev);
3565 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3566 }
3567 }
3568 else if (first_insn == insn)
3569 first_insn = next;
3570 else
3571 {
3572 struct sequence_stack *stack = seq_stack;
3573 /* Scan all pending sequences too. */
3574 for (; stack; stack = stack->next)
3575 if (insn == stack->first)
3576 {
3577 stack->first = next;
3578 break;
3579 }
3580
3581 if (stack == 0)
3582 abort ();
3583 }
3584
3585 if (next)
3586 {
3587 PREV_INSN (next) = prev;
3588 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3589 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3590 }
3591 else if (last_insn == insn)
3592 last_insn = prev;
3593 else
3594 {
3595 struct sequence_stack *stack = seq_stack;
3596 /* Scan all pending sequences too. */
3597 for (; stack; stack = stack->next)
3598 if (insn == stack->last)
3599 {
3600 stack->last = prev;
3601 break;
3602 }
3603
3604 if (stack == 0)
3605 abort ();
3606 }
3607 if (GET_CODE (insn) != BARRIER
3608 && (bb = BLOCK_FOR_INSN (insn)))
3609 {
3610 if (INSN_P (insn))
3611 bb->flags |= BB_DIRTY;
3612 if (BB_HEAD (bb) == insn)
3613 {
3614 /* Never ever delete the basic block note without deleting whole
3615 basic block. */
3616 if (GET_CODE (insn) == NOTE)
3617 abort ();
3618 BB_HEAD (bb) = next;
3619 }
3620 if (BB_END (bb) == insn)
3621 BB_END (bb) = prev;
3622 }
3623 }
3624
3625 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3626
3627 void
3628 add_function_usage_to (rtx call_insn, rtx call_fusage)
3629 {
3630 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3631 abort ();
3632
3633 /* Put the register usage information on the CALL. If there is already
3634 some usage information, put ours at the end. */
3635 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3636 {
3637 rtx link;
3638
3639 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3640 link = XEXP (link, 1))
3641 ;
3642
3643 XEXP (link, 1) = call_fusage;
3644 }
3645 else
3646 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3647 }
3648
3649 /* Delete all insns made since FROM.
3650 FROM becomes the new last instruction. */
3651
3652 void
3653 delete_insns_since (rtx from)
3654 {
3655 if (from == 0)
3656 first_insn = 0;
3657 else
3658 NEXT_INSN (from) = 0;
3659 last_insn = from;
3660 }
3661
3662 /* This function is deprecated, please use sequences instead.
3663
3664 Move a consecutive bunch of insns to a different place in the chain.
3665 The insns to be moved are those between FROM and TO.
3666 They are moved to a new position after the insn AFTER.
3667 AFTER must not be FROM or TO or any insn in between.
3668
3669 This function does not know about SEQUENCEs and hence should not be
3670 called after delay-slot filling has been done. */
3671
3672 void
3673 reorder_insns_nobb (rtx from, rtx to, rtx after)
3674 {
3675 /* Splice this bunch out of where it is now. */
3676 if (PREV_INSN (from))
3677 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3678 if (NEXT_INSN (to))
3679 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3680 if (last_insn == to)
3681 last_insn = PREV_INSN (from);
3682 if (first_insn == from)
3683 first_insn = NEXT_INSN (to);
3684
3685 /* Make the new neighbors point to it and it to them. */
3686 if (NEXT_INSN (after))
3687 PREV_INSN (NEXT_INSN (after)) = to;
3688
3689 NEXT_INSN (to) = NEXT_INSN (after);
3690 PREV_INSN (from) = after;
3691 NEXT_INSN (after) = from;
3692 if (after == last_insn)
3693 last_insn = to;
3694 }
3695
3696 /* Same as function above, but take care to update BB boundaries. */
3697 void
3698 reorder_insns (rtx from, rtx to, rtx after)
3699 {
3700 rtx prev = PREV_INSN (from);
3701 basic_block bb, bb2;
3702
3703 reorder_insns_nobb (from, to, after);
3704
3705 if (GET_CODE (after) != BARRIER
3706 && (bb = BLOCK_FOR_INSN (after)))
3707 {
3708 rtx x;
3709 bb->flags |= BB_DIRTY;
3710
3711 if (GET_CODE (from) != BARRIER
3712 && (bb2 = BLOCK_FOR_INSN (from)))
3713 {
3714 if (BB_END (bb2) == to)
3715 BB_END (bb2) = prev;
3716 bb2->flags |= BB_DIRTY;
3717 }
3718
3719 if (BB_END (bb) == after)
3720 BB_END (bb) = to;
3721
3722 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3723 set_block_for_insn (x, bb);
3724 }
3725 }
3726
3727 /* Return the line note insn preceding INSN. */
3728
3729 static rtx
3730 find_line_note (rtx insn)
3731 {
3732 if (no_line_numbers)
3733 return 0;
3734
3735 for (; insn; insn = PREV_INSN (insn))
3736 if (GET_CODE (insn) == NOTE
3737 && NOTE_LINE_NUMBER (insn) >= 0)
3738 break;
3739
3740 return insn;
3741 }
3742
3743 /* Remove unnecessary notes from the instruction stream. */
3744
3745 void
3746 remove_unnecessary_notes (void)
3747 {
3748 rtx block_stack = NULL_RTX;
3749 rtx eh_stack = NULL_RTX;
3750 rtx insn;
3751 rtx next;
3752 rtx tmp;
3753
3754 /* We must not remove the first instruction in the function because
3755 the compiler depends on the first instruction being a note. */
3756 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3757 {
3758 /* Remember what's next. */
3759 next = NEXT_INSN (insn);
3760
3761 /* We're only interested in notes. */
3762 if (GET_CODE (insn) != NOTE)
3763 continue;
3764
3765 switch (NOTE_LINE_NUMBER (insn))
3766 {
3767 case NOTE_INSN_DELETED:
3768 case NOTE_INSN_LOOP_END_TOP_COND:
3769 remove_insn (insn);
3770 break;
3771
3772 case NOTE_INSN_EH_REGION_BEG:
3773 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3774 break;
3775
3776 case NOTE_INSN_EH_REGION_END:
3777 /* Too many end notes. */
3778 if (eh_stack == NULL_RTX)
3779 abort ();
3780 /* Mismatched nesting. */
3781 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3782 abort ();
3783 tmp = eh_stack;
3784 eh_stack = XEXP (eh_stack, 1);
3785 free_INSN_LIST_node (tmp);
3786 break;
3787
3788 case NOTE_INSN_BLOCK_BEG:
3789 /* By now, all notes indicating lexical blocks should have
3790 NOTE_BLOCK filled in. */
3791 if (NOTE_BLOCK (insn) == NULL_TREE)
3792 abort ();
3793 block_stack = alloc_INSN_LIST (insn, block_stack);
3794 break;
3795
3796 case NOTE_INSN_BLOCK_END:
3797 /* Too many end notes. */
3798 if (block_stack == NULL_RTX)
3799 abort ();
3800 /* Mismatched nesting. */
3801 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3802 abort ();
3803 tmp = block_stack;
3804 block_stack = XEXP (block_stack, 1);
3805 free_INSN_LIST_node (tmp);
3806
3807 /* Scan back to see if there are any non-note instructions
3808 between INSN and the beginning of this block. If not,
3809 then there is no PC range in the generated code that will
3810 actually be in this block, so there's no point in
3811 remembering the existence of the block. */
3812 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3813 {
3814 /* This block contains a real instruction. Note that we
3815 don't include labels; if the only thing in the block
3816 is a label, then there are still no PC values that
3817 lie within the block. */
3818 if (INSN_P (tmp))
3819 break;
3820
3821 /* We're only interested in NOTEs. */
3822 if (GET_CODE (tmp) != NOTE)
3823 continue;
3824
3825 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3826 {
3827 /* We just verified that this BLOCK matches us with
3828 the block_stack check above. Never delete the
3829 BLOCK for the outermost scope of the function; we
3830 can refer to names from that scope even if the
3831 block notes are messed up. */
3832 if (! is_body_block (NOTE_BLOCK (insn))
3833 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3834 {
3835 remove_insn (tmp);
3836 remove_insn (insn);
3837 }
3838 break;
3839 }
3840 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3841 /* There's a nested block. We need to leave the
3842 current block in place since otherwise the debugger
3843 wouldn't be able to show symbols from our block in
3844 the nested block. */
3845 break;
3846 }
3847 }
3848 }
3849
3850 /* Too many begin notes. */
3851 if (block_stack || eh_stack)
3852 abort ();
3853 }
3854
3855 \f
3856 /* Emit insn(s) of given code and pattern
3857 at a specified place within the doubly-linked list.
3858
3859 All of the emit_foo global entry points accept an object
3860 X which is either an insn list or a PATTERN of a single
3861 instruction.
3862
3863 There are thus a few canonical ways to generate code and
3864 emit it at a specific place in the instruction stream. For
3865 example, consider the instruction named SPOT and the fact that
3866 we would like to emit some instructions before SPOT. We might
3867 do it like this:
3868
3869 start_sequence ();
3870 ... emit the new instructions ...
3871 insns_head = get_insns ();
3872 end_sequence ();
3873
3874 emit_insn_before (insns_head, SPOT);
3875
3876 It used to be common to generate SEQUENCE rtl instead, but that
3877 is a relic of the past which no longer occurs. The reason is that
3878 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3879 generated would almost certainly die right after it was created. */
3880
3881 /* Make X be output before the instruction BEFORE. */
3882
3883 rtx
3884 emit_insn_before (rtx x, rtx before)
3885 {
3886 rtx last = before;
3887 rtx insn;
3888
3889 #ifdef ENABLE_RTL_CHECKING
3890 if (before == NULL_RTX)
3891 abort ();
3892 #endif
3893
3894 if (x == NULL_RTX)
3895 return last;
3896
3897 switch (GET_CODE (x))
3898 {
3899 case INSN:
3900 case JUMP_INSN:
3901 case CALL_INSN:
3902 case CODE_LABEL:
3903 case BARRIER:
3904 case NOTE:
3905 insn = x;
3906 while (insn)
3907 {
3908 rtx next = NEXT_INSN (insn);
3909 add_insn_before (insn, before);
3910 last = insn;
3911 insn = next;
3912 }
3913 break;
3914
3915 #ifdef ENABLE_RTL_CHECKING
3916 case SEQUENCE:
3917 abort ();
3918 break;
3919 #endif
3920
3921 default:
3922 last = make_insn_raw (x);
3923 add_insn_before (last, before);
3924 break;
3925 }
3926
3927 return last;
3928 }
3929
3930 /* Make an instruction with body X and code JUMP_INSN
3931 and output it before the instruction BEFORE. */
3932
3933 rtx
3934 emit_jump_insn_before (rtx x, rtx before)
3935 {
3936 rtx insn, last = NULL_RTX;
3937
3938 #ifdef ENABLE_RTL_CHECKING
3939 if (before == NULL_RTX)
3940 abort ();
3941 #endif
3942
3943 switch (GET_CODE (x))
3944 {
3945 case INSN:
3946 case JUMP_INSN:
3947 case CALL_INSN:
3948 case CODE_LABEL:
3949 case BARRIER:
3950 case NOTE:
3951 insn = x;
3952 while (insn)
3953 {
3954 rtx next = NEXT_INSN (insn);
3955 add_insn_before (insn, before);
3956 last = insn;
3957 insn = next;
3958 }
3959 break;
3960
3961 #ifdef ENABLE_RTL_CHECKING
3962 case SEQUENCE:
3963 abort ();
3964 break;
3965 #endif
3966
3967 default:
3968 last = make_jump_insn_raw (x);
3969 add_insn_before (last, before);
3970 break;
3971 }
3972
3973 return last;
3974 }
3975
3976 /* Make an instruction with body X and code CALL_INSN
3977 and output it before the instruction BEFORE. */
3978
3979 rtx
3980 emit_call_insn_before (rtx x, rtx before)
3981 {
3982 rtx last = NULL_RTX, insn;
3983
3984 #ifdef ENABLE_RTL_CHECKING
3985 if (before == NULL_RTX)
3986 abort ();
3987 #endif
3988
3989 switch (GET_CODE (x))
3990 {
3991 case INSN:
3992 case JUMP_INSN:
3993 case CALL_INSN:
3994 case CODE_LABEL:
3995 case BARRIER:
3996 case NOTE:
3997 insn = x;
3998 while (insn)
3999 {
4000 rtx next = NEXT_INSN (insn);
4001 add_insn_before (insn, before);
4002 last = insn;
4003 insn = next;
4004 }
4005 break;
4006
4007 #ifdef ENABLE_RTL_CHECKING
4008 case SEQUENCE:
4009 abort ();
4010 break;
4011 #endif
4012
4013 default:
4014 last = make_call_insn_raw (x);
4015 add_insn_before (last, before);
4016 break;
4017 }
4018
4019 return last;
4020 }
4021
4022 /* Make an insn of code BARRIER
4023 and output it before the insn BEFORE. */
4024
4025 rtx
4026 emit_barrier_before (rtx before)
4027 {
4028 rtx insn = rtx_alloc (BARRIER);
4029
4030 INSN_UID (insn) = cur_insn_uid++;
4031
4032 add_insn_before (insn, before);
4033 return insn;
4034 }
4035
4036 /* Emit the label LABEL before the insn BEFORE. */
4037
4038 rtx
4039 emit_label_before (rtx label, rtx before)
4040 {
4041 /* This can be called twice for the same label as a result of the
4042 confusion that follows a syntax error! So make it harmless. */
4043 if (INSN_UID (label) == 0)
4044 {
4045 INSN_UID (label) = cur_insn_uid++;
4046 add_insn_before (label, before);
4047 }
4048
4049 return label;
4050 }
4051
4052 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4053
4054 rtx
4055 emit_note_before (int subtype, rtx before)
4056 {
4057 rtx note = rtx_alloc (NOTE);
4058 INSN_UID (note) = cur_insn_uid++;
4059 NOTE_SOURCE_FILE (note) = 0;
4060 NOTE_LINE_NUMBER (note) = subtype;
4061 BLOCK_FOR_INSN (note) = NULL;
4062
4063 add_insn_before (note, before);
4064 return note;
4065 }
4066 \f
4067 /* Helper for emit_insn_after, handles lists of instructions
4068 efficiently. */
4069
4070 static rtx emit_insn_after_1 (rtx, rtx);
4071
4072 static rtx
4073 emit_insn_after_1 (rtx first, rtx after)
4074 {
4075 rtx last;
4076 rtx after_after;
4077 basic_block bb;
4078
4079 if (GET_CODE (after) != BARRIER
4080 && (bb = BLOCK_FOR_INSN (after)))
4081 {
4082 bb->flags |= BB_DIRTY;
4083 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4084 if (GET_CODE (last) != BARRIER)
4085 set_block_for_insn (last, bb);
4086 if (GET_CODE (last) != BARRIER)
4087 set_block_for_insn (last, bb);
4088 if (BB_END (bb) == after)
4089 BB_END (bb) = last;
4090 }
4091 else
4092 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4093 continue;
4094
4095 after_after = NEXT_INSN (after);
4096
4097 NEXT_INSN (after) = first;
4098 PREV_INSN (first) = after;
4099 NEXT_INSN (last) = after_after;
4100 if (after_after)
4101 PREV_INSN (after_after) = last;
4102
4103 if (after == last_insn)
4104 last_insn = last;
4105 return last;
4106 }
4107
4108 /* Make X be output after the insn AFTER. */
4109
4110 rtx
4111 emit_insn_after (rtx x, rtx after)
4112 {
4113 rtx last = after;
4114
4115 #ifdef ENABLE_RTL_CHECKING
4116 if (after == NULL_RTX)
4117 abort ();
4118 #endif
4119
4120 if (x == NULL_RTX)
4121 return last;
4122
4123 switch (GET_CODE (x))
4124 {
4125 case INSN:
4126 case JUMP_INSN:
4127 case CALL_INSN:
4128 case CODE_LABEL:
4129 case BARRIER:
4130 case NOTE:
4131 last = emit_insn_after_1 (x, after);
4132 break;
4133
4134 #ifdef ENABLE_RTL_CHECKING
4135 case SEQUENCE:
4136 abort ();
4137 break;
4138 #endif
4139
4140 default:
4141 last = make_insn_raw (x);
4142 add_insn_after (last, after);
4143 break;
4144 }
4145
4146 return last;
4147 }
4148
4149 /* Similar to emit_insn_after, except that line notes are to be inserted so
4150 as to act as if this insn were at FROM. */
4151
4152 void
4153 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4154 {
4155 rtx from_line = find_line_note (from);
4156 rtx after_line = find_line_note (after);
4157 rtx insn = emit_insn_after (x, after);
4158
4159 if (from_line)
4160 emit_note_copy_after (from_line, after);
4161
4162 if (after_line)
4163 emit_note_copy_after (after_line, insn);
4164 }
4165
4166 /* Make an insn of code JUMP_INSN with body X
4167 and output it after the insn AFTER. */
4168
4169 rtx
4170 emit_jump_insn_after (rtx x, rtx after)
4171 {
4172 rtx last;
4173
4174 #ifdef ENABLE_RTL_CHECKING
4175 if (after == NULL_RTX)
4176 abort ();
4177 #endif
4178
4179 switch (GET_CODE (x))
4180 {
4181 case INSN:
4182 case JUMP_INSN:
4183 case CALL_INSN:
4184 case CODE_LABEL:
4185 case BARRIER:
4186 case NOTE:
4187 last = emit_insn_after_1 (x, after);
4188 break;
4189
4190 #ifdef ENABLE_RTL_CHECKING
4191 case SEQUENCE:
4192 abort ();
4193 break;
4194 #endif
4195
4196 default:
4197 last = make_jump_insn_raw (x);
4198 add_insn_after (last, after);
4199 break;
4200 }
4201
4202 return last;
4203 }
4204
4205 /* Make an instruction with body X and code CALL_INSN
4206 and output it after the instruction AFTER. */
4207
4208 rtx
4209 emit_call_insn_after (rtx x, rtx after)
4210 {
4211 rtx last;
4212
4213 #ifdef ENABLE_RTL_CHECKING
4214 if (after == NULL_RTX)
4215 abort ();
4216 #endif
4217
4218 switch (GET_CODE (x))
4219 {
4220 case INSN:
4221 case JUMP_INSN:
4222 case CALL_INSN:
4223 case CODE_LABEL:
4224 case BARRIER:
4225 case NOTE:
4226 last = emit_insn_after_1 (x, after);
4227 break;
4228
4229 #ifdef ENABLE_RTL_CHECKING
4230 case SEQUENCE:
4231 abort ();
4232 break;
4233 #endif
4234
4235 default:
4236 last = make_call_insn_raw (x);
4237 add_insn_after (last, after);
4238 break;
4239 }
4240
4241 return last;
4242 }
4243
4244 /* Make an insn of code BARRIER
4245 and output it after the insn AFTER. */
4246
4247 rtx
4248 emit_barrier_after (rtx after)
4249 {
4250 rtx insn = rtx_alloc (BARRIER);
4251
4252 INSN_UID (insn) = cur_insn_uid++;
4253
4254 add_insn_after (insn, after);
4255 return insn;
4256 }
4257
4258 /* Emit the label LABEL after the insn AFTER. */
4259
4260 rtx
4261 emit_label_after (rtx label, rtx after)
4262 {
4263 /* This can be called twice for the same label
4264 as a result of the confusion that follows a syntax error!
4265 So make it harmless. */
4266 if (INSN_UID (label) == 0)
4267 {
4268 INSN_UID (label) = cur_insn_uid++;
4269 add_insn_after (label, after);
4270 }
4271
4272 return label;
4273 }
4274
4275 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4276
4277 rtx
4278 emit_note_after (int subtype, rtx after)
4279 {
4280 rtx note = rtx_alloc (NOTE);
4281 INSN_UID (note) = cur_insn_uid++;
4282 NOTE_SOURCE_FILE (note) = 0;
4283 NOTE_LINE_NUMBER (note) = subtype;
4284 BLOCK_FOR_INSN (note) = NULL;
4285 add_insn_after (note, after);
4286 return note;
4287 }
4288
4289 /* Emit a copy of note ORIG after the insn AFTER. */
4290
4291 rtx
4292 emit_note_copy_after (rtx orig, rtx after)
4293 {
4294 rtx note;
4295
4296 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4297 {
4298 cur_insn_uid++;
4299 return 0;
4300 }
4301
4302 note = rtx_alloc (NOTE);
4303 INSN_UID (note) = cur_insn_uid++;
4304 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4305 NOTE_DATA (note) = NOTE_DATA (orig);
4306 BLOCK_FOR_INSN (note) = NULL;
4307 add_insn_after (note, after);
4308 return note;
4309 }
4310 \f
4311 /* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
4312 rtx
4313 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4314 {
4315 rtx last = emit_insn_after (pattern, after);
4316
4317 if (pattern == NULL_RTX)
4318 return last;
4319
4320 after = NEXT_INSN (after);
4321 while (1)
4322 {
4323 if (active_insn_p (after))
4324 INSN_LOCATOR (after) = loc;
4325 if (after == last)
4326 break;
4327 after = NEXT_INSN (after);
4328 }
4329 return last;
4330 }
4331
4332 /* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
4333 rtx
4334 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4335 {
4336 rtx last = emit_jump_insn_after (pattern, after);
4337
4338 if (pattern == NULL_RTX)
4339 return last;
4340
4341 after = NEXT_INSN (after);
4342 while (1)
4343 {
4344 if (active_insn_p (after))
4345 INSN_LOCATOR (after) = loc;
4346 if (after == last)
4347 break;
4348 after = NEXT_INSN (after);
4349 }
4350 return last;
4351 }
4352
4353 /* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
4354 rtx
4355 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4356 {
4357 rtx last = emit_call_insn_after (pattern, after);
4358
4359 if (pattern == NULL_RTX)
4360 return last;
4361
4362 after = NEXT_INSN (after);
4363 while (1)
4364 {
4365 if (active_insn_p (after))
4366 INSN_LOCATOR (after) = loc;
4367 if (after == last)
4368 break;
4369 after = NEXT_INSN (after);
4370 }
4371 return last;
4372 }
4373
4374 /* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
4375 rtx
4376 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4377 {
4378 rtx first = PREV_INSN (before);
4379 rtx last = emit_insn_before (pattern, before);
4380
4381 if (pattern == NULL_RTX)
4382 return last;
4383
4384 first = NEXT_INSN (first);
4385 while (1)
4386 {
4387 if (active_insn_p (first))
4388 INSN_LOCATOR (first) = loc;
4389 if (first == last)
4390 break;
4391 first = NEXT_INSN (first);
4392 }
4393 return last;
4394 }
4395 \f
4396 /* Take X and emit it at the end of the doubly-linked
4397 INSN list.
4398
4399 Returns the last insn emitted. */
4400
4401 rtx
4402 emit_insn (rtx x)
4403 {
4404 rtx last = last_insn;
4405 rtx insn;
4406
4407 if (x == NULL_RTX)
4408 return last;
4409
4410 switch (GET_CODE (x))
4411 {
4412 case INSN:
4413 case JUMP_INSN:
4414 case CALL_INSN:
4415 case CODE_LABEL:
4416 case BARRIER:
4417 case NOTE:
4418 insn = x;
4419 while (insn)
4420 {
4421 rtx next = NEXT_INSN (insn);
4422 add_insn (insn);
4423 last = insn;
4424 insn = next;
4425 }
4426 break;
4427
4428 #ifdef ENABLE_RTL_CHECKING
4429 case SEQUENCE:
4430 abort ();
4431 break;
4432 #endif
4433
4434 default:
4435 last = make_insn_raw (x);
4436 add_insn (last);
4437 break;
4438 }
4439
4440 return last;
4441 }
4442
4443 /* Make an insn of code JUMP_INSN with pattern X
4444 and add it to the end of the doubly-linked list. */
4445
4446 rtx
4447 emit_jump_insn (rtx x)
4448 {
4449 rtx last = NULL_RTX, insn;
4450
4451 switch (GET_CODE (x))
4452 {
4453 case INSN:
4454 case JUMP_INSN:
4455 case CALL_INSN:
4456 case CODE_LABEL:
4457 case BARRIER:
4458 case NOTE:
4459 insn = x;
4460 while (insn)
4461 {
4462 rtx next = NEXT_INSN (insn);
4463 add_insn (insn);
4464 last = insn;
4465 insn = next;
4466 }
4467 break;
4468
4469 #ifdef ENABLE_RTL_CHECKING
4470 case SEQUENCE:
4471 abort ();
4472 break;
4473 #endif
4474
4475 default:
4476 last = make_jump_insn_raw (x);
4477 add_insn (last);
4478 break;
4479 }
4480
4481 return last;
4482 }
4483
4484 /* Make an insn of code CALL_INSN with pattern X
4485 and add it to the end of the doubly-linked list. */
4486
4487 rtx
4488 emit_call_insn (rtx x)
4489 {
4490 rtx insn;
4491
4492 switch (GET_CODE (x))
4493 {
4494 case INSN:
4495 case JUMP_INSN:
4496 case CALL_INSN:
4497 case CODE_LABEL:
4498 case BARRIER:
4499 case NOTE:
4500 insn = emit_insn (x);
4501 break;
4502
4503 #ifdef ENABLE_RTL_CHECKING
4504 case SEQUENCE:
4505 abort ();
4506 break;
4507 #endif
4508
4509 default:
4510 insn = make_call_insn_raw (x);
4511 add_insn (insn);
4512 break;
4513 }
4514
4515 return insn;
4516 }
4517
4518 /* Add the label LABEL to the end of the doubly-linked list. */
4519
4520 rtx
4521 emit_label (rtx label)
4522 {
4523 /* This can be called twice for the same label
4524 as a result of the confusion that follows a syntax error!
4525 So make it harmless. */
4526 if (INSN_UID (label) == 0)
4527 {
4528 INSN_UID (label) = cur_insn_uid++;
4529 add_insn (label);
4530 }
4531 return label;
4532 }
4533
4534 /* Make an insn of code BARRIER
4535 and add it to the end of the doubly-linked list. */
4536
4537 rtx
4538 emit_barrier (void)
4539 {
4540 rtx barrier = rtx_alloc (BARRIER);
4541 INSN_UID (barrier) = cur_insn_uid++;
4542 add_insn (barrier);
4543 return barrier;
4544 }
4545
4546 /* Make line numbering NOTE insn for LOCATION add it to the end
4547 of the doubly-linked list, but only if line-numbers are desired for
4548 debugging info and it doesn't match the previous one. */
4549
4550 rtx
4551 emit_line_note (location_t location)
4552 {
4553 rtx note;
4554
4555 set_file_and_line_for_stmt (location);
4556
4557 if (location.file && last_location.file
4558 && !strcmp (location.file, last_location.file)
4559 && location.line == last_location.line)
4560 return NULL_RTX;
4561 last_location = location;
4562
4563 if (no_line_numbers)
4564 {
4565 cur_insn_uid++;
4566 return NULL_RTX;
4567 }
4568
4569 note = emit_note (location.line);
4570 NOTE_SOURCE_FILE (note) = location.file;
4571
4572 return note;
4573 }
4574
4575 /* Emit a copy of note ORIG. */
4576
4577 rtx
4578 emit_note_copy (rtx orig)
4579 {
4580 rtx note;
4581
4582 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4583 {
4584 cur_insn_uid++;
4585 return NULL_RTX;
4586 }
4587
4588 note = rtx_alloc (NOTE);
4589
4590 INSN_UID (note) = cur_insn_uid++;
4591 NOTE_DATA (note) = NOTE_DATA (orig);
4592 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4593 BLOCK_FOR_INSN (note) = NULL;
4594 add_insn (note);
4595
4596 return note;
4597 }
4598
4599 /* Make an insn of code NOTE or type NOTE_NO
4600 and add it to the end of the doubly-linked list. */
4601
4602 rtx
4603 emit_note (int note_no)
4604 {
4605 rtx note;
4606
4607 note = rtx_alloc (NOTE);
4608 INSN_UID (note) = cur_insn_uid++;
4609 NOTE_LINE_NUMBER (note) = note_no;
4610 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4611 BLOCK_FOR_INSN (note) = NULL;
4612 add_insn (note);
4613 return note;
4614 }
4615
4616 /* Cause next statement to emit a line note even if the line number
4617 has not changed. */
4618
4619 void
4620 force_next_line_note (void)
4621 {
4622 last_location.line = -1;
4623 }
4624
4625 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4626 note of this type already exists, remove it first. */
4627
4628 rtx
4629 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4630 {
4631 rtx note = find_reg_note (insn, kind, NULL_RTX);
4632
4633 switch (kind)
4634 {
4635 case REG_EQUAL:
4636 case REG_EQUIV:
4637 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4638 has multiple sets (some callers assume single_set
4639 means the insn only has one set, when in fact it
4640 means the insn only has one * useful * set). */
4641 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4642 {
4643 if (note)
4644 abort ();
4645 return NULL_RTX;
4646 }
4647
4648 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4649 It serves no useful purpose and breaks eliminate_regs. */
4650 if (GET_CODE (datum) == ASM_OPERANDS)
4651 return NULL_RTX;
4652 break;
4653
4654 default:
4655 break;
4656 }
4657
4658 if (note)
4659 {
4660 XEXP (note, 0) = datum;
4661 return note;
4662 }
4663
4664 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4665 return REG_NOTES (insn);
4666 }
4667 \f
4668 /* Return an indication of which type of insn should have X as a body.
4669 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4670
4671 enum rtx_code
4672 classify_insn (rtx x)
4673 {
4674 if (GET_CODE (x) == CODE_LABEL)
4675 return CODE_LABEL;
4676 if (GET_CODE (x) == CALL)
4677 return CALL_INSN;
4678 if (GET_CODE (x) == RETURN)
4679 return JUMP_INSN;
4680 if (GET_CODE (x) == SET)
4681 {
4682 if (SET_DEST (x) == pc_rtx)
4683 return JUMP_INSN;
4684 else if (GET_CODE (SET_SRC (x)) == CALL)
4685 return CALL_INSN;
4686 else
4687 return INSN;
4688 }
4689 if (GET_CODE (x) == PARALLEL)
4690 {
4691 int j;
4692 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4693 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4694 return CALL_INSN;
4695 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4696 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4697 return JUMP_INSN;
4698 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4699 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4700 return CALL_INSN;
4701 }
4702 return INSN;
4703 }
4704
4705 /* Emit the rtl pattern X as an appropriate kind of insn.
4706 If X is a label, it is simply added into the insn chain. */
4707
4708 rtx
4709 emit (rtx x)
4710 {
4711 enum rtx_code code = classify_insn (x);
4712
4713 if (code == CODE_LABEL)
4714 return emit_label (x);
4715 else if (code == INSN)
4716 return emit_insn (x);
4717 else if (code == JUMP_INSN)
4718 {
4719 rtx insn = emit_jump_insn (x);
4720 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4721 return emit_barrier ();
4722 return insn;
4723 }
4724 else if (code == CALL_INSN)
4725 return emit_call_insn (x);
4726 else
4727 abort ();
4728 }
4729 \f
4730 /* Space for free sequence stack entries. */
4731 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4732
4733 /* Begin emitting insns to a sequence which can be packaged in an
4734 RTL_EXPR. If this sequence will contain something that might cause
4735 the compiler to pop arguments to function calls (because those
4736 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4737 details), use do_pending_stack_adjust before calling this function.
4738 That will ensure that the deferred pops are not accidentally
4739 emitted in the middle of this sequence. */
4740
4741 void
4742 start_sequence (void)
4743 {
4744 struct sequence_stack *tem;
4745
4746 if (free_sequence_stack != NULL)
4747 {
4748 tem = free_sequence_stack;
4749 free_sequence_stack = tem->next;
4750 }
4751 else
4752 tem = ggc_alloc (sizeof (struct sequence_stack));
4753
4754 tem->next = seq_stack;
4755 tem->first = first_insn;
4756 tem->last = last_insn;
4757 tem->sequence_rtl_expr = seq_rtl_expr;
4758
4759 seq_stack = tem;
4760
4761 first_insn = 0;
4762 last_insn = 0;
4763 }
4764
4765 /* Similarly, but indicate that this sequence will be placed in T, an
4766 RTL_EXPR. See the documentation for start_sequence for more
4767 information about how to use this function. */
4768
4769 void
4770 start_sequence_for_rtl_expr (tree t)
4771 {
4772 start_sequence ();
4773
4774 seq_rtl_expr = t;
4775 }
4776
4777 /* Set up the insn chain starting with FIRST as the current sequence,
4778 saving the previously current one. See the documentation for
4779 start_sequence for more information about how to use this function. */
4780
4781 void
4782 push_to_sequence (rtx first)
4783 {
4784 rtx last;
4785
4786 start_sequence ();
4787
4788 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4789
4790 first_insn = first;
4791 last_insn = last;
4792 }
4793
4794 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4795
4796 void
4797 push_to_full_sequence (rtx first, rtx last)
4798 {
4799 start_sequence ();
4800 first_insn = first;
4801 last_insn = last;
4802 /* We really should have the end of the insn chain here. */
4803 if (last && NEXT_INSN (last))
4804 abort ();
4805 }
4806
4807 /* Set up the outer-level insn chain
4808 as the current sequence, saving the previously current one. */
4809
4810 void
4811 push_topmost_sequence (void)
4812 {
4813 struct sequence_stack *stack, *top = NULL;
4814
4815 start_sequence ();
4816
4817 for (stack = seq_stack; stack; stack = stack->next)
4818 top = stack;
4819
4820 first_insn = top->first;
4821 last_insn = top->last;
4822 seq_rtl_expr = top->sequence_rtl_expr;
4823 }
4824
4825 /* After emitting to the outer-level insn chain, update the outer-level
4826 insn chain, and restore the previous saved state. */
4827
4828 void
4829 pop_topmost_sequence (void)
4830 {
4831 struct sequence_stack *stack, *top = NULL;
4832
4833 for (stack = seq_stack; stack; stack = stack->next)
4834 top = stack;
4835
4836 top->first = first_insn;
4837 top->last = last_insn;
4838 /* ??? Why don't we save seq_rtl_expr here? */
4839
4840 end_sequence ();
4841 }
4842
4843 /* After emitting to a sequence, restore previous saved state.
4844
4845 To get the contents of the sequence just made, you must call
4846 `get_insns' *before* calling here.
4847
4848 If the compiler might have deferred popping arguments while
4849 generating this sequence, and this sequence will not be immediately
4850 inserted into the instruction stream, use do_pending_stack_adjust
4851 before calling get_insns. That will ensure that the deferred
4852 pops are inserted into this sequence, and not into some random
4853 location in the instruction stream. See INHIBIT_DEFER_POP for more
4854 information about deferred popping of arguments. */
4855
4856 void
4857 end_sequence (void)
4858 {
4859 struct sequence_stack *tem = seq_stack;
4860
4861 first_insn = tem->first;
4862 last_insn = tem->last;
4863 seq_rtl_expr = tem->sequence_rtl_expr;
4864 seq_stack = tem->next;
4865
4866 memset (tem, 0, sizeof (*tem));
4867 tem->next = free_sequence_stack;
4868 free_sequence_stack = tem;
4869 }
4870
4871 /* Return 1 if currently emitting into a sequence. */
4872
4873 int
4874 in_sequence_p (void)
4875 {
4876 return seq_stack != 0;
4877 }
4878 \f
4879 /* Put the various virtual registers into REGNO_REG_RTX. */
4880
4881 void
4882 init_virtual_regs (struct emit_status *es)
4883 {
4884 rtx *ptr = es->x_regno_reg_rtx;
4885 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4886 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4887 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4888 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4889 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4890 }
4891
4892 \f
4893 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4894 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4895 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4896 static int copy_insn_n_scratches;
4897
4898 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4899 copied an ASM_OPERANDS.
4900 In that case, it is the original input-operand vector. */
4901 static rtvec orig_asm_operands_vector;
4902
4903 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4904 copied an ASM_OPERANDS.
4905 In that case, it is the copied input-operand vector. */
4906 static rtvec copy_asm_operands_vector;
4907
4908 /* Likewise for the constraints vector. */
4909 static rtvec orig_asm_constraints_vector;
4910 static rtvec copy_asm_constraints_vector;
4911
4912 /* Recursively create a new copy of an rtx for copy_insn.
4913 This function differs from copy_rtx in that it handles SCRATCHes and
4914 ASM_OPERANDs properly.
4915 Normally, this function is not used directly; use copy_insn as front end.
4916 However, you could first copy an insn pattern with copy_insn and then use
4917 this function afterwards to properly copy any REG_NOTEs containing
4918 SCRATCHes. */
4919
4920 rtx
4921 copy_insn_1 (rtx orig)
4922 {
4923 rtx copy;
4924 int i, j;
4925 RTX_CODE code;
4926 const char *format_ptr;
4927
4928 code = GET_CODE (orig);
4929
4930 switch (code)
4931 {
4932 case REG:
4933 case QUEUED:
4934 case CONST_INT:
4935 case CONST_DOUBLE:
4936 case CONST_VECTOR:
4937 case SYMBOL_REF:
4938 case CODE_LABEL:
4939 case PC:
4940 case CC0:
4941 case ADDRESSOF:
4942 return orig;
4943 case CLOBBER:
4944 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4945 return orig;
4946 break;
4947
4948 case SCRATCH:
4949 for (i = 0; i < copy_insn_n_scratches; i++)
4950 if (copy_insn_scratch_in[i] == orig)
4951 return copy_insn_scratch_out[i];
4952 break;
4953
4954 case CONST:
4955 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4956 a LABEL_REF, it isn't sharable. */
4957 if (GET_CODE (XEXP (orig, 0)) == PLUS
4958 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4959 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4960 return orig;
4961 break;
4962
4963 /* A MEM with a constant address is not sharable. The problem is that
4964 the constant address may need to be reloaded. If the mem is shared,
4965 then reloading one copy of this mem will cause all copies to appear
4966 to have been reloaded. */
4967
4968 default:
4969 break;
4970 }
4971
4972 copy = rtx_alloc (code);
4973
4974 /* Copy the various flags, and other information. We assume that
4975 all fields need copying, and then clear the fields that should
4976 not be copied. That is the sensible default behavior, and forces
4977 us to explicitly document why we are *not* copying a flag. */
4978 memcpy (copy, orig, RTX_HDR_SIZE);
4979
4980 /* We do not copy the USED flag, which is used as a mark bit during
4981 walks over the RTL. */
4982 RTX_FLAG (copy, used) = 0;
4983
4984 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4985 if (INSN_P (orig))
4986 {
4987 RTX_FLAG (copy, jump) = 0;
4988 RTX_FLAG (copy, call) = 0;
4989 RTX_FLAG (copy, frame_related) = 0;
4990 }
4991
4992 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4993
4994 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4995 {
4996 copy->u.fld[i] = orig->u.fld[i];
4997 switch (*format_ptr++)
4998 {
4999 case 'e':
5000 if (XEXP (orig, i) != NULL)
5001 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5002 break;
5003
5004 case 'E':
5005 case 'V':
5006 if (XVEC (orig, i) == orig_asm_constraints_vector)
5007 XVEC (copy, i) = copy_asm_constraints_vector;
5008 else if (XVEC (orig, i) == orig_asm_operands_vector)
5009 XVEC (copy, i) = copy_asm_operands_vector;
5010 else if (XVEC (orig, i) != NULL)
5011 {
5012 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5013 for (j = 0; j < XVECLEN (copy, i); j++)
5014 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5015 }
5016 break;
5017
5018 case 't':
5019 case 'w':
5020 case 'i':
5021 case 's':
5022 case 'S':
5023 case 'u':
5024 case '0':
5025 /* These are left unchanged. */
5026 break;
5027
5028 default:
5029 abort ();
5030 }
5031 }
5032
5033 if (code == SCRATCH)
5034 {
5035 i = copy_insn_n_scratches++;
5036 if (i >= MAX_RECOG_OPERANDS)
5037 abort ();
5038 copy_insn_scratch_in[i] = orig;
5039 copy_insn_scratch_out[i] = copy;
5040 }
5041 else if (code == ASM_OPERANDS)
5042 {
5043 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5044 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5045 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5046 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5047 }
5048
5049 return copy;
5050 }
5051
5052 /* Create a new copy of an rtx.
5053 This function differs from copy_rtx in that it handles SCRATCHes and
5054 ASM_OPERANDs properly.
5055 INSN doesn't really have to be a full INSN; it could be just the
5056 pattern. */
5057 rtx
5058 copy_insn (rtx insn)
5059 {
5060 copy_insn_n_scratches = 0;
5061 orig_asm_operands_vector = 0;
5062 orig_asm_constraints_vector = 0;
5063 copy_asm_operands_vector = 0;
5064 copy_asm_constraints_vector = 0;
5065 return copy_insn_1 (insn);
5066 }
5067
5068 /* Initialize data structures and variables in this file
5069 before generating rtl for each function. */
5070
5071 void
5072 init_emit (void)
5073 {
5074 struct function *f = cfun;
5075
5076 f->emit = ggc_alloc (sizeof (struct emit_status));
5077 first_insn = NULL;
5078 last_insn = NULL;
5079 seq_rtl_expr = NULL;
5080 cur_insn_uid = 1;
5081 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5082 last_location.line = 0;
5083 last_location.file = 0;
5084 first_label_num = label_num;
5085 last_label_num = 0;
5086 seq_stack = NULL;
5087
5088 /* Init the tables that describe all the pseudo regs. */
5089
5090 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5091
5092 f->emit->regno_pointer_align
5093 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5094 * sizeof (unsigned char));
5095
5096 regno_reg_rtx
5097 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5098
5099 /* Put copies of all the hard registers into regno_reg_rtx. */
5100 memcpy (regno_reg_rtx,
5101 static_regno_reg_rtx,
5102 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5103
5104 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5105 init_virtual_regs (f->emit);
5106
5107 /* Indicate that the virtual registers and stack locations are
5108 all pointers. */
5109 REG_POINTER (stack_pointer_rtx) = 1;
5110 REG_POINTER (frame_pointer_rtx) = 1;
5111 REG_POINTER (hard_frame_pointer_rtx) = 1;
5112 REG_POINTER (arg_pointer_rtx) = 1;
5113
5114 REG_POINTER (virtual_incoming_args_rtx) = 1;
5115 REG_POINTER (virtual_stack_vars_rtx) = 1;
5116 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5117 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5118 REG_POINTER (virtual_cfa_rtx) = 1;
5119
5120 #ifdef STACK_BOUNDARY
5121 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5122 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5123 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5124 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5125
5126 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5127 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5128 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5129 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5130 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5131 #endif
5132
5133 #ifdef INIT_EXPANDERS
5134 INIT_EXPANDERS;
5135 #endif
5136 }
5137
5138 /* Generate the constant 0. */
5139
5140 static rtx
5141 gen_const_vector_0 (enum machine_mode mode)
5142 {
5143 rtx tem;
5144 rtvec v;
5145 int units, i;
5146 enum machine_mode inner;
5147
5148 units = GET_MODE_NUNITS (mode);
5149 inner = GET_MODE_INNER (mode);
5150
5151 v = rtvec_alloc (units);
5152
5153 /* We need to call this function after we to set CONST0_RTX first. */
5154 if (!CONST0_RTX (inner))
5155 abort ();
5156
5157 for (i = 0; i < units; ++i)
5158 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5159
5160 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5161 return tem;
5162 }
5163
5164 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5165 all elements are zero. */
5166 rtx
5167 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5168 {
5169 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5170 int i;
5171
5172 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5173 if (RTVEC_ELT (v, i) != inner_zero)
5174 return gen_rtx_raw_CONST_VECTOR (mode, v);
5175 return CONST0_RTX (mode);
5176 }
5177
5178 /* Create some permanent unique rtl objects shared between all functions.
5179 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5180
5181 void
5182 init_emit_once (int line_numbers)
5183 {
5184 int i;
5185 enum machine_mode mode;
5186 enum machine_mode double_mode;
5187
5188 /* We need reg_raw_mode, so initialize the modes now. */
5189 init_reg_modes_once ();
5190
5191 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5192 tables. */
5193 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5194 const_int_htab_eq, NULL);
5195
5196 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5197 const_double_htab_eq, NULL);
5198
5199 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5200 mem_attrs_htab_eq, NULL);
5201 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5202 reg_attrs_htab_eq, NULL);
5203
5204 no_line_numbers = ! line_numbers;
5205
5206 /* Compute the word and byte modes. */
5207
5208 byte_mode = VOIDmode;
5209 word_mode = VOIDmode;
5210 double_mode = VOIDmode;
5211
5212 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5213 mode = GET_MODE_WIDER_MODE (mode))
5214 {
5215 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5216 && byte_mode == VOIDmode)
5217 byte_mode = mode;
5218
5219 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5220 && word_mode == VOIDmode)
5221 word_mode = mode;
5222 }
5223
5224 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5225 mode = GET_MODE_WIDER_MODE (mode))
5226 {
5227 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5228 && double_mode == VOIDmode)
5229 double_mode = mode;
5230 }
5231
5232 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5233
5234 /* Assign register numbers to the globally defined register rtx.
5235 This must be done at runtime because the register number field
5236 is in a union and some compilers can't initialize unions. */
5237
5238 pc_rtx = gen_rtx_PC (VOIDmode);
5239 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5240 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5241 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5242 if (hard_frame_pointer_rtx == 0)
5243 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5244 HARD_FRAME_POINTER_REGNUM);
5245 if (arg_pointer_rtx == 0)
5246 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5247 virtual_incoming_args_rtx =
5248 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5249 virtual_stack_vars_rtx =
5250 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5251 virtual_stack_dynamic_rtx =
5252 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5253 virtual_outgoing_args_rtx =
5254 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5255 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5256
5257 /* Initialize RTL for commonly used hard registers. These are
5258 copied into regno_reg_rtx as we begin to compile each function. */
5259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5260 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5261
5262 #ifdef INIT_EXPANDERS
5263 /* This is to initialize {init|mark|free}_machine_status before the first
5264 call to push_function_context_to. This is needed by the Chill front
5265 end which calls push_function_context_to before the first call to
5266 init_function_start. */
5267 INIT_EXPANDERS;
5268 #endif
5269
5270 /* Create the unique rtx's for certain rtx codes and operand values. */
5271
5272 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5273 tries to use these variables. */
5274 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5275 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5276 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5277
5278 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5279 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5280 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5281 else
5282 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5283
5284 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5285 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5286 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5287 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5288 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5289 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5290 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5291
5292 dconsthalf = dconst1;
5293 dconsthalf.exp--;
5294
5295 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5296
5297 /* Initialize mathematical constants for constant folding builtins.
5298 These constants need to be given to at least 160 bits precision. */
5299 real_from_string (&dconstpi,
5300 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5301 real_from_string (&dconste,
5302 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5303
5304 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5305 {
5306 REAL_VALUE_TYPE *r =
5307 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5308
5309 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5310 mode = GET_MODE_WIDER_MODE (mode))
5311 const_tiny_rtx[i][(int) mode] =
5312 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5313
5314 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5315
5316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5317 mode = GET_MODE_WIDER_MODE (mode))
5318 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5319
5320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5321 mode != VOIDmode;
5322 mode = GET_MODE_WIDER_MODE (mode))
5323 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5324 }
5325
5326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5327 mode != VOIDmode;
5328 mode = GET_MODE_WIDER_MODE (mode))
5329 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5330
5331 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5332 mode != VOIDmode;
5333 mode = GET_MODE_WIDER_MODE (mode))
5334 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5335
5336 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5337 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5338 const_tiny_rtx[0][i] = const0_rtx;
5339
5340 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5341 if (STORE_FLAG_VALUE == 1)
5342 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5343
5344 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5345 return_address_pointer_rtx
5346 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5347 #endif
5348
5349 #ifdef STATIC_CHAIN_REGNUM
5350 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5351
5352 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5353 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5354 static_chain_incoming_rtx
5355 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5356 else
5357 #endif
5358 static_chain_incoming_rtx = static_chain_rtx;
5359 #endif
5360
5361 #ifdef STATIC_CHAIN
5362 static_chain_rtx = STATIC_CHAIN;
5363
5364 #ifdef STATIC_CHAIN_INCOMING
5365 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5366 #else
5367 static_chain_incoming_rtx = static_chain_rtx;
5368 #endif
5369 #endif
5370
5371 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5372 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5373 }
5374 \f
5375 /* Query and clear/ restore no_line_numbers. This is used by the
5376 switch / case handling in stmt.c to give proper line numbers in
5377 warnings about unreachable code. */
5378
5379 int
5380 force_line_numbers (void)
5381 {
5382 int old = no_line_numbers;
5383
5384 no_line_numbers = 0;
5385 if (old)
5386 force_next_line_note ();
5387 return old;
5388 }
5389
5390 void
5391 restore_line_number_status (int old_value)
5392 {
5393 no_line_numbers = old_value;
5394 }
5395
5396 /* Produce exact duplicate of insn INSN after AFTER.
5397 Care updating of libcall regions if present. */
5398
5399 rtx
5400 emit_copy_of_insn_after (rtx insn, rtx after)
5401 {
5402 rtx new;
5403 rtx note1, note2, link;
5404
5405 switch (GET_CODE (insn))
5406 {
5407 case INSN:
5408 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5409 break;
5410
5411 case JUMP_INSN:
5412 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5413 break;
5414
5415 case CALL_INSN:
5416 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5417 if (CALL_INSN_FUNCTION_USAGE (insn))
5418 CALL_INSN_FUNCTION_USAGE (new)
5419 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5420 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5421 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5422 break;
5423
5424 default:
5425 abort ();
5426 }
5427
5428 /* Update LABEL_NUSES. */
5429 mark_jump_label (PATTERN (new), new, 0);
5430
5431 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5432
5433 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5434 make them. */
5435 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5436 if (REG_NOTE_KIND (link) != REG_LABEL)
5437 {
5438 if (GET_CODE (link) == EXPR_LIST)
5439 REG_NOTES (new)
5440 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5441 XEXP (link, 0),
5442 REG_NOTES (new)));
5443 else
5444 REG_NOTES (new)
5445 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5446 XEXP (link, 0),
5447 REG_NOTES (new)));
5448 }
5449
5450 /* Fix the libcall sequences. */
5451 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5452 {
5453 rtx p = new;
5454 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5455 p = PREV_INSN (p);
5456 XEXP (note1, 0) = p;
5457 XEXP (note2, 0) = new;
5458 }
5459 INSN_CODE (new) = INSN_CODE (insn);
5460 return new;
5461 }
5462
5463 static GTY((deletable(""))) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5464 rtx
5465 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5466 {
5467 if (hard_reg_clobbers[mode][regno])
5468 return hard_reg_clobbers[mode][regno];
5469 else
5470 return (hard_reg_clobbers[mode][regno] =
5471 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5472 }
5473
5474 #include "gt-emit-rtl.h"