]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
builtins.c (real_dconstp, [...]): New, split out from fold_builtin.
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconst3;
114 REAL_VALUE_TYPE dconst10;
115 REAL_VALUE_TYPE dconstm1;
116 REAL_VALUE_TYPE dconstm2;
117 REAL_VALUE_TYPE dconsthalf;
118 REAL_VALUE_TYPE dconstthird;
119
120 /* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
131 should be used if it is being set, and frame_pointer_rtx otherwise. After
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
138 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
142 /* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
146 /* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
151 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
152
153 /* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
156 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
158
159 /* A hash table storing memory attribute structures. */
160 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
162
163 /* A hash table storing register attribute structures. */
164 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
167 /* A hash table storing all CONST_DOUBLEs. */
168 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
170
171 #define first_insn (cfun->emit->x_first_insn)
172 #define last_insn (cfun->emit->x_last_insn)
173 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
174 #define last_location (cfun->emit->x_last_location)
175 #define first_label_num (cfun->emit->x_first_label_num)
176
177 static rtx make_jump_insn_raw (rtx);
178 static rtx make_call_insn_raw (rtx);
179 static rtx find_line_note (rtx);
180 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
181 static void unshare_all_rtl_1 (rtx);
182 static void unshare_all_decls (tree);
183 static void reset_used_decls (tree);
184 static void mark_label_nuses (rtx);
185 static hashval_t const_int_htab_hash (const void *);
186 static int const_int_htab_eq (const void *, const void *);
187 static hashval_t const_double_htab_hash (const void *);
188 static int const_double_htab_eq (const void *, const void *);
189 static rtx lookup_const_double (rtx);
190 static hashval_t mem_attrs_htab_hash (const void *);
191 static int mem_attrs_htab_eq (const void *, const void *);
192 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
193 enum machine_mode);
194 static hashval_t reg_attrs_htab_hash (const void *);
195 static int reg_attrs_htab_eq (const void *, const void *);
196 static reg_attrs *get_reg_attrs (tree, int);
197 static tree component_ref_for_mem_expr (tree);
198 static rtx gen_const_vector_0 (enum machine_mode);
199 static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
200
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability = -1;
204 \f
205 /* Returns a hash code for X (which is a really a CONST_INT). */
206
207 static hashval_t
208 const_int_htab_hash (const void *x)
209 {
210 return (hashval_t) INTVAL ((rtx) x);
211 }
212
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
216
217 static int
218 const_int_htab_eq (const void *x, const void *y)
219 {
220 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
221 }
222
223 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
224 static hashval_t
225 const_double_htab_hash (const void *x)
226 {
227 rtx value = (rtx) x;
228 hashval_t h;
229
230 if (GET_MODE (value) == VOIDmode)
231 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
232 else
233 {
234 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
235 /* MODE is used in the comparison, so it should be in the hash. */
236 h ^= GET_MODE (value);
237 }
238 return h;
239 }
240
241 /* Returns nonzero if the value represented by X (really a ...)
242 is the same as that represented by Y (really a ...) */
243 static int
244 const_double_htab_eq (const void *x, const void *y)
245 {
246 rtx a = (rtx)x, b = (rtx)y;
247
248 if (GET_MODE (a) != GET_MODE (b))
249 return 0;
250 if (GET_MODE (a) == VOIDmode)
251 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
252 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
253 else
254 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
255 CONST_DOUBLE_REAL_VALUE (b));
256 }
257
258 /* Returns a hash code for X (which is a really a mem_attrs *). */
259
260 static hashval_t
261 mem_attrs_htab_hash (const void *x)
262 {
263 mem_attrs *p = (mem_attrs *) x;
264
265 return (p->alias ^ (p->align * 1000)
266 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
267 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
268 ^ (size_t) p->expr);
269 }
270
271 /* Returns nonzero if the value represented by X (which is really a
272 mem_attrs *) is the same as that given by Y (which is also really a
273 mem_attrs *). */
274
275 static int
276 mem_attrs_htab_eq (const void *x, const void *y)
277 {
278 mem_attrs *p = (mem_attrs *) x;
279 mem_attrs *q = (mem_attrs *) y;
280
281 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
282 && p->size == q->size && p->align == q->align);
283 }
284
285 /* Allocate a new mem_attrs structure and insert it into the hash table if
286 one identical to it is not already in the table. We are doing this for
287 MEM of mode MODE. */
288
289 static mem_attrs *
290 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
291 unsigned int align, enum machine_mode mode)
292 {
293 mem_attrs attrs;
294 void **slot;
295
296 /* If everything is the default, we can just return zero.
297 This must match what the corresponding MEM_* macros return when the
298 field is not present. */
299 if (alias == 0 && expr == 0 && offset == 0
300 && (size == 0
301 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
302 && (STRICT_ALIGNMENT && mode != BLKmode
303 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
304 return 0;
305
306 attrs.alias = alias;
307 attrs.expr = expr;
308 attrs.offset = offset;
309 attrs.size = size;
310 attrs.align = align;
311
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
314 {
315 *slot = ggc_alloc (sizeof (mem_attrs));
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
317 }
318
319 return *slot;
320 }
321
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
323
324 static hashval_t
325 reg_attrs_htab_hash (const void *x)
326 {
327 reg_attrs *p = (reg_attrs *) x;
328
329 return ((p->offset * 1000) ^ (long) p->decl);
330 }
331
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
335
336 static int
337 reg_attrs_htab_eq (const void *x, const void *y)
338 {
339 reg_attrs *p = (reg_attrs *) x;
340 reg_attrs *q = (reg_attrs *) y;
341
342 return (p->decl == q->decl && p->offset == q->offset);
343 }
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
347
348 static reg_attrs *
349 get_reg_attrs (tree decl, int offset)
350 {
351 reg_attrs attrs;
352 void **slot;
353
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
357
358 attrs.decl = decl;
359 attrs.offset = offset;
360
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
363 {
364 *slot = ggc_alloc (sizeof (reg_attrs));
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
366 }
367
368 return *slot;
369 }
370
371 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
372 don't attempt to share with the various global pieces of rtl (such as
373 frame_pointer_rtx). */
374
375 rtx
376 gen_raw_REG (enum machine_mode mode, int regno)
377 {
378 rtx x = gen_rtx_raw_REG (mode, regno);
379 ORIGINAL_REGNO (x) = regno;
380 return x;
381 }
382
383 /* There are some RTL codes that require special attention; the generation
384 functions do the raw handling. If you add to this list, modify
385 special_rtx in gengenrtl.c as well. */
386
387 rtx
388 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
389 {
390 void **slot;
391
392 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
393 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
394
395 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
396 if (const_true_rtx && arg == STORE_FLAG_VALUE)
397 return const_true_rtx;
398 #endif
399
400 /* Look up the CONST_INT in the hash table. */
401 slot = htab_find_slot_with_hash (const_int_htab, &arg,
402 (hashval_t) arg, INSERT);
403 if (*slot == 0)
404 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
405
406 return (rtx) *slot;
407 }
408
409 rtx
410 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
411 {
412 return GEN_INT (trunc_int_for_mode (c, mode));
413 }
414
415 /* CONST_DOUBLEs might be created from pairs of integers, or from
416 REAL_VALUE_TYPEs. Also, their length is known only at run time,
417 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
418
419 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
420 hash table. If so, return its counterpart; otherwise add it
421 to the hash table and return it. */
422 static rtx
423 lookup_const_double (rtx real)
424 {
425 void **slot = htab_find_slot (const_double_htab, real, INSERT);
426 if (*slot == 0)
427 *slot = real;
428
429 return (rtx) *slot;
430 }
431
432 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
433 VALUE in mode MODE. */
434 rtx
435 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
436 {
437 rtx real = rtx_alloc (CONST_DOUBLE);
438 PUT_MODE (real, mode);
439
440 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
441
442 return lookup_const_double (real);
443 }
444
445 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
446 of ints: I0 is the low-order word and I1 is the high-order word.
447 Do not use this routine for non-integer modes; convert to
448 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
449
450 rtx
451 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
452 {
453 rtx value;
454 unsigned int i;
455
456 if (mode != VOIDmode)
457 {
458 int width;
459 if (GET_MODE_CLASS (mode) != MODE_INT
460 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
461 /* We can get a 0 for an error mark. */
462 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
463 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
464 abort ();
465
466 /* We clear out all bits that don't belong in MODE, unless they and
467 our sign bit are all one. So we get either a reasonable negative
468 value or a reasonable unsigned value for this mode. */
469 width = GET_MODE_BITSIZE (mode);
470 if (width < HOST_BITS_PER_WIDE_INT
471 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
472 != ((HOST_WIDE_INT) (-1) << (width - 1))))
473 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
474 else if (width == HOST_BITS_PER_WIDE_INT
475 && ! (i1 == ~0 && i0 < 0))
476 i1 = 0;
477 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
478 /* We cannot represent this value as a constant. */
479 abort ();
480
481 /* If this would be an entire word for the target, but is not for
482 the host, then sign-extend on the host so that the number will
483 look the same way on the host that it would on the target.
484
485 For example, when building a 64 bit alpha hosted 32 bit sparc
486 targeted compiler, then we want the 32 bit unsigned value -1 to be
487 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
488 The latter confuses the sparc backend. */
489
490 if (width < HOST_BITS_PER_WIDE_INT
491 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
492 i0 |= ((HOST_WIDE_INT) (-1) << width);
493
494 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
495 CONST_INT.
496
497 ??? Strictly speaking, this is wrong if we create a CONST_INT for
498 a large unsigned constant with the size of MODE being
499 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
500 in a wider mode. In that case we will mis-interpret it as a
501 negative number.
502
503 Unfortunately, the only alternative is to make a CONST_DOUBLE for
504 any constant in any mode if it is an unsigned constant larger
505 than the maximum signed integer in an int on the host. However,
506 doing this will break everyone that always expects to see a
507 CONST_INT for SImode and smaller.
508
509 We have always been making CONST_INTs in this case, so nothing
510 new is being broken. */
511
512 if (width <= HOST_BITS_PER_WIDE_INT)
513 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
514 }
515
516 /* If this integer fits in one word, return a CONST_INT. */
517 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
518 return GEN_INT (i0);
519
520 /* We use VOIDmode for integers. */
521 value = rtx_alloc (CONST_DOUBLE);
522 PUT_MODE (value, VOIDmode);
523
524 CONST_DOUBLE_LOW (value) = i0;
525 CONST_DOUBLE_HIGH (value) = i1;
526
527 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
528 XWINT (value, i) = 0;
529
530 return lookup_const_double (value);
531 }
532
533 rtx
534 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
535 {
536 /* In case the MD file explicitly references the frame pointer, have
537 all such references point to the same frame pointer. This is
538 used during frame pointer elimination to distinguish the explicit
539 references to these registers from pseudos that happened to be
540 assigned to them.
541
542 If we have eliminated the frame pointer or arg pointer, we will
543 be using it as a normal register, for example as a spill
544 register. In such cases, we might be accessing it in a mode that
545 is not Pmode and therefore cannot use the pre-allocated rtx.
546
547 Also don't do this when we are making new REGs in reload, since
548 we don't want to get confused with the real pointers. */
549
550 if (mode == Pmode && !reload_in_progress)
551 {
552 if (regno == FRAME_POINTER_REGNUM
553 && (!reload_completed || frame_pointer_needed))
554 return frame_pointer_rtx;
555 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
556 if (regno == HARD_FRAME_POINTER_REGNUM
557 && (!reload_completed || frame_pointer_needed))
558 return hard_frame_pointer_rtx;
559 #endif
560 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
561 if (regno == ARG_POINTER_REGNUM)
562 return arg_pointer_rtx;
563 #endif
564 #ifdef RETURN_ADDRESS_POINTER_REGNUM
565 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
566 return return_address_pointer_rtx;
567 #endif
568 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
569 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
570 return pic_offset_table_rtx;
571 if (regno == STACK_POINTER_REGNUM)
572 return stack_pointer_rtx;
573 }
574
575 #if 0
576 /* If the per-function register table has been set up, try to re-use
577 an existing entry in that table to avoid useless generation of RTL.
578
579 This code is disabled for now until we can fix the various backends
580 which depend on having non-shared hard registers in some cases. Long
581 term we want to re-enable this code as it can significantly cut down
582 on the amount of useless RTL that gets generated.
583
584 We'll also need to fix some code that runs after reload that wants to
585 set ORIGINAL_REGNO. */
586
587 if (cfun
588 && cfun->emit
589 && regno_reg_rtx
590 && regno < FIRST_PSEUDO_REGISTER
591 && reg_raw_mode[regno] == mode)
592 return regno_reg_rtx[regno];
593 #endif
594
595 return gen_raw_REG (mode, regno);
596 }
597
598 rtx
599 gen_rtx_MEM (enum machine_mode mode, rtx addr)
600 {
601 rtx rt = gen_rtx_raw_MEM (mode, addr);
602
603 /* This field is not cleared by the mere allocation of the rtx, so
604 we clear it here. */
605 MEM_ATTRS (rt) = 0;
606
607 return rt;
608 }
609
610 rtx
611 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
612 {
613 /* This is the most common failure type.
614 Catch it early so we can see who does it. */
615 if ((offset % GET_MODE_SIZE (mode)) != 0)
616 abort ();
617
618 /* This check isn't usable right now because combine will
619 throw arbitrary crap like a CALL into a SUBREG in
620 gen_lowpart_for_combine so we must just eat it. */
621 #if 0
622 /* Check for this too. */
623 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
624 abort ();
625 #endif
626 return gen_rtx_raw_SUBREG (mode, reg, offset);
627 }
628
629 /* Generate a SUBREG representing the least-significant part of REG if MODE
630 is smaller than mode of REG, otherwise paradoxical SUBREG. */
631
632 rtx
633 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
634 {
635 enum machine_mode inmode;
636
637 inmode = GET_MODE (reg);
638 if (inmode == VOIDmode)
639 inmode = mode;
640 return gen_rtx_SUBREG (mode, reg,
641 subreg_lowpart_offset (mode, inmode));
642 }
643 \f
644 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
645 **
646 ** This routine generates an RTX of the size specified by
647 ** <code>, which is an RTX code. The RTX structure is initialized
648 ** from the arguments <element1> through <elementn>, which are
649 ** interpreted according to the specific RTX type's format. The
650 ** special machine mode associated with the rtx (if any) is specified
651 ** in <mode>.
652 **
653 ** gen_rtx can be invoked in a way which resembles the lisp-like
654 ** rtx it will generate. For example, the following rtx structure:
655 **
656 ** (plus:QI (mem:QI (reg:SI 1))
657 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
658 **
659 ** ...would be generated by the following C code:
660 **
661 ** gen_rtx (PLUS, QImode,
662 ** gen_rtx (MEM, QImode,
663 ** gen_rtx (REG, SImode, 1)),
664 ** gen_rtx (MEM, QImode,
665 ** gen_rtx (PLUS, SImode,
666 ** gen_rtx (REG, SImode, 2),
667 ** gen_rtx (REG, SImode, 3)))),
668 */
669
670 /*VARARGS2*/
671 rtx
672 gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
673 {
674 int i; /* Array indices... */
675 const char *fmt; /* Current rtx's format... */
676 rtx rt_val; /* RTX to return to caller... */
677 va_list p;
678
679 va_start (p, mode);
680
681 switch (code)
682 {
683 case CONST_INT:
684 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
685 break;
686
687 case CONST_DOUBLE:
688 {
689 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
690 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
691
692 rt_val = immed_double_const (arg0, arg1, mode);
693 }
694 break;
695
696 case REG:
697 rt_val = gen_rtx_REG (mode, va_arg (p, int));
698 break;
699
700 case MEM:
701 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
702 break;
703
704 default:
705 rt_val = rtx_alloc (code); /* Allocate the storage space. */
706 rt_val->mode = mode; /* Store the machine mode... */
707
708 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
709 for (i = 0; i < GET_RTX_LENGTH (code); i++)
710 {
711 switch (*fmt++)
712 {
713 case '0': /* Field with unknown use. Zero it. */
714 X0EXP (rt_val, i) = NULL_RTX;
715 break;
716
717 case 'i': /* An integer? */
718 XINT (rt_val, i) = va_arg (p, int);
719 break;
720
721 case 'w': /* A wide integer? */
722 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
723 break;
724
725 case 's': /* A string? */
726 XSTR (rt_val, i) = va_arg (p, char *);
727 break;
728
729 case 'e': /* An expression? */
730 case 'u': /* An insn? Same except when printing. */
731 XEXP (rt_val, i) = va_arg (p, rtx);
732 break;
733
734 case 'E': /* An RTX vector? */
735 XVEC (rt_val, i) = va_arg (p, rtvec);
736 break;
737
738 case 'b': /* A bitmap? */
739 XBITMAP (rt_val, i) = va_arg (p, bitmap);
740 break;
741
742 case 't': /* A tree? */
743 XTREE (rt_val, i) = va_arg (p, tree);
744 break;
745
746 default:
747 abort ();
748 }
749 }
750 break;
751 }
752
753 va_end (p);
754 return rt_val;
755 }
756
757 /* gen_rtvec (n, [rt1, ..., rtn])
758 **
759 ** This routine creates an rtvec and stores within it the
760 ** pointers to rtx's which are its arguments.
761 */
762
763 /*VARARGS1*/
764 rtvec
765 gen_rtvec (int n, ...)
766 {
767 int i, save_n;
768 rtx *vector;
769 va_list p;
770
771 va_start (p, n);
772
773 if (n == 0)
774 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
775
776 vector = alloca (n * sizeof (rtx));
777
778 for (i = 0; i < n; i++)
779 vector[i] = va_arg (p, rtx);
780
781 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
782 save_n = n;
783 va_end (p);
784
785 return gen_rtvec_v (save_n, vector);
786 }
787
788 rtvec
789 gen_rtvec_v (int n, rtx *argp)
790 {
791 int i;
792 rtvec rt_val;
793
794 if (n == 0)
795 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
796
797 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
798
799 for (i = 0; i < n; i++)
800 rt_val->elem[i] = *argp++;
801
802 return rt_val;
803 }
804 \f
805 /* Generate a REG rtx for a new pseudo register of mode MODE.
806 This pseudo is assigned the next sequential register number. */
807
808 rtx
809 gen_reg_rtx (enum machine_mode mode)
810 {
811 struct function *f = cfun;
812 rtx val;
813
814 /* Don't let anything called after initial flow analysis create new
815 registers. */
816 if (no_new_pseudos)
817 abort ();
818
819 if (generating_concat_p
820 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
821 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
822 {
823 /* For complex modes, don't make a single pseudo.
824 Instead, make a CONCAT of two pseudos.
825 This allows noncontiguous allocation of the real and imaginary parts,
826 which makes much better code. Besides, allocating DCmode
827 pseudos overstrains reload on some machines like the 386. */
828 rtx realpart, imagpart;
829 enum machine_mode partmode = GET_MODE_INNER (mode);
830
831 realpart = gen_reg_rtx (partmode);
832 imagpart = gen_reg_rtx (partmode);
833 return gen_rtx_CONCAT (mode, realpart, imagpart);
834 }
835
836 /* Make sure regno_pointer_align, and regno_reg_rtx are large
837 enough to have an element for this pseudo reg number. */
838
839 if (reg_rtx_no == f->emit->regno_pointer_align_length)
840 {
841 int old_size = f->emit->regno_pointer_align_length;
842 char *new;
843 rtx *new1;
844
845 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
846 memset (new + old_size, 0, old_size);
847 f->emit->regno_pointer_align = (unsigned char *) new;
848
849 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
850 old_size * 2 * sizeof (rtx));
851 memset (new1 + old_size, 0, old_size * sizeof (rtx));
852 regno_reg_rtx = new1;
853
854 f->emit->regno_pointer_align_length = old_size * 2;
855 }
856
857 val = gen_raw_REG (mode, reg_rtx_no);
858 regno_reg_rtx[reg_rtx_no++] = val;
859 return val;
860 }
861
862 /* Generate a register with same attributes as REG,
863 but offsetted by OFFSET. */
864
865 rtx
866 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
867 {
868 rtx new = gen_rtx_REG (mode, regno);
869 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
870 REG_OFFSET (reg) + offset);
871 return new;
872 }
873
874 /* Set the decl for MEM to DECL. */
875
876 void
877 set_reg_attrs_from_mem (rtx reg, rtx mem)
878 {
879 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
880 REG_ATTRS (reg)
881 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
882 }
883
884 /* Set the register attributes for registers contained in PARM_RTX.
885 Use needed values from memory attributes of MEM. */
886
887 void
888 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
889 {
890 if (GET_CODE (parm_rtx) == REG)
891 set_reg_attrs_from_mem (parm_rtx, mem);
892 else if (GET_CODE (parm_rtx) == PARALLEL)
893 {
894 /* Check for a NULL entry in the first slot, used to indicate that the
895 parameter goes both on the stack and in registers. */
896 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
897 for (; i < XVECLEN (parm_rtx, 0); i++)
898 {
899 rtx x = XVECEXP (parm_rtx, 0, i);
900 if (GET_CODE (XEXP (x, 0)) == REG)
901 REG_ATTRS (XEXP (x, 0))
902 = get_reg_attrs (MEM_EXPR (mem),
903 INTVAL (XEXP (x, 1)));
904 }
905 }
906 }
907
908 /* Assign the RTX X to declaration T. */
909 void
910 set_decl_rtl (tree t, rtx x)
911 {
912 DECL_CHECK (t)->decl.rtl = x;
913
914 if (!x)
915 return;
916 /* For register, we maintain the reverse information too. */
917 if (GET_CODE (x) == REG)
918 REG_ATTRS (x) = get_reg_attrs (t, 0);
919 else if (GET_CODE (x) == SUBREG)
920 REG_ATTRS (SUBREG_REG (x))
921 = get_reg_attrs (t, -SUBREG_BYTE (x));
922 if (GET_CODE (x) == CONCAT)
923 {
924 if (REG_P (XEXP (x, 0)))
925 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
926 if (REG_P (XEXP (x, 1)))
927 REG_ATTRS (XEXP (x, 1))
928 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
929 }
930 if (GET_CODE (x) == PARALLEL)
931 {
932 int i;
933 for (i = 0; i < XVECLEN (x, 0); i++)
934 {
935 rtx y = XVECEXP (x, 0, i);
936 if (REG_P (XEXP (y, 0)))
937 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
938 }
939 }
940 }
941
942 /* Identify REG (which may be a CONCAT) as a user register. */
943
944 void
945 mark_user_reg (rtx reg)
946 {
947 if (GET_CODE (reg) == CONCAT)
948 {
949 REG_USERVAR_P (XEXP (reg, 0)) = 1;
950 REG_USERVAR_P (XEXP (reg, 1)) = 1;
951 }
952 else if (GET_CODE (reg) == REG)
953 REG_USERVAR_P (reg) = 1;
954 else
955 abort ();
956 }
957
958 /* Identify REG as a probable pointer register and show its alignment
959 as ALIGN, if nonzero. */
960
961 void
962 mark_reg_pointer (rtx reg, int align)
963 {
964 if (! REG_POINTER (reg))
965 {
966 REG_POINTER (reg) = 1;
967
968 if (align)
969 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
970 }
971 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
972 /* We can no-longer be sure just how aligned this pointer is */
973 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
974 }
975
976 /* Return 1 plus largest pseudo reg number used in the current function. */
977
978 int
979 max_reg_num (void)
980 {
981 return reg_rtx_no;
982 }
983
984 /* Return 1 + the largest label number used so far in the current function. */
985
986 int
987 max_label_num (void)
988 {
989 if (last_label_num && label_num == base_label_num)
990 return last_label_num;
991 return label_num;
992 }
993
994 /* Return first label number used in this function (if any were used). */
995
996 int
997 get_first_label_num (void)
998 {
999 return first_label_num;
1000 }
1001 \f
1002 /* Return the final regno of X, which is a SUBREG of a hard
1003 register. */
1004 int
1005 subreg_hard_regno (rtx x, int check_mode)
1006 {
1007 enum machine_mode mode = GET_MODE (x);
1008 unsigned int byte_offset, base_regno, final_regno;
1009 rtx reg = SUBREG_REG (x);
1010
1011 /* This is where we attempt to catch illegal subregs
1012 created by the compiler. */
1013 if (GET_CODE (x) != SUBREG
1014 || GET_CODE (reg) != REG)
1015 abort ();
1016 base_regno = REGNO (reg);
1017 if (base_regno >= FIRST_PSEUDO_REGISTER)
1018 abort ();
1019 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1020 abort ();
1021 #ifdef ENABLE_CHECKING
1022 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1023 SUBREG_BYTE (x), mode))
1024 abort ();
1025 #endif
1026 /* Catch non-congruent offsets too. */
1027 byte_offset = SUBREG_BYTE (x);
1028 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1029 abort ();
1030
1031 final_regno = subreg_regno (x);
1032
1033 return final_regno;
1034 }
1035
1036 /* Return a value representing some low-order bits of X, where the number
1037 of low-order bits is given by MODE. Note that no conversion is done
1038 between floating-point and fixed-point values, rather, the bit
1039 representation is returned.
1040
1041 This function handles the cases in common between gen_lowpart, below,
1042 and two variants in cse.c and combine.c. These are the cases that can
1043 be safely handled at all points in the compilation.
1044
1045 If this is not a case we can handle, return 0. */
1046
1047 rtx
1048 gen_lowpart_common (enum machine_mode mode, rtx x)
1049 {
1050 int msize = GET_MODE_SIZE (mode);
1051 int xsize = GET_MODE_SIZE (GET_MODE (x));
1052 int offset = 0;
1053
1054 if (GET_MODE (x) == mode)
1055 return x;
1056
1057 /* MODE must occupy no more words than the mode of X. */
1058 if (GET_MODE (x) != VOIDmode
1059 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1060 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1061 return 0;
1062
1063 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1064 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1065 && GET_MODE (x) != VOIDmode && msize > xsize)
1066 return 0;
1067
1068 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1069
1070 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1071 && (GET_MODE_CLASS (mode) == MODE_INT
1072 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1073 {
1074 /* If we are getting the low-order part of something that has been
1075 sign- or zero-extended, we can either just use the object being
1076 extended or make a narrower extension. If we want an even smaller
1077 piece than the size of the object being extended, call ourselves
1078 recursively.
1079
1080 This case is used mostly by combine and cse. */
1081
1082 if (GET_MODE (XEXP (x, 0)) == mode)
1083 return XEXP (x, 0);
1084 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1085 return gen_lowpart_common (mode, XEXP (x, 0));
1086 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1087 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1088 }
1089 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1090 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1091 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1092 else if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1093 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1094 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1095 from the low-order part of the constant. */
1096 else if ((GET_MODE_CLASS (mode) == MODE_INT
1097 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1098 && GET_MODE (x) == VOIDmode
1099 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1100 {
1101 /* If MODE is twice the host word size, X is already the desired
1102 representation. Otherwise, if MODE is wider than a word, we can't
1103 do this. If MODE is exactly a word, return just one CONST_INT. */
1104
1105 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1106 return x;
1107 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1108 return 0;
1109 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1110 return (GET_CODE (x) == CONST_INT ? x
1111 : GEN_INT (CONST_DOUBLE_LOW (x)));
1112 else
1113 {
1114 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1115 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1116 : CONST_DOUBLE_LOW (x));
1117
1118 /* Sign extend to HOST_WIDE_INT. */
1119 val = trunc_int_for_mode (val, mode);
1120
1121 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1122 : GEN_INT (val));
1123 }
1124 }
1125
1126 /* The floating-point emulator can handle all conversions between
1127 FP and integer operands. This simplifies reload because it
1128 doesn't have to deal with constructs like (subreg:DI
1129 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1130 /* Single-precision floats are always 32-bits and double-precision
1131 floats are always 64-bits. */
1132
1133 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1134 && GET_MODE_BITSIZE (mode) == 32
1135 && GET_CODE (x) == CONST_INT)
1136 {
1137 REAL_VALUE_TYPE r;
1138 long i = INTVAL (x);
1139
1140 real_from_target (&r, &i, mode);
1141 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1142 }
1143 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1144 && GET_MODE_BITSIZE (mode) == 64
1145 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1146 && GET_MODE (x) == VOIDmode)
1147 {
1148 REAL_VALUE_TYPE r;
1149 HOST_WIDE_INT low, high;
1150 long i[2];
1151
1152 if (GET_CODE (x) == CONST_INT)
1153 {
1154 low = INTVAL (x);
1155 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1156 }
1157 else
1158 {
1159 low = CONST_DOUBLE_LOW (x);
1160 high = CONST_DOUBLE_HIGH (x);
1161 }
1162
1163 if (HOST_BITS_PER_WIDE_INT > 32)
1164 high = low >> 31 >> 1;
1165
1166 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1167 target machine. */
1168 if (WORDS_BIG_ENDIAN)
1169 i[0] = high, i[1] = low;
1170 else
1171 i[0] = low, i[1] = high;
1172
1173 real_from_target (&r, i, mode);
1174 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1175 }
1176 else if ((GET_MODE_CLASS (mode) == MODE_INT
1177 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1178 && GET_CODE (x) == CONST_DOUBLE
1179 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1180 {
1181 REAL_VALUE_TYPE r;
1182 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1183 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1184
1185 /* Convert 'r' into an array of four 32-bit words in target word
1186 order. */
1187 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1188 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1189 {
1190 case 32:
1191 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1192 i[1] = 0;
1193 i[2] = 0;
1194 i[3 - 3 * endian] = 0;
1195 break;
1196 case 64:
1197 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1198 i[2 - 2 * endian] = 0;
1199 i[3 - 2 * endian] = 0;
1200 break;
1201 case 96:
1202 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1203 i[3 - 3 * endian] = 0;
1204 break;
1205 case 128:
1206 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1207 break;
1208 default:
1209 abort ();
1210 }
1211 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1212 and return it. */
1213 #if HOST_BITS_PER_WIDE_INT == 32
1214 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1215 #else
1216 if (HOST_BITS_PER_WIDE_INT != 64)
1217 abort ();
1218
1219 return immed_double_const ((((unsigned long) i[3 * endian])
1220 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1221 (((unsigned long) i[2 - endian])
1222 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1223 mode);
1224 #endif
1225 }
1226 /* If MODE is a condition code and X is a CONST_INT, the value of X
1227 must already have been "recognized" by the back-end, and we can
1228 assume that it is valid for this mode. */
1229 else if (GET_MODE_CLASS (mode) == MODE_CC
1230 && GET_CODE (x) == CONST_INT)
1231 return x;
1232
1233 /* Otherwise, we can't do this. */
1234 return 0;
1235 }
1236 \f
1237 /* Return the constant real or imaginary part (which has mode MODE)
1238 of a complex value X. The IMAGPART_P argument determines whether
1239 the real or complex component should be returned. This function
1240 returns NULL_RTX if the component isn't a constant. */
1241
1242 static rtx
1243 gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1244 {
1245 tree decl, part;
1246
1247 if (GET_CODE (x) == MEM
1248 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1249 {
1250 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1251 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1252 {
1253 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1254 if (TREE_CODE (part) == REAL_CST
1255 || TREE_CODE (part) == INTEGER_CST)
1256 return expand_expr (part, NULL_RTX, mode, 0);
1257 }
1258 }
1259 return NULL_RTX;
1260 }
1261
1262 /* Return the real part (which has mode MODE) of a complex value X.
1263 This always comes at the low address in memory. */
1264
1265 rtx
1266 gen_realpart (enum machine_mode mode, rtx x)
1267 {
1268 rtx part;
1269
1270 /* Handle complex constants. */
1271 part = gen_complex_constant_part (mode, x, 0);
1272 if (part != NULL_RTX)
1273 return part;
1274
1275 if (WORDS_BIG_ENDIAN
1276 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1277 && REG_P (x)
1278 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1279 internal_error
1280 ("can't access real part of complex value in hard register");
1281 else if (WORDS_BIG_ENDIAN)
1282 return gen_highpart (mode, x);
1283 else
1284 return gen_lowpart (mode, x);
1285 }
1286
1287 /* Return the imaginary part (which has mode MODE) of a complex value X.
1288 This always comes at the high address in memory. */
1289
1290 rtx
1291 gen_imagpart (enum machine_mode mode, rtx x)
1292 {
1293 rtx part;
1294
1295 /* Handle complex constants. */
1296 part = gen_complex_constant_part (mode, x, 1);
1297 if (part != NULL_RTX)
1298 return part;
1299
1300 if (WORDS_BIG_ENDIAN)
1301 return gen_lowpart (mode, x);
1302 else if (! WORDS_BIG_ENDIAN
1303 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1304 && REG_P (x)
1305 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1306 internal_error
1307 ("can't access imaginary part of complex value in hard register");
1308 else
1309 return gen_highpart (mode, x);
1310 }
1311
1312 /* Return 1 iff X, assumed to be a SUBREG,
1313 refers to the real part of the complex value in its containing reg.
1314 Complex values are always stored with the real part in the first word,
1315 regardless of WORDS_BIG_ENDIAN. */
1316
1317 int
1318 subreg_realpart_p (rtx x)
1319 {
1320 if (GET_CODE (x) != SUBREG)
1321 abort ();
1322
1323 return ((unsigned int) SUBREG_BYTE (x)
1324 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1325 }
1326 \f
1327 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1328 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1329 least-significant part of X.
1330 MODE specifies how big a part of X to return;
1331 it usually should not be larger than a word.
1332 If X is a MEM whose address is a QUEUED, the value may be so also. */
1333
1334 rtx
1335 gen_lowpart (enum machine_mode mode, rtx x)
1336 {
1337 rtx result = gen_lowpart_common (mode, x);
1338
1339 if (result)
1340 return result;
1341 else if (GET_CODE (x) == REG)
1342 {
1343 /* Must be a hard reg that's not valid in MODE. */
1344 result = gen_lowpart_common (mode, copy_to_reg (x));
1345 if (result == 0)
1346 abort ();
1347 return result;
1348 }
1349 else if (GET_CODE (x) == MEM)
1350 {
1351 /* The only additional case we can do is MEM. */
1352 int offset = 0;
1353
1354 /* The following exposes the use of "x" to CSE. */
1355 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1356 && SCALAR_INT_MODE_P (GET_MODE (x))
1357 && ! no_new_pseudos)
1358 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1359
1360 if (WORDS_BIG_ENDIAN)
1361 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1362 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1363
1364 if (BYTES_BIG_ENDIAN)
1365 /* Adjust the address so that the address-after-the-data
1366 is unchanged. */
1367 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1368 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1369
1370 return adjust_address (x, mode, offset);
1371 }
1372 else if (GET_CODE (x) == ADDRESSOF)
1373 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1374 else
1375 abort ();
1376 }
1377
1378 /* Like `gen_lowpart', but refer to the most significant part.
1379 This is used to access the imaginary part of a complex number. */
1380
1381 rtx
1382 gen_highpart (enum machine_mode mode, rtx x)
1383 {
1384 unsigned int msize = GET_MODE_SIZE (mode);
1385 rtx result;
1386
1387 /* This case loses if X is a subreg. To catch bugs early,
1388 complain if an invalid MODE is used even in other cases. */
1389 if (msize > UNITS_PER_WORD
1390 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1391 abort ();
1392
1393 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1394 subreg_highpart_offset (mode, GET_MODE (x)));
1395
1396 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1397 the target if we have a MEM. gen_highpart must return a valid operand,
1398 emitting code if necessary to do so. */
1399 if (result != NULL_RTX && GET_CODE (result) == MEM)
1400 result = validize_mem (result);
1401
1402 if (!result)
1403 abort ();
1404 return result;
1405 }
1406
1407 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1408 be VOIDmode constant. */
1409 rtx
1410 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1411 {
1412 if (GET_MODE (exp) != VOIDmode)
1413 {
1414 if (GET_MODE (exp) != innermode)
1415 abort ();
1416 return gen_highpart (outermode, exp);
1417 }
1418 return simplify_gen_subreg (outermode, exp, innermode,
1419 subreg_highpart_offset (outermode, innermode));
1420 }
1421
1422 /* Return offset in bytes to get OUTERMODE low part
1423 of the value in mode INNERMODE stored in memory in target format. */
1424
1425 unsigned int
1426 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1427 {
1428 unsigned int offset = 0;
1429 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1430
1431 if (difference > 0)
1432 {
1433 if (WORDS_BIG_ENDIAN)
1434 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1435 if (BYTES_BIG_ENDIAN)
1436 offset += difference % UNITS_PER_WORD;
1437 }
1438
1439 return offset;
1440 }
1441
1442 /* Return offset in bytes to get OUTERMODE high part
1443 of the value in mode INNERMODE stored in memory in target format. */
1444 unsigned int
1445 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1446 {
1447 unsigned int offset = 0;
1448 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1449
1450 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1451 abort ();
1452
1453 if (difference > 0)
1454 {
1455 if (! WORDS_BIG_ENDIAN)
1456 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1457 if (! BYTES_BIG_ENDIAN)
1458 offset += difference % UNITS_PER_WORD;
1459 }
1460
1461 return offset;
1462 }
1463
1464 /* Return 1 iff X, assumed to be a SUBREG,
1465 refers to the least significant part of its containing reg.
1466 If X is not a SUBREG, always return 1 (it is its own low part!). */
1467
1468 int
1469 subreg_lowpart_p (rtx x)
1470 {
1471 if (GET_CODE (x) != SUBREG)
1472 return 1;
1473 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1474 return 0;
1475
1476 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1477 == SUBREG_BYTE (x));
1478 }
1479 \f
1480
1481 /* Helper routine for all the constant cases of operand_subword.
1482 Some places invoke this directly. */
1483
1484 rtx
1485 constant_subword (rtx op, int offset, enum machine_mode mode)
1486 {
1487 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1488 HOST_WIDE_INT val;
1489
1490 /* If OP is already an integer word, return it. */
1491 if (GET_MODE_CLASS (mode) == MODE_INT
1492 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1493 return op;
1494
1495 /* The output is some bits, the width of the target machine's word.
1496 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1497 host can't. */
1498 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1499 && GET_MODE_CLASS (mode) == MODE_FLOAT
1500 && GET_MODE_BITSIZE (mode) == 64
1501 && GET_CODE (op) == CONST_DOUBLE)
1502 {
1503 long k[2];
1504 REAL_VALUE_TYPE rv;
1505
1506 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1507 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1508
1509 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1510 which the words are written depends on the word endianness.
1511 ??? This is a potential portability problem and should
1512 be fixed at some point.
1513
1514 We must exercise caution with the sign bit. By definition there
1515 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1516 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1517 So we explicitly mask and sign-extend as necessary. */
1518 if (BITS_PER_WORD == 32)
1519 {
1520 val = k[offset];
1521 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1522 return GEN_INT (val);
1523 }
1524 #if HOST_BITS_PER_WIDE_INT >= 64
1525 else if (BITS_PER_WORD >= 64 && offset == 0)
1526 {
1527 val = k[! WORDS_BIG_ENDIAN];
1528 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1529 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1530 return GEN_INT (val);
1531 }
1532 #endif
1533 else if (BITS_PER_WORD == 16)
1534 {
1535 val = k[offset >> 1];
1536 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1537 val >>= 16;
1538 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1539 return GEN_INT (val);
1540 }
1541 else
1542 abort ();
1543 }
1544 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1545 && GET_MODE_CLASS (mode) == MODE_FLOAT
1546 && GET_MODE_BITSIZE (mode) > 64
1547 && GET_CODE (op) == CONST_DOUBLE)
1548 {
1549 long k[4];
1550 REAL_VALUE_TYPE rv;
1551
1552 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1553 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1554
1555 if (BITS_PER_WORD == 32)
1556 {
1557 val = k[offset];
1558 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1559 return GEN_INT (val);
1560 }
1561 #if HOST_BITS_PER_WIDE_INT >= 64
1562 else if (BITS_PER_WORD >= 64 && offset <= 1)
1563 {
1564 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1565 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1566 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1567 return GEN_INT (val);
1568 }
1569 #endif
1570 else
1571 abort ();
1572 }
1573
1574 /* Single word float is a little harder, since single- and double-word
1575 values often do not have the same high-order bits. We have already
1576 verified that we want the only defined word of the single-word value. */
1577 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1578 && GET_MODE_BITSIZE (mode) == 32
1579 && GET_CODE (op) == CONST_DOUBLE)
1580 {
1581 long l;
1582 REAL_VALUE_TYPE rv;
1583
1584 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1585 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1586
1587 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1588 val = l;
1589 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1590
1591 if (BITS_PER_WORD == 16)
1592 {
1593 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1594 val >>= 16;
1595 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1596 }
1597
1598 return GEN_INT (val);
1599 }
1600
1601 /* The only remaining cases that we can handle are integers.
1602 Convert to proper endianness now since these cases need it.
1603 At this point, offset == 0 means the low-order word.
1604
1605 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1606 in general. However, if OP is (const_int 0), we can just return
1607 it for any word. */
1608
1609 if (op == const0_rtx)
1610 return op;
1611
1612 if (GET_MODE_CLASS (mode) != MODE_INT
1613 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1614 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1615 return 0;
1616
1617 if (WORDS_BIG_ENDIAN)
1618 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1619
1620 /* Find out which word on the host machine this value is in and get
1621 it from the constant. */
1622 val = (offset / size_ratio == 0
1623 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1624 : (GET_CODE (op) == CONST_INT
1625 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1626
1627 /* Get the value we want into the low bits of val. */
1628 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1629 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1630
1631 val = trunc_int_for_mode (val, word_mode);
1632
1633 return GEN_INT (val);
1634 }
1635
1636 /* Return subword OFFSET of operand OP.
1637 The word number, OFFSET, is interpreted as the word number starting
1638 at the low-order address. OFFSET 0 is the low-order word if not
1639 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1640
1641 If we cannot extract the required word, we return zero. Otherwise,
1642 an rtx corresponding to the requested word will be returned.
1643
1644 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1645 reload has completed, a valid address will always be returned. After
1646 reload, if a valid address cannot be returned, we return zero.
1647
1648 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1649 it is the responsibility of the caller.
1650
1651 MODE is the mode of OP in case it is a CONST_INT.
1652
1653 ??? This is still rather broken for some cases. The problem for the
1654 moment is that all callers of this thing provide no 'goal mode' to
1655 tell us to work with. This exists because all callers were written
1656 in a word based SUBREG world.
1657 Now use of this function can be deprecated by simplify_subreg in most
1658 cases.
1659 */
1660
1661 rtx
1662 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1663 {
1664 if (mode == VOIDmode)
1665 mode = GET_MODE (op);
1666
1667 if (mode == VOIDmode)
1668 abort ();
1669
1670 /* If OP is narrower than a word, fail. */
1671 if (mode != BLKmode
1672 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1673 return 0;
1674
1675 /* If we want a word outside OP, return zero. */
1676 if (mode != BLKmode
1677 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1678 return const0_rtx;
1679
1680 /* Form a new MEM at the requested address. */
1681 if (GET_CODE (op) == MEM)
1682 {
1683 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1684
1685 if (! validate_address)
1686 return new;
1687
1688 else if (reload_completed)
1689 {
1690 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1691 return 0;
1692 }
1693 else
1694 return replace_equiv_address (new, XEXP (new, 0));
1695 }
1696
1697 /* Rest can be handled by simplify_subreg. */
1698 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1699 }
1700
1701 /* Similar to `operand_subword', but never return 0. If we can't extract
1702 the required subword, put OP into a register and try again. If that fails,
1703 abort. We always validate the address in this case.
1704
1705 MODE is the mode of OP, in case it is CONST_INT. */
1706
1707 rtx
1708 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1709 {
1710 rtx result = operand_subword (op, offset, 1, mode);
1711
1712 if (result)
1713 return result;
1714
1715 if (mode != BLKmode && mode != VOIDmode)
1716 {
1717 /* If this is a register which can not be accessed by words, copy it
1718 to a pseudo register. */
1719 if (GET_CODE (op) == REG)
1720 op = copy_to_reg (op);
1721 else
1722 op = force_reg (mode, op);
1723 }
1724
1725 result = operand_subword (op, offset, 1, mode);
1726 if (result == 0)
1727 abort ();
1728
1729 return result;
1730 }
1731 \f
1732 /* Given a compare instruction, swap the operands.
1733 A test instruction is changed into a compare of 0 against the operand. */
1734
1735 void
1736 reverse_comparison (rtx insn)
1737 {
1738 rtx body = PATTERN (insn);
1739 rtx comp;
1740
1741 if (GET_CODE (body) == SET)
1742 comp = SET_SRC (body);
1743 else
1744 comp = SET_SRC (XVECEXP (body, 0, 0));
1745
1746 if (GET_CODE (comp) == COMPARE)
1747 {
1748 rtx op0 = XEXP (comp, 0);
1749 rtx op1 = XEXP (comp, 1);
1750 XEXP (comp, 0) = op1;
1751 XEXP (comp, 1) = op0;
1752 }
1753 else
1754 {
1755 rtx new = gen_rtx_COMPARE (VOIDmode,
1756 CONST0_RTX (GET_MODE (comp)), comp);
1757 if (GET_CODE (body) == SET)
1758 SET_SRC (body) = new;
1759 else
1760 SET_SRC (XVECEXP (body, 0, 0)) = new;
1761 }
1762 }
1763 \f
1764 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1765 or (2) a component ref of something variable. Represent the later with
1766 a NULL expression. */
1767
1768 static tree
1769 component_ref_for_mem_expr (tree ref)
1770 {
1771 tree inner = TREE_OPERAND (ref, 0);
1772
1773 if (TREE_CODE (inner) == COMPONENT_REF)
1774 inner = component_ref_for_mem_expr (inner);
1775 else
1776 {
1777 tree placeholder_ptr = 0;
1778
1779 /* Now remove any conversions: they don't change what the underlying
1780 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1781 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1782 || TREE_CODE (inner) == NON_LVALUE_EXPR
1783 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1784 || TREE_CODE (inner) == SAVE_EXPR
1785 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1786 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1787 inner = find_placeholder (inner, &placeholder_ptr);
1788 else
1789 inner = TREE_OPERAND (inner, 0);
1790
1791 if (! DECL_P (inner))
1792 inner = NULL_TREE;
1793 }
1794
1795 if (inner == TREE_OPERAND (ref, 0))
1796 return ref;
1797 else
1798 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1799 TREE_OPERAND (ref, 1));
1800 }
1801
1802 /* Given REF, a MEM, and T, either the type of X or the expression
1803 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1804 if we are making a new object of this type. BITPOS is nonzero if
1805 there is an offset outstanding on T that will be applied later. */
1806
1807 void
1808 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1809 HOST_WIDE_INT bitpos)
1810 {
1811 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1812 tree expr = MEM_EXPR (ref);
1813 rtx offset = MEM_OFFSET (ref);
1814 rtx size = MEM_SIZE (ref);
1815 unsigned int align = MEM_ALIGN (ref);
1816 HOST_WIDE_INT apply_bitpos = 0;
1817 tree type;
1818
1819 /* It can happen that type_for_mode was given a mode for which there
1820 is no language-level type. In which case it returns NULL, which
1821 we can see here. */
1822 if (t == NULL_TREE)
1823 return;
1824
1825 type = TYPE_P (t) ? t : TREE_TYPE (t);
1826
1827 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1828 wrong answer, as it assumes that DECL_RTL already has the right alias
1829 info. Callers should not set DECL_RTL until after the call to
1830 set_mem_attributes. */
1831 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1832 abort ();
1833
1834 /* Get the alias set from the expression or type (perhaps using a
1835 front-end routine) and use it. */
1836 alias = get_alias_set (t);
1837
1838 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1839 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1840 RTX_UNCHANGING_P (ref)
1841 |= ((lang_hooks.honor_readonly
1842 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1843 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1844
1845 /* If we are making an object of this type, or if this is a DECL, we know
1846 that it is a scalar if the type is not an aggregate. */
1847 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1848 MEM_SCALAR_P (ref) = 1;
1849
1850 /* We can set the alignment from the type if we are making an object,
1851 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1852 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1853 align = MAX (align, TYPE_ALIGN (type));
1854
1855 /* If the size is known, we can set that. */
1856 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1857 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1858
1859 /* If T is not a type, we may be able to deduce some more information about
1860 the expression. */
1861 if (! TYPE_P (t))
1862 {
1863 maybe_set_unchanging (ref, t);
1864 if (TREE_THIS_VOLATILE (t))
1865 MEM_VOLATILE_P (ref) = 1;
1866
1867 /* Now remove any conversions: they don't change what the underlying
1868 object is. Likewise for SAVE_EXPR. */
1869 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1870 || TREE_CODE (t) == NON_LVALUE_EXPR
1871 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1872 || TREE_CODE (t) == SAVE_EXPR)
1873 t = TREE_OPERAND (t, 0);
1874
1875 /* If this expression can't be addressed (e.g., it contains a reference
1876 to a non-addressable field), show we don't change its alias set. */
1877 if (! can_address_p (t))
1878 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1879
1880 /* If this is a decl, set the attributes of the MEM from it. */
1881 if (DECL_P (t))
1882 {
1883 expr = t;
1884 offset = const0_rtx;
1885 apply_bitpos = bitpos;
1886 size = (DECL_SIZE_UNIT (t)
1887 && host_integerp (DECL_SIZE_UNIT (t), 1)
1888 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1889 align = DECL_ALIGN (t);
1890 }
1891
1892 /* If this is a constant, we know the alignment. */
1893 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1894 {
1895 align = TYPE_ALIGN (type);
1896 #ifdef CONSTANT_ALIGNMENT
1897 align = CONSTANT_ALIGNMENT (t, align);
1898 #endif
1899 }
1900
1901 /* If this is a field reference and not a bit-field, record it. */
1902 /* ??? There is some information that can be gleened from bit-fields,
1903 such as the word offset in the structure that might be modified.
1904 But skip it for now. */
1905 else if (TREE_CODE (t) == COMPONENT_REF
1906 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1907 {
1908 expr = component_ref_for_mem_expr (t);
1909 offset = const0_rtx;
1910 apply_bitpos = bitpos;
1911 /* ??? Any reason the field size would be different than
1912 the size we got from the type? */
1913 }
1914
1915 /* If this is an array reference, look for an outer field reference. */
1916 else if (TREE_CODE (t) == ARRAY_REF)
1917 {
1918 tree off_tree = size_zero_node;
1919 /* We can't modify t, because we use it at the end of the
1920 function. */
1921 tree t2 = t;
1922
1923 do
1924 {
1925 tree index = TREE_OPERAND (t2, 1);
1926 tree array = TREE_OPERAND (t2, 0);
1927 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1928 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1929 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1930
1931 /* We assume all arrays have sizes that are a multiple of a byte.
1932 First subtract the lower bound, if any, in the type of the
1933 index, then convert to sizetype and multiply by the size of the
1934 array element. */
1935 if (low_bound != 0 && ! integer_zerop (low_bound))
1936 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1937 index, low_bound));
1938
1939 /* If the index has a self-referential type, pass it to a
1940 WITH_RECORD_EXPR; if the component size is, pass our
1941 component to one. */
1942 if (CONTAINS_PLACEHOLDER_P (index))
1943 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1944 if (CONTAINS_PLACEHOLDER_P (unit_size))
1945 unit_size = build (WITH_RECORD_EXPR, sizetype,
1946 unit_size, array);
1947
1948 off_tree
1949 = fold (build (PLUS_EXPR, sizetype,
1950 fold (build (MULT_EXPR, sizetype,
1951 index,
1952 unit_size)),
1953 off_tree));
1954 t2 = TREE_OPERAND (t2, 0);
1955 }
1956 while (TREE_CODE (t2) == ARRAY_REF);
1957
1958 if (DECL_P (t2))
1959 {
1960 expr = t2;
1961 offset = NULL;
1962 if (host_integerp (off_tree, 1))
1963 {
1964 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1965 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1966 align = DECL_ALIGN (t2);
1967 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1968 align = aoff;
1969 offset = GEN_INT (ioff);
1970 apply_bitpos = bitpos;
1971 }
1972 }
1973 else if (TREE_CODE (t2) == COMPONENT_REF)
1974 {
1975 expr = component_ref_for_mem_expr (t2);
1976 if (host_integerp (off_tree, 1))
1977 {
1978 offset = GEN_INT (tree_low_cst (off_tree, 1));
1979 apply_bitpos = bitpos;
1980 }
1981 /* ??? Any reason the field size would be different than
1982 the size we got from the type? */
1983 }
1984 else if (flag_argument_noalias > 1
1985 && TREE_CODE (t2) == INDIRECT_REF
1986 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1987 {
1988 expr = t2;
1989 offset = NULL;
1990 }
1991 }
1992
1993 /* If this is a Fortran indirect argument reference, record the
1994 parameter decl. */
1995 else if (flag_argument_noalias > 1
1996 && TREE_CODE (t) == INDIRECT_REF
1997 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1998 {
1999 expr = t;
2000 offset = NULL;
2001 }
2002 }
2003
2004 /* If we modified OFFSET based on T, then subtract the outstanding
2005 bit position offset. Similarly, increase the size of the accessed
2006 object to contain the negative offset. */
2007 if (apply_bitpos)
2008 {
2009 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2010 if (size)
2011 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2012 }
2013
2014 /* Now set the attributes we computed above. */
2015 MEM_ATTRS (ref)
2016 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2017
2018 /* If this is already known to be a scalar or aggregate, we are done. */
2019 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2020 return;
2021
2022 /* If it is a reference into an aggregate, this is part of an aggregate.
2023 Otherwise we don't know. */
2024 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2025 || TREE_CODE (t) == ARRAY_RANGE_REF
2026 || TREE_CODE (t) == BIT_FIELD_REF)
2027 MEM_IN_STRUCT_P (ref) = 1;
2028 }
2029
2030 void
2031 set_mem_attributes (rtx ref, tree t, int objectp)
2032 {
2033 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2034 }
2035
2036 /* Set the decl for MEM to DECL. */
2037
2038 void
2039 set_mem_attrs_from_reg (rtx mem, rtx reg)
2040 {
2041 MEM_ATTRS (mem)
2042 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2043 GEN_INT (REG_OFFSET (reg)),
2044 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2045 }
2046
2047 /* Set the alias set of MEM to SET. */
2048
2049 void
2050 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
2051 {
2052 #ifdef ENABLE_CHECKING
2053 /* If the new and old alias sets don't conflict, something is wrong. */
2054 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2055 abort ();
2056 #endif
2057
2058 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2059 MEM_SIZE (mem), MEM_ALIGN (mem),
2060 GET_MODE (mem));
2061 }
2062
2063 /* Set the alignment of MEM to ALIGN bits. */
2064
2065 void
2066 set_mem_align (rtx mem, unsigned int align)
2067 {
2068 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2069 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2070 GET_MODE (mem));
2071 }
2072
2073 /* Set the expr for MEM to EXPR. */
2074
2075 void
2076 set_mem_expr (rtx mem, tree expr)
2077 {
2078 MEM_ATTRS (mem)
2079 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2080 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2081 }
2082
2083 /* Set the offset of MEM to OFFSET. */
2084
2085 void
2086 set_mem_offset (rtx mem, rtx offset)
2087 {
2088 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2089 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2090 GET_MODE (mem));
2091 }
2092
2093 /* Set the size of MEM to SIZE. */
2094
2095 void
2096 set_mem_size (rtx mem, rtx size)
2097 {
2098 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2099 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2100 GET_MODE (mem));
2101 }
2102 \f
2103 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2104 and its address changed to ADDR. (VOIDmode means don't change the mode.
2105 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2106 returned memory location is required to be valid. The memory
2107 attributes are not changed. */
2108
2109 static rtx
2110 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
2111 {
2112 rtx new;
2113
2114 if (GET_CODE (memref) != MEM)
2115 abort ();
2116 if (mode == VOIDmode)
2117 mode = GET_MODE (memref);
2118 if (addr == 0)
2119 addr = XEXP (memref, 0);
2120
2121 if (validate)
2122 {
2123 if (reload_in_progress || reload_completed)
2124 {
2125 if (! memory_address_p (mode, addr))
2126 abort ();
2127 }
2128 else
2129 addr = memory_address (mode, addr);
2130 }
2131
2132 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2133 return memref;
2134
2135 new = gen_rtx_MEM (mode, addr);
2136 MEM_COPY_ATTRIBUTES (new, memref);
2137 return new;
2138 }
2139
2140 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2141 way we are changing MEMREF, so we only preserve the alias set. */
2142
2143 rtx
2144 change_address (rtx memref, enum machine_mode mode, rtx addr)
2145 {
2146 rtx new = change_address_1 (memref, mode, addr, 1);
2147 enum machine_mode mmode = GET_MODE (new);
2148
2149 MEM_ATTRS (new)
2150 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2151 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2152 (mmode == BLKmode ? BITS_PER_UNIT
2153 : GET_MODE_ALIGNMENT (mmode)),
2154 mmode);
2155
2156 return new;
2157 }
2158
2159 /* Return a memory reference like MEMREF, but with its mode changed
2160 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2161 nonzero, the memory address is forced to be valid.
2162 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2163 and caller is responsible for adjusting MEMREF base register. */
2164
2165 rtx
2166 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2167 int validate, int adjust)
2168 {
2169 rtx addr = XEXP (memref, 0);
2170 rtx new;
2171 rtx memoffset = MEM_OFFSET (memref);
2172 rtx size = 0;
2173 unsigned int memalign = MEM_ALIGN (memref);
2174
2175 /* ??? Prefer to create garbage instead of creating shared rtl.
2176 This may happen even if offset is nonzero -- consider
2177 (plus (plus reg reg) const_int) -- so do this always. */
2178 addr = copy_rtx (addr);
2179
2180 if (adjust)
2181 {
2182 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2183 object, we can merge it into the LO_SUM. */
2184 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2185 && offset >= 0
2186 && (unsigned HOST_WIDE_INT) offset
2187 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2188 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2189 plus_constant (XEXP (addr, 1), offset));
2190 else
2191 addr = plus_constant (addr, offset);
2192 }
2193
2194 new = change_address_1 (memref, mode, addr, validate);
2195
2196 /* Compute the new values of the memory attributes due to this adjustment.
2197 We add the offsets and update the alignment. */
2198 if (memoffset)
2199 memoffset = GEN_INT (offset + INTVAL (memoffset));
2200
2201 /* Compute the new alignment by taking the MIN of the alignment and the
2202 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2203 if zero. */
2204 if (offset != 0)
2205 memalign
2206 = MIN (memalign,
2207 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2208
2209 /* We can compute the size in a number of ways. */
2210 if (GET_MODE (new) != BLKmode)
2211 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2212 else if (MEM_SIZE (memref))
2213 size = plus_constant (MEM_SIZE (memref), -offset);
2214
2215 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2216 memoffset, size, memalign, GET_MODE (new));
2217
2218 /* At some point, we should validate that this offset is within the object,
2219 if all the appropriate values are known. */
2220 return new;
2221 }
2222
2223 /* Return a memory reference like MEMREF, but with its mode changed
2224 to MODE and its address changed to ADDR, which is assumed to be
2225 MEMREF offseted by OFFSET bytes. If VALIDATE is
2226 nonzero, the memory address is forced to be valid. */
2227
2228 rtx
2229 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2230 HOST_WIDE_INT offset, int validate)
2231 {
2232 memref = change_address_1 (memref, VOIDmode, addr, validate);
2233 return adjust_address_1 (memref, mode, offset, validate, 0);
2234 }
2235
2236 /* Return a memory reference like MEMREF, but whose address is changed by
2237 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2238 known to be in OFFSET (possibly 1). */
2239
2240 rtx
2241 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2242 {
2243 rtx new, addr = XEXP (memref, 0);
2244
2245 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2246
2247 /* At this point we don't know _why_ the address is invalid. It
2248 could have secondary memory references, multiplies or anything.
2249
2250 However, if we did go and rearrange things, we can wind up not
2251 being able to recognize the magic around pic_offset_table_rtx.
2252 This stuff is fragile, and is yet another example of why it is
2253 bad to expose PIC machinery too early. */
2254 if (! memory_address_p (GET_MODE (memref), new)
2255 && GET_CODE (addr) == PLUS
2256 && XEXP (addr, 0) == pic_offset_table_rtx)
2257 {
2258 addr = force_reg (GET_MODE (addr), addr);
2259 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2260 }
2261
2262 update_temp_slot_address (XEXP (memref, 0), new);
2263 new = change_address_1 (memref, VOIDmode, new, 1);
2264
2265 /* Update the alignment to reflect the offset. Reset the offset, which
2266 we don't know. */
2267 MEM_ATTRS (new)
2268 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2269 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2270 GET_MODE (new));
2271 return new;
2272 }
2273
2274 /* Return a memory reference like MEMREF, but with its address changed to
2275 ADDR. The caller is asserting that the actual piece of memory pointed
2276 to is the same, just the form of the address is being changed, such as
2277 by putting something into a register. */
2278
2279 rtx
2280 replace_equiv_address (rtx memref, rtx addr)
2281 {
2282 /* change_address_1 copies the memory attribute structure without change
2283 and that's exactly what we want here. */
2284 update_temp_slot_address (XEXP (memref, 0), addr);
2285 return change_address_1 (memref, VOIDmode, addr, 1);
2286 }
2287
2288 /* Likewise, but the reference is not required to be valid. */
2289
2290 rtx
2291 replace_equiv_address_nv (rtx memref, rtx addr)
2292 {
2293 return change_address_1 (memref, VOIDmode, addr, 0);
2294 }
2295
2296 /* Return a memory reference like MEMREF, but with its mode widened to
2297 MODE and offset by OFFSET. This would be used by targets that e.g.
2298 cannot issue QImode memory operations and have to use SImode memory
2299 operations plus masking logic. */
2300
2301 rtx
2302 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2303 {
2304 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2305 tree expr = MEM_EXPR (new);
2306 rtx memoffset = MEM_OFFSET (new);
2307 unsigned int size = GET_MODE_SIZE (mode);
2308
2309 /* If we don't know what offset we were at within the expression, then
2310 we can't know if we've overstepped the bounds. */
2311 if (! memoffset)
2312 expr = NULL_TREE;
2313
2314 while (expr)
2315 {
2316 if (TREE_CODE (expr) == COMPONENT_REF)
2317 {
2318 tree field = TREE_OPERAND (expr, 1);
2319
2320 if (! DECL_SIZE_UNIT (field))
2321 {
2322 expr = NULL_TREE;
2323 break;
2324 }
2325
2326 /* Is the field at least as large as the access? If so, ok,
2327 otherwise strip back to the containing structure. */
2328 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2329 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2330 && INTVAL (memoffset) >= 0)
2331 break;
2332
2333 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2334 {
2335 expr = NULL_TREE;
2336 break;
2337 }
2338
2339 expr = TREE_OPERAND (expr, 0);
2340 memoffset = (GEN_INT (INTVAL (memoffset)
2341 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2342 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2343 / BITS_PER_UNIT)));
2344 }
2345 /* Similarly for the decl. */
2346 else if (DECL_P (expr)
2347 && DECL_SIZE_UNIT (expr)
2348 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2349 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2350 && (! memoffset || INTVAL (memoffset) >= 0))
2351 break;
2352 else
2353 {
2354 /* The widened memory access overflows the expression, which means
2355 that it could alias another expression. Zap it. */
2356 expr = NULL_TREE;
2357 break;
2358 }
2359 }
2360
2361 if (! expr)
2362 memoffset = NULL_RTX;
2363
2364 /* The widened memory may alias other stuff, so zap the alias set. */
2365 /* ??? Maybe use get_alias_set on any remaining expression. */
2366
2367 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2368 MEM_ALIGN (new), mode);
2369
2370 return new;
2371 }
2372 \f
2373 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2374
2375 rtx
2376 gen_label_rtx (void)
2377 {
2378 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2379 NULL, label_num++, NULL);
2380 }
2381 \f
2382 /* For procedure integration. */
2383
2384 /* Install new pointers to the first and last insns in the chain.
2385 Also, set cur_insn_uid to one higher than the last in use.
2386 Used for an inline-procedure after copying the insn chain. */
2387
2388 void
2389 set_new_first_and_last_insn (rtx first, rtx last)
2390 {
2391 rtx insn;
2392
2393 first_insn = first;
2394 last_insn = last;
2395 cur_insn_uid = 0;
2396
2397 for (insn = first; insn; insn = NEXT_INSN (insn))
2398 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2399
2400 cur_insn_uid++;
2401 }
2402
2403 /* Set the range of label numbers found in the current function.
2404 This is used when belatedly compiling an inline function. */
2405
2406 void
2407 set_new_first_and_last_label_num (int first, int last)
2408 {
2409 base_label_num = label_num;
2410 first_label_num = first;
2411 last_label_num = last;
2412 }
2413
2414 /* Set the last label number found in the current function.
2415 This is used when belatedly compiling an inline function. */
2416
2417 void
2418 set_new_last_label_num (int last)
2419 {
2420 base_label_num = label_num;
2421 last_label_num = last;
2422 }
2423 \f
2424 /* Restore all variables describing the current status from the structure *P.
2425 This is used after a nested function. */
2426
2427 void
2428 restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2429 {
2430 last_label_num = 0;
2431 }
2432 \f
2433 /* Go through all the RTL insn bodies and copy any invalid shared
2434 structure. This routine should only be called once. */
2435
2436 void
2437 unshare_all_rtl (tree fndecl, rtx insn)
2438 {
2439 tree decl;
2440
2441 /* Make sure that virtual parameters are not shared. */
2442 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2443 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2444
2445 /* Make sure that virtual stack slots are not shared. */
2446 unshare_all_decls (DECL_INITIAL (fndecl));
2447
2448 /* Unshare just about everything else. */
2449 unshare_all_rtl_1 (insn);
2450
2451 /* Make sure the addresses of stack slots found outside the insn chain
2452 (such as, in DECL_RTL of a variable) are not shared
2453 with the insn chain.
2454
2455 This special care is necessary when the stack slot MEM does not
2456 actually appear in the insn chain. If it does appear, its address
2457 is unshared from all else at that point. */
2458 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2459 }
2460
2461 /* Go through all the RTL insn bodies and copy any invalid shared
2462 structure, again. This is a fairly expensive thing to do so it
2463 should be done sparingly. */
2464
2465 void
2466 unshare_all_rtl_again (rtx insn)
2467 {
2468 rtx p;
2469 tree decl;
2470
2471 for (p = insn; p; p = NEXT_INSN (p))
2472 if (INSN_P (p))
2473 {
2474 reset_used_flags (PATTERN (p));
2475 reset_used_flags (REG_NOTES (p));
2476 reset_used_flags (LOG_LINKS (p));
2477 }
2478
2479 /* Make sure that virtual stack slots are not shared. */
2480 reset_used_decls (DECL_INITIAL (cfun->decl));
2481
2482 /* Make sure that virtual parameters are not shared. */
2483 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2484 reset_used_flags (DECL_RTL (decl));
2485
2486 reset_used_flags (stack_slot_list);
2487
2488 unshare_all_rtl (cfun->decl, insn);
2489 }
2490
2491 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2492 Assumes the mark bits are cleared at entry. */
2493
2494 static void
2495 unshare_all_rtl_1 (rtx insn)
2496 {
2497 for (; insn; insn = NEXT_INSN (insn))
2498 if (INSN_P (insn))
2499 {
2500 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2501 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2502 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2503 }
2504 }
2505
2506 /* Go through all virtual stack slots of a function and copy any
2507 shared structure. */
2508 static void
2509 unshare_all_decls (tree blk)
2510 {
2511 tree t;
2512
2513 /* Copy shared decls. */
2514 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2515 if (DECL_RTL_SET_P (t))
2516 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2517
2518 /* Now process sub-blocks. */
2519 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2520 unshare_all_decls (t);
2521 }
2522
2523 /* Go through all virtual stack slots of a function and mark them as
2524 not shared. */
2525 static void
2526 reset_used_decls (tree blk)
2527 {
2528 tree t;
2529
2530 /* Mark decls. */
2531 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2532 if (DECL_RTL_SET_P (t))
2533 reset_used_flags (DECL_RTL (t));
2534
2535 /* Now process sub-blocks. */
2536 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2537 reset_used_decls (t);
2538 }
2539
2540 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2541 placed in the result directly, rather than being copied. MAY_SHARE is
2542 either a MEM of an EXPR_LIST of MEMs. */
2543
2544 rtx
2545 copy_most_rtx (rtx orig, rtx may_share)
2546 {
2547 rtx copy;
2548 int i, j;
2549 RTX_CODE code;
2550 const char *format_ptr;
2551
2552 if (orig == may_share
2553 || (GET_CODE (may_share) == EXPR_LIST
2554 && in_expr_list_p (may_share, orig)))
2555 return orig;
2556
2557 code = GET_CODE (orig);
2558
2559 switch (code)
2560 {
2561 case REG:
2562 case QUEUED:
2563 case CONST_INT:
2564 case CONST_DOUBLE:
2565 case CONST_VECTOR:
2566 case SYMBOL_REF:
2567 case CODE_LABEL:
2568 case PC:
2569 case CC0:
2570 return orig;
2571 default:
2572 break;
2573 }
2574
2575 copy = rtx_alloc (code);
2576 PUT_MODE (copy, GET_MODE (orig));
2577 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2578 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2579 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2580 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2581 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2582
2583 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2584
2585 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2586 {
2587 switch (*format_ptr++)
2588 {
2589 case 'e':
2590 XEXP (copy, i) = XEXP (orig, i);
2591 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2592 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2593 break;
2594
2595 case 'u':
2596 XEXP (copy, i) = XEXP (orig, i);
2597 break;
2598
2599 case 'E':
2600 case 'V':
2601 XVEC (copy, i) = XVEC (orig, i);
2602 if (XVEC (orig, i) != NULL)
2603 {
2604 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2605 for (j = 0; j < XVECLEN (copy, i); j++)
2606 XVECEXP (copy, i, j)
2607 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2608 }
2609 break;
2610
2611 case 'w':
2612 XWINT (copy, i) = XWINT (orig, i);
2613 break;
2614
2615 case 'n':
2616 case 'i':
2617 XINT (copy, i) = XINT (orig, i);
2618 break;
2619
2620 case 't':
2621 XTREE (copy, i) = XTREE (orig, i);
2622 break;
2623
2624 case 's':
2625 case 'S':
2626 XSTR (copy, i) = XSTR (orig, i);
2627 break;
2628
2629 case '0':
2630 /* Copy this through the wide int field; that's safest. */
2631 X0WINT (copy, i) = X0WINT (orig, i);
2632 break;
2633
2634 default:
2635 abort ();
2636 }
2637 }
2638 return copy;
2639 }
2640
2641 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2642 Recursively does the same for subexpressions. */
2643
2644 rtx
2645 copy_rtx_if_shared (rtx orig)
2646 {
2647 rtx x = orig;
2648 int i;
2649 enum rtx_code code;
2650 const char *format_ptr;
2651 int copied = 0;
2652
2653 if (x == 0)
2654 return 0;
2655
2656 code = GET_CODE (x);
2657
2658 /* These types may be freely shared. */
2659
2660 switch (code)
2661 {
2662 case REG:
2663 case QUEUED:
2664 case CONST_INT:
2665 case CONST_DOUBLE:
2666 case CONST_VECTOR:
2667 case SYMBOL_REF:
2668 case CODE_LABEL:
2669 case PC:
2670 case CC0:
2671 case SCRATCH:
2672 /* SCRATCH must be shared because they represent distinct values. */
2673 return x;
2674
2675 case CONST:
2676 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2677 a LABEL_REF, it isn't sharable. */
2678 if (GET_CODE (XEXP (x, 0)) == PLUS
2679 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2680 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2681 return x;
2682 break;
2683
2684 case INSN:
2685 case JUMP_INSN:
2686 case CALL_INSN:
2687 case NOTE:
2688 case BARRIER:
2689 /* The chain of insns is not being copied. */
2690 return x;
2691
2692 case MEM:
2693 /* A MEM is allowed to be shared if its address is constant.
2694
2695 We used to allow sharing of MEMs which referenced
2696 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2697 that can lose. instantiate_virtual_regs will not unshare
2698 the MEMs, and combine may change the structure of the address
2699 because it looks safe and profitable in one context, but
2700 in some other context it creates unrecognizable RTL. */
2701 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2702 return x;
2703
2704 break;
2705
2706 default:
2707 break;
2708 }
2709
2710 /* This rtx may not be shared. If it has already been seen,
2711 replace it with a copy of itself. */
2712
2713 if (RTX_FLAG (x, used))
2714 {
2715 rtx copy;
2716
2717 copy = rtx_alloc (code);
2718 memcpy (copy, x,
2719 (sizeof (*copy) - sizeof (copy->fld)
2720 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2721 x = copy;
2722 copied = 1;
2723 }
2724 RTX_FLAG (x, used) = 1;
2725
2726 /* Now scan the subexpressions recursively.
2727 We can store any replaced subexpressions directly into X
2728 since we know X is not shared! Any vectors in X
2729 must be copied if X was copied. */
2730
2731 format_ptr = GET_RTX_FORMAT (code);
2732
2733 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2734 {
2735 switch (*format_ptr++)
2736 {
2737 case 'e':
2738 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2739 break;
2740
2741 case 'E':
2742 if (XVEC (x, i) != NULL)
2743 {
2744 int j;
2745 int len = XVECLEN (x, i);
2746
2747 if (copied && len > 0)
2748 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2749 for (j = 0; j < len; j++)
2750 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2751 }
2752 break;
2753 }
2754 }
2755 return x;
2756 }
2757
2758 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2759 to look for shared sub-parts. */
2760
2761 void
2762 reset_used_flags (rtx x)
2763 {
2764 int i, j;
2765 enum rtx_code code;
2766 const char *format_ptr;
2767
2768 if (x == 0)
2769 return;
2770
2771 code = GET_CODE (x);
2772
2773 /* These types may be freely shared so we needn't do any resetting
2774 for them. */
2775
2776 switch (code)
2777 {
2778 case REG:
2779 case QUEUED:
2780 case CONST_INT:
2781 case CONST_DOUBLE:
2782 case CONST_VECTOR:
2783 case SYMBOL_REF:
2784 case CODE_LABEL:
2785 case PC:
2786 case CC0:
2787 return;
2788
2789 case INSN:
2790 case JUMP_INSN:
2791 case CALL_INSN:
2792 case NOTE:
2793 case LABEL_REF:
2794 case BARRIER:
2795 /* The chain of insns is not being copied. */
2796 return;
2797
2798 default:
2799 break;
2800 }
2801
2802 RTX_FLAG (x, used) = 0;
2803
2804 format_ptr = GET_RTX_FORMAT (code);
2805 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2806 {
2807 switch (*format_ptr++)
2808 {
2809 case 'e':
2810 reset_used_flags (XEXP (x, i));
2811 break;
2812
2813 case 'E':
2814 for (j = 0; j < XVECLEN (x, i); j++)
2815 reset_used_flags (XVECEXP (x, i, j));
2816 break;
2817 }
2818 }
2819 }
2820 \f
2821 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2822 Return X or the rtx for the pseudo reg the value of X was copied into.
2823 OTHER must be valid as a SET_DEST. */
2824
2825 rtx
2826 make_safe_from (rtx x, rtx other)
2827 {
2828 while (1)
2829 switch (GET_CODE (other))
2830 {
2831 case SUBREG:
2832 other = SUBREG_REG (other);
2833 break;
2834 case STRICT_LOW_PART:
2835 case SIGN_EXTEND:
2836 case ZERO_EXTEND:
2837 other = XEXP (other, 0);
2838 break;
2839 default:
2840 goto done;
2841 }
2842 done:
2843 if ((GET_CODE (other) == MEM
2844 && ! CONSTANT_P (x)
2845 && GET_CODE (x) != REG
2846 && GET_CODE (x) != SUBREG)
2847 || (GET_CODE (other) == REG
2848 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2849 || reg_mentioned_p (other, x))))
2850 {
2851 rtx temp = gen_reg_rtx (GET_MODE (x));
2852 emit_move_insn (temp, x);
2853 return temp;
2854 }
2855 return x;
2856 }
2857 \f
2858 /* Emission of insns (adding them to the doubly-linked list). */
2859
2860 /* Return the first insn of the current sequence or current function. */
2861
2862 rtx
2863 get_insns (void)
2864 {
2865 return first_insn;
2866 }
2867
2868 /* Specify a new insn as the first in the chain. */
2869
2870 void
2871 set_first_insn (rtx insn)
2872 {
2873 if (PREV_INSN (insn) != 0)
2874 abort ();
2875 first_insn = insn;
2876 }
2877
2878 /* Return the last insn emitted in current sequence or current function. */
2879
2880 rtx
2881 get_last_insn (void)
2882 {
2883 return last_insn;
2884 }
2885
2886 /* Specify a new insn as the last in the chain. */
2887
2888 void
2889 set_last_insn (rtx insn)
2890 {
2891 if (NEXT_INSN (insn) != 0)
2892 abort ();
2893 last_insn = insn;
2894 }
2895
2896 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2897
2898 rtx
2899 get_last_insn_anywhere (void)
2900 {
2901 struct sequence_stack *stack;
2902 if (last_insn)
2903 return last_insn;
2904 for (stack = seq_stack; stack; stack = stack->next)
2905 if (stack->last != 0)
2906 return stack->last;
2907 return 0;
2908 }
2909
2910 /* Return the first nonnote insn emitted in current sequence or current
2911 function. This routine looks inside SEQUENCEs. */
2912
2913 rtx
2914 get_first_nonnote_insn (void)
2915 {
2916 rtx insn = first_insn;
2917
2918 while (insn)
2919 {
2920 insn = next_insn (insn);
2921 if (insn == 0 || GET_CODE (insn) != NOTE)
2922 break;
2923 }
2924
2925 return insn;
2926 }
2927
2928 /* Return the last nonnote insn emitted in current sequence or current
2929 function. This routine looks inside SEQUENCEs. */
2930
2931 rtx
2932 get_last_nonnote_insn (void)
2933 {
2934 rtx insn = last_insn;
2935
2936 while (insn)
2937 {
2938 insn = previous_insn (insn);
2939 if (insn == 0 || GET_CODE (insn) != NOTE)
2940 break;
2941 }
2942
2943 return insn;
2944 }
2945
2946 /* Return a number larger than any instruction's uid in this function. */
2947
2948 int
2949 get_max_uid (void)
2950 {
2951 return cur_insn_uid;
2952 }
2953
2954 /* Renumber instructions so that no instruction UIDs are wasted. */
2955
2956 void
2957 renumber_insns (FILE *stream)
2958 {
2959 rtx insn;
2960
2961 /* If we're not supposed to renumber instructions, don't. */
2962 if (!flag_renumber_insns)
2963 return;
2964
2965 /* If there aren't that many instructions, then it's not really
2966 worth renumbering them. */
2967 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2968 return;
2969
2970 cur_insn_uid = 1;
2971
2972 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2973 {
2974 if (stream)
2975 fprintf (stream, "Renumbering insn %d to %d\n",
2976 INSN_UID (insn), cur_insn_uid);
2977 INSN_UID (insn) = cur_insn_uid++;
2978 }
2979 }
2980 \f
2981 /* Return the next insn. If it is a SEQUENCE, return the first insn
2982 of the sequence. */
2983
2984 rtx
2985 next_insn (rtx insn)
2986 {
2987 if (insn)
2988 {
2989 insn = NEXT_INSN (insn);
2990 if (insn && GET_CODE (insn) == INSN
2991 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2992 insn = XVECEXP (PATTERN (insn), 0, 0);
2993 }
2994
2995 return insn;
2996 }
2997
2998 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2999 of the sequence. */
3000
3001 rtx
3002 previous_insn (rtx insn)
3003 {
3004 if (insn)
3005 {
3006 insn = PREV_INSN (insn);
3007 if (insn && GET_CODE (insn) == INSN
3008 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3009 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3010 }
3011
3012 return insn;
3013 }
3014
3015 /* Return the next insn after INSN that is not a NOTE. This routine does not
3016 look inside SEQUENCEs. */
3017
3018 rtx
3019 next_nonnote_insn (rtx insn)
3020 {
3021 while (insn)
3022 {
3023 insn = NEXT_INSN (insn);
3024 if (insn == 0 || GET_CODE (insn) != NOTE)
3025 break;
3026 }
3027
3028 return insn;
3029 }
3030
3031 /* Return the previous insn before INSN that is not a NOTE. This routine does
3032 not look inside SEQUENCEs. */
3033
3034 rtx
3035 prev_nonnote_insn (rtx insn)
3036 {
3037 while (insn)
3038 {
3039 insn = PREV_INSN (insn);
3040 if (insn == 0 || GET_CODE (insn) != NOTE)
3041 break;
3042 }
3043
3044 return insn;
3045 }
3046
3047 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3048 or 0, if there is none. This routine does not look inside
3049 SEQUENCEs. */
3050
3051 rtx
3052 next_real_insn (rtx insn)
3053 {
3054 while (insn)
3055 {
3056 insn = NEXT_INSN (insn);
3057 if (insn == 0 || GET_CODE (insn) == INSN
3058 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3059 break;
3060 }
3061
3062 return insn;
3063 }
3064
3065 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3066 or 0, if there is none. This routine does not look inside
3067 SEQUENCEs. */
3068
3069 rtx
3070 prev_real_insn (rtx insn)
3071 {
3072 while (insn)
3073 {
3074 insn = PREV_INSN (insn);
3075 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3076 || GET_CODE (insn) == JUMP_INSN)
3077 break;
3078 }
3079
3080 return insn;
3081 }
3082
3083 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3084 This routine does not look inside SEQUENCEs. */
3085
3086 rtx
3087 last_call_insn (void)
3088 {
3089 rtx insn;
3090
3091 for (insn = get_last_insn ();
3092 insn && GET_CODE (insn) != CALL_INSN;
3093 insn = PREV_INSN (insn))
3094 ;
3095
3096 return insn;
3097 }
3098
3099 /* Find the next insn after INSN that really does something. This routine
3100 does not look inside SEQUENCEs. Until reload has completed, this is the
3101 same as next_real_insn. */
3102
3103 int
3104 active_insn_p (rtx insn)
3105 {
3106 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3107 || (GET_CODE (insn) == INSN
3108 && (! reload_completed
3109 || (GET_CODE (PATTERN (insn)) != USE
3110 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3111 }
3112
3113 rtx
3114 next_active_insn (rtx insn)
3115 {
3116 while (insn)
3117 {
3118 insn = NEXT_INSN (insn);
3119 if (insn == 0 || active_insn_p (insn))
3120 break;
3121 }
3122
3123 return insn;
3124 }
3125
3126 /* Find the last insn before INSN that really does something. This routine
3127 does not look inside SEQUENCEs. Until reload has completed, this is the
3128 same as prev_real_insn. */
3129
3130 rtx
3131 prev_active_insn (rtx insn)
3132 {
3133 while (insn)
3134 {
3135 insn = PREV_INSN (insn);
3136 if (insn == 0 || active_insn_p (insn))
3137 break;
3138 }
3139
3140 return insn;
3141 }
3142
3143 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3144
3145 rtx
3146 next_label (rtx insn)
3147 {
3148 while (insn)
3149 {
3150 insn = NEXT_INSN (insn);
3151 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3152 break;
3153 }
3154
3155 return insn;
3156 }
3157
3158 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3159
3160 rtx
3161 prev_label (rtx insn)
3162 {
3163 while (insn)
3164 {
3165 insn = PREV_INSN (insn);
3166 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3167 break;
3168 }
3169
3170 return insn;
3171 }
3172 \f
3173 #ifdef HAVE_cc0
3174 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3175 and REG_CC_USER notes so we can find it. */
3176
3177 void
3178 link_cc0_insns (rtx insn)
3179 {
3180 rtx user = next_nonnote_insn (insn);
3181
3182 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3183 user = XVECEXP (PATTERN (user), 0, 0);
3184
3185 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3186 REG_NOTES (user));
3187 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3188 }
3189
3190 /* Return the next insn that uses CC0 after INSN, which is assumed to
3191 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3192 applied to the result of this function should yield INSN).
3193
3194 Normally, this is simply the next insn. However, if a REG_CC_USER note
3195 is present, it contains the insn that uses CC0.
3196
3197 Return 0 if we can't find the insn. */
3198
3199 rtx
3200 next_cc0_user (rtx insn)
3201 {
3202 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3203
3204 if (note)
3205 return XEXP (note, 0);
3206
3207 insn = next_nonnote_insn (insn);
3208 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3209 insn = XVECEXP (PATTERN (insn), 0, 0);
3210
3211 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3212 return insn;
3213
3214 return 0;
3215 }
3216
3217 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3218 note, it is the previous insn. */
3219
3220 rtx
3221 prev_cc0_setter (rtx insn)
3222 {
3223 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3224
3225 if (note)
3226 return XEXP (note, 0);
3227
3228 insn = prev_nonnote_insn (insn);
3229 if (! sets_cc0_p (PATTERN (insn)))
3230 abort ();
3231
3232 return insn;
3233 }
3234 #endif
3235
3236 /* Increment the label uses for all labels present in rtx. */
3237
3238 static void
3239 mark_label_nuses (rtx x)
3240 {
3241 enum rtx_code code;
3242 int i, j;
3243 const char *fmt;
3244
3245 code = GET_CODE (x);
3246 if (code == LABEL_REF)
3247 LABEL_NUSES (XEXP (x, 0))++;
3248
3249 fmt = GET_RTX_FORMAT (code);
3250 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3251 {
3252 if (fmt[i] == 'e')
3253 mark_label_nuses (XEXP (x, i));
3254 else if (fmt[i] == 'E')
3255 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3256 mark_label_nuses (XVECEXP (x, i, j));
3257 }
3258 }
3259
3260 \f
3261 /* Try splitting insns that can be split for better scheduling.
3262 PAT is the pattern which might split.
3263 TRIAL is the insn providing PAT.
3264 LAST is nonzero if we should return the last insn of the sequence produced.
3265
3266 If this routine succeeds in splitting, it returns the first or last
3267 replacement insn depending on the value of LAST. Otherwise, it
3268 returns TRIAL. If the insn to be returned can be split, it will be. */
3269
3270 rtx
3271 try_split (rtx pat, rtx trial, int last)
3272 {
3273 rtx before = PREV_INSN (trial);
3274 rtx after = NEXT_INSN (trial);
3275 int has_barrier = 0;
3276 rtx tem;
3277 rtx note, seq;
3278 int probability;
3279 rtx insn_last, insn;
3280 int njumps = 0;
3281
3282 if (any_condjump_p (trial)
3283 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3284 split_branch_probability = INTVAL (XEXP (note, 0));
3285 probability = split_branch_probability;
3286
3287 seq = split_insns (pat, trial);
3288
3289 split_branch_probability = -1;
3290
3291 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3292 We may need to handle this specially. */
3293 if (after && GET_CODE (after) == BARRIER)
3294 {
3295 has_barrier = 1;
3296 after = NEXT_INSN (after);
3297 }
3298
3299 if (!seq)
3300 return trial;
3301
3302 /* Avoid infinite loop if any insn of the result matches
3303 the original pattern. */
3304 insn_last = seq;
3305 while (1)
3306 {
3307 if (INSN_P (insn_last)
3308 && rtx_equal_p (PATTERN (insn_last), pat))
3309 return trial;
3310 if (!NEXT_INSN (insn_last))
3311 break;
3312 insn_last = NEXT_INSN (insn_last);
3313 }
3314
3315 /* Mark labels. */
3316 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3317 {
3318 if (GET_CODE (insn) == JUMP_INSN)
3319 {
3320 mark_jump_label (PATTERN (insn), insn, 0);
3321 njumps++;
3322 if (probability != -1
3323 && any_condjump_p (insn)
3324 && !find_reg_note (insn, REG_BR_PROB, 0))
3325 {
3326 /* We can preserve the REG_BR_PROB notes only if exactly
3327 one jump is created, otherwise the machine description
3328 is responsible for this step using
3329 split_branch_probability variable. */
3330 if (njumps != 1)
3331 abort ();
3332 REG_NOTES (insn)
3333 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3334 GEN_INT (probability),
3335 REG_NOTES (insn));
3336 }
3337 }
3338 }
3339
3340 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3341 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3342 if (GET_CODE (trial) == CALL_INSN)
3343 {
3344 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3345 if (GET_CODE (insn) == CALL_INSN)
3346 {
3347 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3348 while (*p)
3349 p = &XEXP (*p, 1);
3350 *p = CALL_INSN_FUNCTION_USAGE (trial);
3351 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3352 }
3353 }
3354
3355 /* Copy notes, particularly those related to the CFG. */
3356 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3357 {
3358 switch (REG_NOTE_KIND (note))
3359 {
3360 case REG_EH_REGION:
3361 insn = insn_last;
3362 while (insn != NULL_RTX)
3363 {
3364 if (GET_CODE (insn) == CALL_INSN
3365 || (flag_non_call_exceptions
3366 && may_trap_p (PATTERN (insn))))
3367 REG_NOTES (insn)
3368 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3369 XEXP (note, 0),
3370 REG_NOTES (insn));
3371 insn = PREV_INSN (insn);
3372 }
3373 break;
3374
3375 case REG_NORETURN:
3376 case REG_SETJMP:
3377 case REG_ALWAYS_RETURN:
3378 insn = insn_last;
3379 while (insn != NULL_RTX)
3380 {
3381 if (GET_CODE (insn) == CALL_INSN)
3382 REG_NOTES (insn)
3383 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3384 XEXP (note, 0),
3385 REG_NOTES (insn));
3386 insn = PREV_INSN (insn);
3387 }
3388 break;
3389
3390 case REG_NON_LOCAL_GOTO:
3391 insn = insn_last;
3392 while (insn != NULL_RTX)
3393 {
3394 if (GET_CODE (insn) == JUMP_INSN)
3395 REG_NOTES (insn)
3396 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3397 XEXP (note, 0),
3398 REG_NOTES (insn));
3399 insn = PREV_INSN (insn);
3400 }
3401 break;
3402
3403 default:
3404 break;
3405 }
3406 }
3407
3408 /* If there are LABELS inside the split insns increment the
3409 usage count so we don't delete the label. */
3410 if (GET_CODE (trial) == INSN)
3411 {
3412 insn = insn_last;
3413 while (insn != NULL_RTX)
3414 {
3415 if (GET_CODE (insn) == INSN)
3416 mark_label_nuses (PATTERN (insn));
3417
3418 insn = PREV_INSN (insn);
3419 }
3420 }
3421
3422 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3423
3424 delete_insn (trial);
3425 if (has_barrier)
3426 emit_barrier_after (tem);
3427
3428 /* Recursively call try_split for each new insn created; by the
3429 time control returns here that insn will be fully split, so
3430 set LAST and continue from the insn after the one returned.
3431 We can't use next_active_insn here since AFTER may be a note.
3432 Ignore deleted insns, which can be occur if not optimizing. */
3433 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3434 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3435 tem = try_split (PATTERN (tem), tem, 1);
3436
3437 /* Return either the first or the last insn, depending on which was
3438 requested. */
3439 return last
3440 ? (after ? PREV_INSN (after) : last_insn)
3441 : NEXT_INSN (before);
3442 }
3443 \f
3444 /* Make and return an INSN rtx, initializing all its slots.
3445 Store PATTERN in the pattern slots. */
3446
3447 rtx
3448 make_insn_raw (rtx pattern)
3449 {
3450 rtx insn;
3451
3452 insn = rtx_alloc (INSN);
3453
3454 INSN_UID (insn) = cur_insn_uid++;
3455 PATTERN (insn) = pattern;
3456 INSN_CODE (insn) = -1;
3457 LOG_LINKS (insn) = NULL;
3458 REG_NOTES (insn) = NULL;
3459 INSN_LOCATOR (insn) = 0;
3460 BLOCK_FOR_INSN (insn) = NULL;
3461
3462 #ifdef ENABLE_RTL_CHECKING
3463 if (insn
3464 && INSN_P (insn)
3465 && (returnjump_p (insn)
3466 || (GET_CODE (insn) == SET
3467 && SET_DEST (insn) == pc_rtx)))
3468 {
3469 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3470 debug_rtx (insn);
3471 }
3472 #endif
3473
3474 return insn;
3475 }
3476
3477 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3478
3479 static rtx
3480 make_jump_insn_raw (rtx pattern)
3481 {
3482 rtx insn;
3483
3484 insn = rtx_alloc (JUMP_INSN);
3485 INSN_UID (insn) = cur_insn_uid++;
3486
3487 PATTERN (insn) = pattern;
3488 INSN_CODE (insn) = -1;
3489 LOG_LINKS (insn) = NULL;
3490 REG_NOTES (insn) = NULL;
3491 JUMP_LABEL (insn) = NULL;
3492 INSN_LOCATOR (insn) = 0;
3493 BLOCK_FOR_INSN (insn) = NULL;
3494
3495 return insn;
3496 }
3497
3498 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3499
3500 static rtx
3501 make_call_insn_raw (rtx pattern)
3502 {
3503 rtx insn;
3504
3505 insn = rtx_alloc (CALL_INSN);
3506 INSN_UID (insn) = cur_insn_uid++;
3507
3508 PATTERN (insn) = pattern;
3509 INSN_CODE (insn) = -1;
3510 LOG_LINKS (insn) = NULL;
3511 REG_NOTES (insn) = NULL;
3512 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3513 INSN_LOCATOR (insn) = 0;
3514 BLOCK_FOR_INSN (insn) = NULL;
3515
3516 return insn;
3517 }
3518 \f
3519 /* Add INSN to the end of the doubly-linked list.
3520 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3521
3522 void
3523 add_insn (rtx insn)
3524 {
3525 PREV_INSN (insn) = last_insn;
3526 NEXT_INSN (insn) = 0;
3527
3528 if (NULL != last_insn)
3529 NEXT_INSN (last_insn) = insn;
3530
3531 if (NULL == first_insn)
3532 first_insn = insn;
3533
3534 last_insn = insn;
3535 }
3536
3537 /* Add INSN into the doubly-linked list after insn AFTER. This and
3538 the next should be the only functions called to insert an insn once
3539 delay slots have been filled since only they know how to update a
3540 SEQUENCE. */
3541
3542 void
3543 add_insn_after (rtx insn, rtx after)
3544 {
3545 rtx next = NEXT_INSN (after);
3546 basic_block bb;
3547
3548 if (optimize && INSN_DELETED_P (after))
3549 abort ();
3550
3551 NEXT_INSN (insn) = next;
3552 PREV_INSN (insn) = after;
3553
3554 if (next)
3555 {
3556 PREV_INSN (next) = insn;
3557 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3558 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3559 }
3560 else if (last_insn == after)
3561 last_insn = insn;
3562 else
3563 {
3564 struct sequence_stack *stack = seq_stack;
3565 /* Scan all pending sequences too. */
3566 for (; stack; stack = stack->next)
3567 if (after == stack->last)
3568 {
3569 stack->last = insn;
3570 break;
3571 }
3572
3573 if (stack == 0)
3574 abort ();
3575 }
3576
3577 if (GET_CODE (after) != BARRIER
3578 && GET_CODE (insn) != BARRIER
3579 && (bb = BLOCK_FOR_INSN (after)))
3580 {
3581 set_block_for_insn (insn, bb);
3582 if (INSN_P (insn))
3583 bb->flags |= BB_DIRTY;
3584 /* Should not happen as first in the BB is always
3585 either NOTE or LABEL. */
3586 if (bb->end == after
3587 /* Avoid clobbering of structure when creating new BB. */
3588 && GET_CODE (insn) != BARRIER
3589 && (GET_CODE (insn) != NOTE
3590 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3591 bb->end = insn;
3592 }
3593
3594 NEXT_INSN (after) = insn;
3595 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3596 {
3597 rtx sequence = PATTERN (after);
3598 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3599 }
3600 }
3601
3602 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3603 the previous should be the only functions called to insert an insn once
3604 delay slots have been filled since only they know how to update a
3605 SEQUENCE. */
3606
3607 void
3608 add_insn_before (rtx insn, rtx before)
3609 {
3610 rtx prev = PREV_INSN (before);
3611 basic_block bb;
3612
3613 if (optimize && INSN_DELETED_P (before))
3614 abort ();
3615
3616 PREV_INSN (insn) = prev;
3617 NEXT_INSN (insn) = before;
3618
3619 if (prev)
3620 {
3621 NEXT_INSN (prev) = insn;
3622 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3623 {
3624 rtx sequence = PATTERN (prev);
3625 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3626 }
3627 }
3628 else if (first_insn == before)
3629 first_insn = insn;
3630 else
3631 {
3632 struct sequence_stack *stack = seq_stack;
3633 /* Scan all pending sequences too. */
3634 for (; stack; stack = stack->next)
3635 if (before == stack->first)
3636 {
3637 stack->first = insn;
3638 break;
3639 }
3640
3641 if (stack == 0)
3642 abort ();
3643 }
3644
3645 if (GET_CODE (before) != BARRIER
3646 && GET_CODE (insn) != BARRIER
3647 && (bb = BLOCK_FOR_INSN (before)))
3648 {
3649 set_block_for_insn (insn, bb);
3650 if (INSN_P (insn))
3651 bb->flags |= BB_DIRTY;
3652 /* Should not happen as first in the BB is always
3653 either NOTE or LABEl. */
3654 if (bb->head == insn
3655 /* Avoid clobbering of structure when creating new BB. */
3656 && GET_CODE (insn) != BARRIER
3657 && (GET_CODE (insn) != NOTE
3658 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3659 abort ();
3660 }
3661
3662 PREV_INSN (before) = insn;
3663 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3664 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3665 }
3666
3667 /* Remove an insn from its doubly-linked list. This function knows how
3668 to handle sequences. */
3669 void
3670 remove_insn (rtx insn)
3671 {
3672 rtx next = NEXT_INSN (insn);
3673 rtx prev = PREV_INSN (insn);
3674 basic_block bb;
3675
3676 if (prev)
3677 {
3678 NEXT_INSN (prev) = next;
3679 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3680 {
3681 rtx sequence = PATTERN (prev);
3682 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3683 }
3684 }
3685 else if (first_insn == insn)
3686 first_insn = next;
3687 else
3688 {
3689 struct sequence_stack *stack = seq_stack;
3690 /* Scan all pending sequences too. */
3691 for (; stack; stack = stack->next)
3692 if (insn == stack->first)
3693 {
3694 stack->first = next;
3695 break;
3696 }
3697
3698 if (stack == 0)
3699 abort ();
3700 }
3701
3702 if (next)
3703 {
3704 PREV_INSN (next) = prev;
3705 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3706 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3707 }
3708 else if (last_insn == insn)
3709 last_insn = prev;
3710 else
3711 {
3712 struct sequence_stack *stack = seq_stack;
3713 /* Scan all pending sequences too. */
3714 for (; stack; stack = stack->next)
3715 if (insn == stack->last)
3716 {
3717 stack->last = prev;
3718 break;
3719 }
3720
3721 if (stack == 0)
3722 abort ();
3723 }
3724 if (GET_CODE (insn) != BARRIER
3725 && (bb = BLOCK_FOR_INSN (insn)))
3726 {
3727 if (INSN_P (insn))
3728 bb->flags |= BB_DIRTY;
3729 if (bb->head == insn)
3730 {
3731 /* Never ever delete the basic block note without deleting whole
3732 basic block. */
3733 if (GET_CODE (insn) == NOTE)
3734 abort ();
3735 bb->head = next;
3736 }
3737 if (bb->end == insn)
3738 bb->end = prev;
3739 }
3740 }
3741
3742 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3743
3744 void
3745 add_function_usage_to (rtx call_insn, rtx call_fusage)
3746 {
3747 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3748 abort ();
3749
3750 /* Put the register usage information on the CALL. If there is already
3751 some usage information, put ours at the end. */
3752 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3753 {
3754 rtx link;
3755
3756 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3757 link = XEXP (link, 1))
3758 ;
3759
3760 XEXP (link, 1) = call_fusage;
3761 }
3762 else
3763 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3764 }
3765
3766 /* Delete all insns made since FROM.
3767 FROM becomes the new last instruction. */
3768
3769 void
3770 delete_insns_since (rtx from)
3771 {
3772 if (from == 0)
3773 first_insn = 0;
3774 else
3775 NEXT_INSN (from) = 0;
3776 last_insn = from;
3777 }
3778
3779 /* This function is deprecated, please use sequences instead.
3780
3781 Move a consecutive bunch of insns to a different place in the chain.
3782 The insns to be moved are those between FROM and TO.
3783 They are moved to a new position after the insn AFTER.
3784 AFTER must not be FROM or TO or any insn in between.
3785
3786 This function does not know about SEQUENCEs and hence should not be
3787 called after delay-slot filling has been done. */
3788
3789 void
3790 reorder_insns_nobb (rtx from, rtx to, rtx after)
3791 {
3792 /* Splice this bunch out of where it is now. */
3793 if (PREV_INSN (from))
3794 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3795 if (NEXT_INSN (to))
3796 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3797 if (last_insn == to)
3798 last_insn = PREV_INSN (from);
3799 if (first_insn == from)
3800 first_insn = NEXT_INSN (to);
3801
3802 /* Make the new neighbors point to it and it to them. */
3803 if (NEXT_INSN (after))
3804 PREV_INSN (NEXT_INSN (after)) = to;
3805
3806 NEXT_INSN (to) = NEXT_INSN (after);
3807 PREV_INSN (from) = after;
3808 NEXT_INSN (after) = from;
3809 if (after == last_insn)
3810 last_insn = to;
3811 }
3812
3813 /* Same as function above, but take care to update BB boundaries. */
3814 void
3815 reorder_insns (rtx from, rtx to, rtx after)
3816 {
3817 rtx prev = PREV_INSN (from);
3818 basic_block bb, bb2;
3819
3820 reorder_insns_nobb (from, to, after);
3821
3822 if (GET_CODE (after) != BARRIER
3823 && (bb = BLOCK_FOR_INSN (after)))
3824 {
3825 rtx x;
3826 bb->flags |= BB_DIRTY;
3827
3828 if (GET_CODE (from) != BARRIER
3829 && (bb2 = BLOCK_FOR_INSN (from)))
3830 {
3831 if (bb2->end == to)
3832 bb2->end = prev;
3833 bb2->flags |= BB_DIRTY;
3834 }
3835
3836 if (bb->end == after)
3837 bb->end = to;
3838
3839 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3840 set_block_for_insn (x, bb);
3841 }
3842 }
3843
3844 /* Return the line note insn preceding INSN. */
3845
3846 static rtx
3847 find_line_note (rtx insn)
3848 {
3849 if (no_line_numbers)
3850 return 0;
3851
3852 for (; insn; insn = PREV_INSN (insn))
3853 if (GET_CODE (insn) == NOTE
3854 && NOTE_LINE_NUMBER (insn) >= 0)
3855 break;
3856
3857 return insn;
3858 }
3859
3860 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3861 of the moved insns when debugging. This may insert a note between AFTER
3862 and FROM, and another one after TO. */
3863
3864 void
3865 reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
3866 {
3867 rtx from_line = find_line_note (from);
3868 rtx after_line = find_line_note (after);
3869
3870 reorder_insns (from, to, after);
3871
3872 if (from_line == after_line)
3873 return;
3874
3875 if (from_line)
3876 emit_note_copy_after (from_line, after);
3877 if (after_line)
3878 emit_note_copy_after (after_line, to);
3879 }
3880
3881 /* Remove unnecessary notes from the instruction stream. */
3882
3883 void
3884 remove_unnecessary_notes (void)
3885 {
3886 rtx block_stack = NULL_RTX;
3887 rtx eh_stack = NULL_RTX;
3888 rtx insn;
3889 rtx next;
3890 rtx tmp;
3891
3892 /* We must not remove the first instruction in the function because
3893 the compiler depends on the first instruction being a note. */
3894 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3895 {
3896 /* Remember what's next. */
3897 next = NEXT_INSN (insn);
3898
3899 /* We're only interested in notes. */
3900 if (GET_CODE (insn) != NOTE)
3901 continue;
3902
3903 switch (NOTE_LINE_NUMBER (insn))
3904 {
3905 case NOTE_INSN_DELETED:
3906 case NOTE_INSN_LOOP_END_TOP_COND:
3907 remove_insn (insn);
3908 break;
3909
3910 case NOTE_INSN_EH_REGION_BEG:
3911 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3912 break;
3913
3914 case NOTE_INSN_EH_REGION_END:
3915 /* Too many end notes. */
3916 if (eh_stack == NULL_RTX)
3917 abort ();
3918 /* Mismatched nesting. */
3919 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3920 abort ();
3921 tmp = eh_stack;
3922 eh_stack = XEXP (eh_stack, 1);
3923 free_INSN_LIST_node (tmp);
3924 break;
3925
3926 case NOTE_INSN_BLOCK_BEG:
3927 /* By now, all notes indicating lexical blocks should have
3928 NOTE_BLOCK filled in. */
3929 if (NOTE_BLOCK (insn) == NULL_TREE)
3930 abort ();
3931 block_stack = alloc_INSN_LIST (insn, block_stack);
3932 break;
3933
3934 case NOTE_INSN_BLOCK_END:
3935 /* Too many end notes. */
3936 if (block_stack == NULL_RTX)
3937 abort ();
3938 /* Mismatched nesting. */
3939 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3940 abort ();
3941 tmp = block_stack;
3942 block_stack = XEXP (block_stack, 1);
3943 free_INSN_LIST_node (tmp);
3944
3945 /* Scan back to see if there are any non-note instructions
3946 between INSN and the beginning of this block. If not,
3947 then there is no PC range in the generated code that will
3948 actually be in this block, so there's no point in
3949 remembering the existence of the block. */
3950 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3951 {
3952 /* This block contains a real instruction. Note that we
3953 don't include labels; if the only thing in the block
3954 is a label, then there are still no PC values that
3955 lie within the block. */
3956 if (INSN_P (tmp))
3957 break;
3958
3959 /* We're only interested in NOTEs. */
3960 if (GET_CODE (tmp) != NOTE)
3961 continue;
3962
3963 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3964 {
3965 /* We just verified that this BLOCK matches us with
3966 the block_stack check above. Never delete the
3967 BLOCK for the outermost scope of the function; we
3968 can refer to names from that scope even if the
3969 block notes are messed up. */
3970 if (! is_body_block (NOTE_BLOCK (insn))
3971 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3972 {
3973 remove_insn (tmp);
3974 remove_insn (insn);
3975 }
3976 break;
3977 }
3978 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3979 /* There's a nested block. We need to leave the
3980 current block in place since otherwise the debugger
3981 wouldn't be able to show symbols from our block in
3982 the nested block. */
3983 break;
3984 }
3985 }
3986 }
3987
3988 /* Too many begin notes. */
3989 if (block_stack || eh_stack)
3990 abort ();
3991 }
3992
3993 \f
3994 /* Emit insn(s) of given code and pattern
3995 at a specified place within the doubly-linked list.
3996
3997 All of the emit_foo global entry points accept an object
3998 X which is either an insn list or a PATTERN of a single
3999 instruction.
4000
4001 There are thus a few canonical ways to generate code and
4002 emit it at a specific place in the instruction stream. For
4003 example, consider the instruction named SPOT and the fact that
4004 we would like to emit some instructions before SPOT. We might
4005 do it like this:
4006
4007 start_sequence ();
4008 ... emit the new instructions ...
4009 insns_head = get_insns ();
4010 end_sequence ();
4011
4012 emit_insn_before (insns_head, SPOT);
4013
4014 It used to be common to generate SEQUENCE rtl instead, but that
4015 is a relic of the past which no longer occurs. The reason is that
4016 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4017 generated would almost certainly die right after it was created. */
4018
4019 /* Make X be output before the instruction BEFORE. */
4020
4021 rtx
4022 emit_insn_before (rtx x, rtx before)
4023 {
4024 rtx last = before;
4025 rtx insn;
4026
4027 #ifdef ENABLE_RTL_CHECKING
4028 if (before == NULL_RTX)
4029 abort ();
4030 #endif
4031
4032 if (x == NULL_RTX)
4033 return last;
4034
4035 switch (GET_CODE (x))
4036 {
4037 case INSN:
4038 case JUMP_INSN:
4039 case CALL_INSN:
4040 case CODE_LABEL:
4041 case BARRIER:
4042 case NOTE:
4043 insn = x;
4044 while (insn)
4045 {
4046 rtx next = NEXT_INSN (insn);
4047 add_insn_before (insn, before);
4048 last = insn;
4049 insn = next;
4050 }
4051 break;
4052
4053 #ifdef ENABLE_RTL_CHECKING
4054 case SEQUENCE:
4055 abort ();
4056 break;
4057 #endif
4058
4059 default:
4060 last = make_insn_raw (x);
4061 add_insn_before (last, before);
4062 break;
4063 }
4064
4065 return last;
4066 }
4067
4068 /* Make an instruction with body X and code JUMP_INSN
4069 and output it before the instruction BEFORE. */
4070
4071 rtx
4072 emit_jump_insn_before (rtx x, rtx before)
4073 {
4074 rtx insn, last = NULL_RTX;
4075
4076 #ifdef ENABLE_RTL_CHECKING
4077 if (before == NULL_RTX)
4078 abort ();
4079 #endif
4080
4081 switch (GET_CODE (x))
4082 {
4083 case INSN:
4084 case JUMP_INSN:
4085 case CALL_INSN:
4086 case CODE_LABEL:
4087 case BARRIER:
4088 case NOTE:
4089 insn = x;
4090 while (insn)
4091 {
4092 rtx next = NEXT_INSN (insn);
4093 add_insn_before (insn, before);
4094 last = insn;
4095 insn = next;
4096 }
4097 break;
4098
4099 #ifdef ENABLE_RTL_CHECKING
4100 case SEQUENCE:
4101 abort ();
4102 break;
4103 #endif
4104
4105 default:
4106 last = make_jump_insn_raw (x);
4107 add_insn_before (last, before);
4108 break;
4109 }
4110
4111 return last;
4112 }
4113
4114 /* Make an instruction with body X and code CALL_INSN
4115 and output it before the instruction BEFORE. */
4116
4117 rtx
4118 emit_call_insn_before (rtx x, rtx before)
4119 {
4120 rtx last = NULL_RTX, insn;
4121
4122 #ifdef ENABLE_RTL_CHECKING
4123 if (before == NULL_RTX)
4124 abort ();
4125 #endif
4126
4127 switch (GET_CODE (x))
4128 {
4129 case INSN:
4130 case JUMP_INSN:
4131 case CALL_INSN:
4132 case CODE_LABEL:
4133 case BARRIER:
4134 case NOTE:
4135 insn = x;
4136 while (insn)
4137 {
4138 rtx next = NEXT_INSN (insn);
4139 add_insn_before (insn, before);
4140 last = insn;
4141 insn = next;
4142 }
4143 break;
4144
4145 #ifdef ENABLE_RTL_CHECKING
4146 case SEQUENCE:
4147 abort ();
4148 break;
4149 #endif
4150
4151 default:
4152 last = make_call_insn_raw (x);
4153 add_insn_before (last, before);
4154 break;
4155 }
4156
4157 return last;
4158 }
4159
4160 /* Make an insn of code BARRIER
4161 and output it before the insn BEFORE. */
4162
4163 rtx
4164 emit_barrier_before (rtx before)
4165 {
4166 rtx insn = rtx_alloc (BARRIER);
4167
4168 INSN_UID (insn) = cur_insn_uid++;
4169
4170 add_insn_before (insn, before);
4171 return insn;
4172 }
4173
4174 /* Emit the label LABEL before the insn BEFORE. */
4175
4176 rtx
4177 emit_label_before (rtx label, rtx before)
4178 {
4179 /* This can be called twice for the same label as a result of the
4180 confusion that follows a syntax error! So make it harmless. */
4181 if (INSN_UID (label) == 0)
4182 {
4183 INSN_UID (label) = cur_insn_uid++;
4184 add_insn_before (label, before);
4185 }
4186
4187 return label;
4188 }
4189
4190 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4191
4192 rtx
4193 emit_note_before (int subtype, rtx before)
4194 {
4195 rtx note = rtx_alloc (NOTE);
4196 INSN_UID (note) = cur_insn_uid++;
4197 NOTE_SOURCE_FILE (note) = 0;
4198 NOTE_LINE_NUMBER (note) = subtype;
4199 BLOCK_FOR_INSN (note) = NULL;
4200
4201 add_insn_before (note, before);
4202 return note;
4203 }
4204 \f
4205 /* Helper for emit_insn_after, handles lists of instructions
4206 efficiently. */
4207
4208 static rtx emit_insn_after_1 (rtx, rtx);
4209
4210 static rtx
4211 emit_insn_after_1 (rtx first, rtx after)
4212 {
4213 rtx last;
4214 rtx after_after;
4215 basic_block bb;
4216
4217 if (GET_CODE (after) != BARRIER
4218 && (bb = BLOCK_FOR_INSN (after)))
4219 {
4220 bb->flags |= BB_DIRTY;
4221 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4222 if (GET_CODE (last) != BARRIER)
4223 set_block_for_insn (last, bb);
4224 if (GET_CODE (last) != BARRIER)
4225 set_block_for_insn (last, bb);
4226 if (bb->end == after)
4227 bb->end = last;
4228 }
4229 else
4230 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4231 continue;
4232
4233 after_after = NEXT_INSN (after);
4234
4235 NEXT_INSN (after) = first;
4236 PREV_INSN (first) = after;
4237 NEXT_INSN (last) = after_after;
4238 if (after_after)
4239 PREV_INSN (after_after) = last;
4240
4241 if (after == last_insn)
4242 last_insn = last;
4243 return last;
4244 }
4245
4246 /* Make X be output after the insn AFTER. */
4247
4248 rtx
4249 emit_insn_after (rtx x, rtx after)
4250 {
4251 rtx last = after;
4252
4253 #ifdef ENABLE_RTL_CHECKING
4254 if (after == NULL_RTX)
4255 abort ();
4256 #endif
4257
4258 if (x == NULL_RTX)
4259 return last;
4260
4261 switch (GET_CODE (x))
4262 {
4263 case INSN:
4264 case JUMP_INSN:
4265 case CALL_INSN:
4266 case CODE_LABEL:
4267 case BARRIER:
4268 case NOTE:
4269 last = emit_insn_after_1 (x, after);
4270 break;
4271
4272 #ifdef ENABLE_RTL_CHECKING
4273 case SEQUENCE:
4274 abort ();
4275 break;
4276 #endif
4277
4278 default:
4279 last = make_insn_raw (x);
4280 add_insn_after (last, after);
4281 break;
4282 }
4283
4284 return last;
4285 }
4286
4287 /* Similar to emit_insn_after, except that line notes are to be inserted so
4288 as to act as if this insn were at FROM. */
4289
4290 void
4291 emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4292 {
4293 rtx from_line = find_line_note (from);
4294 rtx after_line = find_line_note (after);
4295 rtx insn = emit_insn_after (x, after);
4296
4297 if (from_line)
4298 emit_note_copy_after (from_line, after);
4299
4300 if (after_line)
4301 emit_note_copy_after (after_line, insn);
4302 }
4303
4304 /* Make an insn of code JUMP_INSN with body X
4305 and output it after the insn AFTER. */
4306
4307 rtx
4308 emit_jump_insn_after (rtx x, rtx after)
4309 {
4310 rtx last;
4311
4312 #ifdef ENABLE_RTL_CHECKING
4313 if (after == NULL_RTX)
4314 abort ();
4315 #endif
4316
4317 switch (GET_CODE (x))
4318 {
4319 case INSN:
4320 case JUMP_INSN:
4321 case CALL_INSN:
4322 case CODE_LABEL:
4323 case BARRIER:
4324 case NOTE:
4325 last = emit_insn_after_1 (x, after);
4326 break;
4327
4328 #ifdef ENABLE_RTL_CHECKING
4329 case SEQUENCE:
4330 abort ();
4331 break;
4332 #endif
4333
4334 default:
4335 last = make_jump_insn_raw (x);
4336 add_insn_after (last, after);
4337 break;
4338 }
4339
4340 return last;
4341 }
4342
4343 /* Make an instruction with body X and code CALL_INSN
4344 and output it after the instruction AFTER. */
4345
4346 rtx
4347 emit_call_insn_after (rtx x, rtx after)
4348 {
4349 rtx last;
4350
4351 #ifdef ENABLE_RTL_CHECKING
4352 if (after == NULL_RTX)
4353 abort ();
4354 #endif
4355
4356 switch (GET_CODE (x))
4357 {
4358 case INSN:
4359 case JUMP_INSN:
4360 case CALL_INSN:
4361 case CODE_LABEL:
4362 case BARRIER:
4363 case NOTE:
4364 last = emit_insn_after_1 (x, after);
4365 break;
4366
4367 #ifdef ENABLE_RTL_CHECKING
4368 case SEQUENCE:
4369 abort ();
4370 break;
4371 #endif
4372
4373 default:
4374 last = make_call_insn_raw (x);
4375 add_insn_after (last, after);
4376 break;
4377 }
4378
4379 return last;
4380 }
4381
4382 /* Make an insn of code BARRIER
4383 and output it after the insn AFTER. */
4384
4385 rtx
4386 emit_barrier_after (rtx after)
4387 {
4388 rtx insn = rtx_alloc (BARRIER);
4389
4390 INSN_UID (insn) = cur_insn_uid++;
4391
4392 add_insn_after (insn, after);
4393 return insn;
4394 }
4395
4396 /* Emit the label LABEL after the insn AFTER. */
4397
4398 rtx
4399 emit_label_after (rtx label, rtx after)
4400 {
4401 /* This can be called twice for the same label
4402 as a result of the confusion that follows a syntax error!
4403 So make it harmless. */
4404 if (INSN_UID (label) == 0)
4405 {
4406 INSN_UID (label) = cur_insn_uid++;
4407 add_insn_after (label, after);
4408 }
4409
4410 return label;
4411 }
4412
4413 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4414
4415 rtx
4416 emit_note_after (int subtype, rtx after)
4417 {
4418 rtx note = rtx_alloc (NOTE);
4419 INSN_UID (note) = cur_insn_uid++;
4420 NOTE_SOURCE_FILE (note) = 0;
4421 NOTE_LINE_NUMBER (note) = subtype;
4422 BLOCK_FOR_INSN (note) = NULL;
4423 add_insn_after (note, after);
4424 return note;
4425 }
4426
4427 /* Emit a copy of note ORIG after the insn AFTER. */
4428
4429 rtx
4430 emit_note_copy_after (rtx orig, rtx after)
4431 {
4432 rtx note;
4433
4434 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4435 {
4436 cur_insn_uid++;
4437 return 0;
4438 }
4439
4440 note = rtx_alloc (NOTE);
4441 INSN_UID (note) = cur_insn_uid++;
4442 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4443 NOTE_DATA (note) = NOTE_DATA (orig);
4444 BLOCK_FOR_INSN (note) = NULL;
4445 add_insn_after (note, after);
4446 return note;
4447 }
4448 \f
4449 /* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
4450 rtx
4451 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4452 {
4453 rtx last = emit_insn_after (pattern, after);
4454
4455 after = NEXT_INSN (after);
4456 while (1)
4457 {
4458 if (active_insn_p (after))
4459 INSN_LOCATOR (after) = loc;
4460 if (after == last)
4461 break;
4462 after = NEXT_INSN (after);
4463 }
4464 return last;
4465 }
4466
4467 /* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
4468 rtx
4469 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4470 {
4471 rtx last = emit_jump_insn_after (pattern, after);
4472
4473 after = NEXT_INSN (after);
4474 while (1)
4475 {
4476 if (active_insn_p (after))
4477 INSN_LOCATOR (after) = loc;
4478 if (after == last)
4479 break;
4480 after = NEXT_INSN (after);
4481 }
4482 return last;
4483 }
4484
4485 /* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
4486 rtx
4487 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4488 {
4489 rtx last = emit_call_insn_after (pattern, after);
4490
4491 after = NEXT_INSN (after);
4492 while (1)
4493 {
4494 if (active_insn_p (after))
4495 INSN_LOCATOR (after) = loc;
4496 if (after == last)
4497 break;
4498 after = NEXT_INSN (after);
4499 }
4500 return last;
4501 }
4502
4503 /* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
4504 rtx
4505 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4506 {
4507 rtx first = PREV_INSN (before);
4508 rtx last = emit_insn_before (pattern, before);
4509
4510 first = NEXT_INSN (first);
4511 while (1)
4512 {
4513 if (active_insn_p (first))
4514 INSN_LOCATOR (first) = loc;
4515 if (first == last)
4516 break;
4517 first = NEXT_INSN (first);
4518 }
4519 return last;
4520 }
4521 \f
4522 /* Take X and emit it at the end of the doubly-linked
4523 INSN list.
4524
4525 Returns the last insn emitted. */
4526
4527 rtx
4528 emit_insn (rtx x)
4529 {
4530 rtx last = last_insn;
4531 rtx insn;
4532
4533 if (x == NULL_RTX)
4534 return last;
4535
4536 switch (GET_CODE (x))
4537 {
4538 case INSN:
4539 case JUMP_INSN:
4540 case CALL_INSN:
4541 case CODE_LABEL:
4542 case BARRIER:
4543 case NOTE:
4544 insn = x;
4545 while (insn)
4546 {
4547 rtx next = NEXT_INSN (insn);
4548 add_insn (insn);
4549 last = insn;
4550 insn = next;
4551 }
4552 break;
4553
4554 #ifdef ENABLE_RTL_CHECKING
4555 case SEQUENCE:
4556 abort ();
4557 break;
4558 #endif
4559
4560 default:
4561 last = make_insn_raw (x);
4562 add_insn (last);
4563 break;
4564 }
4565
4566 return last;
4567 }
4568
4569 /* Make an insn of code JUMP_INSN with pattern X
4570 and add it to the end of the doubly-linked list. */
4571
4572 rtx
4573 emit_jump_insn (rtx x)
4574 {
4575 rtx last = NULL_RTX, insn;
4576
4577 switch (GET_CODE (x))
4578 {
4579 case INSN:
4580 case JUMP_INSN:
4581 case CALL_INSN:
4582 case CODE_LABEL:
4583 case BARRIER:
4584 case NOTE:
4585 insn = x;
4586 while (insn)
4587 {
4588 rtx next = NEXT_INSN (insn);
4589 add_insn (insn);
4590 last = insn;
4591 insn = next;
4592 }
4593 break;
4594
4595 #ifdef ENABLE_RTL_CHECKING
4596 case SEQUENCE:
4597 abort ();
4598 break;
4599 #endif
4600
4601 default:
4602 last = make_jump_insn_raw (x);
4603 add_insn (last);
4604 break;
4605 }
4606
4607 return last;
4608 }
4609
4610 /* Make an insn of code CALL_INSN with pattern X
4611 and add it to the end of the doubly-linked list. */
4612
4613 rtx
4614 emit_call_insn (rtx x)
4615 {
4616 rtx insn;
4617
4618 switch (GET_CODE (x))
4619 {
4620 case INSN:
4621 case JUMP_INSN:
4622 case CALL_INSN:
4623 case CODE_LABEL:
4624 case BARRIER:
4625 case NOTE:
4626 insn = emit_insn (x);
4627 break;
4628
4629 #ifdef ENABLE_RTL_CHECKING
4630 case SEQUENCE:
4631 abort ();
4632 break;
4633 #endif
4634
4635 default:
4636 insn = make_call_insn_raw (x);
4637 add_insn (insn);
4638 break;
4639 }
4640
4641 return insn;
4642 }
4643
4644 /* Add the label LABEL to the end of the doubly-linked list. */
4645
4646 rtx
4647 emit_label (rtx label)
4648 {
4649 /* This can be called twice for the same label
4650 as a result of the confusion that follows a syntax error!
4651 So make it harmless. */
4652 if (INSN_UID (label) == 0)
4653 {
4654 INSN_UID (label) = cur_insn_uid++;
4655 add_insn (label);
4656 }
4657 return label;
4658 }
4659
4660 /* Make an insn of code BARRIER
4661 and add it to the end of the doubly-linked list. */
4662
4663 rtx
4664 emit_barrier (void)
4665 {
4666 rtx barrier = rtx_alloc (BARRIER);
4667 INSN_UID (barrier) = cur_insn_uid++;
4668 add_insn (barrier);
4669 return barrier;
4670 }
4671
4672 /* Make line numbering NOTE insn for LOCATION add it to the end
4673 of the doubly-linked list, but only if line-numbers are desired for
4674 debugging info and it doesn't match the previous one. */
4675
4676 rtx
4677 emit_line_note (location_t location)
4678 {
4679 rtx note;
4680
4681 set_file_and_line_for_stmt (location);
4682
4683 if (location.file && last_location.file
4684 && !strcmp (location.file, last_location.file)
4685 && location.line == last_location.line)
4686 return NULL_RTX;
4687 last_location = location;
4688
4689 if (no_line_numbers)
4690 {
4691 cur_insn_uid++;
4692 return NULL_RTX;
4693 }
4694
4695 note = emit_note (location.line);
4696 NOTE_SOURCE_FILE (note) = location.file;
4697
4698 return note;
4699 }
4700
4701 /* Emit a copy of note ORIG. */
4702
4703 rtx
4704 emit_note_copy (rtx orig)
4705 {
4706 rtx note;
4707
4708 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4709 {
4710 cur_insn_uid++;
4711 return NULL_RTX;
4712 }
4713
4714 note = rtx_alloc (NOTE);
4715
4716 INSN_UID (note) = cur_insn_uid++;
4717 NOTE_DATA (note) = NOTE_DATA (orig);
4718 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4719 BLOCK_FOR_INSN (note) = NULL;
4720 add_insn (note);
4721
4722 return note;
4723 }
4724
4725 /* Make an insn of code NOTE or type NOTE_NO
4726 and add it to the end of the doubly-linked list. */
4727
4728 rtx
4729 emit_note (int note_no)
4730 {
4731 rtx note;
4732
4733 note = rtx_alloc (NOTE);
4734 INSN_UID (note) = cur_insn_uid++;
4735 NOTE_LINE_NUMBER (note) = note_no;
4736 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4737 BLOCK_FOR_INSN (note) = NULL;
4738 add_insn (note);
4739 return note;
4740 }
4741
4742 /* Cause next statement to emit a line note even if the line number
4743 has not changed. */
4744
4745 void
4746 force_next_line_note (void)
4747 {
4748 last_location.line = -1;
4749 }
4750
4751 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4752 note of this type already exists, remove it first. */
4753
4754 rtx
4755 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4756 {
4757 rtx note = find_reg_note (insn, kind, NULL_RTX);
4758
4759 switch (kind)
4760 {
4761 case REG_EQUAL:
4762 case REG_EQUIV:
4763 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4764 has multiple sets (some callers assume single_set
4765 means the insn only has one set, when in fact it
4766 means the insn only has one * useful * set). */
4767 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4768 {
4769 if (note)
4770 abort ();
4771 return NULL_RTX;
4772 }
4773
4774 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4775 It serves no useful purpose and breaks eliminate_regs. */
4776 if (GET_CODE (datum) == ASM_OPERANDS)
4777 return NULL_RTX;
4778 break;
4779
4780 default:
4781 break;
4782 }
4783
4784 if (note)
4785 {
4786 XEXP (note, 0) = datum;
4787 return note;
4788 }
4789
4790 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4791 return REG_NOTES (insn);
4792 }
4793 \f
4794 /* Return an indication of which type of insn should have X as a body.
4795 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4796
4797 enum rtx_code
4798 classify_insn (rtx x)
4799 {
4800 if (GET_CODE (x) == CODE_LABEL)
4801 return CODE_LABEL;
4802 if (GET_CODE (x) == CALL)
4803 return CALL_INSN;
4804 if (GET_CODE (x) == RETURN)
4805 return JUMP_INSN;
4806 if (GET_CODE (x) == SET)
4807 {
4808 if (SET_DEST (x) == pc_rtx)
4809 return JUMP_INSN;
4810 else if (GET_CODE (SET_SRC (x)) == CALL)
4811 return CALL_INSN;
4812 else
4813 return INSN;
4814 }
4815 if (GET_CODE (x) == PARALLEL)
4816 {
4817 int j;
4818 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4819 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4820 return CALL_INSN;
4821 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4822 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4823 return JUMP_INSN;
4824 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4825 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4826 return CALL_INSN;
4827 }
4828 return INSN;
4829 }
4830
4831 /* Emit the rtl pattern X as an appropriate kind of insn.
4832 If X is a label, it is simply added into the insn chain. */
4833
4834 rtx
4835 emit (rtx x)
4836 {
4837 enum rtx_code code = classify_insn (x);
4838
4839 if (code == CODE_LABEL)
4840 return emit_label (x);
4841 else if (code == INSN)
4842 return emit_insn (x);
4843 else if (code == JUMP_INSN)
4844 {
4845 rtx insn = emit_jump_insn (x);
4846 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4847 return emit_barrier ();
4848 return insn;
4849 }
4850 else if (code == CALL_INSN)
4851 return emit_call_insn (x);
4852 else
4853 abort ();
4854 }
4855 \f
4856 /* Space for free sequence stack entries. */
4857 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4858
4859 /* Begin emitting insns to a sequence which can be packaged in an
4860 RTL_EXPR. If this sequence will contain something that might cause
4861 the compiler to pop arguments to function calls (because those
4862 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4863 details), use do_pending_stack_adjust before calling this function.
4864 That will ensure that the deferred pops are not accidentally
4865 emitted in the middle of this sequence. */
4866
4867 void
4868 start_sequence (void)
4869 {
4870 struct sequence_stack *tem;
4871
4872 if (free_sequence_stack != NULL)
4873 {
4874 tem = free_sequence_stack;
4875 free_sequence_stack = tem->next;
4876 }
4877 else
4878 tem = ggc_alloc (sizeof (struct sequence_stack));
4879
4880 tem->next = seq_stack;
4881 tem->first = first_insn;
4882 tem->last = last_insn;
4883 tem->sequence_rtl_expr = seq_rtl_expr;
4884
4885 seq_stack = tem;
4886
4887 first_insn = 0;
4888 last_insn = 0;
4889 }
4890
4891 /* Similarly, but indicate that this sequence will be placed in T, an
4892 RTL_EXPR. See the documentation for start_sequence for more
4893 information about how to use this function. */
4894
4895 void
4896 start_sequence_for_rtl_expr (tree t)
4897 {
4898 start_sequence ();
4899
4900 seq_rtl_expr = t;
4901 }
4902
4903 /* Set up the insn chain starting with FIRST as the current sequence,
4904 saving the previously current one. See the documentation for
4905 start_sequence for more information about how to use this function. */
4906
4907 void
4908 push_to_sequence (rtx first)
4909 {
4910 rtx last;
4911
4912 start_sequence ();
4913
4914 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4915
4916 first_insn = first;
4917 last_insn = last;
4918 }
4919
4920 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4921
4922 void
4923 push_to_full_sequence (rtx first, rtx last)
4924 {
4925 start_sequence ();
4926 first_insn = first;
4927 last_insn = last;
4928 /* We really should have the end of the insn chain here. */
4929 if (last && NEXT_INSN (last))
4930 abort ();
4931 }
4932
4933 /* Set up the outer-level insn chain
4934 as the current sequence, saving the previously current one. */
4935
4936 void
4937 push_topmost_sequence (void)
4938 {
4939 struct sequence_stack *stack, *top = NULL;
4940
4941 start_sequence ();
4942
4943 for (stack = seq_stack; stack; stack = stack->next)
4944 top = stack;
4945
4946 first_insn = top->first;
4947 last_insn = top->last;
4948 seq_rtl_expr = top->sequence_rtl_expr;
4949 }
4950
4951 /* After emitting to the outer-level insn chain, update the outer-level
4952 insn chain, and restore the previous saved state. */
4953
4954 void
4955 pop_topmost_sequence (void)
4956 {
4957 struct sequence_stack *stack, *top = NULL;
4958
4959 for (stack = seq_stack; stack; stack = stack->next)
4960 top = stack;
4961
4962 top->first = first_insn;
4963 top->last = last_insn;
4964 /* ??? Why don't we save seq_rtl_expr here? */
4965
4966 end_sequence ();
4967 }
4968
4969 /* After emitting to a sequence, restore previous saved state.
4970
4971 To get the contents of the sequence just made, you must call
4972 `get_insns' *before* calling here.
4973
4974 If the compiler might have deferred popping arguments while
4975 generating this sequence, and this sequence will not be immediately
4976 inserted into the instruction stream, use do_pending_stack_adjust
4977 before calling get_insns. That will ensure that the deferred
4978 pops are inserted into this sequence, and not into some random
4979 location in the instruction stream. See INHIBIT_DEFER_POP for more
4980 information about deferred popping of arguments. */
4981
4982 void
4983 end_sequence (void)
4984 {
4985 struct sequence_stack *tem = seq_stack;
4986
4987 first_insn = tem->first;
4988 last_insn = tem->last;
4989 seq_rtl_expr = tem->sequence_rtl_expr;
4990 seq_stack = tem->next;
4991
4992 memset (tem, 0, sizeof (*tem));
4993 tem->next = free_sequence_stack;
4994 free_sequence_stack = tem;
4995 }
4996
4997 /* This works like end_sequence, but records the old sequence in FIRST
4998 and LAST. */
4999
5000 void
5001 end_full_sequence (rtx *first, rtx *last)
5002 {
5003 *first = first_insn;
5004 *last = last_insn;
5005 end_sequence ();
5006 }
5007
5008 /* Return 1 if currently emitting into a sequence. */
5009
5010 int
5011 in_sequence_p (void)
5012 {
5013 return seq_stack != 0;
5014 }
5015 \f
5016 /* Put the various virtual registers into REGNO_REG_RTX. */
5017
5018 void
5019 init_virtual_regs (struct emit_status *es)
5020 {
5021 rtx *ptr = es->x_regno_reg_rtx;
5022 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5023 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5024 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5025 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5026 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5027 }
5028
5029 \f
5030 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5031 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5032 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5033 static int copy_insn_n_scratches;
5034
5035 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5036 copied an ASM_OPERANDS.
5037 In that case, it is the original input-operand vector. */
5038 static rtvec orig_asm_operands_vector;
5039
5040 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5041 copied an ASM_OPERANDS.
5042 In that case, it is the copied input-operand vector. */
5043 static rtvec copy_asm_operands_vector;
5044
5045 /* Likewise for the constraints vector. */
5046 static rtvec orig_asm_constraints_vector;
5047 static rtvec copy_asm_constraints_vector;
5048
5049 /* Recursively create a new copy of an rtx for copy_insn.
5050 This function differs from copy_rtx in that it handles SCRATCHes and
5051 ASM_OPERANDs properly.
5052 Normally, this function is not used directly; use copy_insn as front end.
5053 However, you could first copy an insn pattern with copy_insn and then use
5054 this function afterwards to properly copy any REG_NOTEs containing
5055 SCRATCHes. */
5056
5057 rtx
5058 copy_insn_1 (rtx orig)
5059 {
5060 rtx copy;
5061 int i, j;
5062 RTX_CODE code;
5063 const char *format_ptr;
5064
5065 code = GET_CODE (orig);
5066
5067 switch (code)
5068 {
5069 case REG:
5070 case QUEUED:
5071 case CONST_INT:
5072 case CONST_DOUBLE:
5073 case CONST_VECTOR:
5074 case SYMBOL_REF:
5075 case CODE_LABEL:
5076 case PC:
5077 case CC0:
5078 case ADDRESSOF:
5079 return orig;
5080
5081 case SCRATCH:
5082 for (i = 0; i < copy_insn_n_scratches; i++)
5083 if (copy_insn_scratch_in[i] == orig)
5084 return copy_insn_scratch_out[i];
5085 break;
5086
5087 case CONST:
5088 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5089 a LABEL_REF, it isn't sharable. */
5090 if (GET_CODE (XEXP (orig, 0)) == PLUS
5091 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5092 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5093 return orig;
5094 break;
5095
5096 /* A MEM with a constant address is not sharable. The problem is that
5097 the constant address may need to be reloaded. If the mem is shared,
5098 then reloading one copy of this mem will cause all copies to appear
5099 to have been reloaded. */
5100
5101 default:
5102 break;
5103 }
5104
5105 copy = rtx_alloc (code);
5106
5107 /* Copy the various flags, and other information. We assume that
5108 all fields need copying, and then clear the fields that should
5109 not be copied. That is the sensible default behavior, and forces
5110 us to explicitly document why we are *not* copying a flag. */
5111 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5112
5113 /* We do not copy the USED flag, which is used as a mark bit during
5114 walks over the RTL. */
5115 RTX_FLAG (copy, used) = 0;
5116
5117 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5118 if (GET_RTX_CLASS (code) == 'i')
5119 {
5120 RTX_FLAG (copy, jump) = 0;
5121 RTX_FLAG (copy, call) = 0;
5122 RTX_FLAG (copy, frame_related) = 0;
5123 }
5124
5125 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5126
5127 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5128 {
5129 copy->fld[i] = orig->fld[i];
5130 switch (*format_ptr++)
5131 {
5132 case 'e':
5133 if (XEXP (orig, i) != NULL)
5134 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5135 break;
5136
5137 case 'E':
5138 case 'V':
5139 if (XVEC (orig, i) == orig_asm_constraints_vector)
5140 XVEC (copy, i) = copy_asm_constraints_vector;
5141 else if (XVEC (orig, i) == orig_asm_operands_vector)
5142 XVEC (copy, i) = copy_asm_operands_vector;
5143 else if (XVEC (orig, i) != NULL)
5144 {
5145 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5146 for (j = 0; j < XVECLEN (copy, i); j++)
5147 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5148 }
5149 break;
5150
5151 case 't':
5152 case 'w':
5153 case 'i':
5154 case 's':
5155 case 'S':
5156 case 'u':
5157 case '0':
5158 /* These are left unchanged. */
5159 break;
5160
5161 default:
5162 abort ();
5163 }
5164 }
5165
5166 if (code == SCRATCH)
5167 {
5168 i = copy_insn_n_scratches++;
5169 if (i >= MAX_RECOG_OPERANDS)
5170 abort ();
5171 copy_insn_scratch_in[i] = orig;
5172 copy_insn_scratch_out[i] = copy;
5173 }
5174 else if (code == ASM_OPERANDS)
5175 {
5176 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5177 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5178 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5179 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5180 }
5181
5182 return copy;
5183 }
5184
5185 /* Create a new copy of an rtx.
5186 This function differs from copy_rtx in that it handles SCRATCHes and
5187 ASM_OPERANDs properly.
5188 INSN doesn't really have to be a full INSN; it could be just the
5189 pattern. */
5190 rtx
5191 copy_insn (rtx insn)
5192 {
5193 copy_insn_n_scratches = 0;
5194 orig_asm_operands_vector = 0;
5195 orig_asm_constraints_vector = 0;
5196 copy_asm_operands_vector = 0;
5197 copy_asm_constraints_vector = 0;
5198 return copy_insn_1 (insn);
5199 }
5200
5201 /* Initialize data structures and variables in this file
5202 before generating rtl for each function. */
5203
5204 void
5205 init_emit (void)
5206 {
5207 struct function *f = cfun;
5208
5209 f->emit = ggc_alloc (sizeof (struct emit_status));
5210 first_insn = NULL;
5211 last_insn = NULL;
5212 seq_rtl_expr = NULL;
5213 cur_insn_uid = 1;
5214 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5215 last_location.line = 0;
5216 last_location.file = 0;
5217 first_label_num = label_num;
5218 last_label_num = 0;
5219 seq_stack = NULL;
5220
5221 /* Init the tables that describe all the pseudo regs. */
5222
5223 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5224
5225 f->emit->regno_pointer_align
5226 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5227 * sizeof (unsigned char));
5228
5229 regno_reg_rtx
5230 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5231
5232 /* Put copies of all the hard registers into regno_reg_rtx. */
5233 memcpy (regno_reg_rtx,
5234 static_regno_reg_rtx,
5235 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5236
5237 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5238 init_virtual_regs (f->emit);
5239
5240 /* Indicate that the virtual registers and stack locations are
5241 all pointers. */
5242 REG_POINTER (stack_pointer_rtx) = 1;
5243 REG_POINTER (frame_pointer_rtx) = 1;
5244 REG_POINTER (hard_frame_pointer_rtx) = 1;
5245 REG_POINTER (arg_pointer_rtx) = 1;
5246
5247 REG_POINTER (virtual_incoming_args_rtx) = 1;
5248 REG_POINTER (virtual_stack_vars_rtx) = 1;
5249 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5250 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5251 REG_POINTER (virtual_cfa_rtx) = 1;
5252
5253 #ifdef STACK_BOUNDARY
5254 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5255 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5256 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5257 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5258
5259 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5260 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5261 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5262 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5263 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5264 #endif
5265
5266 #ifdef INIT_EXPANDERS
5267 INIT_EXPANDERS;
5268 #endif
5269 }
5270
5271 /* Generate the constant 0. */
5272
5273 static rtx
5274 gen_const_vector_0 (enum machine_mode mode)
5275 {
5276 rtx tem;
5277 rtvec v;
5278 int units, i;
5279 enum machine_mode inner;
5280
5281 units = GET_MODE_NUNITS (mode);
5282 inner = GET_MODE_INNER (mode);
5283
5284 v = rtvec_alloc (units);
5285
5286 /* We need to call this function after we to set CONST0_RTX first. */
5287 if (!CONST0_RTX (inner))
5288 abort ();
5289
5290 for (i = 0; i < units; ++i)
5291 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5292
5293 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5294 return tem;
5295 }
5296
5297 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5298 all elements are zero. */
5299 rtx
5300 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5301 {
5302 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5303 int i;
5304
5305 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5306 if (RTVEC_ELT (v, i) != inner_zero)
5307 return gen_rtx_raw_CONST_VECTOR (mode, v);
5308 return CONST0_RTX (mode);
5309 }
5310
5311 /* Create some permanent unique rtl objects shared between all functions.
5312 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5313
5314 void
5315 init_emit_once (int line_numbers)
5316 {
5317 int i;
5318 enum machine_mode mode;
5319 enum machine_mode double_mode;
5320
5321 /* We need reg_raw_mode, so initialize the modes now. */
5322 init_reg_modes_once ();
5323
5324 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5325 tables. */
5326 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5327 const_int_htab_eq, NULL);
5328
5329 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5330 const_double_htab_eq, NULL);
5331
5332 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5333 mem_attrs_htab_eq, NULL);
5334 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5335 reg_attrs_htab_eq, NULL);
5336
5337 no_line_numbers = ! line_numbers;
5338
5339 /* Compute the word and byte modes. */
5340
5341 byte_mode = VOIDmode;
5342 word_mode = VOIDmode;
5343 double_mode = VOIDmode;
5344
5345 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5346 mode = GET_MODE_WIDER_MODE (mode))
5347 {
5348 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5349 && byte_mode == VOIDmode)
5350 byte_mode = mode;
5351
5352 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5353 && word_mode == VOIDmode)
5354 word_mode = mode;
5355 }
5356
5357 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5358 mode = GET_MODE_WIDER_MODE (mode))
5359 {
5360 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5361 && double_mode == VOIDmode)
5362 double_mode = mode;
5363 }
5364
5365 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5366
5367 /* Assign register numbers to the globally defined register rtx.
5368 This must be done at runtime because the register number field
5369 is in a union and some compilers can't initialize unions. */
5370
5371 pc_rtx = gen_rtx (PC, VOIDmode);
5372 cc0_rtx = gen_rtx (CC0, VOIDmode);
5373 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5374 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5375 if (hard_frame_pointer_rtx == 0)
5376 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5377 HARD_FRAME_POINTER_REGNUM);
5378 if (arg_pointer_rtx == 0)
5379 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5380 virtual_incoming_args_rtx =
5381 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5382 virtual_stack_vars_rtx =
5383 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5384 virtual_stack_dynamic_rtx =
5385 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5386 virtual_outgoing_args_rtx =
5387 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5388 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5389
5390 /* Initialize RTL for commonly used hard registers. These are
5391 copied into regno_reg_rtx as we begin to compile each function. */
5392 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5393 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5394
5395 #ifdef INIT_EXPANDERS
5396 /* This is to initialize {init|mark|free}_machine_status before the first
5397 call to push_function_context_to. This is needed by the Chill front
5398 end which calls push_function_context_to before the first call to
5399 init_function_start. */
5400 INIT_EXPANDERS;
5401 #endif
5402
5403 /* Create the unique rtx's for certain rtx codes and operand values. */
5404
5405 /* Don't use gen_rtx here since gen_rtx in this case
5406 tries to use these variables. */
5407 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5408 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5409 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5410
5411 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5412 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5413 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5414 else
5415 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5416
5417 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5418 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5419 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5420 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5421 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5422 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5423 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5424
5425 dconsthalf = dconst1;
5426 dconsthalf.exp--;
5427
5428 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5429
5430 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5431 {
5432 REAL_VALUE_TYPE *r =
5433 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5434
5435 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5436 mode = GET_MODE_WIDER_MODE (mode))
5437 const_tiny_rtx[i][(int) mode] =
5438 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5439
5440 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5441
5442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5443 mode = GET_MODE_WIDER_MODE (mode))
5444 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5445
5446 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5447 mode != VOIDmode;
5448 mode = GET_MODE_WIDER_MODE (mode))
5449 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5450 }
5451
5452 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5453 mode != VOIDmode;
5454 mode = GET_MODE_WIDER_MODE (mode))
5455 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5456
5457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5458 mode != VOIDmode;
5459 mode = GET_MODE_WIDER_MODE (mode))
5460 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5461
5462 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5463 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5464 const_tiny_rtx[0][i] = const0_rtx;
5465
5466 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5467 if (STORE_FLAG_VALUE == 1)
5468 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5469
5470 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5471 return_address_pointer_rtx
5472 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5473 #endif
5474
5475 #ifdef STATIC_CHAIN_REGNUM
5476 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5477
5478 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5479 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5480 static_chain_incoming_rtx
5481 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5482 else
5483 #endif
5484 static_chain_incoming_rtx = static_chain_rtx;
5485 #endif
5486
5487 #ifdef STATIC_CHAIN
5488 static_chain_rtx = STATIC_CHAIN;
5489
5490 #ifdef STATIC_CHAIN_INCOMING
5491 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5492 #else
5493 static_chain_incoming_rtx = static_chain_rtx;
5494 #endif
5495 #endif
5496
5497 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5498 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5499 }
5500 \f
5501 /* Query and clear/ restore no_line_numbers. This is used by the
5502 switch / case handling in stmt.c to give proper line numbers in
5503 warnings about unreachable code. */
5504
5505 int
5506 force_line_numbers (void)
5507 {
5508 int old = no_line_numbers;
5509
5510 no_line_numbers = 0;
5511 if (old)
5512 force_next_line_note ();
5513 return old;
5514 }
5515
5516 void
5517 restore_line_number_status (int old_value)
5518 {
5519 no_line_numbers = old_value;
5520 }
5521
5522 /* Produce exact duplicate of insn INSN after AFTER.
5523 Care updating of libcall regions if present. */
5524
5525 rtx
5526 emit_copy_of_insn_after (rtx insn, rtx after)
5527 {
5528 rtx new;
5529 rtx note1, note2, link;
5530
5531 switch (GET_CODE (insn))
5532 {
5533 case INSN:
5534 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5535 break;
5536
5537 case JUMP_INSN:
5538 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5539 break;
5540
5541 case CALL_INSN:
5542 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5543 if (CALL_INSN_FUNCTION_USAGE (insn))
5544 CALL_INSN_FUNCTION_USAGE (new)
5545 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5546 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5547 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5548 break;
5549
5550 default:
5551 abort ();
5552 }
5553
5554 /* Update LABEL_NUSES. */
5555 mark_jump_label (PATTERN (new), new, 0);
5556
5557 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5558
5559 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5560 make them. */
5561 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5562 if (REG_NOTE_KIND (link) != REG_LABEL)
5563 {
5564 if (GET_CODE (link) == EXPR_LIST)
5565 REG_NOTES (new)
5566 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5567 XEXP (link, 0),
5568 REG_NOTES (new)));
5569 else
5570 REG_NOTES (new)
5571 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5572 XEXP (link, 0),
5573 REG_NOTES (new)));
5574 }
5575
5576 /* Fix the libcall sequences. */
5577 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5578 {
5579 rtx p = new;
5580 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5581 p = PREV_INSN (p);
5582 XEXP (note1, 0) = p;
5583 XEXP (note2, 0) = new;
5584 }
5585 INSN_CODE (new) = INSN_CODE (insn);
5586 return new;
5587 }
5588
5589 #include "gt-emit-rtl.h"