]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/emit-rtl.c
4d82afb68851b06adb101c3e625dbc2ee3f239e8
[thirdparty/gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "obstack.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59
60 /* Commonly used modes. */
61
62 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
64 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
65 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
66
67
68 /* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
70
71 static int label_num = 1;
72
73 /* Highest label number in current function.
74 Zero means use the value of label_num instead.
75 This is nonzero only when belatedly compiling an inline function. */
76
77 static int last_label_num;
78
79 /* Value label_num had when set_new_first_and_last_label_number was called.
80 If label_num has not changed since then, last_label_num is valid. */
81
82 static int base_label_num;
83
84 /* Nonzero means do not generate NOTEs for source line numbers. */
85
86 static int no_line_numbers;
87
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these except perhaps the floating-point CONST_DOUBLEs
91 are unique; no other rtx-object will be equal to any of these. */
92
93 rtx global_rtl[GR_MAX];
94
95 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
96 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
97 record a copy of const[012]_rtx. */
98
99 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100
101 rtx const_true_rtx;
102
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107
108 /* All references to the following fixed hard registers go through
109 these unique rtl objects. On machines where the frame-pointer and
110 arg-pointer are the same register, they use the same unique object.
111
112 After register allocation, other rtl objects which used to be pseudo-regs
113 may be clobbered to refer to the frame-pointer register.
114 But references that were originally to the frame-pointer can be
115 distinguished from the others because they contain frame_pointer_rtx.
116
117 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
118 tricky: until register elimination has taken place hard_frame_pointer_rtx
119 should be used if it is being set, and frame_pointer_rtx otherwise. After
120 register elimination hard_frame_pointer_rtx should always be used.
121 On machines where the two registers are same (most) then these are the
122 same.
123
124 In an inline procedure, the stack and frame pointer rtxs may not be
125 used for anything else. */
126 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
127 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
128 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
132 /* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
136 /* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
140
141 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142
143 /* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
145
146 static htab_t const_int_htab;
147
148 /* A hash table storing memory attribute structures. */
149 static htab_t mem_attrs_htab;
150
151 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
152 shortly thrown away. We use two mechanisms to prevent this waste:
153
154 For sizes up to 5 elements, we keep a SEQUENCE and its associated
155 rtvec for use by gen_sequence. One entry for each size is
156 sufficient because most cases are calls to gen_sequence followed by
157 immediately emitting the SEQUENCE. Reuse is safe since emitting a
158 sequence is destructive on the insn in it anyway and hence can't be
159 redone.
160
161 We do not bother to save this cached data over nested function calls.
162 Instead, we just reinitialize them. */
163
164 #define SEQUENCE_RESULT_SIZE 5
165
166 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
167
168 /* During RTL generation, we also keep a list of free INSN rtl codes. */
169 static rtx free_insn;
170
171 #define first_insn (cfun->emit->x_first_insn)
172 #define last_insn (cfun->emit->x_last_insn)
173 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
174 #define last_linenum (cfun->emit->x_last_linenum)
175 #define last_filename (cfun->emit->x_last_filename)
176 #define first_label_num (cfun->emit->x_first_label_num)
177
178 static rtx make_jump_insn_raw PARAMS ((rtx));
179 static rtx make_call_insn_raw PARAMS ((rtx));
180 static rtx find_line_note PARAMS ((rtx));
181 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
182 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
183 int));
184 static void unshare_all_rtl_1 PARAMS ((rtx));
185 static void unshare_all_decls PARAMS ((tree));
186 static void reset_used_decls PARAMS ((tree));
187 static void mark_label_nuses PARAMS ((rtx));
188 static hashval_t const_int_htab_hash PARAMS ((const void *));
189 static int const_int_htab_eq PARAMS ((const void *,
190 const void *));
191 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
192 static int mem_attrs_htab_eq PARAMS ((const void *,
193 const void *));
194 static void mem_attrs_mark PARAMS ((const void *));
195 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
196 rtx, unsigned int,
197 enum machine_mode));
198 static tree component_ref_for_mem_expr PARAMS ((tree));
199 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
200
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability = -1;
204 \f
205 /* Returns a hash code for X (which is a really a CONST_INT). */
206
207 static hashval_t
208 const_int_htab_hash (x)
209 const void *x;
210 {
211 return (hashval_t) INTVAL ((const struct rtx_def *) x);
212 }
213
214 /* Returns non-zero if the value represented by X (which is really a
215 CONST_INT) is the same as that given by Y (which is really a
216 HOST_WIDE_INT *). */
217
218 static int
219 const_int_htab_eq (x, y)
220 const void *x;
221 const void *y;
222 {
223 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
224 }
225
226 /* Returns a hash code for X (which is a really a mem_attrs *). */
227
228 static hashval_t
229 mem_attrs_htab_hash (x)
230 const void *x;
231 {
232 mem_attrs *p = (mem_attrs *) x;
233
234 return (p->alias ^ (p->align * 1000)
235 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
236 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
237 ^ (size_t) p->expr);
238 }
239
240 /* Returns non-zero if the value represented by X (which is really a
241 mem_attrs *) is the same as that given by Y (which is also really a
242 mem_attrs *). */
243
244 static int
245 mem_attrs_htab_eq (x, y)
246 const void *x;
247 const void *y;
248 {
249 mem_attrs *p = (mem_attrs *) x;
250 mem_attrs *q = (mem_attrs *) y;
251
252 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
253 && p->size == q->size && p->align == q->align);
254 }
255
256 /* This routine is called when we determine that we need a mem_attrs entry.
257 It marks the associated decl and RTL as being used, if present. */
258
259 static void
260 mem_attrs_mark (x)
261 const void *x;
262 {
263 mem_attrs *p = (mem_attrs *) x;
264
265 if (p->expr)
266 ggc_mark_tree (p->expr);
267
268 if (p->offset)
269 ggc_mark_rtx (p->offset);
270
271 if (p->size)
272 ggc_mark_rtx (p->size);
273 }
274
275 /* Allocate a new mem_attrs structure and insert it into the hash table if
276 one identical to it is not already in the table. We are doing this for
277 MEM of mode MODE. */
278
279 static mem_attrs *
280 get_mem_attrs (alias, expr, offset, size, align, mode)
281 HOST_WIDE_INT alias;
282 tree expr;
283 rtx offset;
284 rtx size;
285 unsigned int align;
286 enum machine_mode mode;
287 {
288 mem_attrs attrs;
289 void **slot;
290
291 /* If everything is the default, we can just return zero. */
292 if (alias == 0 && expr == 0 && offset == 0
293 && (size == 0
294 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
295 && (align == BITS_PER_UNIT
296 || (STRICT_ALIGNMENT
297 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
298 return 0;
299
300 attrs.alias = alias;
301 attrs.expr = expr;
302 attrs.offset = offset;
303 attrs.size = size;
304 attrs.align = align;
305
306 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
307 if (*slot == 0)
308 {
309 *slot = ggc_alloc (sizeof (mem_attrs));
310 memcpy (*slot, &attrs, sizeof (mem_attrs));
311 }
312
313 return *slot;
314 }
315
316 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
317 don't attempt to share with the various global pieces of rtl (such as
318 frame_pointer_rtx). */
319
320 rtx
321 gen_raw_REG (mode, regno)
322 enum machine_mode mode;
323 int regno;
324 {
325 rtx x = gen_rtx_raw_REG (mode, regno);
326 ORIGINAL_REGNO (x) = regno;
327 return x;
328 }
329
330 /* There are some RTL codes that require special attention; the generation
331 functions do the raw handling. If you add to this list, modify
332 special_rtx in gengenrtl.c as well. */
333
334 rtx
335 gen_rtx_CONST_INT (mode, arg)
336 enum machine_mode mode ATTRIBUTE_UNUSED;
337 HOST_WIDE_INT arg;
338 {
339 void **slot;
340
341 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
342 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
343
344 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
345 if (const_true_rtx && arg == STORE_FLAG_VALUE)
346 return const_true_rtx;
347 #endif
348
349 /* Look up the CONST_INT in the hash table. */
350 slot = htab_find_slot_with_hash (const_int_htab, &arg,
351 (hashval_t) arg, INSERT);
352 if (*slot == 0)
353 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
354
355 return (rtx) *slot;
356 }
357
358 /* CONST_DOUBLEs needs special handling because their length is known
359 only at run-time. */
360
361 rtx
362 gen_rtx_CONST_DOUBLE (mode, arg0, arg1)
363 enum machine_mode mode;
364 HOST_WIDE_INT arg0, arg1;
365 {
366 rtx r = rtx_alloc (CONST_DOUBLE);
367 int i;
368
369 PUT_MODE (r, mode);
370 X0EXP (r, 0) = NULL_RTX;
371 XWINT (r, 1) = arg0;
372 XWINT (r, 2) = arg1;
373
374 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 2; --i)
375 XWINT (r, i) = 0;
376
377 return r;
378 }
379
380 rtx
381 gen_rtx_REG (mode, regno)
382 enum machine_mode mode;
383 int regno;
384 {
385 /* In case the MD file explicitly references the frame pointer, have
386 all such references point to the same frame pointer. This is
387 used during frame pointer elimination to distinguish the explicit
388 references to these registers from pseudos that happened to be
389 assigned to them.
390
391 If we have eliminated the frame pointer or arg pointer, we will
392 be using it as a normal register, for example as a spill
393 register. In such cases, we might be accessing it in a mode that
394 is not Pmode and therefore cannot use the pre-allocated rtx.
395
396 Also don't do this when we are making new REGs in reload, since
397 we don't want to get confused with the real pointers. */
398
399 if (mode == Pmode && !reload_in_progress)
400 {
401 if (regno == FRAME_POINTER_REGNUM)
402 return frame_pointer_rtx;
403 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
404 if (regno == HARD_FRAME_POINTER_REGNUM)
405 return hard_frame_pointer_rtx;
406 #endif
407 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
408 if (regno == ARG_POINTER_REGNUM)
409 return arg_pointer_rtx;
410 #endif
411 #ifdef RETURN_ADDRESS_POINTER_REGNUM
412 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
413 return return_address_pointer_rtx;
414 #endif
415 if (regno == PIC_OFFSET_TABLE_REGNUM
416 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
417 return pic_offset_table_rtx;
418 if (regno == STACK_POINTER_REGNUM)
419 return stack_pointer_rtx;
420 }
421
422 return gen_raw_REG (mode, regno);
423 }
424
425 rtx
426 gen_rtx_MEM (mode, addr)
427 enum machine_mode mode;
428 rtx addr;
429 {
430 rtx rt = gen_rtx_raw_MEM (mode, addr);
431
432 /* This field is not cleared by the mere allocation of the rtx, so
433 we clear it here. */
434 MEM_ATTRS (rt) = 0;
435
436 return rt;
437 }
438
439 rtx
440 gen_rtx_SUBREG (mode, reg, offset)
441 enum machine_mode mode;
442 rtx reg;
443 int offset;
444 {
445 /* This is the most common failure type.
446 Catch it early so we can see who does it. */
447 if ((offset % GET_MODE_SIZE (mode)) != 0)
448 abort ();
449
450 /* This check isn't usable right now because combine will
451 throw arbitrary crap like a CALL into a SUBREG in
452 gen_lowpart_for_combine so we must just eat it. */
453 #if 0
454 /* Check for this too. */
455 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
456 abort ();
457 #endif
458 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
459 }
460
461 /* Generate a SUBREG representing the least-significant part of REG if MODE
462 is smaller than mode of REG, otherwise paradoxical SUBREG. */
463
464 rtx
465 gen_lowpart_SUBREG (mode, reg)
466 enum machine_mode mode;
467 rtx reg;
468 {
469 enum machine_mode inmode;
470
471 inmode = GET_MODE (reg);
472 if (inmode == VOIDmode)
473 inmode = mode;
474 return gen_rtx_SUBREG (mode, reg,
475 subreg_lowpart_offset (mode, inmode));
476 }
477 \f
478 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
479 **
480 ** This routine generates an RTX of the size specified by
481 ** <code>, which is an RTX code. The RTX structure is initialized
482 ** from the arguments <element1> through <elementn>, which are
483 ** interpreted according to the specific RTX type's format. The
484 ** special machine mode associated with the rtx (if any) is specified
485 ** in <mode>.
486 **
487 ** gen_rtx can be invoked in a way which resembles the lisp-like
488 ** rtx it will generate. For example, the following rtx structure:
489 **
490 ** (plus:QI (mem:QI (reg:SI 1))
491 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
492 **
493 ** ...would be generated by the following C code:
494 **
495 ** gen_rtx (PLUS, QImode,
496 ** gen_rtx (MEM, QImode,
497 ** gen_rtx (REG, SImode, 1)),
498 ** gen_rtx (MEM, QImode,
499 ** gen_rtx (PLUS, SImode,
500 ** gen_rtx (REG, SImode, 2),
501 ** gen_rtx (REG, SImode, 3)))),
502 */
503
504 /*VARARGS2*/
505 rtx
506 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
507 {
508 int i; /* Array indices... */
509 const char *fmt; /* Current rtx's format... */
510 rtx rt_val; /* RTX to return to caller... */
511
512 VA_OPEN (p, mode);
513 VA_FIXEDARG (p, enum rtx_code, code);
514 VA_FIXEDARG (p, enum machine_mode, mode);
515
516 switch (code)
517 {
518 case CONST_INT:
519 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
520 break;
521
522 case CONST_DOUBLE:
523 {
524 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
525 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
526
527 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1);
528 }
529 break;
530
531 case REG:
532 rt_val = gen_rtx_REG (mode, va_arg (p, int));
533 break;
534
535 case MEM:
536 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
537 break;
538
539 default:
540 rt_val = rtx_alloc (code); /* Allocate the storage space. */
541 rt_val->mode = mode; /* Store the machine mode... */
542
543 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
544 for (i = 0; i < GET_RTX_LENGTH (code); i++)
545 {
546 switch (*fmt++)
547 {
548 case '0': /* Unused field. */
549 break;
550
551 case 'i': /* An integer? */
552 XINT (rt_val, i) = va_arg (p, int);
553 break;
554
555 case 'w': /* A wide integer? */
556 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
557 break;
558
559 case 's': /* A string? */
560 XSTR (rt_val, i) = va_arg (p, char *);
561 break;
562
563 case 'e': /* An expression? */
564 case 'u': /* An insn? Same except when printing. */
565 XEXP (rt_val, i) = va_arg (p, rtx);
566 break;
567
568 case 'E': /* An RTX vector? */
569 XVEC (rt_val, i) = va_arg (p, rtvec);
570 break;
571
572 case 'b': /* A bitmap? */
573 XBITMAP (rt_val, i) = va_arg (p, bitmap);
574 break;
575
576 case 't': /* A tree? */
577 XTREE (rt_val, i) = va_arg (p, tree);
578 break;
579
580 default:
581 abort ();
582 }
583 }
584 break;
585 }
586
587 VA_CLOSE (p);
588 return rt_val;
589 }
590
591 /* gen_rtvec (n, [rt1, ..., rtn])
592 **
593 ** This routine creates an rtvec and stores within it the
594 ** pointers to rtx's which are its arguments.
595 */
596
597 /*VARARGS1*/
598 rtvec
599 gen_rtvec VPARAMS ((int n, ...))
600 {
601 int i, save_n;
602 rtx *vector;
603
604 VA_OPEN (p, n);
605 VA_FIXEDARG (p, int, n);
606
607 if (n == 0)
608 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
609
610 vector = (rtx *) alloca (n * sizeof (rtx));
611
612 for (i = 0; i < n; i++)
613 vector[i] = va_arg (p, rtx);
614
615 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
616 save_n = n;
617 VA_CLOSE (p);
618
619 return gen_rtvec_v (save_n, vector);
620 }
621
622 rtvec
623 gen_rtvec_v (n, argp)
624 int n;
625 rtx *argp;
626 {
627 int i;
628 rtvec rt_val;
629
630 if (n == 0)
631 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
632
633 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
634
635 for (i = 0; i < n; i++)
636 rt_val->elem[i] = *argp++;
637
638 return rt_val;
639 }
640 \f
641 /* Generate a REG rtx for a new pseudo register of mode MODE.
642 This pseudo is assigned the next sequential register number. */
643
644 rtx
645 gen_reg_rtx (mode)
646 enum machine_mode mode;
647 {
648 struct function *f = cfun;
649 rtx val;
650
651 /* Don't let anything called after initial flow analysis create new
652 registers. */
653 if (no_new_pseudos)
654 abort ();
655
656 if (generating_concat_p
657 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
658 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
659 {
660 /* For complex modes, don't make a single pseudo.
661 Instead, make a CONCAT of two pseudos.
662 This allows noncontiguous allocation of the real and imaginary parts,
663 which makes much better code. Besides, allocating DCmode
664 pseudos overstrains reload on some machines like the 386. */
665 rtx realpart, imagpart;
666 int size = GET_MODE_UNIT_SIZE (mode);
667 enum machine_mode partmode
668 = mode_for_size (size * BITS_PER_UNIT,
669 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
670 ? MODE_FLOAT : MODE_INT),
671 0);
672
673 realpart = gen_reg_rtx (partmode);
674 imagpart = gen_reg_rtx (partmode);
675 return gen_rtx_CONCAT (mode, realpart, imagpart);
676 }
677
678 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
679 enough to have an element for this pseudo reg number. */
680
681 if (reg_rtx_no == f->emit->regno_pointer_align_length)
682 {
683 int old_size = f->emit->regno_pointer_align_length;
684 char *new;
685 rtx *new1;
686 tree *new2;
687
688 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
689 memset (new + old_size, 0, old_size);
690 f->emit->regno_pointer_align = (unsigned char *) new;
691
692 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
693 old_size * 2 * sizeof (rtx));
694 memset (new1 + old_size, 0, old_size * sizeof (rtx));
695 regno_reg_rtx = new1;
696
697 new2 = (tree *) xrealloc (f->emit->regno_decl,
698 old_size * 2 * sizeof (tree));
699 memset (new2 + old_size, 0, old_size * sizeof (tree));
700 f->emit->regno_decl = new2;
701
702 f->emit->regno_pointer_align_length = old_size * 2;
703 }
704
705 val = gen_raw_REG (mode, reg_rtx_no);
706 regno_reg_rtx[reg_rtx_no++] = val;
707 return val;
708 }
709
710 /* Identify REG (which may be a CONCAT) as a user register. */
711
712 void
713 mark_user_reg (reg)
714 rtx reg;
715 {
716 if (GET_CODE (reg) == CONCAT)
717 {
718 REG_USERVAR_P (XEXP (reg, 0)) = 1;
719 REG_USERVAR_P (XEXP (reg, 1)) = 1;
720 }
721 else if (GET_CODE (reg) == REG)
722 REG_USERVAR_P (reg) = 1;
723 else
724 abort ();
725 }
726
727 /* Identify REG as a probable pointer register and show its alignment
728 as ALIGN, if nonzero. */
729
730 void
731 mark_reg_pointer (reg, align)
732 rtx reg;
733 int align;
734 {
735 if (! REG_POINTER (reg))
736 {
737 REG_POINTER (reg) = 1;
738
739 if (align)
740 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
741 }
742 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
743 /* We can no-longer be sure just how aligned this pointer is */
744 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
745 }
746
747 /* Return 1 plus largest pseudo reg number used in the current function. */
748
749 int
750 max_reg_num ()
751 {
752 return reg_rtx_no;
753 }
754
755 /* Return 1 + the largest label number used so far in the current function. */
756
757 int
758 max_label_num ()
759 {
760 if (last_label_num && label_num == base_label_num)
761 return last_label_num;
762 return label_num;
763 }
764
765 /* Return first label number used in this function (if any were used). */
766
767 int
768 get_first_label_num ()
769 {
770 return first_label_num;
771 }
772 \f
773 /* Return the final regno of X, which is a SUBREG of a hard
774 register. */
775 int
776 subreg_hard_regno (x, check_mode)
777 rtx x;
778 int check_mode;
779 {
780 enum machine_mode mode = GET_MODE (x);
781 unsigned int byte_offset, base_regno, final_regno;
782 rtx reg = SUBREG_REG (x);
783
784 /* This is where we attempt to catch illegal subregs
785 created by the compiler. */
786 if (GET_CODE (x) != SUBREG
787 || GET_CODE (reg) != REG)
788 abort ();
789 base_regno = REGNO (reg);
790 if (base_regno >= FIRST_PSEUDO_REGISTER)
791 abort ();
792 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
793 abort ();
794
795 /* Catch non-congruent offsets too. */
796 byte_offset = SUBREG_BYTE (x);
797 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
798 abort ();
799
800 final_regno = subreg_regno (x);
801
802 return final_regno;
803 }
804
805 /* Return a value representing some low-order bits of X, where the number
806 of low-order bits is given by MODE. Note that no conversion is done
807 between floating-point and fixed-point values, rather, the bit
808 representation is returned.
809
810 This function handles the cases in common between gen_lowpart, below,
811 and two variants in cse.c and combine.c. These are the cases that can
812 be safely handled at all points in the compilation.
813
814 If this is not a case we can handle, return 0. */
815
816 rtx
817 gen_lowpart_common (mode, x)
818 enum machine_mode mode;
819 rtx x;
820 {
821 int msize = GET_MODE_SIZE (mode);
822 int xsize = GET_MODE_SIZE (GET_MODE (x));
823 int offset = 0;
824
825 if (GET_MODE (x) == mode)
826 return x;
827
828 /* MODE must occupy no more words than the mode of X. */
829 if (GET_MODE (x) != VOIDmode
830 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
831 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
832 return 0;
833
834 offset = subreg_lowpart_offset (mode, GET_MODE (x));
835
836 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
837 && (GET_MODE_CLASS (mode) == MODE_INT
838 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
839 {
840 /* If we are getting the low-order part of something that has been
841 sign- or zero-extended, we can either just use the object being
842 extended or make a narrower extension. If we want an even smaller
843 piece than the size of the object being extended, call ourselves
844 recursively.
845
846 This case is used mostly by combine and cse. */
847
848 if (GET_MODE (XEXP (x, 0)) == mode)
849 return XEXP (x, 0);
850 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
851 return gen_lowpart_common (mode, XEXP (x, 0));
852 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
853 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
854 }
855 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
856 || GET_CODE (x) == CONCAT)
857 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
858 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
859 from the low-order part of the constant. */
860 else if ((GET_MODE_CLASS (mode) == MODE_INT
861 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
862 && GET_MODE (x) == VOIDmode
863 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
864 {
865 /* If MODE is twice the host word size, X is already the desired
866 representation. Otherwise, if MODE is wider than a word, we can't
867 do this. If MODE is exactly a word, return just one CONST_INT. */
868
869 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
870 return x;
871 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
872 return 0;
873 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
874 return (GET_CODE (x) == CONST_INT ? x
875 : GEN_INT (CONST_DOUBLE_LOW (x)));
876 else
877 {
878 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
879 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
880 : CONST_DOUBLE_LOW (x));
881
882 /* Sign extend to HOST_WIDE_INT. */
883 val = trunc_int_for_mode (val, mode);
884
885 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
886 : GEN_INT (val));
887 }
888 }
889
890 /* The floating-point emulator can handle all conversions between
891 FP and integer operands. This simplifies reload because it
892 doesn't have to deal with constructs like (subreg:DI
893 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
894 /* Single-precision floats are always 32-bits and double-precision
895 floats are always 64-bits. */
896
897 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
898 && GET_MODE_BITSIZE (mode) == 32
899 && GET_CODE (x) == CONST_INT)
900 {
901 REAL_VALUE_TYPE r;
902 HOST_WIDE_INT i;
903
904 i = INTVAL (x);
905 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
906 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
907 }
908 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
909 && GET_MODE_BITSIZE (mode) == 64
910 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
911 && GET_MODE (x) == VOIDmode)
912 {
913 REAL_VALUE_TYPE r;
914 HOST_WIDE_INT i[2];
915 HOST_WIDE_INT low, high;
916
917 if (GET_CODE (x) == CONST_INT)
918 {
919 low = INTVAL (x);
920 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
921 }
922 else
923 {
924 low = CONST_DOUBLE_LOW (x);
925 high = CONST_DOUBLE_HIGH (x);
926 }
927
928 #if HOST_BITS_PER_WIDE_INT == 32
929 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
930 target machine. */
931 if (WORDS_BIG_ENDIAN)
932 i[0] = high, i[1] = low;
933 else
934 i[0] = low, i[1] = high;
935 #else
936 i[0] = low;
937 #endif
938
939 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
940 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
941 }
942 else if ((GET_MODE_CLASS (mode) == MODE_INT
943 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
944 && GET_CODE (x) == CONST_DOUBLE
945 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
946 {
947 REAL_VALUE_TYPE r;
948 long i[4]; /* Only the low 32 bits of each 'long' are used. */
949 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
950
951 /* Convert 'r' into an array of four 32-bit words in target word
952 order. */
953 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
954 switch (GET_MODE_BITSIZE (GET_MODE (x)))
955 {
956 case 32:
957 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
958 i[1] = 0;
959 i[2] = 0;
960 i[3 - 3 * endian] = 0;
961 break;
962 case 64:
963 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
964 i[2 - 2 * endian] = 0;
965 i[3 - 2 * endian] = 0;
966 break;
967 case 96:
968 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
969 i[3 - 3 * endian] = 0;
970 break;
971 case 128:
972 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
973 break;
974 default:
975 abort ();
976 }
977 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
978 and return it. */
979 #if HOST_BITS_PER_WIDE_INT == 32
980 return immed_double_const (i[3 * endian], i[1 + endian], mode);
981 #else
982 if (HOST_BITS_PER_WIDE_INT != 64)
983 abort ();
984
985 return immed_double_const ((((unsigned long) i[3 * endian])
986 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
987 (((unsigned long) i[2 - endian])
988 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
989 mode);
990 #endif
991 }
992
993 /* Otherwise, we can't do this. */
994 return 0;
995 }
996 \f
997 /* Return the real part (which has mode MODE) of a complex value X.
998 This always comes at the low address in memory. */
999
1000 rtx
1001 gen_realpart (mode, x)
1002 enum machine_mode mode;
1003 rtx x;
1004 {
1005 if (WORDS_BIG_ENDIAN
1006 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1007 && REG_P (x)
1008 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1009 internal_error
1010 ("can't access real part of complex value in hard register");
1011 else if (WORDS_BIG_ENDIAN)
1012 return gen_highpart (mode, x);
1013 else
1014 return gen_lowpart (mode, x);
1015 }
1016
1017 /* Return the imaginary part (which has mode MODE) of a complex value X.
1018 This always comes at the high address in memory. */
1019
1020 rtx
1021 gen_imagpart (mode, x)
1022 enum machine_mode mode;
1023 rtx x;
1024 {
1025 if (WORDS_BIG_ENDIAN)
1026 return gen_lowpart (mode, x);
1027 else if (! WORDS_BIG_ENDIAN
1028 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1029 && REG_P (x)
1030 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1031 internal_error
1032 ("can't access imaginary part of complex value in hard register");
1033 else
1034 return gen_highpart (mode, x);
1035 }
1036
1037 /* Return 1 iff X, assumed to be a SUBREG,
1038 refers to the real part of the complex value in its containing reg.
1039 Complex values are always stored with the real part in the first word,
1040 regardless of WORDS_BIG_ENDIAN. */
1041
1042 int
1043 subreg_realpart_p (x)
1044 rtx x;
1045 {
1046 if (GET_CODE (x) != SUBREG)
1047 abort ();
1048
1049 return ((unsigned int) SUBREG_BYTE (x)
1050 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1051 }
1052 \f
1053 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1054 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1055 least-significant part of X.
1056 MODE specifies how big a part of X to return;
1057 it usually should not be larger than a word.
1058 If X is a MEM whose address is a QUEUED, the value may be so also. */
1059
1060 rtx
1061 gen_lowpart (mode, x)
1062 enum machine_mode mode;
1063 rtx x;
1064 {
1065 rtx result = gen_lowpart_common (mode, x);
1066
1067 if (result)
1068 return result;
1069 else if (GET_CODE (x) == REG)
1070 {
1071 /* Must be a hard reg that's not valid in MODE. */
1072 result = gen_lowpart_common (mode, copy_to_reg (x));
1073 if (result == 0)
1074 abort ();
1075 return result;
1076 }
1077 else if (GET_CODE (x) == MEM)
1078 {
1079 /* The only additional case we can do is MEM. */
1080 int offset = 0;
1081 if (WORDS_BIG_ENDIAN)
1082 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1083 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1084
1085 if (BYTES_BIG_ENDIAN)
1086 /* Adjust the address so that the address-after-the-data
1087 is unchanged. */
1088 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1089 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1090
1091 return adjust_address (x, mode, offset);
1092 }
1093 else if (GET_CODE (x) == ADDRESSOF)
1094 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1095 else
1096 abort ();
1097 }
1098
1099 /* Like `gen_lowpart', but refer to the most significant part.
1100 This is used to access the imaginary part of a complex number. */
1101
1102 rtx
1103 gen_highpart (mode, x)
1104 enum machine_mode mode;
1105 rtx x;
1106 {
1107 unsigned int msize = GET_MODE_SIZE (mode);
1108 rtx result;
1109
1110 /* This case loses if X is a subreg. To catch bugs early,
1111 complain if an invalid MODE is used even in other cases. */
1112 if (msize > UNITS_PER_WORD
1113 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1114 abort ();
1115
1116 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1117 subreg_highpart_offset (mode, GET_MODE (x)));
1118
1119 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1120 the target if we have a MEM. gen_highpart must return a valid operand,
1121 emitting code if necessary to do so. */
1122 if (result != NULL_RTX && GET_CODE (result) == MEM)
1123 result = validize_mem (result);
1124
1125 if (!result)
1126 abort ();
1127 return result;
1128 }
1129
1130 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1131 be VOIDmode constant. */
1132 rtx
1133 gen_highpart_mode (outermode, innermode, exp)
1134 enum machine_mode outermode, innermode;
1135 rtx exp;
1136 {
1137 if (GET_MODE (exp) != VOIDmode)
1138 {
1139 if (GET_MODE (exp) != innermode)
1140 abort ();
1141 return gen_highpart (outermode, exp);
1142 }
1143 return simplify_gen_subreg (outermode, exp, innermode,
1144 subreg_highpart_offset (outermode, innermode));
1145 }
1146 /* Return offset in bytes to get OUTERMODE low part
1147 of the value in mode INNERMODE stored in memory in target format. */
1148
1149 unsigned int
1150 subreg_lowpart_offset (outermode, innermode)
1151 enum machine_mode outermode, innermode;
1152 {
1153 unsigned int offset = 0;
1154 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1155
1156 if (difference > 0)
1157 {
1158 if (WORDS_BIG_ENDIAN)
1159 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1160 if (BYTES_BIG_ENDIAN)
1161 offset += difference % UNITS_PER_WORD;
1162 }
1163
1164 return offset;
1165 }
1166
1167 /* Return offset in bytes to get OUTERMODE high part
1168 of the value in mode INNERMODE stored in memory in target format. */
1169 unsigned int
1170 subreg_highpart_offset (outermode, innermode)
1171 enum machine_mode outermode, innermode;
1172 {
1173 unsigned int offset = 0;
1174 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1175
1176 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1177 abort ();
1178
1179 if (difference > 0)
1180 {
1181 if (! WORDS_BIG_ENDIAN)
1182 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1183 if (! BYTES_BIG_ENDIAN)
1184 offset += difference % UNITS_PER_WORD;
1185 }
1186
1187 return offset;
1188 }
1189
1190 /* Return 1 iff X, assumed to be a SUBREG,
1191 refers to the least significant part of its containing reg.
1192 If X is not a SUBREG, always return 1 (it is its own low part!). */
1193
1194 int
1195 subreg_lowpart_p (x)
1196 rtx x;
1197 {
1198 if (GET_CODE (x) != SUBREG)
1199 return 1;
1200 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1201 return 0;
1202
1203 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1204 == SUBREG_BYTE (x));
1205 }
1206 \f
1207
1208 /* Helper routine for all the constant cases of operand_subword.
1209 Some places invoke this directly. */
1210
1211 rtx
1212 constant_subword (op, offset, mode)
1213 rtx op;
1214 int offset;
1215 enum machine_mode mode;
1216 {
1217 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1218 HOST_WIDE_INT val;
1219
1220 /* If OP is already an integer word, return it. */
1221 if (GET_MODE_CLASS (mode) == MODE_INT
1222 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1223 return op;
1224
1225 /* The output is some bits, the width of the target machine's word.
1226 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1227 host can't. */
1228 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1229 && GET_MODE_CLASS (mode) == MODE_FLOAT
1230 && GET_MODE_BITSIZE (mode) == 64
1231 && GET_CODE (op) == CONST_DOUBLE)
1232 {
1233 long k[2];
1234 REAL_VALUE_TYPE rv;
1235
1236 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1237 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1238
1239 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1240 which the words are written depends on the word endianness.
1241 ??? This is a potential portability problem and should
1242 be fixed at some point.
1243
1244 We must exercise caution with the sign bit. By definition there
1245 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1246 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1247 So we explicitly mask and sign-extend as necessary. */
1248 if (BITS_PER_WORD == 32)
1249 {
1250 val = k[offset];
1251 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1252 return GEN_INT (val);
1253 }
1254 #if HOST_BITS_PER_WIDE_INT >= 64
1255 else if (BITS_PER_WORD >= 64 && offset == 0)
1256 {
1257 val = k[! WORDS_BIG_ENDIAN];
1258 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1259 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1260 return GEN_INT (val);
1261 }
1262 #endif
1263 else if (BITS_PER_WORD == 16)
1264 {
1265 val = k[offset >> 1];
1266 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1267 val >>= 16;
1268 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1269 return GEN_INT (val);
1270 }
1271 else
1272 abort ();
1273 }
1274 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1275 && GET_MODE_CLASS (mode) == MODE_FLOAT
1276 && GET_MODE_BITSIZE (mode) > 64
1277 && GET_CODE (op) == CONST_DOUBLE)
1278 {
1279 long k[4];
1280 REAL_VALUE_TYPE rv;
1281
1282 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1283 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1284
1285 if (BITS_PER_WORD == 32)
1286 {
1287 val = k[offset];
1288 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1289 return GEN_INT (val);
1290 }
1291 #if HOST_BITS_PER_WIDE_INT >= 64
1292 else if (BITS_PER_WORD >= 64 && offset <= 1)
1293 {
1294 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1295 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1296 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1297 return GEN_INT (val);
1298 }
1299 #endif
1300 else
1301 abort ();
1302 }
1303
1304 /* Single word float is a little harder, since single- and double-word
1305 values often do not have the same high-order bits. We have already
1306 verified that we want the only defined word of the single-word value. */
1307 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1308 && GET_MODE_BITSIZE (mode) == 32
1309 && GET_CODE (op) == CONST_DOUBLE)
1310 {
1311 long l;
1312 REAL_VALUE_TYPE rv;
1313
1314 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1315 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1316
1317 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1318 val = l;
1319 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1320
1321 if (BITS_PER_WORD == 16)
1322 {
1323 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1324 val >>= 16;
1325 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1326 }
1327
1328 return GEN_INT (val);
1329 }
1330
1331 /* The only remaining cases that we can handle are integers.
1332 Convert to proper endianness now since these cases need it.
1333 At this point, offset == 0 means the low-order word.
1334
1335 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1336 in general. However, if OP is (const_int 0), we can just return
1337 it for any word. */
1338
1339 if (op == const0_rtx)
1340 return op;
1341
1342 if (GET_MODE_CLASS (mode) != MODE_INT
1343 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1344 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1345 return 0;
1346
1347 if (WORDS_BIG_ENDIAN)
1348 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1349
1350 /* Find out which word on the host machine this value is in and get
1351 it from the constant. */
1352 val = (offset / size_ratio == 0
1353 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1354 : (GET_CODE (op) == CONST_INT
1355 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1356
1357 /* Get the value we want into the low bits of val. */
1358 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1359 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1360
1361 val = trunc_int_for_mode (val, word_mode);
1362
1363 return GEN_INT (val);
1364 }
1365
1366 /* Return subword OFFSET of operand OP.
1367 The word number, OFFSET, is interpreted as the word number starting
1368 at the low-order address. OFFSET 0 is the low-order word if not
1369 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1370
1371 If we cannot extract the required word, we return zero. Otherwise,
1372 an rtx corresponding to the requested word will be returned.
1373
1374 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1375 reload has completed, a valid address will always be returned. After
1376 reload, if a valid address cannot be returned, we return zero.
1377
1378 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1379 it is the responsibility of the caller.
1380
1381 MODE is the mode of OP in case it is a CONST_INT.
1382
1383 ??? This is still rather broken for some cases. The problem for the
1384 moment is that all callers of this thing provide no 'goal mode' to
1385 tell us to work with. This exists because all callers were written
1386 in a word based SUBREG world.
1387 Now use of this function can be deprecated by simplify_subreg in most
1388 cases.
1389 */
1390
1391 rtx
1392 operand_subword (op, offset, validate_address, mode)
1393 rtx op;
1394 unsigned int offset;
1395 int validate_address;
1396 enum machine_mode mode;
1397 {
1398 if (mode == VOIDmode)
1399 mode = GET_MODE (op);
1400
1401 if (mode == VOIDmode)
1402 abort ();
1403
1404 /* If OP is narrower than a word, fail. */
1405 if (mode != BLKmode
1406 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1407 return 0;
1408
1409 /* If we want a word outside OP, return zero. */
1410 if (mode != BLKmode
1411 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1412 return const0_rtx;
1413
1414 /* Form a new MEM at the requested address. */
1415 if (GET_CODE (op) == MEM)
1416 {
1417 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1418
1419 if (! validate_address)
1420 return new;
1421
1422 else if (reload_completed)
1423 {
1424 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1425 return 0;
1426 }
1427 else
1428 return replace_equiv_address (new, XEXP (new, 0));
1429 }
1430
1431 /* Rest can be handled by simplify_subreg. */
1432 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1433 }
1434
1435 /* Similar to `operand_subword', but never return 0. If we can't extract
1436 the required subword, put OP into a register and try again. If that fails,
1437 abort. We always validate the address in this case.
1438
1439 MODE is the mode of OP, in case it is CONST_INT. */
1440
1441 rtx
1442 operand_subword_force (op, offset, mode)
1443 rtx op;
1444 unsigned int offset;
1445 enum machine_mode mode;
1446 {
1447 rtx result = operand_subword (op, offset, 1, mode);
1448
1449 if (result)
1450 return result;
1451
1452 if (mode != BLKmode && mode != VOIDmode)
1453 {
1454 /* If this is a register which can not be accessed by words, copy it
1455 to a pseudo register. */
1456 if (GET_CODE (op) == REG)
1457 op = copy_to_reg (op);
1458 else
1459 op = force_reg (mode, op);
1460 }
1461
1462 result = operand_subword (op, offset, 1, mode);
1463 if (result == 0)
1464 abort ();
1465
1466 return result;
1467 }
1468 \f
1469 /* Given a compare instruction, swap the operands.
1470 A test instruction is changed into a compare of 0 against the operand. */
1471
1472 void
1473 reverse_comparison (insn)
1474 rtx insn;
1475 {
1476 rtx body = PATTERN (insn);
1477 rtx comp;
1478
1479 if (GET_CODE (body) == SET)
1480 comp = SET_SRC (body);
1481 else
1482 comp = SET_SRC (XVECEXP (body, 0, 0));
1483
1484 if (GET_CODE (comp) == COMPARE)
1485 {
1486 rtx op0 = XEXP (comp, 0);
1487 rtx op1 = XEXP (comp, 1);
1488 XEXP (comp, 0) = op1;
1489 XEXP (comp, 1) = op0;
1490 }
1491 else
1492 {
1493 rtx new = gen_rtx_COMPARE (VOIDmode,
1494 CONST0_RTX (GET_MODE (comp)), comp);
1495 if (GET_CODE (body) == SET)
1496 SET_SRC (body) = new;
1497 else
1498 SET_SRC (XVECEXP (body, 0, 0)) = new;
1499 }
1500 }
1501 \f
1502 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1503 or (2) a component ref of something variable. Represent the later with
1504 a NULL expression. */
1505
1506 static tree
1507 component_ref_for_mem_expr (ref)
1508 tree ref;
1509 {
1510 tree inner = TREE_OPERAND (ref, 0);
1511
1512 if (TREE_CODE (inner) == COMPONENT_REF)
1513 inner = component_ref_for_mem_expr (inner);
1514 else
1515 {
1516 tree placeholder_ptr = 0;
1517
1518 /* Now remove any conversions: they don't change what the underlying
1519 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1520 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1521 || TREE_CODE (inner) == NON_LVALUE_EXPR
1522 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1523 || TREE_CODE (inner) == SAVE_EXPR
1524 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1525 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1526 inner = find_placeholder (inner, &placeholder_ptr);
1527 else
1528 inner = TREE_OPERAND (inner, 0);
1529
1530 if (! DECL_P (inner))
1531 inner = NULL_TREE;
1532 }
1533
1534 if (inner == TREE_OPERAND (ref, 0))
1535 return ref;
1536 else
1537 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1538 TREE_OPERAND (ref, 1));
1539 }
1540
1541 /* Given REF, a MEM, and T, either the type of X or the expression
1542 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1543 if we are making a new object of this type. */
1544
1545 void
1546 set_mem_attributes (ref, t, objectp)
1547 rtx ref;
1548 tree t;
1549 int objectp;
1550 {
1551 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1552 tree expr = MEM_EXPR (ref);
1553 rtx offset = MEM_OFFSET (ref);
1554 rtx size = MEM_SIZE (ref);
1555 unsigned int align = MEM_ALIGN (ref);
1556 tree type;
1557
1558 /* It can happen that type_for_mode was given a mode for which there
1559 is no language-level type. In which case it returns NULL, which
1560 we can see here. */
1561 if (t == NULL_TREE)
1562 return;
1563
1564 type = TYPE_P (t) ? t : TREE_TYPE (t);
1565
1566 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1567 wrong answer, as it assumes that DECL_RTL already has the right alias
1568 info. Callers should not set DECL_RTL until after the call to
1569 set_mem_attributes. */
1570 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1571 abort ();
1572
1573 /* Get the alias set from the expression or type (perhaps using a
1574 front-end routine) and use it. */
1575 alias = get_alias_set (t);
1576
1577 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1578 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1579 RTX_UNCHANGING_P (ref)
1580 |= ((lang_hooks.honor_readonly
1581 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1582 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1583
1584 /* If we are making an object of this type, or if this is a DECL, we know
1585 that it is a scalar if the type is not an aggregate. */
1586 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1587 MEM_SCALAR_P (ref) = 1;
1588
1589 /* We can set the alignment from the type if we are making an object,
1590 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1591 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1592 align = MAX (align, TYPE_ALIGN (type));
1593
1594 /* If the size is known, we can set that. */
1595 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1596 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1597
1598 /* If T is not a type, we may be able to deduce some more information about
1599 the expression. */
1600 if (! TYPE_P (t))
1601 {
1602 maybe_set_unchanging (ref, t);
1603 if (TREE_THIS_VOLATILE (t))
1604 MEM_VOLATILE_P (ref) = 1;
1605
1606 /* Now remove any conversions: they don't change what the underlying
1607 object is. Likewise for SAVE_EXPR. */
1608 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1609 || TREE_CODE (t) == NON_LVALUE_EXPR
1610 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1611 || TREE_CODE (t) == SAVE_EXPR)
1612 t = TREE_OPERAND (t, 0);
1613
1614 /* If this expression can't be addressed (e.g., it contains a reference
1615 to a non-addressable field), show we don't change its alias set. */
1616 if (! can_address_p (t))
1617 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1618
1619 /* If this is a decl, set the attributes of the MEM from it. */
1620 if (DECL_P (t))
1621 {
1622 expr = t;
1623 offset = const0_rtx;
1624 size = (DECL_SIZE_UNIT (t)
1625 && host_integerp (DECL_SIZE_UNIT (t), 1)
1626 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1627 align = DECL_ALIGN (t);
1628 }
1629
1630 /* If this is a constant, we know the alignment. */
1631 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1632 {
1633 align = TYPE_ALIGN (type);
1634 #ifdef CONSTANT_ALIGNMENT
1635 align = CONSTANT_ALIGNMENT (t, align);
1636 #endif
1637 }
1638
1639 /* If this is a field reference and not a bit-field, record it. */
1640 /* ??? There is some information that can be gleened from bit-fields,
1641 such as the word offset in the structure that might be modified.
1642 But skip it for now. */
1643 else if (TREE_CODE (t) == COMPONENT_REF
1644 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1645 {
1646 expr = component_ref_for_mem_expr (t);
1647 offset = const0_rtx;
1648 /* ??? Any reason the field size would be different than
1649 the size we got from the type? */
1650 }
1651
1652 /* If this is an array reference, look for an outer field reference. */
1653 else if (TREE_CODE (t) == ARRAY_REF)
1654 {
1655 tree off_tree = size_zero_node;
1656
1657 do
1658 {
1659 off_tree
1660 = fold (build (PLUS_EXPR, sizetype,
1661 fold (build (MULT_EXPR, sizetype,
1662 TREE_OPERAND (t, 1),
1663 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1664 off_tree));
1665 t = TREE_OPERAND (t, 0);
1666 }
1667 while (TREE_CODE (t) == ARRAY_REF);
1668
1669 if (TREE_CODE (t) == COMPONENT_REF)
1670 {
1671 expr = component_ref_for_mem_expr (t);
1672 if (host_integerp (off_tree, 1))
1673 offset = GEN_INT (tree_low_cst (off_tree, 1));
1674 /* ??? Any reason the field size would be different than
1675 the size we got from the type? */
1676 }
1677 }
1678 }
1679
1680 /* Now set the attributes we computed above. */
1681 MEM_ATTRS (ref)
1682 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1683
1684 /* If this is already known to be a scalar or aggregate, we are done. */
1685 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1686 return;
1687
1688 /* If it is a reference into an aggregate, this is part of an aggregate.
1689 Otherwise we don't know. */
1690 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1691 || TREE_CODE (t) == ARRAY_RANGE_REF
1692 || TREE_CODE (t) == BIT_FIELD_REF)
1693 MEM_IN_STRUCT_P (ref) = 1;
1694 }
1695
1696 /* Set the alias set of MEM to SET. */
1697
1698 void
1699 set_mem_alias_set (mem, set)
1700 rtx mem;
1701 HOST_WIDE_INT set;
1702 {
1703 #ifdef ENABLE_CHECKING
1704 /* If the new and old alias sets don't conflict, something is wrong. */
1705 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1706 abort ();
1707 #endif
1708
1709 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1710 MEM_SIZE (mem), MEM_ALIGN (mem),
1711 GET_MODE (mem));
1712 }
1713
1714 /* Set the alignment of MEM to ALIGN bits. */
1715
1716 void
1717 set_mem_align (mem, align)
1718 rtx mem;
1719 unsigned int align;
1720 {
1721 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1722 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1723 GET_MODE (mem));
1724 }
1725
1726 /* Set the expr for MEM to EXPR. */
1727
1728 void
1729 set_mem_expr (mem, expr)
1730 rtx mem;
1731 tree expr;
1732 {
1733 MEM_ATTRS (mem)
1734 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1735 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1736 }
1737
1738 /* Set the offset of MEM to OFFSET. */
1739
1740 void
1741 set_mem_offset (mem, offset)
1742 rtx mem, offset;
1743 {
1744 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1745 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1746 GET_MODE (mem));
1747 }
1748 \f
1749 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1750 and its address changed to ADDR. (VOIDmode means don't change the mode.
1751 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1752 returned memory location is required to be valid. The memory
1753 attributes are not changed. */
1754
1755 static rtx
1756 change_address_1 (memref, mode, addr, validate)
1757 rtx memref;
1758 enum machine_mode mode;
1759 rtx addr;
1760 int validate;
1761 {
1762 rtx new;
1763
1764 if (GET_CODE (memref) != MEM)
1765 abort ();
1766 if (mode == VOIDmode)
1767 mode = GET_MODE (memref);
1768 if (addr == 0)
1769 addr = XEXP (memref, 0);
1770
1771 if (validate)
1772 {
1773 if (reload_in_progress || reload_completed)
1774 {
1775 if (! memory_address_p (mode, addr))
1776 abort ();
1777 }
1778 else
1779 addr = memory_address (mode, addr);
1780 }
1781
1782 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1783 return memref;
1784
1785 new = gen_rtx_MEM (mode, addr);
1786 MEM_COPY_ATTRIBUTES (new, memref);
1787 return new;
1788 }
1789
1790 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1791 way we are changing MEMREF, so we only preserve the alias set. */
1792
1793 rtx
1794 change_address (memref, mode, addr)
1795 rtx memref;
1796 enum machine_mode mode;
1797 rtx addr;
1798 {
1799 rtx new = change_address_1 (memref, mode, addr, 1);
1800 enum machine_mode mmode = GET_MODE (new);
1801
1802 MEM_ATTRS (new)
1803 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1804 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1805 (mmode == BLKmode ? BITS_PER_UNIT
1806 : GET_MODE_ALIGNMENT (mmode)),
1807 mmode);
1808
1809 return new;
1810 }
1811
1812 /* Return a memory reference like MEMREF, but with its mode changed
1813 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1814 nonzero, the memory address is forced to be valid.
1815 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1816 and caller is responsible for adjusting MEMREF base register. */
1817
1818 rtx
1819 adjust_address_1 (memref, mode, offset, validate, adjust)
1820 rtx memref;
1821 enum machine_mode mode;
1822 HOST_WIDE_INT offset;
1823 int validate, adjust;
1824 {
1825 rtx addr = XEXP (memref, 0);
1826 rtx new;
1827 rtx memoffset = MEM_OFFSET (memref);
1828 rtx size = 0;
1829 unsigned int memalign = MEM_ALIGN (memref);
1830
1831 /* ??? Prefer to create garbage instead of creating shared rtl.
1832 This may happen even if offset is non-zero -- consider
1833 (plus (plus reg reg) const_int) -- so do this always. */
1834 addr = copy_rtx (addr);
1835
1836 if (adjust)
1837 {
1838 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1839 object, we can merge it into the LO_SUM. */
1840 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1841 && offset >= 0
1842 && (unsigned HOST_WIDE_INT) offset
1843 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1844 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1845 plus_constant (XEXP (addr, 1), offset));
1846 else
1847 addr = plus_constant (addr, offset);
1848 }
1849
1850 new = change_address_1 (memref, mode, addr, validate);
1851
1852 /* Compute the new values of the memory attributes due to this adjustment.
1853 We add the offsets and update the alignment. */
1854 if (memoffset)
1855 memoffset = GEN_INT (offset + INTVAL (memoffset));
1856
1857 /* Compute the new alignment by taking the MIN of the alignment and the
1858 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1859 if zero. */
1860 if (offset != 0)
1861 memalign = MIN (memalign,
1862 (unsigned int) (offset & -offset) * BITS_PER_UNIT);
1863
1864 /* We can compute the size in a number of ways. */
1865 if (GET_MODE (new) != BLKmode)
1866 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1867 else if (MEM_SIZE (memref))
1868 size = plus_constant (MEM_SIZE (memref), -offset);
1869
1870 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1871 memoffset, size, memalign, GET_MODE (new));
1872
1873 /* At some point, we should validate that this offset is within the object,
1874 if all the appropriate values are known. */
1875 return new;
1876 }
1877
1878 /* Return a memory reference like MEMREF, but with its mode changed
1879 to MODE and its address changed to ADDR, which is assumed to be
1880 MEMREF offseted by OFFSET bytes. If VALIDATE is
1881 nonzero, the memory address is forced to be valid. */
1882
1883 rtx
1884 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
1885 rtx memref;
1886 enum machine_mode mode;
1887 rtx addr;
1888 HOST_WIDE_INT offset;
1889 int validate;
1890 {
1891 memref = change_address_1 (memref, VOIDmode, addr, validate);
1892 return adjust_address_1 (memref, mode, offset, validate, 0);
1893 }
1894
1895 /* Return a memory reference like MEMREF, but whose address is changed by
1896 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1897 known to be in OFFSET (possibly 1). */
1898
1899 rtx
1900 offset_address (memref, offset, pow2)
1901 rtx memref;
1902 rtx offset;
1903 HOST_WIDE_INT pow2;
1904 {
1905 rtx new, addr = XEXP (memref, 0);
1906
1907 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1908
1909 /* At this point we don't know _why_ the address is invalid. It
1910 could have secondary memory refereces, multiplies or anything.
1911
1912 However, if we did go and rearrange things, we can wind up not
1913 being able to recognize the magic around pic_offset_table_rtx.
1914 This stuff is fragile, and is yet another example of why it is
1915 bad to expose PIC machinery too early. */
1916 if (! memory_address_p (GET_MODE (memref), new)
1917 && GET_CODE (addr) == PLUS
1918 && XEXP (addr, 0) == pic_offset_table_rtx)
1919 {
1920 addr = force_reg (GET_MODE (addr), addr);
1921 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1922 }
1923
1924 update_temp_slot_address (XEXP (memref, 0), new);
1925 new = change_address_1 (memref, VOIDmode, new, 1);
1926
1927 /* Update the alignment to reflect the offset. Reset the offset, which
1928 we don't know. */
1929 MEM_ATTRS (new)
1930 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1931 MIN (MEM_ALIGN (memref),
1932 (unsigned int) pow2 * BITS_PER_UNIT),
1933 GET_MODE (new));
1934 return new;
1935 }
1936
1937 /* Return a memory reference like MEMREF, but with its address changed to
1938 ADDR. The caller is asserting that the actual piece of memory pointed
1939 to is the same, just the form of the address is being changed, such as
1940 by putting something into a register. */
1941
1942 rtx
1943 replace_equiv_address (memref, addr)
1944 rtx memref;
1945 rtx addr;
1946 {
1947 /* change_address_1 copies the memory attribute structure without change
1948 and that's exactly what we want here. */
1949 update_temp_slot_address (XEXP (memref, 0), addr);
1950 return change_address_1 (memref, VOIDmode, addr, 1);
1951 }
1952
1953 /* Likewise, but the reference is not required to be valid. */
1954
1955 rtx
1956 replace_equiv_address_nv (memref, addr)
1957 rtx memref;
1958 rtx addr;
1959 {
1960 return change_address_1 (memref, VOIDmode, addr, 0);
1961 }
1962
1963 /* Return a memory reference like MEMREF, but with its mode widened to
1964 MODE and offset by OFFSET. This would be used by targets that e.g.
1965 cannot issue QImode memory operations and have to use SImode memory
1966 operations plus masking logic. */
1967
1968 rtx
1969 widen_memory_access (memref, mode, offset)
1970 rtx memref;
1971 enum machine_mode mode;
1972 HOST_WIDE_INT offset;
1973 {
1974 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1975 tree expr = MEM_EXPR (new);
1976 rtx memoffset = MEM_OFFSET (new);
1977 unsigned int size = GET_MODE_SIZE (mode);
1978
1979 /* If we don't know what offset we were at within the expression, then
1980 we can't know if we've overstepped the bounds. */
1981 if (! memoffset && offset != 0)
1982 expr = NULL_TREE;
1983
1984 while (expr)
1985 {
1986 if (TREE_CODE (expr) == COMPONENT_REF)
1987 {
1988 tree field = TREE_OPERAND (expr, 1);
1989
1990 if (! DECL_SIZE_UNIT (field))
1991 {
1992 expr = NULL_TREE;
1993 break;
1994 }
1995
1996 /* Is the field at least as large as the access? If so, ok,
1997 otherwise strip back to the containing structure. */
1998 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
1999 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2000 && INTVAL (memoffset) >= 0)
2001 break;
2002
2003 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2004 {
2005 expr = NULL_TREE;
2006 break;
2007 }
2008
2009 expr = TREE_OPERAND (expr, 0);
2010 memoffset = (GEN_INT (INTVAL (memoffset)
2011 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2012 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2013 / BITS_PER_UNIT)));
2014 }
2015 /* Similarly for the decl. */
2016 else if (DECL_P (expr)
2017 && DECL_SIZE_UNIT (expr)
2018 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2019 && (! memoffset || INTVAL (memoffset) >= 0))
2020 break;
2021 else
2022 {
2023 /* The widened memory access overflows the expression, which means
2024 that it could alias another expression. Zap it. */
2025 expr = NULL_TREE;
2026 break;
2027 }
2028 }
2029
2030 if (! expr)
2031 memoffset = NULL_RTX;
2032
2033 /* The widened memory may alias other stuff, so zap the alias set. */
2034 /* ??? Maybe use get_alias_set on any remaining expression. */
2035
2036 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2037 MEM_ALIGN (new), mode);
2038
2039 return new;
2040 }
2041 \f
2042 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2043
2044 rtx
2045 gen_label_rtx ()
2046 {
2047 rtx label;
2048
2049 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2050 NULL_RTX, label_num++, NULL, NULL);
2051
2052 LABEL_NUSES (label) = 0;
2053 LABEL_ALTERNATE_NAME (label) = NULL;
2054 return label;
2055 }
2056 \f
2057 /* For procedure integration. */
2058
2059 /* Install new pointers to the first and last insns in the chain.
2060 Also, set cur_insn_uid to one higher than the last in use.
2061 Used for an inline-procedure after copying the insn chain. */
2062
2063 void
2064 set_new_first_and_last_insn (first, last)
2065 rtx first, last;
2066 {
2067 rtx insn;
2068
2069 first_insn = first;
2070 last_insn = last;
2071 cur_insn_uid = 0;
2072
2073 for (insn = first; insn; insn = NEXT_INSN (insn))
2074 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2075
2076 cur_insn_uid++;
2077 }
2078
2079 /* Set the range of label numbers found in the current function.
2080 This is used when belatedly compiling an inline function. */
2081
2082 void
2083 set_new_first_and_last_label_num (first, last)
2084 int first, last;
2085 {
2086 base_label_num = label_num;
2087 first_label_num = first;
2088 last_label_num = last;
2089 }
2090
2091 /* Set the last label number found in the current function.
2092 This is used when belatedly compiling an inline function. */
2093
2094 void
2095 set_new_last_label_num (last)
2096 int last;
2097 {
2098 base_label_num = label_num;
2099 last_label_num = last;
2100 }
2101 \f
2102 /* Restore all variables describing the current status from the structure *P.
2103 This is used after a nested function. */
2104
2105 void
2106 restore_emit_status (p)
2107 struct function *p ATTRIBUTE_UNUSED;
2108 {
2109 last_label_num = 0;
2110 clear_emit_caches ();
2111 }
2112
2113 /* Clear out all parts of the state in F that can safely be discarded
2114 after the function has been compiled, to let garbage collection
2115 reclaim the memory. */
2116
2117 void
2118 free_emit_status (f)
2119 struct function *f;
2120 {
2121 free (f->emit->x_regno_reg_rtx);
2122 free (f->emit->regno_pointer_align);
2123 free (f->emit->regno_decl);
2124 free (f->emit);
2125 f->emit = NULL;
2126 }
2127 \f
2128 /* Go through all the RTL insn bodies and copy any invalid shared
2129 structure. This routine should only be called once. */
2130
2131 void
2132 unshare_all_rtl (fndecl, insn)
2133 tree fndecl;
2134 rtx insn;
2135 {
2136 tree decl;
2137
2138 /* Make sure that virtual parameters are not shared. */
2139 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2140 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2141
2142 /* Make sure that virtual stack slots are not shared. */
2143 unshare_all_decls (DECL_INITIAL (fndecl));
2144
2145 /* Unshare just about everything else. */
2146 unshare_all_rtl_1 (insn);
2147
2148 /* Make sure the addresses of stack slots found outside the insn chain
2149 (such as, in DECL_RTL of a variable) are not shared
2150 with the insn chain.
2151
2152 This special care is necessary when the stack slot MEM does not
2153 actually appear in the insn chain. If it does appear, its address
2154 is unshared from all else at that point. */
2155 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2156 }
2157
2158 /* Go through all the RTL insn bodies and copy any invalid shared
2159 structure, again. This is a fairly expensive thing to do so it
2160 should be done sparingly. */
2161
2162 void
2163 unshare_all_rtl_again (insn)
2164 rtx insn;
2165 {
2166 rtx p;
2167 tree decl;
2168
2169 for (p = insn; p; p = NEXT_INSN (p))
2170 if (INSN_P (p))
2171 {
2172 reset_used_flags (PATTERN (p));
2173 reset_used_flags (REG_NOTES (p));
2174 reset_used_flags (LOG_LINKS (p));
2175 }
2176
2177 /* Make sure that virtual stack slots are not shared. */
2178 reset_used_decls (DECL_INITIAL (cfun->decl));
2179
2180 /* Make sure that virtual parameters are not shared. */
2181 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2182 reset_used_flags (DECL_RTL (decl));
2183
2184 reset_used_flags (stack_slot_list);
2185
2186 unshare_all_rtl (cfun->decl, insn);
2187 }
2188
2189 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2190 Assumes the mark bits are cleared at entry. */
2191
2192 static void
2193 unshare_all_rtl_1 (insn)
2194 rtx insn;
2195 {
2196 for (; insn; insn = NEXT_INSN (insn))
2197 if (INSN_P (insn))
2198 {
2199 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2200 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2201 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2202 }
2203 }
2204
2205 /* Go through all virtual stack slots of a function and copy any
2206 shared structure. */
2207 static void
2208 unshare_all_decls (blk)
2209 tree blk;
2210 {
2211 tree t;
2212
2213 /* Copy shared decls. */
2214 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2215 if (DECL_RTL_SET_P (t))
2216 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2217
2218 /* Now process sub-blocks. */
2219 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2220 unshare_all_decls (t);
2221 }
2222
2223 /* Go through all virtual stack slots of a function and mark them as
2224 not shared. */
2225 static void
2226 reset_used_decls (blk)
2227 tree blk;
2228 {
2229 tree t;
2230
2231 /* Mark decls. */
2232 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2233 if (DECL_RTL_SET_P (t))
2234 reset_used_flags (DECL_RTL (t));
2235
2236 /* Now process sub-blocks. */
2237 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2238 reset_used_decls (t);
2239 }
2240
2241 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2242 placed in the result directly, rather than being copied. MAY_SHARE is
2243 either a MEM of an EXPR_LIST of MEMs. */
2244
2245 rtx
2246 copy_most_rtx (orig, may_share)
2247 rtx orig;
2248 rtx may_share;
2249 {
2250 rtx copy;
2251 int i, j;
2252 RTX_CODE code;
2253 const char *format_ptr;
2254
2255 if (orig == may_share
2256 || (GET_CODE (may_share) == EXPR_LIST
2257 && in_expr_list_p (may_share, orig)))
2258 return orig;
2259
2260 code = GET_CODE (orig);
2261
2262 switch (code)
2263 {
2264 case REG:
2265 case QUEUED:
2266 case CONST_INT:
2267 case CONST_DOUBLE:
2268 case CONST_VECTOR:
2269 case SYMBOL_REF:
2270 case CODE_LABEL:
2271 case PC:
2272 case CC0:
2273 return orig;
2274 default:
2275 break;
2276 }
2277
2278 copy = rtx_alloc (code);
2279 PUT_MODE (copy, GET_MODE (orig));
2280 copy->in_struct = orig->in_struct;
2281 copy->volatil = orig->volatil;
2282 copy->unchanging = orig->unchanging;
2283 copy->integrated = orig->integrated;
2284 copy->frame_related = orig->frame_related;
2285
2286 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2287
2288 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2289 {
2290 switch (*format_ptr++)
2291 {
2292 case 'e':
2293 XEXP (copy, i) = XEXP (orig, i);
2294 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2295 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2296 break;
2297
2298 case 'u':
2299 XEXP (copy, i) = XEXP (orig, i);
2300 break;
2301
2302 case 'E':
2303 case 'V':
2304 XVEC (copy, i) = XVEC (orig, i);
2305 if (XVEC (orig, i) != NULL)
2306 {
2307 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2308 for (j = 0; j < XVECLEN (copy, i); j++)
2309 XVECEXP (copy, i, j)
2310 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2311 }
2312 break;
2313
2314 case 'w':
2315 XWINT (copy, i) = XWINT (orig, i);
2316 break;
2317
2318 case 'n':
2319 case 'i':
2320 XINT (copy, i) = XINT (orig, i);
2321 break;
2322
2323 case 't':
2324 XTREE (copy, i) = XTREE (orig, i);
2325 break;
2326
2327 case 's':
2328 case 'S':
2329 XSTR (copy, i) = XSTR (orig, i);
2330 break;
2331
2332 case '0':
2333 /* Copy this through the wide int field; that's safest. */
2334 X0WINT (copy, i) = X0WINT (orig, i);
2335 break;
2336
2337 default:
2338 abort ();
2339 }
2340 }
2341 return copy;
2342 }
2343
2344 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2345 Recursively does the same for subexpressions. */
2346
2347 rtx
2348 copy_rtx_if_shared (orig)
2349 rtx orig;
2350 {
2351 rtx x = orig;
2352 int i;
2353 enum rtx_code code;
2354 const char *format_ptr;
2355 int copied = 0;
2356
2357 if (x == 0)
2358 return 0;
2359
2360 code = GET_CODE (x);
2361
2362 /* These types may be freely shared. */
2363
2364 switch (code)
2365 {
2366 case REG:
2367 case QUEUED:
2368 case CONST_INT:
2369 case CONST_DOUBLE:
2370 case CONST_VECTOR:
2371 case SYMBOL_REF:
2372 case CODE_LABEL:
2373 case PC:
2374 case CC0:
2375 case SCRATCH:
2376 /* SCRATCH must be shared because they represent distinct values. */
2377 return x;
2378
2379 case CONST:
2380 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2381 a LABEL_REF, it isn't sharable. */
2382 if (GET_CODE (XEXP (x, 0)) == PLUS
2383 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2384 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2385 return x;
2386 break;
2387
2388 case INSN:
2389 case JUMP_INSN:
2390 case CALL_INSN:
2391 case NOTE:
2392 case BARRIER:
2393 /* The chain of insns is not being copied. */
2394 return x;
2395
2396 case MEM:
2397 /* A MEM is allowed to be shared if its address is constant.
2398
2399 We used to allow sharing of MEMs which referenced
2400 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2401 that can lose. instantiate_virtual_regs will not unshare
2402 the MEMs, and combine may change the structure of the address
2403 because it looks safe and profitable in one context, but
2404 in some other context it creates unrecognizable RTL. */
2405 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2406 return x;
2407
2408 break;
2409
2410 default:
2411 break;
2412 }
2413
2414 /* This rtx may not be shared. If it has already been seen,
2415 replace it with a copy of itself. */
2416
2417 if (x->used)
2418 {
2419 rtx copy;
2420
2421 copy = rtx_alloc (code);
2422 memcpy (copy, x,
2423 (sizeof (*copy) - sizeof (copy->fld)
2424 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2425 x = copy;
2426 copied = 1;
2427 }
2428 x->used = 1;
2429
2430 /* Now scan the subexpressions recursively.
2431 We can store any replaced subexpressions directly into X
2432 since we know X is not shared! Any vectors in X
2433 must be copied if X was copied. */
2434
2435 format_ptr = GET_RTX_FORMAT (code);
2436
2437 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2438 {
2439 switch (*format_ptr++)
2440 {
2441 case 'e':
2442 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2443 break;
2444
2445 case 'E':
2446 if (XVEC (x, i) != NULL)
2447 {
2448 int j;
2449 int len = XVECLEN (x, i);
2450
2451 if (copied && len > 0)
2452 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2453 for (j = 0; j < len; j++)
2454 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2455 }
2456 break;
2457 }
2458 }
2459 return x;
2460 }
2461
2462 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2463 to look for shared sub-parts. */
2464
2465 void
2466 reset_used_flags (x)
2467 rtx x;
2468 {
2469 int i, j;
2470 enum rtx_code code;
2471 const char *format_ptr;
2472
2473 if (x == 0)
2474 return;
2475
2476 code = GET_CODE (x);
2477
2478 /* These types may be freely shared so we needn't do any resetting
2479 for them. */
2480
2481 switch (code)
2482 {
2483 case REG:
2484 case QUEUED:
2485 case CONST_INT:
2486 case CONST_DOUBLE:
2487 case CONST_VECTOR:
2488 case SYMBOL_REF:
2489 case CODE_LABEL:
2490 case PC:
2491 case CC0:
2492 return;
2493
2494 case INSN:
2495 case JUMP_INSN:
2496 case CALL_INSN:
2497 case NOTE:
2498 case LABEL_REF:
2499 case BARRIER:
2500 /* The chain of insns is not being copied. */
2501 return;
2502
2503 default:
2504 break;
2505 }
2506
2507 x->used = 0;
2508
2509 format_ptr = GET_RTX_FORMAT (code);
2510 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2511 {
2512 switch (*format_ptr++)
2513 {
2514 case 'e':
2515 reset_used_flags (XEXP (x, i));
2516 break;
2517
2518 case 'E':
2519 for (j = 0; j < XVECLEN (x, i); j++)
2520 reset_used_flags (XVECEXP (x, i, j));
2521 break;
2522 }
2523 }
2524 }
2525 \f
2526 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2527 Return X or the rtx for the pseudo reg the value of X was copied into.
2528 OTHER must be valid as a SET_DEST. */
2529
2530 rtx
2531 make_safe_from (x, other)
2532 rtx x, other;
2533 {
2534 while (1)
2535 switch (GET_CODE (other))
2536 {
2537 case SUBREG:
2538 other = SUBREG_REG (other);
2539 break;
2540 case STRICT_LOW_PART:
2541 case SIGN_EXTEND:
2542 case ZERO_EXTEND:
2543 other = XEXP (other, 0);
2544 break;
2545 default:
2546 goto done;
2547 }
2548 done:
2549 if ((GET_CODE (other) == MEM
2550 && ! CONSTANT_P (x)
2551 && GET_CODE (x) != REG
2552 && GET_CODE (x) != SUBREG)
2553 || (GET_CODE (other) == REG
2554 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2555 || reg_mentioned_p (other, x))))
2556 {
2557 rtx temp = gen_reg_rtx (GET_MODE (x));
2558 emit_move_insn (temp, x);
2559 return temp;
2560 }
2561 return x;
2562 }
2563 \f
2564 /* Emission of insns (adding them to the doubly-linked list). */
2565
2566 /* Return the first insn of the current sequence or current function. */
2567
2568 rtx
2569 get_insns ()
2570 {
2571 return first_insn;
2572 }
2573
2574 /* Specify a new insn as the first in the chain. */
2575
2576 void
2577 set_first_insn (insn)
2578 rtx insn;
2579 {
2580 if (PREV_INSN (insn) != 0)
2581 abort ();
2582 first_insn = insn;
2583 }
2584
2585 /* Return the last insn emitted in current sequence or current function. */
2586
2587 rtx
2588 get_last_insn ()
2589 {
2590 return last_insn;
2591 }
2592
2593 /* Specify a new insn as the last in the chain. */
2594
2595 void
2596 set_last_insn (insn)
2597 rtx insn;
2598 {
2599 if (NEXT_INSN (insn) != 0)
2600 abort ();
2601 last_insn = insn;
2602 }
2603
2604 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2605
2606 rtx
2607 get_last_insn_anywhere ()
2608 {
2609 struct sequence_stack *stack;
2610 if (last_insn)
2611 return last_insn;
2612 for (stack = seq_stack; stack; stack = stack->next)
2613 if (stack->last != 0)
2614 return stack->last;
2615 return 0;
2616 }
2617
2618 /* Return a number larger than any instruction's uid in this function. */
2619
2620 int
2621 get_max_uid ()
2622 {
2623 return cur_insn_uid;
2624 }
2625
2626 /* Renumber instructions so that no instruction UIDs are wasted. */
2627
2628 void
2629 renumber_insns (stream)
2630 FILE *stream;
2631 {
2632 rtx insn;
2633
2634 /* If we're not supposed to renumber instructions, don't. */
2635 if (!flag_renumber_insns)
2636 return;
2637
2638 /* If there aren't that many instructions, then it's not really
2639 worth renumbering them. */
2640 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2641 return;
2642
2643 cur_insn_uid = 1;
2644
2645 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2646 {
2647 if (stream)
2648 fprintf (stream, "Renumbering insn %d to %d\n",
2649 INSN_UID (insn), cur_insn_uid);
2650 INSN_UID (insn) = cur_insn_uid++;
2651 }
2652 }
2653 \f
2654 /* Return the next insn. If it is a SEQUENCE, return the first insn
2655 of the sequence. */
2656
2657 rtx
2658 next_insn (insn)
2659 rtx insn;
2660 {
2661 if (insn)
2662 {
2663 insn = NEXT_INSN (insn);
2664 if (insn && GET_CODE (insn) == INSN
2665 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2666 insn = XVECEXP (PATTERN (insn), 0, 0);
2667 }
2668
2669 return insn;
2670 }
2671
2672 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2673 of the sequence. */
2674
2675 rtx
2676 previous_insn (insn)
2677 rtx insn;
2678 {
2679 if (insn)
2680 {
2681 insn = PREV_INSN (insn);
2682 if (insn && GET_CODE (insn) == INSN
2683 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2684 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2685 }
2686
2687 return insn;
2688 }
2689
2690 /* Return the next insn after INSN that is not a NOTE. This routine does not
2691 look inside SEQUENCEs. */
2692
2693 rtx
2694 next_nonnote_insn (insn)
2695 rtx insn;
2696 {
2697 while (insn)
2698 {
2699 insn = NEXT_INSN (insn);
2700 if (insn == 0 || GET_CODE (insn) != NOTE)
2701 break;
2702 }
2703
2704 return insn;
2705 }
2706
2707 /* Return the previous insn before INSN that is not a NOTE. This routine does
2708 not look inside SEQUENCEs. */
2709
2710 rtx
2711 prev_nonnote_insn (insn)
2712 rtx insn;
2713 {
2714 while (insn)
2715 {
2716 insn = PREV_INSN (insn);
2717 if (insn == 0 || GET_CODE (insn) != NOTE)
2718 break;
2719 }
2720
2721 return insn;
2722 }
2723
2724 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2725 or 0, if there is none. This routine does not look inside
2726 SEQUENCEs. */
2727
2728 rtx
2729 next_real_insn (insn)
2730 rtx insn;
2731 {
2732 while (insn)
2733 {
2734 insn = NEXT_INSN (insn);
2735 if (insn == 0 || GET_CODE (insn) == INSN
2736 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2737 break;
2738 }
2739
2740 return insn;
2741 }
2742
2743 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2744 or 0, if there is none. This routine does not look inside
2745 SEQUENCEs. */
2746
2747 rtx
2748 prev_real_insn (insn)
2749 rtx insn;
2750 {
2751 while (insn)
2752 {
2753 insn = PREV_INSN (insn);
2754 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2755 || GET_CODE (insn) == JUMP_INSN)
2756 break;
2757 }
2758
2759 return insn;
2760 }
2761
2762 /* Find the next insn after INSN that really does something. This routine
2763 does not look inside SEQUENCEs. Until reload has completed, this is the
2764 same as next_real_insn. */
2765
2766 int
2767 active_insn_p (insn)
2768 rtx insn;
2769 {
2770 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2771 || (GET_CODE (insn) == INSN
2772 && (! reload_completed
2773 || (GET_CODE (PATTERN (insn)) != USE
2774 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2775 }
2776
2777 rtx
2778 next_active_insn (insn)
2779 rtx insn;
2780 {
2781 while (insn)
2782 {
2783 insn = NEXT_INSN (insn);
2784 if (insn == 0 || active_insn_p (insn))
2785 break;
2786 }
2787
2788 return insn;
2789 }
2790
2791 /* Find the last insn before INSN that really does something. This routine
2792 does not look inside SEQUENCEs. Until reload has completed, this is the
2793 same as prev_real_insn. */
2794
2795 rtx
2796 prev_active_insn (insn)
2797 rtx insn;
2798 {
2799 while (insn)
2800 {
2801 insn = PREV_INSN (insn);
2802 if (insn == 0 || active_insn_p (insn))
2803 break;
2804 }
2805
2806 return insn;
2807 }
2808
2809 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2810
2811 rtx
2812 next_label (insn)
2813 rtx insn;
2814 {
2815 while (insn)
2816 {
2817 insn = NEXT_INSN (insn);
2818 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2819 break;
2820 }
2821
2822 return insn;
2823 }
2824
2825 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2826
2827 rtx
2828 prev_label (insn)
2829 rtx insn;
2830 {
2831 while (insn)
2832 {
2833 insn = PREV_INSN (insn);
2834 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2835 break;
2836 }
2837
2838 return insn;
2839 }
2840 \f
2841 #ifdef HAVE_cc0
2842 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2843 and REG_CC_USER notes so we can find it. */
2844
2845 void
2846 link_cc0_insns (insn)
2847 rtx insn;
2848 {
2849 rtx user = next_nonnote_insn (insn);
2850
2851 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2852 user = XVECEXP (PATTERN (user), 0, 0);
2853
2854 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2855 REG_NOTES (user));
2856 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2857 }
2858
2859 /* Return the next insn that uses CC0 after INSN, which is assumed to
2860 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2861 applied to the result of this function should yield INSN).
2862
2863 Normally, this is simply the next insn. However, if a REG_CC_USER note
2864 is present, it contains the insn that uses CC0.
2865
2866 Return 0 if we can't find the insn. */
2867
2868 rtx
2869 next_cc0_user (insn)
2870 rtx insn;
2871 {
2872 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2873
2874 if (note)
2875 return XEXP (note, 0);
2876
2877 insn = next_nonnote_insn (insn);
2878 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2879 insn = XVECEXP (PATTERN (insn), 0, 0);
2880
2881 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2882 return insn;
2883
2884 return 0;
2885 }
2886
2887 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2888 note, it is the previous insn. */
2889
2890 rtx
2891 prev_cc0_setter (insn)
2892 rtx insn;
2893 {
2894 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2895
2896 if (note)
2897 return XEXP (note, 0);
2898
2899 insn = prev_nonnote_insn (insn);
2900 if (! sets_cc0_p (PATTERN (insn)))
2901 abort ();
2902
2903 return insn;
2904 }
2905 #endif
2906
2907 /* Increment the label uses for all labels present in rtx. */
2908
2909 static void
2910 mark_label_nuses(x)
2911 rtx x;
2912 {
2913 enum rtx_code code;
2914 int i, j;
2915 const char *fmt;
2916
2917 code = GET_CODE (x);
2918 if (code == LABEL_REF)
2919 LABEL_NUSES (XEXP (x, 0))++;
2920
2921 fmt = GET_RTX_FORMAT (code);
2922 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2923 {
2924 if (fmt[i] == 'e')
2925 mark_label_nuses (XEXP (x, i));
2926 else if (fmt[i] == 'E')
2927 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2928 mark_label_nuses (XVECEXP (x, i, j));
2929 }
2930 }
2931
2932 \f
2933 /* Try splitting insns that can be split for better scheduling.
2934 PAT is the pattern which might split.
2935 TRIAL is the insn providing PAT.
2936 LAST is non-zero if we should return the last insn of the sequence produced.
2937
2938 If this routine succeeds in splitting, it returns the first or last
2939 replacement insn depending on the value of LAST. Otherwise, it
2940 returns TRIAL. If the insn to be returned can be split, it will be. */
2941
2942 rtx
2943 try_split (pat, trial, last)
2944 rtx pat, trial;
2945 int last;
2946 {
2947 rtx before = PREV_INSN (trial);
2948 rtx after = NEXT_INSN (trial);
2949 int has_barrier = 0;
2950 rtx tem;
2951 rtx note, seq;
2952 int probability;
2953
2954 if (any_condjump_p (trial)
2955 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
2956 split_branch_probability = INTVAL (XEXP (note, 0));
2957 probability = split_branch_probability;
2958
2959 seq = split_insns (pat, trial);
2960
2961 split_branch_probability = -1;
2962
2963 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2964 We may need to handle this specially. */
2965 if (after && GET_CODE (after) == BARRIER)
2966 {
2967 has_barrier = 1;
2968 after = NEXT_INSN (after);
2969 }
2970
2971 if (seq)
2972 {
2973 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2974 The latter case will normally arise only when being done so that
2975 it, in turn, will be split (SFmode on the 29k is an example). */
2976 if (GET_CODE (seq) == SEQUENCE)
2977 {
2978 int i, njumps = 0;
2979
2980 /* Avoid infinite loop if any insn of the result matches
2981 the original pattern. */
2982 for (i = 0; i < XVECLEN (seq, 0); i++)
2983 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
2984 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
2985 return trial;
2986
2987 /* Mark labels. */
2988 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2989 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2990 {
2991 rtx insn = XVECEXP (seq, 0, i);
2992 mark_jump_label (PATTERN (insn),
2993 XVECEXP (seq, 0, i), 0);
2994 njumps++;
2995 if (probability != -1
2996 && any_condjump_p (insn)
2997 && !find_reg_note (insn, REG_BR_PROB, 0))
2998 {
2999 /* We can preserve the REG_BR_PROB notes only if exactly
3000 one jump is created, otherwise the machine description
3001 is responsible for this step using
3002 split_branch_probability variable. */
3003 if (njumps != 1)
3004 abort ();
3005 REG_NOTES (insn)
3006 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3007 GEN_INT (probability),
3008 REG_NOTES (insn));
3009 }
3010 }
3011
3012 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3013 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3014 if (GET_CODE (trial) == CALL_INSN)
3015 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3016 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3017 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3018 = CALL_INSN_FUNCTION_USAGE (trial);
3019
3020 /* Copy notes, particularly those related to the CFG. */
3021 for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
3022 {
3023 switch (REG_NOTE_KIND (note))
3024 {
3025 case REG_EH_REGION:
3026 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3027 {
3028 rtx insn = XVECEXP (seq, 0, i);
3029 if (GET_CODE (insn) == CALL_INSN
3030 || (flag_non_call_exceptions
3031 && may_trap_p (PATTERN (insn))))
3032 REG_NOTES (insn)
3033 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3034 XEXP (note, 0),
3035 REG_NOTES (insn));
3036 }
3037 break;
3038
3039 case REG_NORETURN:
3040 case REG_SETJMP:
3041 case REG_ALWAYS_RETURN:
3042 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3043 {
3044 rtx insn = XVECEXP (seq, 0, i);
3045 if (GET_CODE (insn) == CALL_INSN)
3046 REG_NOTES (insn)
3047 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3048 XEXP (note, 0),
3049 REG_NOTES (insn));
3050 }
3051 break;
3052
3053 case REG_NON_LOCAL_GOTO:
3054 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3055 {
3056 rtx insn = XVECEXP (seq, 0, i);
3057 if (GET_CODE (insn) == JUMP_INSN)
3058 REG_NOTES (insn)
3059 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3060 XEXP (note, 0),
3061 REG_NOTES (insn));
3062 }
3063 break;
3064
3065 default:
3066 break;
3067 }
3068 }
3069
3070 /* If there are LABELS inside the split insns increment the
3071 usage count so we don't delete the label. */
3072 if (GET_CODE (trial) == INSN)
3073 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3074 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3075 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3076
3077 tem = emit_insn_after (seq, trial);
3078
3079 delete_related_insns (trial);
3080 if (has_barrier)
3081 emit_barrier_after (tem);
3082
3083 /* Recursively call try_split for each new insn created; by the
3084 time control returns here that insn will be fully split, so
3085 set LAST and continue from the insn after the one returned.
3086 We can't use next_active_insn here since AFTER may be a note.
3087 Ignore deleted insns, which can be occur if not optimizing. */
3088 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3089 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3090 tem = try_split (PATTERN (tem), tem, 1);
3091 }
3092 /* Avoid infinite loop if the result matches the original pattern. */
3093 else if (rtx_equal_p (seq, pat))
3094 return trial;
3095 else
3096 {
3097 PATTERN (trial) = seq;
3098 INSN_CODE (trial) = -1;
3099 try_split (seq, trial, last);
3100 }
3101
3102 /* Return either the first or the last insn, depending on which was
3103 requested. */
3104 return last
3105 ? (after ? PREV_INSN (after) : last_insn)
3106 : NEXT_INSN (before);
3107 }
3108
3109 return trial;
3110 }
3111 \f
3112 /* Make and return an INSN rtx, initializing all its slots.
3113 Store PATTERN in the pattern slots. */
3114
3115 rtx
3116 make_insn_raw (pattern)
3117 rtx pattern;
3118 {
3119 rtx insn;
3120
3121 insn = rtx_alloc (INSN);
3122
3123 INSN_UID (insn) = cur_insn_uid++;
3124 PATTERN (insn) = pattern;
3125 INSN_CODE (insn) = -1;
3126 LOG_LINKS (insn) = NULL;
3127 REG_NOTES (insn) = NULL;
3128
3129 #ifdef ENABLE_RTL_CHECKING
3130 if (insn
3131 && INSN_P (insn)
3132 && (returnjump_p (insn)
3133 || (GET_CODE (insn) == SET
3134 && SET_DEST (insn) == pc_rtx)))
3135 {
3136 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3137 debug_rtx (insn);
3138 }
3139 #endif
3140
3141 return insn;
3142 }
3143
3144 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3145
3146 static rtx
3147 make_jump_insn_raw (pattern)
3148 rtx pattern;
3149 {
3150 rtx insn;
3151
3152 insn = rtx_alloc (JUMP_INSN);
3153 INSN_UID (insn) = cur_insn_uid++;
3154
3155 PATTERN (insn) = pattern;
3156 INSN_CODE (insn) = -1;
3157 LOG_LINKS (insn) = NULL;
3158 REG_NOTES (insn) = NULL;
3159 JUMP_LABEL (insn) = NULL;
3160
3161 return insn;
3162 }
3163
3164 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3165
3166 static rtx
3167 make_call_insn_raw (pattern)
3168 rtx pattern;
3169 {
3170 rtx insn;
3171
3172 insn = rtx_alloc (CALL_INSN);
3173 INSN_UID (insn) = cur_insn_uid++;
3174
3175 PATTERN (insn) = pattern;
3176 INSN_CODE (insn) = -1;
3177 LOG_LINKS (insn) = NULL;
3178 REG_NOTES (insn) = NULL;
3179 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3180
3181 return insn;
3182 }
3183 \f
3184 /* Add INSN to the end of the doubly-linked list.
3185 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3186
3187 void
3188 add_insn (insn)
3189 rtx insn;
3190 {
3191 PREV_INSN (insn) = last_insn;
3192 NEXT_INSN (insn) = 0;
3193
3194 if (NULL != last_insn)
3195 NEXT_INSN (last_insn) = insn;
3196
3197 if (NULL == first_insn)
3198 first_insn = insn;
3199
3200 last_insn = insn;
3201 }
3202
3203 /* Add INSN into the doubly-linked list after insn AFTER. This and
3204 the next should be the only functions called to insert an insn once
3205 delay slots have been filled since only they know how to update a
3206 SEQUENCE. */
3207
3208 void
3209 add_insn_after (insn, after)
3210 rtx insn, after;
3211 {
3212 rtx next = NEXT_INSN (after);
3213 basic_block bb;
3214
3215 if (optimize && INSN_DELETED_P (after))
3216 abort ();
3217
3218 NEXT_INSN (insn) = next;
3219 PREV_INSN (insn) = after;
3220
3221 if (next)
3222 {
3223 PREV_INSN (next) = insn;
3224 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3225 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3226 }
3227 else if (last_insn == after)
3228 last_insn = insn;
3229 else
3230 {
3231 struct sequence_stack *stack = seq_stack;
3232 /* Scan all pending sequences too. */
3233 for (; stack; stack = stack->next)
3234 if (after == stack->last)
3235 {
3236 stack->last = insn;
3237 break;
3238 }
3239
3240 if (stack == 0)
3241 abort ();
3242 }
3243
3244 if (basic_block_for_insn
3245 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3246 && (bb = BLOCK_FOR_INSN (after)))
3247 {
3248 set_block_for_insn (insn, bb);
3249 if (INSN_P (insn))
3250 bb->flags |= BB_DIRTY;
3251 /* Should not happen as first in the BB is always
3252 either NOTE or LABEL. */
3253 if (bb->end == after
3254 /* Avoid clobbering of structure when creating new BB. */
3255 && GET_CODE (insn) != BARRIER
3256 && (GET_CODE (insn) != NOTE
3257 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3258 bb->end = insn;
3259 }
3260
3261 NEXT_INSN (after) = insn;
3262 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3263 {
3264 rtx sequence = PATTERN (after);
3265 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3266 }
3267 }
3268
3269 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3270 the previous should be the only functions called to insert an insn once
3271 delay slots have been filled since only they know how to update a
3272 SEQUENCE. */
3273
3274 void
3275 add_insn_before (insn, before)
3276 rtx insn, before;
3277 {
3278 rtx prev = PREV_INSN (before);
3279 basic_block bb;
3280
3281 if (optimize && INSN_DELETED_P (before))
3282 abort ();
3283
3284 PREV_INSN (insn) = prev;
3285 NEXT_INSN (insn) = before;
3286
3287 if (prev)
3288 {
3289 NEXT_INSN (prev) = insn;
3290 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3291 {
3292 rtx sequence = PATTERN (prev);
3293 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3294 }
3295 }
3296 else if (first_insn == before)
3297 first_insn = insn;
3298 else
3299 {
3300 struct sequence_stack *stack = seq_stack;
3301 /* Scan all pending sequences too. */
3302 for (; stack; stack = stack->next)
3303 if (before == stack->first)
3304 {
3305 stack->first = insn;
3306 break;
3307 }
3308
3309 if (stack == 0)
3310 abort ();
3311 }
3312
3313 if (basic_block_for_insn
3314 && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
3315 && (bb = BLOCK_FOR_INSN (before)))
3316 {
3317 set_block_for_insn (insn, bb);
3318 if (INSN_P (insn))
3319 bb->flags |= BB_DIRTY;
3320 /* Should not happen as first in the BB is always
3321 either NOTE or LABEl. */
3322 if (bb->head == insn
3323 /* Avoid clobbering of structure when creating new BB. */
3324 && GET_CODE (insn) != BARRIER
3325 && (GET_CODE (insn) != NOTE
3326 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3327 abort ();
3328 }
3329
3330 PREV_INSN (before) = insn;
3331 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3332 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3333 }
3334
3335 /* Remove an insn from its doubly-linked list. This function knows how
3336 to handle sequences. */
3337 void
3338 remove_insn (insn)
3339 rtx insn;
3340 {
3341 rtx next = NEXT_INSN (insn);
3342 rtx prev = PREV_INSN (insn);
3343 basic_block bb;
3344
3345 if (prev)
3346 {
3347 NEXT_INSN (prev) = next;
3348 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3349 {
3350 rtx sequence = PATTERN (prev);
3351 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3352 }
3353 }
3354 else if (first_insn == insn)
3355 first_insn = next;
3356 else
3357 {
3358 struct sequence_stack *stack = seq_stack;
3359 /* Scan all pending sequences too. */
3360 for (; stack; stack = stack->next)
3361 if (insn == stack->first)
3362 {
3363 stack->first = next;
3364 break;
3365 }
3366
3367 if (stack == 0)
3368 abort ();
3369 }
3370
3371 if (next)
3372 {
3373 PREV_INSN (next) = prev;
3374 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3375 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3376 }
3377 else if (last_insn == insn)
3378 last_insn = prev;
3379 else
3380 {
3381 struct sequence_stack *stack = seq_stack;
3382 /* Scan all pending sequences too. */
3383 for (; stack; stack = stack->next)
3384 if (insn == stack->last)
3385 {
3386 stack->last = prev;
3387 break;
3388 }
3389
3390 if (stack == 0)
3391 abort ();
3392 }
3393 if (basic_block_for_insn
3394 && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
3395 && (bb = BLOCK_FOR_INSN (insn)))
3396 {
3397 if (INSN_P (insn))
3398 bb->flags |= BB_DIRTY;
3399 if (bb->head == insn)
3400 {
3401 /* Never ever delete the basic block note without deleting whole basic
3402 block. */
3403 if (GET_CODE (insn) == NOTE)
3404 abort ();
3405 bb->head = next;
3406 }
3407 if (bb->end == insn)
3408 bb->end = prev;
3409 }
3410 }
3411
3412 /* Delete all insns made since FROM.
3413 FROM becomes the new last instruction. */
3414
3415 void
3416 delete_insns_since (from)
3417 rtx from;
3418 {
3419 if (from == 0)
3420 first_insn = 0;
3421 else
3422 NEXT_INSN (from) = 0;
3423 last_insn = from;
3424 }
3425
3426 /* This function is deprecated, please use sequences instead.
3427
3428 Move a consecutive bunch of insns to a different place in the chain.
3429 The insns to be moved are those between FROM and TO.
3430 They are moved to a new position after the insn AFTER.
3431 AFTER must not be FROM or TO or any insn in between.
3432
3433 This function does not know about SEQUENCEs and hence should not be
3434 called after delay-slot filling has been done. */
3435
3436 void
3437 reorder_insns_nobb (from, to, after)
3438 rtx from, to, after;
3439 {
3440 /* Splice this bunch out of where it is now. */
3441 if (PREV_INSN (from))
3442 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3443 if (NEXT_INSN (to))
3444 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3445 if (last_insn == to)
3446 last_insn = PREV_INSN (from);
3447 if (first_insn == from)
3448 first_insn = NEXT_INSN (to);
3449
3450 /* Make the new neighbors point to it and it to them. */
3451 if (NEXT_INSN (after))
3452 PREV_INSN (NEXT_INSN (after)) = to;
3453
3454 NEXT_INSN (to) = NEXT_INSN (after);
3455 PREV_INSN (from) = after;
3456 NEXT_INSN (after) = from;
3457 if (after == last_insn)
3458 last_insn = to;
3459 }
3460
3461 /* Same as function above, but take care to update BB boundaries. */
3462 void
3463 reorder_insns (from, to, after)
3464 rtx from, to, after;
3465 {
3466 rtx prev = PREV_INSN (from);
3467 basic_block bb, bb2;
3468
3469 reorder_insns_nobb (from, to, after);
3470
3471 if (basic_block_for_insn
3472 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3473 && (bb = BLOCK_FOR_INSN (after)))
3474 {
3475 rtx x;
3476 bb->flags |= BB_DIRTY;
3477
3478 if (basic_block_for_insn
3479 && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
3480 && (bb2 = BLOCK_FOR_INSN (from)))
3481 {
3482 if (bb2->end == to)
3483 bb2->end = prev;
3484 bb2->flags |= BB_DIRTY;
3485 }
3486
3487 if (bb->end == after)
3488 bb->end = to;
3489
3490 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3491 set_block_for_insn (x, bb);
3492 }
3493 }
3494
3495 /* Return the line note insn preceding INSN. */
3496
3497 static rtx
3498 find_line_note (insn)
3499 rtx insn;
3500 {
3501 if (no_line_numbers)
3502 return 0;
3503
3504 for (; insn; insn = PREV_INSN (insn))
3505 if (GET_CODE (insn) == NOTE
3506 && NOTE_LINE_NUMBER (insn) >= 0)
3507 break;
3508
3509 return insn;
3510 }
3511
3512 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3513 of the moved insns when debugging. This may insert a note between AFTER
3514 and FROM, and another one after TO. */
3515
3516 void
3517 reorder_insns_with_line_notes (from, to, after)
3518 rtx from, to, after;
3519 {
3520 rtx from_line = find_line_note (from);
3521 rtx after_line = find_line_note (after);
3522
3523 reorder_insns (from, to, after);
3524
3525 if (from_line == after_line)
3526 return;
3527
3528 if (from_line)
3529 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3530 NOTE_LINE_NUMBER (from_line),
3531 after);
3532 if (after_line)
3533 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3534 NOTE_LINE_NUMBER (after_line),
3535 to);
3536 }
3537
3538 /* Remove unnecessary notes from the instruction stream. */
3539
3540 void
3541 remove_unnecessary_notes ()
3542 {
3543 rtx block_stack = NULL_RTX;
3544 rtx eh_stack = NULL_RTX;
3545 rtx insn;
3546 rtx next;
3547 rtx tmp;
3548
3549 /* We must not remove the first instruction in the function because
3550 the compiler depends on the first instruction being a note. */
3551 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3552 {
3553 /* Remember what's next. */
3554 next = NEXT_INSN (insn);
3555
3556 /* We're only interested in notes. */
3557 if (GET_CODE (insn) != NOTE)
3558 continue;
3559
3560 switch (NOTE_LINE_NUMBER (insn))
3561 {
3562 case NOTE_INSN_DELETED:
3563 case NOTE_INSN_LOOP_END_TOP_COND:
3564 remove_insn (insn);
3565 break;
3566
3567 case NOTE_INSN_EH_REGION_BEG:
3568 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3569 break;
3570
3571 case NOTE_INSN_EH_REGION_END:
3572 /* Too many end notes. */
3573 if (eh_stack == NULL_RTX)
3574 abort ();
3575 /* Mismatched nesting. */
3576 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3577 abort ();
3578 tmp = eh_stack;
3579 eh_stack = XEXP (eh_stack, 1);
3580 free_INSN_LIST_node (tmp);
3581 break;
3582
3583 case NOTE_INSN_BLOCK_BEG:
3584 /* By now, all notes indicating lexical blocks should have
3585 NOTE_BLOCK filled in. */
3586 if (NOTE_BLOCK (insn) == NULL_TREE)
3587 abort ();
3588 block_stack = alloc_INSN_LIST (insn, block_stack);
3589 break;
3590
3591 case NOTE_INSN_BLOCK_END:
3592 /* Too many end notes. */
3593 if (block_stack == NULL_RTX)
3594 abort ();
3595 /* Mismatched nesting. */
3596 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3597 abort ();
3598 tmp = block_stack;
3599 block_stack = XEXP (block_stack, 1);
3600 free_INSN_LIST_node (tmp);
3601
3602 /* Scan back to see if there are any non-note instructions
3603 between INSN and the beginning of this block. If not,
3604 then there is no PC range in the generated code that will
3605 actually be in this block, so there's no point in
3606 remembering the existence of the block. */
3607 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
3608 {
3609 /* This block contains a real instruction. Note that we
3610 don't include labels; if the only thing in the block
3611 is a label, then there are still no PC values that
3612 lie within the block. */
3613 if (INSN_P (tmp))
3614 break;
3615
3616 /* We're only interested in NOTEs. */
3617 if (GET_CODE (tmp) != NOTE)
3618 continue;
3619
3620 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3621 {
3622 /* We just verified that this BLOCK matches us with
3623 the block_stack check above. Never delete the
3624 BLOCK for the outermost scope of the function; we
3625 can refer to names from that scope even if the
3626 block notes are messed up. */
3627 if (! is_body_block (NOTE_BLOCK (insn))
3628 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3629 {
3630 remove_insn (tmp);
3631 remove_insn (insn);
3632 }
3633 break;
3634 }
3635 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3636 /* There's a nested block. We need to leave the
3637 current block in place since otherwise the debugger
3638 wouldn't be able to show symbols from our block in
3639 the nested block. */
3640 break;
3641 }
3642 }
3643 }
3644
3645 /* Too many begin notes. */
3646 if (block_stack || eh_stack)
3647 abort ();
3648 }
3649
3650 \f
3651 /* Emit an insn of given code and pattern
3652 at a specified place within the doubly-linked list. */
3653
3654 /* Make an instruction with body PATTERN
3655 and output it before the instruction BEFORE. */
3656
3657 rtx
3658 emit_insn_before (pattern, before)
3659 rtx pattern, before;
3660 {
3661 rtx insn = before;
3662
3663 if (GET_CODE (pattern) == SEQUENCE)
3664 {
3665 int i;
3666
3667 for (i = 0; i < XVECLEN (pattern, 0); i++)
3668 {
3669 insn = XVECEXP (pattern, 0, i);
3670 add_insn_before (insn, before);
3671 }
3672 }
3673 else
3674 {
3675 insn = make_insn_raw (pattern);
3676 add_insn_before (insn, before);
3677 }
3678
3679 return insn;
3680 }
3681
3682 /* Make an instruction with body PATTERN and code JUMP_INSN
3683 and output it before the instruction BEFORE. */
3684
3685 rtx
3686 emit_jump_insn_before (pattern, before)
3687 rtx pattern, before;
3688 {
3689 rtx insn;
3690
3691 if (GET_CODE (pattern) == SEQUENCE)
3692 insn = emit_insn_before (pattern, before);
3693 else
3694 {
3695 insn = make_jump_insn_raw (pattern);
3696 add_insn_before (insn, before);
3697 }
3698
3699 return insn;
3700 }
3701
3702 /* Make an instruction with body PATTERN and code CALL_INSN
3703 and output it before the instruction BEFORE. */
3704
3705 rtx
3706 emit_call_insn_before (pattern, before)
3707 rtx pattern, before;
3708 {
3709 rtx insn;
3710
3711 if (GET_CODE (pattern) == SEQUENCE)
3712 insn = emit_insn_before (pattern, before);
3713 else
3714 {
3715 insn = make_call_insn_raw (pattern);
3716 add_insn_before (insn, before);
3717 PUT_CODE (insn, CALL_INSN);
3718 }
3719
3720 return insn;
3721 }
3722
3723 /* Make an insn of code BARRIER
3724 and output it before the insn BEFORE. */
3725
3726 rtx
3727 emit_barrier_before (before)
3728 rtx before;
3729 {
3730 rtx insn = rtx_alloc (BARRIER);
3731
3732 INSN_UID (insn) = cur_insn_uid++;
3733
3734 add_insn_before (insn, before);
3735 return insn;
3736 }
3737
3738 /* Emit the label LABEL before the insn BEFORE. */
3739
3740 rtx
3741 emit_label_before (label, before)
3742 rtx label, before;
3743 {
3744 /* This can be called twice for the same label as a result of the
3745 confusion that follows a syntax error! So make it harmless. */
3746 if (INSN_UID (label) == 0)
3747 {
3748 INSN_UID (label) = cur_insn_uid++;
3749 add_insn_before (label, before);
3750 }
3751
3752 return label;
3753 }
3754
3755 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3756
3757 rtx
3758 emit_note_before (subtype, before)
3759 int subtype;
3760 rtx before;
3761 {
3762 rtx note = rtx_alloc (NOTE);
3763 INSN_UID (note) = cur_insn_uid++;
3764 NOTE_SOURCE_FILE (note) = 0;
3765 NOTE_LINE_NUMBER (note) = subtype;
3766
3767 add_insn_before (note, before);
3768 return note;
3769 }
3770 \f
3771 /* Make an insn of code INSN with body PATTERN
3772 and output it after the insn AFTER. */
3773
3774 rtx
3775 emit_insn_after (pattern, after)
3776 rtx pattern, after;
3777 {
3778 rtx insn = after;
3779
3780 if (GET_CODE (pattern) == SEQUENCE)
3781 {
3782 int i;
3783
3784 for (i = 0; i < XVECLEN (pattern, 0); i++)
3785 {
3786 insn = XVECEXP (pattern, 0, i);
3787 add_insn_after (insn, after);
3788 after = insn;
3789 }
3790 }
3791 else
3792 {
3793 insn = make_insn_raw (pattern);
3794 add_insn_after (insn, after);
3795 }
3796
3797 return insn;
3798 }
3799
3800 /* Similar to emit_insn_after, except that line notes are to be inserted so
3801 as to act as if this insn were at FROM. */
3802
3803 void
3804 emit_insn_after_with_line_notes (pattern, after, from)
3805 rtx pattern, after, from;
3806 {
3807 rtx from_line = find_line_note (from);
3808 rtx after_line = find_line_note (after);
3809 rtx insn = emit_insn_after (pattern, after);
3810
3811 if (from_line)
3812 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3813 NOTE_LINE_NUMBER (from_line),
3814 after);
3815
3816 if (after_line)
3817 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3818 NOTE_LINE_NUMBER (after_line),
3819 insn);
3820 }
3821
3822 /* Make an insn of code JUMP_INSN with body PATTERN
3823 and output it after the insn AFTER. */
3824
3825 rtx
3826 emit_jump_insn_after (pattern, after)
3827 rtx pattern, after;
3828 {
3829 rtx insn;
3830
3831 if (GET_CODE (pattern) == SEQUENCE)
3832 insn = emit_insn_after (pattern, after);
3833 else
3834 {
3835 insn = make_jump_insn_raw (pattern);
3836 add_insn_after (insn, after);
3837 }
3838
3839 return insn;
3840 }
3841
3842 /* Make an insn of code BARRIER
3843 and output it after the insn AFTER. */
3844
3845 rtx
3846 emit_barrier_after (after)
3847 rtx after;
3848 {
3849 rtx insn = rtx_alloc (BARRIER);
3850
3851 INSN_UID (insn) = cur_insn_uid++;
3852
3853 add_insn_after (insn, after);
3854 return insn;
3855 }
3856
3857 /* Emit the label LABEL after the insn AFTER. */
3858
3859 rtx
3860 emit_label_after (label, after)
3861 rtx label, after;
3862 {
3863 /* This can be called twice for the same label
3864 as a result of the confusion that follows a syntax error!
3865 So make it harmless. */
3866 if (INSN_UID (label) == 0)
3867 {
3868 INSN_UID (label) = cur_insn_uid++;
3869 add_insn_after (label, after);
3870 }
3871
3872 return label;
3873 }
3874
3875 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
3876
3877 rtx
3878 emit_note_after (subtype, after)
3879 int subtype;
3880 rtx after;
3881 {
3882 rtx note = rtx_alloc (NOTE);
3883 INSN_UID (note) = cur_insn_uid++;
3884 NOTE_SOURCE_FILE (note) = 0;
3885 NOTE_LINE_NUMBER (note) = subtype;
3886 add_insn_after (note, after);
3887 return note;
3888 }
3889
3890 /* Emit a line note for FILE and LINE after the insn AFTER. */
3891
3892 rtx
3893 emit_line_note_after (file, line, after)
3894 const char *file;
3895 int line;
3896 rtx after;
3897 {
3898 rtx note;
3899
3900 if (no_line_numbers && line > 0)
3901 {
3902 cur_insn_uid++;
3903 return 0;
3904 }
3905
3906 note = rtx_alloc (NOTE);
3907 INSN_UID (note) = cur_insn_uid++;
3908 NOTE_SOURCE_FILE (note) = file;
3909 NOTE_LINE_NUMBER (note) = line;
3910 add_insn_after (note, after);
3911 return note;
3912 }
3913 \f
3914 /* Make an insn of code INSN with pattern PATTERN
3915 and add it to the end of the doubly-linked list.
3916 If PATTERN is a SEQUENCE, take the elements of it
3917 and emit an insn for each element.
3918
3919 Returns the last insn emitted. */
3920
3921 rtx
3922 emit_insn (pattern)
3923 rtx pattern;
3924 {
3925 rtx insn = last_insn;
3926
3927 if (GET_CODE (pattern) == SEQUENCE)
3928 {
3929 int i;
3930
3931 for (i = 0; i < XVECLEN (pattern, 0); i++)
3932 {
3933 insn = XVECEXP (pattern, 0, i);
3934 add_insn (insn);
3935 }
3936 }
3937 else
3938 {
3939 insn = make_insn_raw (pattern);
3940 add_insn (insn);
3941 }
3942
3943 return insn;
3944 }
3945
3946 /* Emit the insns in a chain starting with INSN.
3947 Return the last insn emitted. */
3948
3949 rtx
3950 emit_insns (insn)
3951 rtx insn;
3952 {
3953 rtx last = 0;
3954
3955 while (insn)
3956 {
3957 rtx next = NEXT_INSN (insn);
3958 add_insn (insn);
3959 last = insn;
3960 insn = next;
3961 }
3962
3963 return last;
3964 }
3965
3966 /* Emit the insns in a chain starting with INSN and place them in front of
3967 the insn BEFORE. Return the last insn emitted. */
3968
3969 rtx
3970 emit_insns_before (insn, before)
3971 rtx insn;
3972 rtx before;
3973 {
3974 rtx last = 0;
3975
3976 while (insn)
3977 {
3978 rtx next = NEXT_INSN (insn);
3979 add_insn_before (insn, before);
3980 last = insn;
3981 insn = next;
3982 }
3983
3984 return last;
3985 }
3986
3987 /* Emit the insns in a chain starting with FIRST and place them in back of
3988 the insn AFTER. Return the last insn emitted. */
3989
3990 rtx
3991 emit_insns_after (first, after)
3992 rtx first;
3993 rtx after;
3994 {
3995 rtx last;
3996 rtx after_after;
3997 basic_block bb;
3998
3999 if (!after)
4000 abort ();
4001
4002 if (!first)
4003 return after;
4004
4005 if (basic_block_for_insn
4006 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
4007 && (bb = BLOCK_FOR_INSN (after)))
4008 {
4009 bb->flags |= BB_DIRTY;
4010 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4011 set_block_for_insn (last, bb);
4012 set_block_for_insn (last, bb);
4013 if (bb->end == after)
4014 bb->end = last;
4015 }
4016 else
4017 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4018 continue;
4019
4020 after_after = NEXT_INSN (after);
4021
4022 NEXT_INSN (after) = first;
4023 PREV_INSN (first) = after;
4024 NEXT_INSN (last) = after_after;
4025 if (after_after)
4026 PREV_INSN (after_after) = last;
4027
4028 if (after == last_insn)
4029 last_insn = last;
4030 return last;
4031 }
4032
4033 /* Make an insn of code JUMP_INSN with pattern PATTERN
4034 and add it to the end of the doubly-linked list. */
4035
4036 rtx
4037 emit_jump_insn (pattern)
4038 rtx pattern;
4039 {
4040 if (GET_CODE (pattern) == SEQUENCE)
4041 return emit_insn (pattern);
4042 else
4043 {
4044 rtx insn = make_jump_insn_raw (pattern);
4045 add_insn (insn);
4046 return insn;
4047 }
4048 }
4049
4050 /* Make an insn of code CALL_INSN with pattern PATTERN
4051 and add it to the end of the doubly-linked list. */
4052
4053 rtx
4054 emit_call_insn (pattern)
4055 rtx pattern;
4056 {
4057 if (GET_CODE (pattern) == SEQUENCE)
4058 return emit_insn (pattern);
4059 else
4060 {
4061 rtx insn = make_call_insn_raw (pattern);
4062 add_insn (insn);
4063 PUT_CODE (insn, CALL_INSN);
4064 return insn;
4065 }
4066 }
4067
4068 /* Add the label LABEL to the end of the doubly-linked list. */
4069
4070 rtx
4071 emit_label (label)
4072 rtx label;
4073 {
4074 /* This can be called twice for the same label
4075 as a result of the confusion that follows a syntax error!
4076 So make it harmless. */
4077 if (INSN_UID (label) == 0)
4078 {
4079 INSN_UID (label) = cur_insn_uid++;
4080 add_insn (label);
4081 }
4082 return label;
4083 }
4084
4085 /* Make an insn of code BARRIER
4086 and add it to the end of the doubly-linked list. */
4087
4088 rtx
4089 emit_barrier ()
4090 {
4091 rtx barrier = rtx_alloc (BARRIER);
4092 INSN_UID (barrier) = cur_insn_uid++;
4093 add_insn (barrier);
4094 return barrier;
4095 }
4096
4097 /* Make an insn of code NOTE
4098 with data-fields specified by FILE and LINE
4099 and add it to the end of the doubly-linked list,
4100 but only if line-numbers are desired for debugging info. */
4101
4102 rtx
4103 emit_line_note (file, line)
4104 const char *file;
4105 int line;
4106 {
4107 set_file_and_line_for_stmt (file, line);
4108
4109 #if 0
4110 if (no_line_numbers)
4111 return 0;
4112 #endif
4113
4114 return emit_note (file, line);
4115 }
4116
4117 /* Make an insn of code NOTE
4118 with data-fields specified by FILE and LINE
4119 and add it to the end of the doubly-linked list.
4120 If it is a line-number NOTE, omit it if it matches the previous one. */
4121
4122 rtx
4123 emit_note (file, line)
4124 const char *file;
4125 int line;
4126 {
4127 rtx note;
4128
4129 if (line > 0)
4130 {
4131 if (file && last_filename && !strcmp (file, last_filename)
4132 && line == last_linenum)
4133 return 0;
4134 last_filename = file;
4135 last_linenum = line;
4136 }
4137
4138 if (no_line_numbers && line > 0)
4139 {
4140 cur_insn_uid++;
4141 return 0;
4142 }
4143
4144 note = rtx_alloc (NOTE);
4145 INSN_UID (note) = cur_insn_uid++;
4146 NOTE_SOURCE_FILE (note) = file;
4147 NOTE_LINE_NUMBER (note) = line;
4148 add_insn (note);
4149 return note;
4150 }
4151
4152 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4153
4154 rtx
4155 emit_line_note_force (file, line)
4156 const char *file;
4157 int line;
4158 {
4159 last_linenum = -1;
4160 return emit_line_note (file, line);
4161 }
4162
4163 /* Cause next statement to emit a line note even if the line number
4164 has not changed. This is used at the beginning of a function. */
4165
4166 void
4167 force_next_line_note ()
4168 {
4169 last_linenum = -1;
4170 }
4171
4172 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4173 note of this type already exists, remove it first. */
4174
4175 rtx
4176 set_unique_reg_note (insn, kind, datum)
4177 rtx insn;
4178 enum reg_note kind;
4179 rtx datum;
4180 {
4181 rtx note = find_reg_note (insn, kind, NULL_RTX);
4182
4183 switch (kind)
4184 {
4185 case REG_EQUAL:
4186 case REG_EQUIV:
4187 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4188 has multiple sets (some callers assume single_set
4189 means the insn only has one set, when in fact it
4190 means the insn only has one * useful * set). */
4191 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4192 {
4193 if (note)
4194 abort ();
4195 return NULL_RTX;
4196 }
4197
4198 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4199 It serves no useful purpose and breaks eliminate_regs. */
4200 if (GET_CODE (datum) == ASM_OPERANDS)
4201 return NULL_RTX;
4202 break;
4203
4204 default:
4205 break;
4206 }
4207
4208 if (note)
4209 {
4210 XEXP (note, 0) = datum;
4211 return note;
4212 }
4213
4214 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4215 return REG_NOTES (insn);
4216 }
4217 \f
4218 /* Return an indication of which type of insn should have X as a body.
4219 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4220
4221 enum rtx_code
4222 classify_insn (x)
4223 rtx x;
4224 {
4225 if (GET_CODE (x) == CODE_LABEL)
4226 return CODE_LABEL;
4227 if (GET_CODE (x) == CALL)
4228 return CALL_INSN;
4229 if (GET_CODE (x) == RETURN)
4230 return JUMP_INSN;
4231 if (GET_CODE (x) == SET)
4232 {
4233 if (SET_DEST (x) == pc_rtx)
4234 return JUMP_INSN;
4235 else if (GET_CODE (SET_SRC (x)) == CALL)
4236 return CALL_INSN;
4237 else
4238 return INSN;
4239 }
4240 if (GET_CODE (x) == PARALLEL)
4241 {
4242 int j;
4243 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4244 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4245 return CALL_INSN;
4246 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4247 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4248 return JUMP_INSN;
4249 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4250 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4251 return CALL_INSN;
4252 }
4253 return INSN;
4254 }
4255
4256 /* Emit the rtl pattern X as an appropriate kind of insn.
4257 If X is a label, it is simply added into the insn chain. */
4258
4259 rtx
4260 emit (x)
4261 rtx x;
4262 {
4263 enum rtx_code code = classify_insn (x);
4264
4265 if (code == CODE_LABEL)
4266 return emit_label (x);
4267 else if (code == INSN)
4268 return emit_insn (x);
4269 else if (code == JUMP_INSN)
4270 {
4271 rtx insn = emit_jump_insn (x);
4272 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4273 return emit_barrier ();
4274 return insn;
4275 }
4276 else if (code == CALL_INSN)
4277 return emit_call_insn (x);
4278 else
4279 abort ();
4280 }
4281 \f
4282 /* Begin emitting insns to a sequence which can be packaged in an
4283 RTL_EXPR. If this sequence will contain something that might cause
4284 the compiler to pop arguments to function calls (because those
4285 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4286 details), use do_pending_stack_adjust before calling this function.
4287 That will ensure that the deferred pops are not accidentally
4288 emitted in the middle of this sequence. */
4289
4290 void
4291 start_sequence ()
4292 {
4293 struct sequence_stack *tem;
4294
4295 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4296
4297 tem->next = seq_stack;
4298 tem->first = first_insn;
4299 tem->last = last_insn;
4300 tem->sequence_rtl_expr = seq_rtl_expr;
4301
4302 seq_stack = tem;
4303
4304 first_insn = 0;
4305 last_insn = 0;
4306 }
4307
4308 /* Similarly, but indicate that this sequence will be placed in T, an
4309 RTL_EXPR. See the documentation for start_sequence for more
4310 information about how to use this function. */
4311
4312 void
4313 start_sequence_for_rtl_expr (t)
4314 tree t;
4315 {
4316 start_sequence ();
4317
4318 seq_rtl_expr = t;
4319 }
4320
4321 /* Set up the insn chain starting with FIRST as the current sequence,
4322 saving the previously current one. See the documentation for
4323 start_sequence for more information about how to use this function. */
4324
4325 void
4326 push_to_sequence (first)
4327 rtx first;
4328 {
4329 rtx last;
4330
4331 start_sequence ();
4332
4333 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4334
4335 first_insn = first;
4336 last_insn = last;
4337 }
4338
4339 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4340
4341 void
4342 push_to_full_sequence (first, last)
4343 rtx first, last;
4344 {
4345 start_sequence ();
4346 first_insn = first;
4347 last_insn = last;
4348 /* We really should have the end of the insn chain here. */
4349 if (last && NEXT_INSN (last))
4350 abort ();
4351 }
4352
4353 /* Set up the outer-level insn chain
4354 as the current sequence, saving the previously current one. */
4355
4356 void
4357 push_topmost_sequence ()
4358 {
4359 struct sequence_stack *stack, *top = NULL;
4360
4361 start_sequence ();
4362
4363 for (stack = seq_stack; stack; stack = stack->next)
4364 top = stack;
4365
4366 first_insn = top->first;
4367 last_insn = top->last;
4368 seq_rtl_expr = top->sequence_rtl_expr;
4369 }
4370
4371 /* After emitting to the outer-level insn chain, update the outer-level
4372 insn chain, and restore the previous saved state. */
4373
4374 void
4375 pop_topmost_sequence ()
4376 {
4377 struct sequence_stack *stack, *top = NULL;
4378
4379 for (stack = seq_stack; stack; stack = stack->next)
4380 top = stack;
4381
4382 top->first = first_insn;
4383 top->last = last_insn;
4384 /* ??? Why don't we save seq_rtl_expr here? */
4385
4386 end_sequence ();
4387 }
4388
4389 /* After emitting to a sequence, restore previous saved state.
4390
4391 To get the contents of the sequence just made, you must call
4392 `gen_sequence' *before* calling here.
4393
4394 If the compiler might have deferred popping arguments while
4395 generating this sequence, and this sequence will not be immediately
4396 inserted into the instruction stream, use do_pending_stack_adjust
4397 before calling gen_sequence. That will ensure that the deferred
4398 pops are inserted into this sequence, and not into some random
4399 location in the instruction stream. See INHIBIT_DEFER_POP for more
4400 information about deferred popping of arguments. */
4401
4402 void
4403 end_sequence ()
4404 {
4405 struct sequence_stack *tem = seq_stack;
4406
4407 first_insn = tem->first;
4408 last_insn = tem->last;
4409 seq_rtl_expr = tem->sequence_rtl_expr;
4410 seq_stack = tem->next;
4411
4412 free (tem);
4413 }
4414
4415 /* This works like end_sequence, but records the old sequence in FIRST
4416 and LAST. */
4417
4418 void
4419 end_full_sequence (first, last)
4420 rtx *first, *last;
4421 {
4422 *first = first_insn;
4423 *last = last_insn;
4424 end_sequence();
4425 }
4426
4427 /* Return 1 if currently emitting into a sequence. */
4428
4429 int
4430 in_sequence_p ()
4431 {
4432 return seq_stack != 0;
4433 }
4434
4435 /* Generate a SEQUENCE rtx containing the insns already emitted
4436 to the current sequence.
4437
4438 This is how the gen_... function from a DEFINE_EXPAND
4439 constructs the SEQUENCE that it returns. */
4440
4441 rtx
4442 gen_sequence ()
4443 {
4444 rtx result;
4445 rtx tem;
4446 int i;
4447 int len;
4448
4449 /* Count the insns in the chain. */
4450 len = 0;
4451 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4452 len++;
4453
4454 /* If only one insn, return it rather than a SEQUENCE.
4455 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4456 the case of an empty list.)
4457 We only return the pattern of an insn if its code is INSN and it
4458 has no notes. This ensures that no information gets lost. */
4459 if (len == 1
4460 && ! RTX_FRAME_RELATED_P (first_insn)
4461 && GET_CODE (first_insn) == INSN
4462 /* Don't throw away any reg notes. */
4463 && REG_NOTES (first_insn) == 0)
4464 return PATTERN (first_insn);
4465
4466 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4467
4468 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4469 XVECEXP (result, 0, i) = tem;
4470
4471 return result;
4472 }
4473 \f
4474 /* Put the various virtual registers into REGNO_REG_RTX. */
4475
4476 void
4477 init_virtual_regs (es)
4478 struct emit_status *es;
4479 {
4480 rtx *ptr = es->x_regno_reg_rtx;
4481 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4482 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4483 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4484 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4485 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4486 }
4487
4488 void
4489 clear_emit_caches ()
4490 {
4491 int i;
4492
4493 /* Clear the start_sequence/gen_sequence cache. */
4494 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4495 sequence_result[i] = 0;
4496 free_insn = 0;
4497 }
4498 \f
4499 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4500 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4501 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4502 static int copy_insn_n_scratches;
4503
4504 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4505 copied an ASM_OPERANDS.
4506 In that case, it is the original input-operand vector. */
4507 static rtvec orig_asm_operands_vector;
4508
4509 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4510 copied an ASM_OPERANDS.
4511 In that case, it is the copied input-operand vector. */
4512 static rtvec copy_asm_operands_vector;
4513
4514 /* Likewise for the constraints vector. */
4515 static rtvec orig_asm_constraints_vector;
4516 static rtvec copy_asm_constraints_vector;
4517
4518 /* Recursively create a new copy of an rtx for copy_insn.
4519 This function differs from copy_rtx in that it handles SCRATCHes and
4520 ASM_OPERANDs properly.
4521 Normally, this function is not used directly; use copy_insn as front end.
4522 However, you could first copy an insn pattern with copy_insn and then use
4523 this function afterwards to properly copy any REG_NOTEs containing
4524 SCRATCHes. */
4525
4526 rtx
4527 copy_insn_1 (orig)
4528 rtx orig;
4529 {
4530 rtx copy;
4531 int i, j;
4532 RTX_CODE code;
4533 const char *format_ptr;
4534
4535 code = GET_CODE (orig);
4536
4537 switch (code)
4538 {
4539 case REG:
4540 case QUEUED:
4541 case CONST_INT:
4542 case CONST_DOUBLE:
4543 case CONST_VECTOR:
4544 case SYMBOL_REF:
4545 case CODE_LABEL:
4546 case PC:
4547 case CC0:
4548 case ADDRESSOF:
4549 return orig;
4550
4551 case SCRATCH:
4552 for (i = 0; i < copy_insn_n_scratches; i++)
4553 if (copy_insn_scratch_in[i] == orig)
4554 return copy_insn_scratch_out[i];
4555 break;
4556
4557 case CONST:
4558 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4559 a LABEL_REF, it isn't sharable. */
4560 if (GET_CODE (XEXP (orig, 0)) == PLUS
4561 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4562 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4563 return orig;
4564 break;
4565
4566 /* A MEM with a constant address is not sharable. The problem is that
4567 the constant address may need to be reloaded. If the mem is shared,
4568 then reloading one copy of this mem will cause all copies to appear
4569 to have been reloaded. */
4570
4571 default:
4572 break;
4573 }
4574
4575 copy = rtx_alloc (code);
4576
4577 /* Copy the various flags, and other information. We assume that
4578 all fields need copying, and then clear the fields that should
4579 not be copied. That is the sensible default behavior, and forces
4580 us to explicitly document why we are *not* copying a flag. */
4581 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4582
4583 /* We do not copy the USED flag, which is used as a mark bit during
4584 walks over the RTL. */
4585 copy->used = 0;
4586
4587 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4588 if (GET_RTX_CLASS (code) == 'i')
4589 {
4590 copy->jump = 0;
4591 copy->call = 0;
4592 copy->frame_related = 0;
4593 }
4594
4595 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4596
4597 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4598 {
4599 copy->fld[i] = orig->fld[i];
4600 switch (*format_ptr++)
4601 {
4602 case 'e':
4603 if (XEXP (orig, i) != NULL)
4604 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4605 break;
4606
4607 case 'E':
4608 case 'V':
4609 if (XVEC (orig, i) == orig_asm_constraints_vector)
4610 XVEC (copy, i) = copy_asm_constraints_vector;
4611 else if (XVEC (orig, i) == orig_asm_operands_vector)
4612 XVEC (copy, i) = copy_asm_operands_vector;
4613 else if (XVEC (orig, i) != NULL)
4614 {
4615 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4616 for (j = 0; j < XVECLEN (copy, i); j++)
4617 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4618 }
4619 break;
4620
4621 case 't':
4622 case 'w':
4623 case 'i':
4624 case 's':
4625 case 'S':
4626 case 'u':
4627 case '0':
4628 /* These are left unchanged. */
4629 break;
4630
4631 default:
4632 abort ();
4633 }
4634 }
4635
4636 if (code == SCRATCH)
4637 {
4638 i = copy_insn_n_scratches++;
4639 if (i >= MAX_RECOG_OPERANDS)
4640 abort ();
4641 copy_insn_scratch_in[i] = orig;
4642 copy_insn_scratch_out[i] = copy;
4643 }
4644 else if (code == ASM_OPERANDS)
4645 {
4646 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4647 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4648 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4649 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4650 }
4651
4652 return copy;
4653 }
4654
4655 /* Create a new copy of an rtx.
4656 This function differs from copy_rtx in that it handles SCRATCHes and
4657 ASM_OPERANDs properly.
4658 INSN doesn't really have to be a full INSN; it could be just the
4659 pattern. */
4660 rtx
4661 copy_insn (insn)
4662 rtx insn;
4663 {
4664 copy_insn_n_scratches = 0;
4665 orig_asm_operands_vector = 0;
4666 orig_asm_constraints_vector = 0;
4667 copy_asm_operands_vector = 0;
4668 copy_asm_constraints_vector = 0;
4669 return copy_insn_1 (insn);
4670 }
4671
4672 /* Initialize data structures and variables in this file
4673 before generating rtl for each function. */
4674
4675 void
4676 init_emit ()
4677 {
4678 struct function *f = cfun;
4679
4680 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4681 first_insn = NULL;
4682 last_insn = NULL;
4683 seq_rtl_expr = NULL;
4684 cur_insn_uid = 1;
4685 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4686 last_linenum = 0;
4687 last_filename = 0;
4688 first_label_num = label_num;
4689 last_label_num = 0;
4690 seq_stack = NULL;
4691
4692 clear_emit_caches ();
4693
4694 /* Init the tables that describe all the pseudo regs. */
4695
4696 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4697
4698 f->emit->regno_pointer_align
4699 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4700 sizeof (unsigned char));
4701
4702 regno_reg_rtx
4703 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4704
4705 f->emit->regno_decl
4706 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4707
4708 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4709 init_virtual_regs (f->emit);
4710
4711 /* Indicate that the virtual registers and stack locations are
4712 all pointers. */
4713 REG_POINTER (stack_pointer_rtx) = 1;
4714 REG_POINTER (frame_pointer_rtx) = 1;
4715 REG_POINTER (hard_frame_pointer_rtx) = 1;
4716 REG_POINTER (arg_pointer_rtx) = 1;
4717
4718 REG_POINTER (virtual_incoming_args_rtx) = 1;
4719 REG_POINTER (virtual_stack_vars_rtx) = 1;
4720 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4721 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4722 REG_POINTER (virtual_cfa_rtx) = 1;
4723
4724 #ifdef STACK_BOUNDARY
4725 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4726 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4727 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4728 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4729
4730 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4731 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4732 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4733 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4734 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4735 #endif
4736
4737 #ifdef INIT_EXPANDERS
4738 INIT_EXPANDERS;
4739 #endif
4740 }
4741
4742 /* Mark SS for GC. */
4743
4744 static void
4745 mark_sequence_stack (ss)
4746 struct sequence_stack *ss;
4747 {
4748 while (ss)
4749 {
4750 ggc_mark_rtx (ss->first);
4751 ggc_mark_tree (ss->sequence_rtl_expr);
4752 ss = ss->next;
4753 }
4754 }
4755
4756 /* Mark ES for GC. */
4757
4758 void
4759 mark_emit_status (es)
4760 struct emit_status *es;
4761 {
4762 rtx *r;
4763 tree *t;
4764 int i;
4765
4766 if (es == 0)
4767 return;
4768
4769 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4770 t = es->regno_decl;
4771 i > 0; --i, ++r, ++t)
4772 {
4773 ggc_mark_rtx (*r);
4774 ggc_mark_tree (*t);
4775 }
4776
4777 mark_sequence_stack (es->sequence_stack);
4778 ggc_mark_tree (es->sequence_rtl_expr);
4779 ggc_mark_rtx (es->x_first_insn);
4780 }
4781
4782 /* Generate the constant 0. */
4783
4784 static rtx
4785 gen_const_vector_0 (mode)
4786 enum machine_mode mode;
4787 {
4788 rtx tem;
4789 rtvec v;
4790 int units, i;
4791 enum machine_mode inner;
4792
4793 units = GET_MODE_NUNITS (mode);
4794 inner = GET_MODE_INNER (mode);
4795
4796 v = rtvec_alloc (units);
4797
4798 /* We need to call this function after we to set CONST0_RTX first. */
4799 if (!CONST0_RTX (inner))
4800 abort ();
4801
4802 for (i = 0; i < units; ++i)
4803 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4804
4805 tem = gen_rtx_CONST_VECTOR (mode, v);
4806 return tem;
4807 }
4808
4809 /* Create some permanent unique rtl objects shared between all functions.
4810 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4811
4812 void
4813 init_emit_once (line_numbers)
4814 int line_numbers;
4815 {
4816 int i;
4817 enum machine_mode mode;
4818 enum machine_mode double_mode;
4819
4820 /* Initialize the CONST_INT and memory attribute hash tables. */
4821 const_int_htab = htab_create (37, const_int_htab_hash,
4822 const_int_htab_eq, NULL);
4823 ggc_add_deletable_htab (const_int_htab, 0, 0);
4824
4825 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4826 mem_attrs_htab_eq, NULL);
4827 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4828
4829 no_line_numbers = ! line_numbers;
4830
4831 /* Compute the word and byte modes. */
4832
4833 byte_mode = VOIDmode;
4834 word_mode = VOIDmode;
4835 double_mode = VOIDmode;
4836
4837 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4838 mode = GET_MODE_WIDER_MODE (mode))
4839 {
4840 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4841 && byte_mode == VOIDmode)
4842 byte_mode = mode;
4843
4844 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4845 && word_mode == VOIDmode)
4846 word_mode = mode;
4847 }
4848
4849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4850 mode = GET_MODE_WIDER_MODE (mode))
4851 {
4852 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4853 && double_mode == VOIDmode)
4854 double_mode = mode;
4855 }
4856
4857 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4858
4859 /* Assign register numbers to the globally defined register rtx.
4860 This must be done at runtime because the register number field
4861 is in a union and some compilers can't initialize unions. */
4862
4863 pc_rtx = gen_rtx (PC, VOIDmode);
4864 cc0_rtx = gen_rtx (CC0, VOIDmode);
4865 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4866 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
4867 if (hard_frame_pointer_rtx == 0)
4868 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
4869 HARD_FRAME_POINTER_REGNUM);
4870 if (arg_pointer_rtx == 0)
4871 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
4872 virtual_incoming_args_rtx =
4873 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
4874 virtual_stack_vars_rtx =
4875 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
4876 virtual_stack_dynamic_rtx =
4877 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
4878 virtual_outgoing_args_rtx =
4879 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
4880 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
4881
4882 /* These rtx must be roots if GC is enabled. */
4883 ggc_add_rtx_root (global_rtl, GR_MAX);
4884
4885 #ifdef INIT_EXPANDERS
4886 /* This is to initialize {init|mark|free}_machine_status before the first
4887 call to push_function_context_to. This is needed by the Chill front
4888 end which calls push_function_context_to before the first call to
4889 init_function_start. */
4890 INIT_EXPANDERS;
4891 #endif
4892
4893 /* Create the unique rtx's for certain rtx codes and operand values. */
4894
4895 /* Don't use gen_rtx here since gen_rtx in this case
4896 tries to use these variables. */
4897 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
4898 const_int_rtx[i + MAX_SAVED_CONST_INT] =
4899 gen_rtx_raw_CONST_INT (VOIDmode, i);
4900 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
4901
4902 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4903 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
4904 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
4905 else
4906 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
4907
4908 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4909 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4910 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4911 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
4912
4913 for (i = 0; i <= 2; i++)
4914 {
4915 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4916 mode = GET_MODE_WIDER_MODE (mode))
4917 {
4918 rtx tem = rtx_alloc (CONST_DOUBLE);
4919 union real_extract u;
4920
4921 /* Zero any holes in a structure. */
4922 memset ((char *) &u, 0, sizeof u);
4923 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4924
4925 /* Avoid trailing garbage in the rtx. */
4926 if (sizeof (u) < sizeof (HOST_WIDE_INT))
4927 CONST_DOUBLE_LOW (tem) = 0;
4928 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
4929 CONST_DOUBLE_HIGH (tem) = 0;
4930
4931 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
4932 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
4933 PUT_MODE (tem, mode);
4934
4935 const_tiny_rtx[i][(int) mode] = tem;
4936 }
4937
4938 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
4939
4940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4941 mode = GET_MODE_WIDER_MODE (mode))
4942 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4943
4944 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4945 mode != VOIDmode;
4946 mode = GET_MODE_WIDER_MODE (mode))
4947 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
4948 }
4949
4950 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
4951 mode != VOIDmode;
4952 mode = GET_MODE_WIDER_MODE (mode))
4953 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
4954
4955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
4956 mode != VOIDmode;
4957 mode = GET_MODE_WIDER_MODE (mode))
4958 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
4959
4960 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4961 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4962 const_tiny_rtx[0][i] = const0_rtx;
4963
4964 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4965 if (STORE_FLAG_VALUE == 1)
4966 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4967
4968 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4969 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4970 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4971 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
4972 ggc_add_rtx_root (&const_true_rtx, 1);
4973
4974 #ifdef RETURN_ADDRESS_POINTER_REGNUM
4975 return_address_pointer_rtx
4976 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
4977 #endif
4978
4979 #ifdef STRUCT_VALUE
4980 struct_value_rtx = STRUCT_VALUE;
4981 #else
4982 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4983 #endif
4984
4985 #ifdef STRUCT_VALUE_INCOMING
4986 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4987 #else
4988 #ifdef STRUCT_VALUE_INCOMING_REGNUM
4989 struct_value_incoming_rtx
4990 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4991 #else
4992 struct_value_incoming_rtx = struct_value_rtx;
4993 #endif
4994 #endif
4995
4996 #ifdef STATIC_CHAIN_REGNUM
4997 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4998
4999 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5000 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5001 static_chain_incoming_rtx
5002 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5003 else
5004 #endif
5005 static_chain_incoming_rtx = static_chain_rtx;
5006 #endif
5007
5008 #ifdef STATIC_CHAIN
5009 static_chain_rtx = STATIC_CHAIN;
5010
5011 #ifdef STATIC_CHAIN_INCOMING
5012 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5013 #else
5014 static_chain_incoming_rtx = static_chain_rtx;
5015 #endif
5016 #endif
5017
5018 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5019 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5020
5021 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5022 ggc_add_rtx_root (&struct_value_rtx, 1);
5023 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5024 ggc_add_rtx_root (&static_chain_rtx, 1);
5025 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5026 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5027 }
5028 \f
5029 /* Query and clear/ restore no_line_numbers. This is used by the
5030 switch / case handling in stmt.c to give proper line numbers in
5031 warnings about unreachable code. */
5032
5033 int
5034 force_line_numbers ()
5035 {
5036 int old = no_line_numbers;
5037
5038 no_line_numbers = 0;
5039 if (old)
5040 force_next_line_note ();
5041 return old;
5042 }
5043
5044 void
5045 restore_line_number_status (old_value)
5046 int old_value;
5047 {
5048 no_line_numbers = old_value;
5049 }