]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
alias.c: Remove uses of "register" specifier in declarations of arguments and local...
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
23b2ce53 1/* Emit RTL for the GNU C-Compiler expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
90a74703 3 1999, 2000, 2001 Free Software Foundation, Inc.
23b2ce53 4
1322177d 5This file is part of GCC.
23b2ce53 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
23b2ce53 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
23b2ce53
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38#include "config.h"
670ee920 39#include "system.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
ca695ac9 53#include "obstack.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
ca695ac9 58
1d445e9e
ILT
59/* Commonly used modes. */
60
0f41302f
MS
61enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 63enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 64enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 65
23b2ce53
RS
66
67/* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
70static int label_num = 1;
71
23b2ce53
RS
72/* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76static int last_label_num;
77
78/* Value label_num had when set_new_first_and_last_label_number was called.
79 If label_num has not changed since then, last_label_num is valid. */
80
81static int base_label_num;
82
83/* Nonzero means do not generate NOTEs for source line numbers. */
84
85static int no_line_numbers;
86
87/* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
89 All of these except perhaps the floating-point CONST_DOUBLEs
90 are unique; no other rtx-object will be equal to any of these. */
91
5da077de 92rtx global_rtl[GR_MAX];
23b2ce53
RS
93
94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
97
98rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
106
107/* All references to the following fixed hard registers go through
108 these unique rtl objects. On machines where the frame-pointer and
109 arg-pointer are the same register, they use the same unique object.
110
111 After register allocation, other rtl objects which used to be pseudo-regs
112 may be clobbered to refer to the frame-pointer register.
113 But references that were originally to the frame-pointer can be
114 distinguished from the others because they contain frame_pointer_rtx.
115
ac6f08b0
DE
116 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
117 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 118 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
119 register elimination hard_frame_pointer_rtx should always be used.
120 On machines where the two registers are same (most) then these are the
121 same.
122
23b2ce53
RS
123 In an inline procedure, the stack and frame pointer rtxs may not be
124 used for anything else. */
23b2ce53
RS
125rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
126rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
127rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
128rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
129rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
130
a4417a86
JW
131/* This is used to implement __builtin_return_address for some machines.
132 See for instance the MIPS port. */
133rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
134
23b2ce53
RS
135/* We make one copy of (const_int C) where C is in
136 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
137 to save space during the compilation and simplify comparisons of
138 integers. */
139
5da077de 140rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 141
c13e8210
MM
142/* A hash table storing CONST_INTs whose absolute value is greater
143 than MAX_SAVED_CONST_INT. */
144
145static htab_t const_int_htab;
146
173b24b9
RK
147/* A hash table storing memory attribute structures. */
148static htab_t mem_attrs_htab;
149
23b2ce53
RS
150/* start_sequence and gen_sequence can make a lot of rtx expressions which are
151 shortly thrown away. We use two mechanisms to prevent this waste:
152
a3770a81
RH
153 For sizes up to 5 elements, we keep a SEQUENCE and its associated
154 rtvec for use by gen_sequence. One entry for each size is
155 sufficient because most cases are calls to gen_sequence followed by
156 immediately emitting the SEQUENCE. Reuse is safe since emitting a
157 sequence is destructive on the insn in it anyway and hence can't be
158 redone.
23b2ce53
RS
159
160 We do not bother to save this cached data over nested function calls.
161 Instead, we just reinitialize them. */
162
163#define SEQUENCE_RESULT_SIZE 5
164
23b2ce53
RS
165static rtx sequence_result[SEQUENCE_RESULT_SIZE];
166
0f41302f 167/* During RTL generation, we also keep a list of free INSN rtl codes. */
43127294
RK
168static rtx free_insn;
169
01d939e8
BS
170#define first_insn (cfun->emit->x_first_insn)
171#define last_insn (cfun->emit->x_last_insn)
172#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
173#define last_linenum (cfun->emit->x_last_linenum)
174#define last_filename (cfun->emit->x_last_filename)
175#define first_label_num (cfun->emit->x_first_label_num)
23b2ce53 176
711d877c
KG
177static rtx make_jump_insn_raw PARAMS ((rtx));
178static rtx make_call_insn_raw PARAMS ((rtx));
179static rtx find_line_note PARAMS ((rtx));
180static void mark_sequence_stack PARAMS ((struct sequence_stack *));
d1b81779 181static void unshare_all_rtl_1 PARAMS ((rtx));
5c6df058 182static void unshare_all_decls PARAMS ((tree));
2d4aecb3 183static void reset_used_decls PARAMS ((tree));
e5bef2e4 184static void mark_label_nuses PARAMS ((rtx));
c13e8210
MM
185static hashval_t const_int_htab_hash PARAMS ((const void *));
186static int const_int_htab_eq PARAMS ((const void *,
187 const void *));
173b24b9
RK
188static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
189static int mem_attrs_htab_eq PARAMS ((const void *,
190 const void *));
191static void mem_attrs_mark PARAMS ((const void *));
192static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
193 rtx, unsigned int));
c13e8210 194
6b24c259
JH
195/* Probability of the conditional branch currently proceeded by try_split.
196 Set to -1 otherwise. */
197int split_branch_probability = -1;
ca695ac9 198\f
c13e8210
MM
199/* Returns a hash code for X (which is a really a CONST_INT). */
200
201static hashval_t
202const_int_htab_hash (x)
203 const void *x;
204{
e77d72cb 205 return (hashval_t) INTVAL ((const struct rtx_def *) x);
c13e8210
MM
206}
207
208/* Returns non-zero if the value represented by X (which is really a
209 CONST_INT) is the same as that given by Y (which is really a
210 HOST_WIDE_INT *). */
211
212static int
213const_int_htab_eq (x, y)
214 const void *x;
215 const void *y;
216{
e77d72cb 217 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
c13e8210
MM
218}
219
173b24b9
RK
220/* Returns a hash code for X (which is a really a mem_attrs *). */
221
222static hashval_t
223mem_attrs_htab_hash (x)
224 const void *x;
225{
226 mem_attrs *p = (mem_attrs *) x;
227
228 return (p->alias ^ (p->align * 1000)
229 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
230 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
231 ^ (long) p->decl);
232}
233
234/* Returns non-zero if the value represented by X (which is really a
235 mem_attrs *) is the same as that given by Y (which is also really a
236 mem_attrs *). */
c13e8210
MM
237
238static int
173b24b9
RK
239mem_attrs_htab_eq (x, y)
240 const void *x;
241 const void *y;
c13e8210 242{
173b24b9
RK
243 mem_attrs *p = (mem_attrs *) x;
244 mem_attrs *q = (mem_attrs *) y;
245
246 return (p->alias == q->alias && p->decl == q->decl && p->offset == q->offset
247 && p->size == q->size && p->align == q->align);
c13e8210
MM
248}
249
173b24b9
RK
250/* This routine is called when we determine that we need a mem_attrs entry.
251 It marks the associated decl and RTL as being used, if present. */
c13e8210
MM
252
253static void
173b24b9
RK
254mem_attrs_mark (x)
255 const void *x;
c13e8210 256{
173b24b9
RK
257 mem_attrs *p = (mem_attrs *) x;
258
259 if (p->decl)
260 ggc_mark_tree (p->decl);
261
262 if (p->offset)
263 ggc_mark_rtx (p->offset);
264
265 if (p->size)
266 ggc_mark_rtx (p->size);
267}
268
269/* Allocate a new mem_attrs structure and insert it into the hash table if
270 one identical to it is not already in the table. */
271
272static mem_attrs *
273get_mem_attrs (alias, decl, offset, size, align)
274 HOST_WIDE_INT alias;
275 tree decl;
276 rtx offset;
277 rtx size;
278 unsigned int align;
279{
280 mem_attrs attrs;
281 void **slot;
282
283 attrs.alias = alias;
284 attrs.decl = decl;
285 attrs.offset = offset;
286 attrs.size = size;
287 attrs.align = align;
288
289 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
290 if (*slot == 0)
291 {
292 *slot = ggc_alloc (sizeof (mem_attrs));
293 memcpy (*slot, &attrs, sizeof (mem_attrs));
294 }
295
296 return *slot;
c13e8210
MM
297}
298
08394eef
BS
299/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
300 don't attempt to share with the various global pieces of rtl (such as
301 frame_pointer_rtx). */
302
303rtx
304gen_raw_REG (mode, regno)
305 enum machine_mode mode;
306 int regno;
307{
308 rtx x = gen_rtx_raw_REG (mode, regno);
309 ORIGINAL_REGNO (x) = regno;
310 return x;
311}
312
c5c76735
JL
313/* There are some RTL codes that require special attention; the generation
314 functions do the raw handling. If you add to this list, modify
315 special_rtx in gengenrtl.c as well. */
316
3b80f6ca
RH
317rtx
318gen_rtx_CONST_INT (mode, arg)
c13e8210 319 enum machine_mode mode ATTRIBUTE_UNUSED;
3b80f6ca
RH
320 HOST_WIDE_INT arg;
321{
c13e8210
MM
322 void **slot;
323
3b80f6ca 324 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 325 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
326
327#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
328 if (const_true_rtx && arg == STORE_FLAG_VALUE)
329 return const_true_rtx;
330#endif
331
c13e8210 332 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
333 slot = htab_find_slot_with_hash (const_int_htab, &arg,
334 (hashval_t) arg, INSERT);
29105cea 335 if (*slot == 0)
1f8f4a0b 336 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
337
338 return (rtx) *slot;
3b80f6ca
RH
339}
340
29105cea 341/* CONST_DOUBLEs needs special handling because their length is known
0133b7d9 342 only at run-time. */
29105cea 343
0133b7d9
RH
344rtx
345gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
346 enum machine_mode mode;
347 rtx arg0;
348 HOST_WIDE_INT arg1, arg2;
349{
350 rtx r = rtx_alloc (CONST_DOUBLE);
2454beaf
RH
351 int i;
352
0133b7d9
RH
353 PUT_MODE (r, mode);
354 XEXP (r, 0) = arg0;
ef178af3 355 X0EXP (r, 1) = NULL_RTX;
2454beaf
RH
356 XWINT (r, 2) = arg1;
357 XWINT (r, 3) = arg2;
358
359 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
360 XWINT (r, i) = 0;
361
0133b7d9
RH
362 return r;
363}
364
3b80f6ca
RH
365rtx
366gen_rtx_REG (mode, regno)
367 enum machine_mode mode;
368 int regno;
369{
370 /* In case the MD file explicitly references the frame pointer, have
371 all such references point to the same frame pointer. This is
372 used during frame pointer elimination to distinguish the explicit
373 references to these registers from pseudos that happened to be
374 assigned to them.
375
376 If we have eliminated the frame pointer or arg pointer, we will
377 be using it as a normal register, for example as a spill
378 register. In such cases, we might be accessing it in a mode that
379 is not Pmode and therefore cannot use the pre-allocated rtx.
380
381 Also don't do this when we are making new REGs in reload, since
382 we don't want to get confused with the real pointers. */
383
384 if (mode == Pmode && !reload_in_progress)
385 {
bcb33994 386 if (regno == FRAME_POINTER_REGNUM)
3b80f6ca
RH
387 return frame_pointer_rtx;
388#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
bcb33994 389 if (regno == HARD_FRAME_POINTER_REGNUM)
3b80f6ca
RH
390 return hard_frame_pointer_rtx;
391#endif
392#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 393 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
394 return arg_pointer_rtx;
395#endif
396#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 397 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
398 return return_address_pointer_rtx;
399#endif
bcb33994 400 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
401 return stack_pointer_rtx;
402 }
403
08394eef 404 return gen_raw_REG (mode, regno);
3b80f6ca
RH
405}
406
41472af8
MM
407rtx
408gen_rtx_MEM (mode, addr)
409 enum machine_mode mode;
410 rtx addr;
411{
412 rtx rt = gen_rtx_raw_MEM (mode, addr);
413
414 /* This field is not cleared by the mere allocation of the rtx, so
415 we clear it here. */
173b24b9 416 MEM_ATTRS (rt) = 0;
41472af8
MM
417
418 return rt;
419}
ddef6bc7
JJ
420
421rtx
422gen_rtx_SUBREG (mode, reg, offset)
423 enum machine_mode mode;
424 rtx reg;
425 int offset;
426{
427 /* This is the most common failure type.
428 Catch it early so we can see who does it. */
429 if ((offset % GET_MODE_SIZE (mode)) != 0)
430 abort ();
431
432 /* This check isn't usable right now because combine will
433 throw arbitrary crap like a CALL into a SUBREG in
434 gen_lowpart_for_combine so we must just eat it. */
435#if 0
436 /* Check for this too. */
437 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
438 abort ();
439#endif
440 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
441}
442
173b24b9
RK
443/* Generate a SUBREG representing the least-significant part of REG if MODE
444 is smaller than mode of REG, otherwise paradoxical SUBREG. */
445
ddef6bc7
JJ
446rtx
447gen_lowpart_SUBREG (mode, reg)
448 enum machine_mode mode;
449 rtx reg;
450{
451 enum machine_mode inmode;
ddef6bc7
JJ
452
453 inmode = GET_MODE (reg);
454 if (inmode == VOIDmode)
455 inmode = mode;
e0e08ac2
JH
456 return gen_rtx_SUBREG (mode, reg,
457 subreg_lowpart_offset (mode, inmode));
ddef6bc7 458}
c5c76735 459\f
23b2ce53
RS
460/* rtx gen_rtx (code, mode, [element1, ..., elementn])
461**
462** This routine generates an RTX of the size specified by
463** <code>, which is an RTX code. The RTX structure is initialized
464** from the arguments <element1> through <elementn>, which are
465** interpreted according to the specific RTX type's format. The
466** special machine mode associated with the rtx (if any) is specified
467** in <mode>.
468**
1632afca 469** gen_rtx can be invoked in a way which resembles the lisp-like
23b2ce53
RS
470** rtx it will generate. For example, the following rtx structure:
471**
472** (plus:QI (mem:QI (reg:SI 1))
473** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
474**
475** ...would be generated by the following C code:
476**
750c9258 477** gen_rtx (PLUS, QImode,
23b2ce53
RS
478** gen_rtx (MEM, QImode,
479** gen_rtx (REG, SImode, 1)),
480** gen_rtx (MEM, QImode,
481** gen_rtx (PLUS, SImode,
482** gen_rtx (REG, SImode, 2),
483** gen_rtx (REG, SImode, 3)))),
484*/
485
486/*VARARGS2*/
487rtx
711d877c 488gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
23b2ce53 489{
b3694847
SS
490 int i; /* Array indices... */
491 const char *fmt; /* Current rtx's format... */
492 rtx rt_val; /* RTX to return to caller... */
23b2ce53 493
7a75edb7
AJ
494 VA_OPEN (p, mode);
495 VA_FIXEDARG (p, enum rtx_code, code);
496 VA_FIXEDARG (p, enum machine_mode, mode);
23b2ce53 497
0133b7d9 498 switch (code)
23b2ce53 499 {
0133b7d9
RH
500 case CONST_INT:
501 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
502 break;
503
504 case CONST_DOUBLE:
505 {
506 rtx arg0 = va_arg (p, rtx);
507 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
508 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
509 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
510 }
511 break;
512
513 case REG:
514 rt_val = gen_rtx_REG (mode, va_arg (p, int));
515 break;
516
517 case MEM:
518 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
519 break;
520
521 default:
23b2ce53
RS
522 rt_val = rtx_alloc (code); /* Allocate the storage space. */
523 rt_val->mode = mode; /* Store the machine mode... */
524
525 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
526 for (i = 0; i < GET_RTX_LENGTH (code); i++)
527 {
528 switch (*fmt++)
529 {
530 case '0': /* Unused field. */
531 break;
532
533 case 'i': /* An integer? */
534 XINT (rt_val, i) = va_arg (p, int);
535 break;
536
906c4e36
RK
537 case 'w': /* A wide integer? */
538 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
539 break;
540
23b2ce53
RS
541 case 's': /* A string? */
542 XSTR (rt_val, i) = va_arg (p, char *);
543 break;
544
545 case 'e': /* An expression? */
546 case 'u': /* An insn? Same except when printing. */
547 XEXP (rt_val, i) = va_arg (p, rtx);
548 break;
549
550 case 'E': /* An RTX vector? */
551 XVEC (rt_val, i) = va_arg (p, rtvec);
552 break;
553
0dfa1860
MM
554 case 'b': /* A bitmap? */
555 XBITMAP (rt_val, i) = va_arg (p, bitmap);
556 break;
557
558 case 't': /* A tree? */
559 XTREE (rt_val, i) = va_arg (p, tree);
560 break;
561
23b2ce53 562 default:
1632afca 563 abort ();
23b2ce53
RS
564 }
565 }
0133b7d9 566 break;
23b2ce53 567 }
0133b7d9 568
7a75edb7 569 VA_CLOSE (p);
0133b7d9 570 return rt_val;
23b2ce53
RS
571}
572
573/* gen_rtvec (n, [rt1, ..., rtn])
574**
575** This routine creates an rtvec and stores within it the
576** pointers to rtx's which are its arguments.
577*/
578
579/*VARARGS1*/
580rtvec
711d877c 581gen_rtvec VPARAMS ((int n, ...))
23b2ce53 582{
6268b922 583 int i, save_n;
23b2ce53
RS
584 rtx *vector;
585
7a75edb7
AJ
586 VA_OPEN (p, n);
587 VA_FIXEDARG (p, int, n);
23b2ce53
RS
588
589 if (n == 0)
590 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
591
592 vector = (rtx *) alloca (n * sizeof (rtx));
4f90e4a0 593
23b2ce53
RS
594 for (i = 0; i < n; i++)
595 vector[i] = va_arg (p, rtx);
6268b922
KG
596
597 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
598 save_n = n;
7a75edb7 599 VA_CLOSE (p);
23b2ce53 600
6268b922 601 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
602}
603
604rtvec
605gen_rtvec_v (n, argp)
606 int n;
607 rtx *argp;
608{
b3694847
SS
609 int i;
610 rtvec rt_val;
23b2ce53
RS
611
612 if (n == 0)
613 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
614
615 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
616
617 for (i = 0; i < n; i++)
8f985ec4 618 rt_val->elem[i] = *argp++;
23b2ce53
RS
619
620 return rt_val;
621}
622\f
623/* Generate a REG rtx for a new pseudo register of mode MODE.
624 This pseudo is assigned the next sequential register number. */
625
626rtx
627gen_reg_rtx (mode)
628 enum machine_mode mode;
629{
01d939e8 630 struct function *f = cfun;
b3694847 631 rtx val;
23b2ce53 632
f1db3576
JL
633 /* Don't let anything called after initial flow analysis create new
634 registers. */
635 if (no_new_pseudos)
23b2ce53
RS
636 abort ();
637
1b3d8f8a
GK
638 if (generating_concat_p
639 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
640 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
641 {
642 /* For complex modes, don't make a single pseudo.
643 Instead, make a CONCAT of two pseudos.
644 This allows noncontiguous allocation of the real and imaginary parts,
645 which makes much better code. Besides, allocating DCmode
646 pseudos overstrains reload on some machines like the 386. */
647 rtx realpart, imagpart;
648 int size = GET_MODE_UNIT_SIZE (mode);
649 enum machine_mode partmode
650 = mode_for_size (size * BITS_PER_UNIT,
651 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
652 ? MODE_FLOAT : MODE_INT),
653 0);
654
655 realpart = gen_reg_rtx (partmode);
656 imagpart = gen_reg_rtx (partmode);
3b80f6ca 657 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
658 }
659
3502dc9c
JDA
660 /* Make sure regno_pointer_align and regno_reg_rtx are large enough
661 to have an element for this pseudo reg number. */
23b2ce53 662
3502dc9c 663 if (reg_rtx_no == f->emit->regno_pointer_align_length)
23b2ce53 664 {
3502dc9c 665 int old_size = f->emit->regno_pointer_align_length;
23b2ce53 666 rtx *new1;
e2ecd91c 667 char *new;
e2ecd91c 668 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
49ad7cfa 669 memset (new + old_size, 0, old_size);
f9e158c3 670 f->emit->regno_pointer_align = (unsigned char *) new;
49ad7cfa 671
e2ecd91c
BS
672 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
673 old_size * 2 * sizeof (rtx));
49ad7cfa 674 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
675 regno_reg_rtx = new1;
676
3502dc9c 677 f->emit->regno_pointer_align_length = old_size * 2;
23b2ce53
RS
678 }
679
08394eef 680 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
681 regno_reg_rtx[reg_rtx_no++] = val;
682 return val;
683}
684
754fdcca
RK
685/* Identify REG (which may be a CONCAT) as a user register. */
686
687void
688mark_user_reg (reg)
689 rtx reg;
690{
691 if (GET_CODE (reg) == CONCAT)
692 {
693 REG_USERVAR_P (XEXP (reg, 0)) = 1;
694 REG_USERVAR_P (XEXP (reg, 1)) = 1;
695 }
696 else if (GET_CODE (reg) == REG)
697 REG_USERVAR_P (reg) = 1;
698 else
699 abort ();
700}
701
86fe05e0
RK
702/* Identify REG as a probable pointer register and show its alignment
703 as ALIGN, if nonzero. */
23b2ce53
RS
704
705void
86fe05e0 706mark_reg_pointer (reg, align)
23b2ce53 707 rtx reg;
86fe05e0 708 int align;
23b2ce53 709{
3502dc9c 710 if (! REG_POINTER (reg))
00995e78 711 {
3502dc9c 712 REG_POINTER (reg) = 1;
86fe05e0 713
00995e78
RE
714 if (align)
715 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
716 }
717 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
718 /* We can no-longer be sure just how aligned this pointer is */
86fe05e0 719 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
720}
721
722/* Return 1 plus largest pseudo reg number used in the current function. */
723
724int
725max_reg_num ()
726{
727 return reg_rtx_no;
728}
729
730/* Return 1 + the largest label number used so far in the current function. */
731
732int
733max_label_num ()
734{
735 if (last_label_num && label_num == base_label_num)
736 return last_label_num;
737 return label_num;
738}
739
740/* Return first label number used in this function (if any were used). */
741
742int
743get_first_label_num ()
744{
745 return first_label_num;
746}
747\f
ddef6bc7
JJ
748/* Return the final regno of X, which is a SUBREG of a hard
749 register. */
750int
751subreg_hard_regno (x, check_mode)
b3694847 752 rtx x;
ddef6bc7
JJ
753 int check_mode;
754{
755 enum machine_mode mode = GET_MODE (x);
756 unsigned int byte_offset, base_regno, final_regno;
757 rtx reg = SUBREG_REG (x);
758
759 /* This is where we attempt to catch illegal subregs
760 created by the compiler. */
761 if (GET_CODE (x) != SUBREG
762 || GET_CODE (reg) != REG)
763 abort ();
764 base_regno = REGNO (reg);
765 if (base_regno >= FIRST_PSEUDO_REGISTER)
766 abort ();
0607953c 767 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
ddef6bc7
JJ
768 abort ();
769
770 /* Catch non-congruent offsets too. */
771 byte_offset = SUBREG_BYTE (x);
772 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
773 abort ();
774
775 final_regno = subreg_regno (x);
776
777 return final_regno;
778}
779
23b2ce53
RS
780/* Return a value representing some low-order bits of X, where the number
781 of low-order bits is given by MODE. Note that no conversion is done
750c9258 782 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
783 representation is returned.
784
785 This function handles the cases in common between gen_lowpart, below,
786 and two variants in cse.c and combine.c. These are the cases that can
787 be safely handled at all points in the compilation.
788
789 If this is not a case we can handle, return 0. */
790
791rtx
792gen_lowpart_common (mode, x)
793 enum machine_mode mode;
b3694847 794 rtx x;
23b2ce53 795{
ddef6bc7
JJ
796 int msize = GET_MODE_SIZE (mode);
797 int xsize = GET_MODE_SIZE (GET_MODE (x));
798 int offset = 0;
23b2ce53
RS
799
800 if (GET_MODE (x) == mode)
801 return x;
802
803 /* MODE must occupy no more words than the mode of X. */
804 if (GET_MODE (x) != VOIDmode
ddef6bc7
JJ
805 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
806 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
23b2ce53
RS
807 return 0;
808
e0e08ac2 809 offset = subreg_lowpart_offset (mode, GET_MODE (x));
23b2ce53
RS
810
811 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
812 && (GET_MODE_CLASS (mode) == MODE_INT
813 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
814 {
815 /* If we are getting the low-order part of something that has been
816 sign- or zero-extended, we can either just use the object being
817 extended or make a narrower extension. If we want an even smaller
818 piece than the size of the object being extended, call ourselves
819 recursively.
820
821 This case is used mostly by combine and cse. */
822
823 if (GET_MODE (XEXP (x, 0)) == mode)
824 return XEXP (x, 0);
825 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
826 return gen_lowpart_common (mode, XEXP (x, 0));
827 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
3b80f6ca 828 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 829 }
76321db6
MH
830 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
831 || GET_CODE (x) == CONCAT)
e0e08ac2 832 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
23b2ce53
RS
833 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
834 from the low-order part of the constant. */
83e9c679
RK
835 else if ((GET_MODE_CLASS (mode) == MODE_INT
836 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
837 && GET_MODE (x) == VOIDmode
23b2ce53 838 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1a5b457d
RK
839 {
840 /* If MODE is twice the host word size, X is already the desired
841 representation. Otherwise, if MODE is wider than a word, we can't
e1389cac 842 do this. If MODE is exactly a word, return just one CONST_INT. */
1a5b457d 843
a8dd0e73 844 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1a5b457d 845 return x;
906c4e36 846 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1a5b457d 847 return 0;
906c4e36 848 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1a5b457d 849 return (GET_CODE (x) == CONST_INT ? x
906c4e36 850 : GEN_INT (CONST_DOUBLE_LOW (x)));
1a5b457d
RK
851 else
852 {
27eef9ce 853 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
906c4e36
RK
854 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
855 : CONST_DOUBLE_LOW (x));
1a5b457d 856
27eef9ce 857 /* Sign extend to HOST_WIDE_INT. */
e1389cac 858 val = trunc_int_for_mode (val, mode);
1a5b457d
RK
859
860 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
906c4e36 861 : GEN_INT (val));
1a5b457d
RK
862 }
863 }
23b2ce53 864
a2061c0d 865#ifndef REAL_ARITHMETIC
8aada4ad
RK
866 /* If X is an integral constant but we want it in floating-point, it
867 must be the case that we have a union of an integer and a floating-point
868 value. If the machine-parameters allow it, simulate that union here
750c9258 869 and return the result. The two-word and single-word cases are
d6020413 870 different. */
8aada4ad 871
b3bf132d 872 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
906c4e36 873 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
b3bf132d 874 || flag_pretend_float)
8aada4ad 875 && GET_MODE_CLASS (mode) == MODE_FLOAT
d6020413
RK
876 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
877 && GET_CODE (x) == CONST_INT
906c4e36 878 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
d6020413 879 {
906c4e36 880 union {HOST_WIDE_INT i; float d; } u;
d6020413
RK
881
882 u.i = INTVAL (x);
53596fba 883 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
d6020413 884 }
d6020413 885 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
906c4e36 886 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
d6020413
RK
887 || flag_pretend_float)
888 && GET_MODE_CLASS (mode) == MODE_FLOAT
889 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
8aada4ad
RK
890 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
891 && GET_MODE (x) == VOIDmode
906c4e36
RK
892 && (sizeof (double) * HOST_BITS_PER_CHAR
893 == 2 * HOST_BITS_PER_WIDE_INT))
8aada4ad 894 {
906c4e36
RK
895 union {HOST_WIDE_INT i[2]; double d; } u;
896 HOST_WIDE_INT low, high;
8aada4ad
RK
897
898 if (GET_CODE (x) == CONST_INT)
906c4e36 899 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
8aada4ad
RK
900 else
901 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
902
903#ifdef HOST_WORDS_BIG_ENDIAN
904 u.i[0] = high, u.i[1] = low;
905#else
906 u.i[0] = low, u.i[1] = high;
907#endif
908
53596fba 909 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
8aada4ad 910 }
a6a503ed 911
b3bf132d
RK
912 /* Similarly, if this is converting a floating-point value into a
913 single-word integer. Only do this is the host and target parameters are
914 compatible. */
915
916 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
906c4e36 917 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
b3bf132d 918 || flag_pretend_float)
83e9c679
RK
919 && (GET_MODE_CLASS (mode) == MODE_INT
920 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
b3bf132d
RK
921 && GET_CODE (x) == CONST_DOUBLE
922 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
923 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
ddef6bc7 924 return constant_subword (x, (offset / UNITS_PER_WORD), GET_MODE (x));
b3bf132d 925
8aada4ad
RK
926 /* Similarly, if this is converting a floating-point value into a
927 two-word integer, we can do this one word at a time and make an
928 integer. Only do this is the host and target parameters are
929 compatible. */
930
b3bf132d 931 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
906c4e36 932 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
b3bf132d 933 || flag_pretend_float)
83e9c679 934 && (GET_MODE_CLASS (mode) == MODE_INT
f5a2fb25 935 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
8aada4ad
RK
936 && GET_CODE (x) == CONST_DOUBLE
937 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
938 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
939 {
ddef6bc7
JJ
940 rtx lowpart, highpart;
941
942 lowpart = constant_subword (x,
943 (offset / UNITS_PER_WORD) + WORDS_BIG_ENDIAN,
944 GET_MODE (x));
945 highpart = constant_subword (x,
946 (offset / UNITS_PER_WORD) + (! WORDS_BIG_ENDIAN),
947 GET_MODE (x));
8aada4ad
RK
948 if (lowpart && GET_CODE (lowpart) == CONST_INT
949 && highpart && GET_CODE (highpart) == CONST_INT)
950 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
951 }
a2061c0d
GK
952#else /* ifndef REAL_ARITHMETIC */
953
954 /* When we have a FP emulator, we can handle all conversions between
955 FP and integer operands. This simplifies reload because it
956 doesn't have to deal with constructs like (subreg:DI
957 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
57dadce2
EC
958 /* Single-precision floats are always 32-bits and double-precision
959 floats are always 64-bits. */
a2061c0d 960
76321db6 961 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
57dadce2 962 && GET_MODE_BITSIZE (mode) == 32
a2061c0d 963 && GET_CODE (x) == CONST_INT)
76321db6 964 {
a2061c0d
GK
965 REAL_VALUE_TYPE r;
966 HOST_WIDE_INT i;
967
968 i = INTVAL (x);
969 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
970 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
76321db6
MH
971 }
972 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
57dadce2 973 && GET_MODE_BITSIZE (mode) == 64
a2061c0d
GK
974 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
975 && GET_MODE (x) == VOIDmode)
976 {
977 REAL_VALUE_TYPE r;
978 HOST_WIDE_INT i[2];
979 HOST_WIDE_INT low, high;
980
981 if (GET_CODE (x) == CONST_INT)
982 {
983 low = INTVAL (x);
984 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
985 }
986 else
987 {
750c9258 988 low = CONST_DOUBLE_LOW (x);
a2061c0d
GK
989 high = CONST_DOUBLE_HIGH (x);
990 }
991
992 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
993 target machine. */
994 if (WORDS_BIG_ENDIAN)
995 i[0] = high, i[1] = low;
996 else
997 i[0] = low, i[1] = high;
998
999 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1000 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1001 }
1002 else if ((GET_MODE_CLASS (mode) == MODE_INT
1003 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1004 && GET_CODE (x) == CONST_DOUBLE
1005 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1006 {
1007 REAL_VALUE_TYPE r;
1008 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1009 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1010
1011 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
57dadce2 1012 switch (GET_MODE_BITSIZE (GET_MODE (x)))
a2061c0d 1013 {
57dadce2 1014 case 32:
a2061c0d 1015 REAL_VALUE_TO_TARGET_SINGLE (r, i[endian]);
1156b23c 1016 i[1 - endian] = 0;
a2061c0d 1017 break;
57dadce2 1018 case 64:
a2061c0d
GK
1019 REAL_VALUE_TO_TARGET_DOUBLE (r, i);
1020 break;
57dadce2 1021 case 96:
e389897b
JJ
1022 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1023 i[3-3*endian] = 0;
76321db6 1024 break;
57dadce2 1025 case 128:
a2061c0d
GK
1026 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1027 break;
1028 default:
1156b23c 1029 abort ();
a2061c0d
GK
1030 }
1031
1032 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1033 and return it. */
1034#if HOST_BITS_PER_WIDE_INT == 32
1156b23c 1035 return immed_double_const (i[endian], i[1 - endian], mode);
a2061c0d 1036#else
50e60bc3
ZW
1037 {
1038 int c;
1039
1040 if (HOST_BITS_PER_WIDE_INT != 64)
1156b23c
RK
1041 abort ();
1042
50e60bc3 1043 for (c = 0; c < 4; c++)
1156b23c 1044 i[c] &= ~ (0L);
e389897b 1045
57dadce2 1046 switch (GET_MODE_BITSIZE (GET_MODE (x)))
e389897b 1047 {
57dadce2
EC
1048 case 32:
1049 case 64:
e389897b
JJ
1050 return immed_double_const (((unsigned long) i[endian]) |
1051 (((HOST_WIDE_INT) i[1-endian]) << 32),
1052 0, mode);
57dadce2
EC
1053 case 96:
1054 case 128:
e389897b
JJ
1055 return immed_double_const (((unsigned long) i[endian*3]) |
1056 (((HOST_WIDE_INT) i[1+endian]) << 32),
1057 ((unsigned long) i[2-endian]) |
1058 (((HOST_WIDE_INT) i[3-endian*3]) << 32),
1059 mode);
76321db6
MH
1060 default:
1061 abort ();
e389897b 1062 }
50e60bc3 1063 }
a2061c0d
GK
1064#endif
1065 }
1066#endif /* ifndef REAL_ARITHMETIC */
8aada4ad 1067
23b2ce53
RS
1068 /* Otherwise, we can't do this. */
1069 return 0;
1070}
1071\f
280194b0
RS
1072/* Return the real part (which has mode MODE) of a complex value X.
1073 This always comes at the low address in memory. */
1074
1075rtx
1076gen_realpart (mode, x)
1077 enum machine_mode mode;
b3694847 1078 rtx x;
280194b0 1079{
e0e08ac2
JH
1080 if (WORDS_BIG_ENDIAN
1081 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1082 && REG_P (x)
1083 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4
RK
1084 internal_error
1085 ("Can't access real part of complex value in hard register");
dc139c90 1086 else if (WORDS_BIG_ENDIAN)
280194b0
RS
1087 return gen_highpart (mode, x);
1088 else
1089 return gen_lowpart (mode, x);
1090}
1091
1092/* Return the imaginary part (which has mode MODE) of a complex value X.
1093 This always comes at the high address in memory. */
1094
1095rtx
1096gen_imagpart (mode, x)
1097 enum machine_mode mode;
b3694847 1098 rtx x;
280194b0 1099{
e0e08ac2 1100 if (WORDS_BIG_ENDIAN)
280194b0 1101 return gen_lowpart (mode, x);
ddef6bc7 1102 else if (! WORDS_BIG_ENDIAN
40c0c3cf
JL
1103 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1104 && REG_P (x)
1105 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4
RK
1106 internal_error
1107 ("can't access imaginary part of complex value in hard register");
280194b0
RS
1108 else
1109 return gen_highpart (mode, x);
1110}
81284a6a
JW
1111
1112/* Return 1 iff X, assumed to be a SUBREG,
1113 refers to the real part of the complex value in its containing reg.
1114 Complex values are always stored with the real part in the first word,
1115 regardless of WORDS_BIG_ENDIAN. */
1116
1117int
1118subreg_realpart_p (x)
1119 rtx x;
1120{
1121 if (GET_CODE (x) != SUBREG)
1122 abort ();
1123
ddef6bc7 1124 return ((unsigned int) SUBREG_BYTE (x)
770ae6cc 1125 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
81284a6a 1126}
280194b0 1127\f
23b2ce53
RS
1128/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1129 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1130 least-significant part of X.
1131 MODE specifies how big a part of X to return;
1132 it usually should not be larger than a word.
1133 If X is a MEM whose address is a QUEUED, the value may be so also. */
1134
1135rtx
1136gen_lowpart (mode, x)
1137 enum machine_mode mode;
b3694847 1138 rtx x;
23b2ce53
RS
1139{
1140 rtx result = gen_lowpart_common (mode, x);
1141
1142 if (result)
1143 return result;
ea8262b0
RK
1144 else if (GET_CODE (x) == REG)
1145 {
1146 /* Must be a hard reg that's not valid in MODE. */
1147 result = gen_lowpart_common (mode, copy_to_reg (x));
1148 if (result == 0)
1149 abort ();
72c3833b 1150 return result;
ea8262b0 1151 }
23b2ce53
RS
1152 else if (GET_CODE (x) == MEM)
1153 {
1154 /* The only additional case we can do is MEM. */
b3694847 1155 int offset = 0;
23b2ce53
RS
1156 if (WORDS_BIG_ENDIAN)
1157 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1158 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1159
1160 if (BYTES_BIG_ENDIAN)
1161 /* Adjust the address so that the address-after-the-data
1162 is unchanged. */
1163 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1164 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1165
f4ef873c 1166 return adjust_address (x, mode, offset);
23b2ce53 1167 }
e9a25f70
JL
1168 else if (GET_CODE (x) == ADDRESSOF)
1169 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
23b2ce53
RS
1170 else
1171 abort ();
1172}
1173
750c9258 1174/* Like `gen_lowpart', but refer to the most significant part.
ccba022b
RS
1175 This is used to access the imaginary part of a complex number. */
1176
1177rtx
1178gen_highpart (mode, x)
1179 enum machine_mode mode;
b3694847 1180 rtx x;
ccba022b 1181{
ddef6bc7 1182 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1183 rtx result;
ddef6bc7 1184
ccba022b
RS
1185 /* This case loses if X is a subreg. To catch bugs early,
1186 complain if an invalid MODE is used even in other cases. */
ddef6bc7
JJ
1187 if (msize > UNITS_PER_WORD
1188 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
ccba022b 1189 abort ();
ddef6bc7 1190
e0e08ac2
JH
1191 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1192 subreg_highpart_offset (mode, GET_MODE (x)));
09482e0d
JW
1193
1194 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1195 the target if we have a MEM. gen_highpart must return a valid operand,
1196 emitting code if necessary to do so. */
1197 if (GET_CODE (result) == MEM)
1198 result = validize_mem (result);
1199
e0e08ac2
JH
1200 if (!result)
1201 abort ();
1202 return result;
1203}
5222e470
JH
1204
1205/* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1206 be VOIDmode constant. */
1207rtx
1208gen_highpart_mode (outermode, innermode, exp)
1209 enum machine_mode outermode, innermode;
1210 rtx exp;
1211{
1212 if (GET_MODE (exp) != VOIDmode)
1213 {
1214 if (GET_MODE (exp) != innermode)
1215 abort ();
1216 return gen_highpart (outermode, exp);
1217 }
1218 return simplify_gen_subreg (outermode, exp, innermode,
1219 subreg_highpart_offset (outermode, innermode));
1220}
e0e08ac2
JH
1221/* Return offset in bytes to get OUTERMODE low part
1222 of the value in mode INNERMODE stored in memory in target format. */
8698cce3 1223
e0e08ac2
JH
1224unsigned int
1225subreg_lowpart_offset (outermode, innermode)
1226 enum machine_mode outermode, innermode;
1227{
1228 unsigned int offset = 0;
1229 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1230
e0e08ac2 1231 if (difference > 0)
ccba022b 1232 {
e0e08ac2
JH
1233 if (WORDS_BIG_ENDIAN)
1234 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1235 if (BYTES_BIG_ENDIAN)
1236 offset += difference % UNITS_PER_WORD;
ccba022b 1237 }
ddef6bc7 1238
e0e08ac2 1239 return offset;
ccba022b 1240}
eea50aa0 1241
e0e08ac2
JH
1242/* Return offset in bytes to get OUTERMODE high part
1243 of the value in mode INNERMODE stored in memory in target format. */
1244unsigned int
1245subreg_highpart_offset (outermode, innermode)
eea50aa0 1246 enum machine_mode outermode, innermode;
eea50aa0
JH
1247{
1248 unsigned int offset = 0;
1249 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1250
e0e08ac2
JH
1251 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1252 abort ();
1253
eea50aa0
JH
1254 if (difference > 0)
1255 {
e0e08ac2 1256 if (! WORDS_BIG_ENDIAN)
eea50aa0 1257 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1258 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1259 offset += difference % UNITS_PER_WORD;
1260 }
1261
e0e08ac2 1262 return offset;
eea50aa0 1263}
ccba022b 1264
23b2ce53
RS
1265/* Return 1 iff X, assumed to be a SUBREG,
1266 refers to the least significant part of its containing reg.
1267 If X is not a SUBREG, always return 1 (it is its own low part!). */
1268
1269int
1270subreg_lowpart_p (x)
1271 rtx x;
1272{
1273 if (GET_CODE (x) != SUBREG)
1274 return 1;
a3a03040
RK
1275 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1276 return 0;
23b2ce53 1277
e0e08ac2
JH
1278 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1279 == SUBREG_BYTE (x));
23b2ce53
RS
1280}
1281\f
23b2ce53 1282
ddef6bc7
JJ
1283/* Helper routine for all the constant cases of operand_subword.
1284 Some places invoke this directly. */
23b2ce53
RS
1285
1286rtx
ddef6bc7 1287constant_subword (op, offset, mode)
23b2ce53 1288 rtx op;
ddef6bc7 1289 int offset;
23b2ce53
RS
1290 enum machine_mode mode;
1291{
906c4e36 1292 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
ddef6bc7 1293 HOST_WIDE_INT val;
23b2ce53
RS
1294
1295 /* If OP is already an integer word, return it. */
1296 if (GET_MODE_CLASS (mode) == MODE_INT
1297 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1298 return op;
1299
1632afca 1300#ifdef REAL_ARITHMETIC
5495cc55
RH
1301 /* The output is some bits, the width of the target machine's word.
1302 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1303 host can't. */
9847c2f6 1304 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1632afca 1305 && GET_MODE_CLASS (mode) == MODE_FLOAT
7677ffa4 1306 && GET_MODE_BITSIZE (mode) == 64
1632afca
RS
1307 && GET_CODE (op) == CONST_DOUBLE)
1308 {
9847c2f6 1309 long k[2];
1632afca
RS
1310 REAL_VALUE_TYPE rv;
1311
1312 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1313 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
7677ffa4 1314
9847c2f6 1315 /* We handle 32-bit and >= 64-bit words here. Note that the order in
7677ffa4 1316 which the words are written depends on the word endianness.
7677ffa4 1317 ??? This is a potential portability problem and should
7cae975e
RH
1318 be fixed at some point.
1319
1320 We must excercise caution with the sign bit. By definition there
1321 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1322 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1323 So we explicitly mask and sign-extend as necessary. */
9847c2f6 1324 if (BITS_PER_WORD == 32)
7cae975e 1325 {
ddef6bc7 1326 val = k[offset];
7cae975e
RH
1327 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1328 return GEN_INT (val);
1329 }
1330#if HOST_BITS_PER_WIDE_INT >= 64
ddef6bc7 1331 else if (BITS_PER_WORD >= 64 && offset == 0)
7cae975e
RH
1332 {
1333 val = k[! WORDS_BIG_ENDIAN];
1334 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1335 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1336 return GEN_INT (val);
1337 }
9847c2f6 1338#endif
47b34d40
JW
1339 else if (BITS_PER_WORD == 16)
1340 {
ddef6bc7
JJ
1341 val = k[offset >> 1];
1342 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
7cae975e 1343 val >>= 16;
73de376f 1344 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
7cae975e 1345 return GEN_INT (val);
47b34d40 1346 }
7677ffa4
RK
1347 else
1348 abort ();
1632afca 1349 }
a5559dbc
RE
1350 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1351 && GET_MODE_CLASS (mode) == MODE_FLOAT
1352 && GET_MODE_BITSIZE (mode) > 64
1353 && GET_CODE (op) == CONST_DOUBLE)
5495cc55
RH
1354 {
1355 long k[4];
1356 REAL_VALUE_TYPE rv;
a5559dbc 1357
5495cc55
RH
1358 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1359 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
a5559dbc 1360
5495cc55
RH
1361 if (BITS_PER_WORD == 32)
1362 {
ddef6bc7 1363 val = k[offset];
5495cc55
RH
1364 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1365 return GEN_INT (val);
1366 }
1367#if HOST_BITS_PER_WIDE_INT >= 64
ddef6bc7 1368 else if (BITS_PER_WORD >= 64 && offset <= 1)
5495cc55 1369 {
ddef6bc7 1370 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
5495cc55 1371 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
ddef6bc7 1372 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
5495cc55
RH
1373 return GEN_INT (val);
1374 }
1375#endif
1376 else
1377 abort ();
1378 }
1632afca 1379#else /* no REAL_ARITHMETIC */
23b2ce53 1380 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
906c4e36 1381 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
23b2ce53
RS
1382 || flag_pretend_float)
1383 && GET_MODE_CLASS (mode) == MODE_FLOAT
1384 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1385 && GET_CODE (op) == CONST_DOUBLE)
7529ac93
CH
1386 {
1387 /* The constant is stored in the host's word-ordering,
1388 but we want to access it in the target's word-ordering. Some
1389 compilers don't like a conditional inside macro args, so we have two
1390 copies of the return. */
2fe02d7e 1391#ifdef HOST_WORDS_BIG_ENDIAN
ddef6bc7 1392 return GEN_INT (offset == WORDS_BIG_ENDIAN
7529ac93 1393 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
2fe02d7e 1394#else
ddef6bc7 1395 return GEN_INT (offset != WORDS_BIG_ENDIAN
7529ac93 1396 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
2fe02d7e 1397#endif
7529ac93 1398 }
1632afca 1399#endif /* no REAL_ARITHMETIC */
23b2ce53
RS
1400
1401 /* Single word float is a little harder, since single- and double-word
1402 values often do not have the same high-order bits. We have already
1403 verified that we want the only defined word of the single-word value. */
1632afca 1404#ifdef REAL_ARITHMETIC
9847c2f6 1405 if (GET_MODE_CLASS (mode) == MODE_FLOAT
7677ffa4 1406 && GET_MODE_BITSIZE (mode) == 32
1632afca
RS
1407 && GET_CODE (op) == CONST_DOUBLE)
1408 {
9847c2f6 1409 long l;
1632afca
RS
1410 REAL_VALUE_TYPE rv;
1411
1412 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1413 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
aa2ae679 1414
7cae975e
RH
1415 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1416 val = l;
1417 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
b5a3eb84 1418
aa2ae679
JL
1419 if (BITS_PER_WORD == 16)
1420 {
ddef6bc7 1421 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
7cae975e 1422 val >>= 16;
73de376f 1423 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
aa2ae679 1424 }
7cae975e
RH
1425
1426 return GEN_INT (val);
1632afca
RS
1427 }
1428#else
23b2ce53 1429 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
906c4e36 1430 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
23b2ce53 1431 || flag_pretend_float)
e01a2cec 1432 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
23b2ce53
RS
1433 && GET_MODE_CLASS (mode) == MODE_FLOAT
1434 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1435 && GET_CODE (op) == CONST_DOUBLE)
1436 {
1437 double d;
906c4e36 1438 union {float f; HOST_WIDE_INT i; } u;
23b2ce53
RS
1439
1440 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1441
1442 u.f = d;
906c4e36 1443 return GEN_INT (u.i);
23b2ce53 1444 }
e01a2cec
RK
1445 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1446 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1447 || flag_pretend_float)
1448 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1449 && GET_MODE_CLASS (mode) == MODE_FLOAT
1450 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1451 && GET_CODE (op) == CONST_DOUBLE)
1452 {
1453 double d;
1454 union {double d; HOST_WIDE_INT i; } u;
1455
1456 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1457
1458 u.d = d;
1459 return GEN_INT (u.i);
1460 }
1632afca 1461#endif /* no REAL_ARITHMETIC */
750c9258 1462
23b2ce53
RS
1463 /* The only remaining cases that we can handle are integers.
1464 Convert to proper endianness now since these cases need it.
750c9258 1465 At this point, offset == 0 means the low-order word.
23b2ce53 1466
2d4f57f8
RK
1467 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1468 in general. However, if OP is (const_int 0), we can just return
1469 it for any word. */
1470
1471 if (op == const0_rtx)
1472 return op;
23b2ce53
RS
1473
1474 if (GET_MODE_CLASS (mode) != MODE_INT
2d4f57f8 1475 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
0cf214a0 1476 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
23b2ce53
RS
1477 return 0;
1478
1479 if (WORDS_BIG_ENDIAN)
ddef6bc7 1480 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
23b2ce53
RS
1481
1482 /* Find out which word on the host machine this value is in and get
1483 it from the constant. */
ddef6bc7 1484 val = (offset / size_ratio == 0
23b2ce53
RS
1485 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1486 : (GET_CODE (op) == CONST_INT
1487 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1488
3f518020 1489 /* Get the value we want into the low bits of val. */
906c4e36 1490 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
ddef6bc7 1491 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
3f518020 1492
7e4ce834 1493 val = trunc_int_for_mode (val, word_mode);
23b2ce53 1494
906c4e36 1495 return GEN_INT (val);
23b2ce53
RS
1496}
1497
ddef6bc7
JJ
1498/* Return subword OFFSET of operand OP.
1499 The word number, OFFSET, is interpreted as the word number starting
1500 at the low-order address. OFFSET 0 is the low-order word if not
1501 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1502
1503 If we cannot extract the required word, we return zero. Otherwise,
1504 an rtx corresponding to the requested word will be returned.
1505
1506 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1507 reload has completed, a valid address will always be returned. After
1508 reload, if a valid address cannot be returned, we return zero.
1509
1510 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1511 it is the responsibility of the caller.
1512
1513 MODE is the mode of OP in case it is a CONST_INT.
1514
1515 ??? This is still rather broken for some cases. The problem for the
1516 moment is that all callers of this thing provide no 'goal mode' to
1517 tell us to work with. This exists because all callers were written
0631e0bf
JH
1518 in a word based SUBREG world.
1519 Now use of this function can be deprecated by simplify_subreg in most
1520 cases.
1521 */
ddef6bc7
JJ
1522
1523rtx
1524operand_subword (op, offset, validate_address, mode)
1525 rtx op;
1526 unsigned int offset;
1527 int validate_address;
1528 enum machine_mode mode;
1529{
1530 if (mode == VOIDmode)
1531 mode = GET_MODE (op);
1532
1533 if (mode == VOIDmode)
1534 abort ();
1535
30f7a378 1536 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1537 if (mode != BLKmode
1538 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1539 return 0;
1540
30f7a378 1541 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1542 if (mode != BLKmode
1543 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1544 return const0_rtx;
1545
ddef6bc7
JJ
1546 /* Form a new MEM at the requested address. */
1547 if (GET_CODE (op) == MEM)
1548 {
f1ec5147 1549 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1550
f1ec5147
RK
1551 if (! validate_address)
1552 return new;
1553
1554 else if (reload_completed)
ddef6bc7 1555 {
f1ec5147
RK
1556 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1557 return 0;
ddef6bc7 1558 }
f1ec5147
RK
1559 else
1560 return replace_equiv_address (new, XEXP (new, 0));
ddef6bc7
JJ
1561 }
1562
0631e0bf
JH
1563 /* Rest can be handled by simplify_subreg. */
1564 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1565}
1566
23b2ce53
RS
1567/* Similar to `operand_subword', but never return 0. If we can't extract
1568 the required subword, put OP into a register and try again. If that fails,
750c9258 1569 abort. We always validate the address in this case.
23b2ce53
RS
1570
1571 MODE is the mode of OP, in case it is CONST_INT. */
1572
1573rtx
ddef6bc7 1574operand_subword_force (op, offset, mode)
23b2ce53 1575 rtx op;
ddef6bc7 1576 unsigned int offset;
23b2ce53
RS
1577 enum machine_mode mode;
1578{
ddef6bc7 1579 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1580
1581 if (result)
1582 return result;
1583
1584 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1585 {
1586 /* If this is a register which can not be accessed by words, copy it
1587 to a pseudo register. */
1588 if (GET_CODE (op) == REG)
1589 op = copy_to_reg (op);
1590 else
1591 op = force_reg (mode, op);
1592 }
23b2ce53 1593
ddef6bc7 1594 result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1595 if (result == 0)
1596 abort ();
1597
1598 return result;
1599}
1600\f
1601/* Given a compare instruction, swap the operands.
1602 A test instruction is changed into a compare of 0 against the operand. */
1603
1604void
1605reverse_comparison (insn)
1606 rtx insn;
1607{
1608 rtx body = PATTERN (insn);
1609 rtx comp;
1610
1611 if (GET_CODE (body) == SET)
1612 comp = SET_SRC (body);
1613 else
1614 comp = SET_SRC (XVECEXP (body, 0, 0));
1615
1616 if (GET_CODE (comp) == COMPARE)
1617 {
1618 rtx op0 = XEXP (comp, 0);
1619 rtx op1 = XEXP (comp, 1);
1620 XEXP (comp, 0) = op1;
1621 XEXP (comp, 1) = op0;
1622 }
1623 else
1624 {
c5c76735
JL
1625 rtx new = gen_rtx_COMPARE (VOIDmode,
1626 CONST0_RTX (GET_MODE (comp)), comp);
23b2ce53
RS
1627 if (GET_CODE (body) == SET)
1628 SET_SRC (body) = new;
1629 else
1630 SET_SRC (XVECEXP (body, 0, 0)) = new;
1631 }
1632}
1633\f
173b24b9
RK
1634
1635/* Given REF, a MEM, and T, either the type of X or the expression
1636 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1637 if we are making a new object of this type. */
1638
1639void
1640set_mem_attributes (ref, t, objectp)
1641 rtx ref;
1642 tree t;
1643 int objectp;
1644{
1645 tree type;
1646
1647 /* It can happen that type_for_mode was given a mode for which there
1648 is no language-level type. In which case it returns NULL, which
1649 we can see here. */
1650 if (t == NULL_TREE)
1651 return;
1652
1653 type = TYPE_P (t) ? t : TREE_TYPE (t);
1654
1655 /* Get the alias set from the expression or type (perhaps using a
1656 front-end routine) and then copy bits from the type. */
1657
1658 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
1659 here, because, in C and C++, the fact that a location is accessed
1660 through a const expression does not mean that the value there can
1661 never change. */
1662
1663 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1664 wrong answer, as it assumes that DECL_RTL already has the right alias
1665 info. Callers should not set DECL_RTL until after the call to
1666 set_mem_attributes. */
1667 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1668 abort ();
1669
1670 set_mem_alias_set (ref, get_alias_set (t));
1671
1672 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1673 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1674
1675 /* If we are making an object of this type, we know that it is a scalar if
1676 the type is not an aggregate. */
1677 if (objectp && ! AGGREGATE_TYPE_P (type))
1678 MEM_SCALAR_P (ref) = 1;
1679
1680 /* If T is a type, this is all we can do. Otherwise, we may be able
1681 to deduce some more information about the expression. */
1682 if (TYPE_P (t))
1683 return;
1684
1685 maybe_set_unchanging (ref, t);
1686 if (TREE_THIS_VOLATILE (t))
1687 MEM_VOLATILE_P (ref) = 1;
1688
1689 /* Now see if we can say more about whether it's an aggregate or
1690 scalar. If we already know it's an aggregate, don't bother. */
1691 if (MEM_IN_STRUCT_P (ref))
1692 return;
1693
1694 /* Now remove any NOPs: they don't change what the underlying object is.
1695 Likewise for SAVE_EXPR. */
1696 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1697 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
1698 t = TREE_OPERAND (t, 0);
1699
1700 /* Since we already know the type isn't an aggregate, if this is a decl,
1701 it must be a scalar. Or if it is a reference into an aggregate,
1702 this is part of an aggregate. Otherwise we don't know. */
1703 if (DECL_P (t))
1704 MEM_SCALAR_P (ref) = 1;
1705 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1706 || TREE_CODE (t) == ARRAY_RANGE_REF
1707 || TREE_CODE (t) == BIT_FIELD_REF)
1708 MEM_IN_STRUCT_P (ref) = 1;
1709}
1710
1711/* Set the alias set of MEM to SET. */
1712
1713void
1714set_mem_alias_set (mem, set)
1715 rtx mem;
1716 HOST_WIDE_INT set;
1717{
1718 /* It would be nice to enable this check, but we can't quite yet. */
173b24b9
RK
1719#ifdef ENABLE_CHECKING
1720 /* If the new and old alias sets don't conflict, something is wrong. */
1721 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1722 abort ();
173b24b9
RK
1723#endif
1724
1725 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
1726 MEM_SIZE (mem), MEM_ALIGN (mem));
1727}
1728\f
23b2ce53
RS
1729/* Return a memory reference like MEMREF, but with its mode changed
1730 to MODE and its address changed to ADDR.
1731 (VOIDmode means don't change the mode.
f1ec5147
RK
1732 NULL for ADDR means don't change the address.)
1733 VALIDATE is nonzero if the returned memory location is required to be
1734 valid. */
23b2ce53
RS
1735
1736rtx
f1ec5147 1737change_address_1 (memref, mode, addr, validate)
23b2ce53
RS
1738 rtx memref;
1739 enum machine_mode mode;
1740 rtx addr;
f1ec5147 1741 int validate;
23b2ce53
RS
1742{
1743 rtx new;
1744
1745 if (GET_CODE (memref) != MEM)
1746 abort ();
1747 if (mode == VOIDmode)
1748 mode = GET_MODE (memref);
1749 if (addr == 0)
1750 addr = XEXP (memref, 0);
1751
f1ec5147 1752 if (validate)
23b2ce53 1753 {
f1ec5147
RK
1754 if (reload_in_progress || reload_completed)
1755 {
1756 if (! memory_address_p (mode, addr))
1757 abort ();
1758 }
1759 else
1760 addr = memory_address (mode, addr);
23b2ce53 1761 }
750c9258 1762
9b04c6a8
RK
1763 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1764 return memref;
1765
3b80f6ca 1766 new = gen_rtx_MEM (mode, addr);
c6df88cb 1767 MEM_COPY_ATTRIBUTES (new, memref);
23b2ce53
RS
1768 return new;
1769}
792760b9 1770
f4ef873c
RK
1771/* Return a memory reference like MEMREF, but with its mode changed
1772 to MODE and its address offset by OFFSET bytes. */
1773
1774rtx
1775adjust_address (memref, mode, offset)
1776 rtx memref;
1777 enum machine_mode mode;
1778 HOST_WIDE_INT offset;
1779{
1780 /* For now, this is just a wrapper for change_address, but eventually
1781 will do memref tracking. */
823e3574
RK
1782 rtx addr = XEXP (memref, 0);
1783
c2f7bcc3
RH
1784 /* ??? Prefer to create garbage instead of creating shared rtl. */
1785 addr = copy_rtx (addr);
1786
62926f0b 1787 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
823e3574
RK
1788 object, we can merge it into the LO_SUM. */
1789 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
62926f0b 1790 && offset >= 0
b9f22704
JM
1791 && (unsigned HOST_WIDE_INT) offset
1792 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1650fcad 1793 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
823e3574
RK
1794 plus_constant (XEXP (addr, 1), offset));
1795 else
1796 addr = plus_constant (addr, offset);
1797
1798 return change_address (memref, mode, addr);
f4ef873c 1799}
792760b9 1800
f1ec5147
RK
1801/* Likewise, but the reference is not required to be valid. */
1802
1803rtx
1804adjust_address_nv (memref, mode, offset)
1805 rtx memref;
1806 enum machine_mode mode;
1807 HOST_WIDE_INT offset;
1808{
1809 /* For now, this is just a wrapper for change_address, but eventually
1810 will do memref tracking. */
823e3574
RK
1811 rtx addr = XEXP (memref, 0);
1812
1813 /* If MEMREF is a LO_SUM and the offset is within the size of the
1814 object, we can merge it into the LO_SUM. */
1815 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
62926f0b 1816 && offset >= 0
b9f22704
JM
1817 && (unsigned HOST_WIDE_INT) offset
1818 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
823e3574
RK
1819 addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
1820 plus_constant (XEXP (addr, 1), offset));
1821 else
1822 addr = plus_constant (addr, offset);
1823
1824 return change_address_1 (memref, mode, addr, 0);
f1ec5147
RK
1825}
1826
792760b9
RK
1827/* Return a memory reference like MEMREF, but with its address changed to
1828 ADDR. The caller is asserting that the actual piece of memory pointed
1829 to is the same, just the form of the address is being changed, such as
1830 by putting something into a register. */
1831
1832rtx
1833replace_equiv_address (memref, addr)
1834 rtx memref;
1835 rtx addr;
1836{
1837 /* For now, this is just a wrapper for change_address, but eventually
1838 will do memref tracking. */
1839 return change_address (memref, VOIDmode, addr);
1840}
f1ec5147
RK
1841/* Likewise, but the reference is not required to be valid. */
1842
1843rtx
1844replace_equiv_address_nv (memref, addr)
1845 rtx memref;
1846 rtx addr;
1847{
1848 /* For now, this is just a wrapper for change_address, but eventually
1849 will do memref tracking. */
1850 return change_address_1 (memref, VOIDmode, addr, 0);
1851}
23b2ce53
RS
1852\f
1853/* Return a newly created CODE_LABEL rtx with a unique label number. */
1854
1855rtx
1856gen_label_rtx ()
1857{
b3694847 1858 rtx label;
ca695ac9 1859
b93a436e 1860 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
9714cf43 1861 NULL_RTX, label_num++, NULL, NULL);
ca695ac9 1862
23b2ce53 1863 LABEL_NUSES (label) = 0;
8cd0faaf 1864 LABEL_ALTERNATE_NAME (label) = NULL;
23b2ce53
RS
1865 return label;
1866}
1867\f
1868/* For procedure integration. */
1869
23b2ce53 1870/* Install new pointers to the first and last insns in the chain.
86fe05e0 1871 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
1872 Used for an inline-procedure after copying the insn chain. */
1873
1874void
1875set_new_first_and_last_insn (first, last)
1876 rtx first, last;
1877{
86fe05e0
RK
1878 rtx insn;
1879
23b2ce53
RS
1880 first_insn = first;
1881 last_insn = last;
86fe05e0
RK
1882 cur_insn_uid = 0;
1883
1884 for (insn = first; insn; insn = NEXT_INSN (insn))
1885 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1886
1887 cur_insn_uid++;
23b2ce53
RS
1888}
1889
1890/* Set the range of label numbers found in the current function.
1891 This is used when belatedly compiling an inline function. */
1892
1893void
1894set_new_first_and_last_label_num (first, last)
1895 int first, last;
1896{
1897 base_label_num = label_num;
1898 first_label_num = first;
1899 last_label_num = last;
1900}
49ad7cfa
BS
1901
1902/* Set the last label number found in the current function.
1903 This is used when belatedly compiling an inline function. */
23b2ce53
RS
1904
1905void
49ad7cfa
BS
1906set_new_last_label_num (last)
1907 int last;
23b2ce53 1908{
49ad7cfa
BS
1909 base_label_num = label_num;
1910 last_label_num = last;
23b2ce53 1911}
49ad7cfa 1912\f
23b2ce53
RS
1913/* Restore all variables describing the current status from the structure *P.
1914 This is used after a nested function. */
1915
1916void
1917restore_emit_status (p)
272df862 1918 struct function *p ATTRIBUTE_UNUSED;
23b2ce53 1919{
457a2d9c 1920 last_label_num = 0;
49ad7cfa 1921 clear_emit_caches ();
23b2ce53 1922}
e2ecd91c 1923
21cd906e 1924/* Clear out all parts of the state in F that can safely be discarded
e2ecd91c 1925 after the function has been compiled, to let garbage collection
0a8a198c 1926 reclaim the memory. */
21cd906e 1927
e2ecd91c 1928void
0a8a198c 1929free_emit_status (f)
e2ecd91c
BS
1930 struct function *f;
1931{
1932 free (f->emit->x_regno_reg_rtx);
e2ecd91c 1933 free (f->emit->regno_pointer_align);
fa51b01b
RH
1934 free (f->emit);
1935 f->emit = NULL;
e2ecd91c 1936}
23b2ce53 1937\f
750c9258 1938/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 1939 structure. This routine should only be called once. */
23b2ce53
RS
1940
1941void
d1b81779
GK
1942unshare_all_rtl (fndecl, insn)
1943 tree fndecl;
1944 rtx insn;
23b2ce53 1945{
d1b81779 1946 tree decl;
23b2ce53 1947
d1b81779
GK
1948 /* Make sure that virtual parameters are not shared. */
1949 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
19e7881c 1950 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
d1b81779 1951
5c6df058
AO
1952 /* Make sure that virtual stack slots are not shared. */
1953 unshare_all_decls (DECL_INITIAL (fndecl));
1954
d1b81779
GK
1955 /* Unshare just about everything else. */
1956 unshare_all_rtl_1 (insn);
750c9258 1957
23b2ce53
RS
1958 /* Make sure the addresses of stack slots found outside the insn chain
1959 (such as, in DECL_RTL of a variable) are not shared
1960 with the insn chain.
1961
1962 This special care is necessary when the stack slot MEM does not
1963 actually appear in the insn chain. If it does appear, its address
1964 is unshared from all else at that point. */
242b0ce6 1965 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
1966}
1967
750c9258 1968/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
1969 structure, again. This is a fairly expensive thing to do so it
1970 should be done sparingly. */
1971
1972void
1973unshare_all_rtl_again (insn)
1974 rtx insn;
1975{
1976 rtx p;
624c87aa
RE
1977 tree decl;
1978
d1b81779 1979 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 1980 if (INSN_P (p))
d1b81779
GK
1981 {
1982 reset_used_flags (PATTERN (p));
1983 reset_used_flags (REG_NOTES (p));
1984 reset_used_flags (LOG_LINKS (p));
1985 }
624c87aa 1986
2d4aecb3
AO
1987 /* Make sure that virtual stack slots are not shared. */
1988 reset_used_decls (DECL_INITIAL (cfun->decl));
1989
624c87aa
RE
1990 /* Make sure that virtual parameters are not shared. */
1991 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
1992 reset_used_flags (DECL_RTL (decl));
1993
1994 reset_used_flags (stack_slot_list);
1995
1996 unshare_all_rtl (cfun->decl, insn);
d1b81779
GK
1997}
1998
1999/* Go through all the RTL insn bodies and copy any invalid shared structure.
2000 Assumes the mark bits are cleared at entry. */
2001
2002static void
2003unshare_all_rtl_1 (insn)
2004 rtx insn;
2005{
2006 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2007 if (INSN_P (insn))
d1b81779
GK
2008 {
2009 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2010 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2011 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2012 }
2013}
2014
5c6df058
AO
2015/* Go through all virtual stack slots of a function and copy any
2016 shared structure. */
2017static void
2018unshare_all_decls (blk)
2019 tree blk;
2020{
2021 tree t;
2022
2023 /* Copy shared decls. */
2024 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2025 if (DECL_RTL_SET_P (t))
2026 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
5c6df058
AO
2027
2028 /* Now process sub-blocks. */
2029 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2030 unshare_all_decls (t);
2031}
2032
2d4aecb3 2033/* Go through all virtual stack slots of a function and mark them as
30f7a378 2034 not shared. */
2d4aecb3
AO
2035static void
2036reset_used_decls (blk)
2037 tree blk;
2038{
2039 tree t;
2040
2041 /* Mark decls. */
2042 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2043 if (DECL_RTL_SET_P (t))
2044 reset_used_flags (DECL_RTL (t));
2d4aecb3
AO
2045
2046 /* Now process sub-blocks. */
2047 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2048 reset_used_decls (t);
2049}
2050
23b2ce53
RS
2051/* Mark ORIG as in use, and return a copy of it if it was already in use.
2052 Recursively does the same for subexpressions. */
2053
2054rtx
2055copy_rtx_if_shared (orig)
2056 rtx orig;
2057{
b3694847
SS
2058 rtx x = orig;
2059 int i;
2060 enum rtx_code code;
2061 const char *format_ptr;
23b2ce53
RS
2062 int copied = 0;
2063
2064 if (x == 0)
2065 return 0;
2066
2067 code = GET_CODE (x);
2068
2069 /* These types may be freely shared. */
2070
2071 switch (code)
2072 {
2073 case REG:
2074 case QUEUED:
2075 case CONST_INT:
2076 case CONST_DOUBLE:
2077 case SYMBOL_REF:
2078 case CODE_LABEL:
2079 case PC:
2080 case CC0:
2081 case SCRATCH:
0f41302f 2082 /* SCRATCH must be shared because they represent distinct values. */
23b2ce53
RS
2083 return x;
2084
b851ea09
RK
2085 case CONST:
2086 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2087 a LABEL_REF, it isn't sharable. */
2088 if (GET_CODE (XEXP (x, 0)) == PLUS
2089 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2090 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2091 return x;
2092 break;
2093
23b2ce53
RS
2094 case INSN:
2095 case JUMP_INSN:
2096 case CALL_INSN:
2097 case NOTE:
23b2ce53
RS
2098 case BARRIER:
2099 /* The chain of insns is not being copied. */
2100 return x;
2101
2102 case MEM:
83512665
JL
2103 /* A MEM is allowed to be shared if its address is constant.
2104
750c9258 2105 We used to allow sharing of MEMs which referenced
83512665
JL
2106 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2107 that can lose. instantiate_virtual_regs will not unshare
2108 the MEMs, and combine may change the structure of the address
2109 because it looks safe and profitable in one context, but
2110 in some other context it creates unrecognizable RTL. */
2111 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
23b2ce53
RS
2112 return x;
2113
e9a25f70
JL
2114 break;
2115
2116 default:
2117 break;
23b2ce53
RS
2118 }
2119
2120 /* This rtx may not be shared. If it has already been seen,
2121 replace it with a copy of itself. */
2122
2123 if (x->used)
2124 {
b3694847 2125 rtx copy;
23b2ce53
RS
2126
2127 copy = rtx_alloc (code);
4e135bdd 2128 memcpy (copy, x,
4c9a05bc
RK
2129 (sizeof (*copy) - sizeof (copy->fld)
2130 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
23b2ce53
RS
2131 x = copy;
2132 copied = 1;
2133 }
2134 x->used = 1;
2135
2136 /* Now scan the subexpressions recursively.
2137 We can store any replaced subexpressions directly into X
2138 since we know X is not shared! Any vectors in X
2139 must be copied if X was copied. */
2140
2141 format_ptr = GET_RTX_FORMAT (code);
2142
2143 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2144 {
2145 switch (*format_ptr++)
2146 {
2147 case 'e':
2148 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2149 break;
2150
2151 case 'E':
2152 if (XVEC (x, i) != NULL)
2153 {
b3694847 2154 int j;
f0722107 2155 int len = XVECLEN (x, i);
23b2ce53 2156
f0722107 2157 if (copied && len > 0)
8f985ec4 2158 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
f0722107
RS
2159 for (j = 0; j < len; j++)
2160 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
23b2ce53
RS
2161 }
2162 break;
2163 }
2164 }
2165 return x;
2166}
2167
2168/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2169 to look for shared sub-parts. */
2170
2171void
2172reset_used_flags (x)
2173 rtx x;
2174{
b3694847
SS
2175 int i, j;
2176 enum rtx_code code;
2177 const char *format_ptr;
23b2ce53
RS
2178
2179 if (x == 0)
2180 return;
2181
2182 code = GET_CODE (x);
2183
9faa82d8 2184 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2185 for them. */
2186
2187 switch (code)
2188 {
2189 case REG:
2190 case QUEUED:
2191 case CONST_INT:
2192 case CONST_DOUBLE:
2193 case SYMBOL_REF:
2194 case CODE_LABEL:
2195 case PC:
2196 case CC0:
2197 return;
2198
2199 case INSN:
2200 case JUMP_INSN:
2201 case CALL_INSN:
2202 case NOTE:
2203 case LABEL_REF:
2204 case BARRIER:
2205 /* The chain of insns is not being copied. */
2206 return;
750c9258 2207
e9a25f70
JL
2208 default:
2209 break;
23b2ce53
RS
2210 }
2211
2212 x->used = 0;
2213
2214 format_ptr = GET_RTX_FORMAT (code);
2215 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2216 {
2217 switch (*format_ptr++)
2218 {
2219 case 'e':
2220 reset_used_flags (XEXP (x, i));
2221 break;
2222
2223 case 'E':
2224 for (j = 0; j < XVECLEN (x, i); j++)
2225 reset_used_flags (XVECEXP (x, i, j));
2226 break;
2227 }
2228 }
2229}
2230\f
2231/* Copy X if necessary so that it won't be altered by changes in OTHER.
2232 Return X or the rtx for the pseudo reg the value of X was copied into.
2233 OTHER must be valid as a SET_DEST. */
2234
2235rtx
2236make_safe_from (x, other)
2237 rtx x, other;
2238{
2239 while (1)
2240 switch (GET_CODE (other))
2241 {
2242 case SUBREG:
2243 other = SUBREG_REG (other);
2244 break;
2245 case STRICT_LOW_PART:
2246 case SIGN_EXTEND:
2247 case ZERO_EXTEND:
2248 other = XEXP (other, 0);
2249 break;
2250 default:
2251 goto done;
2252 }
2253 done:
2254 if ((GET_CODE (other) == MEM
2255 && ! CONSTANT_P (x)
2256 && GET_CODE (x) != REG
2257 && GET_CODE (x) != SUBREG)
2258 || (GET_CODE (other) == REG
2259 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2260 || reg_mentioned_p (other, x))))
2261 {
2262 rtx temp = gen_reg_rtx (GET_MODE (x));
2263 emit_move_insn (temp, x);
2264 return temp;
2265 }
2266 return x;
2267}
2268\f
2269/* Emission of insns (adding them to the doubly-linked list). */
2270
2271/* Return the first insn of the current sequence or current function. */
2272
2273rtx
2274get_insns ()
2275{
2276 return first_insn;
2277}
2278
2279/* Return the last insn emitted in current sequence or current function. */
2280
2281rtx
2282get_last_insn ()
2283{
2284 return last_insn;
2285}
2286
2287/* Specify a new insn as the last in the chain. */
2288
2289void
2290set_last_insn (insn)
2291 rtx insn;
2292{
2293 if (NEXT_INSN (insn) != 0)
2294 abort ();
2295 last_insn = insn;
2296}
2297
2298/* Return the last insn emitted, even if it is in a sequence now pushed. */
2299
2300rtx
2301get_last_insn_anywhere ()
2302{
2303 struct sequence_stack *stack;
2304 if (last_insn)
2305 return last_insn;
49ad7cfa 2306 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2307 if (stack->last != 0)
2308 return stack->last;
2309 return 0;
2310}
2311
2312/* Return a number larger than any instruction's uid in this function. */
2313
2314int
2315get_max_uid ()
2316{
2317 return cur_insn_uid;
2318}
aeeeda03 2319
673b5311
MM
2320/* Renumber instructions so that no instruction UIDs are wasted. */
2321
aeeeda03 2322void
673b5311
MM
2323renumber_insns (stream)
2324 FILE *stream;
aeeeda03
MM
2325{
2326 rtx insn;
aeeeda03 2327
673b5311
MM
2328 /* If we're not supposed to renumber instructions, don't. */
2329 if (!flag_renumber_insns)
2330 return;
2331
aeeeda03
MM
2332 /* If there aren't that many instructions, then it's not really
2333 worth renumbering them. */
673b5311 2334 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
aeeeda03
MM
2335 return;
2336
2337 cur_insn_uid = 1;
2338
2339 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
673b5311
MM
2340 {
2341 if (stream)
750c9258 2342 fprintf (stream, "Renumbering insn %d to %d\n",
673b5311
MM
2343 INSN_UID (insn), cur_insn_uid);
2344 INSN_UID (insn) = cur_insn_uid++;
2345 }
aeeeda03 2346}
23b2ce53
RS
2347\f
2348/* Return the next insn. If it is a SEQUENCE, return the first insn
2349 of the sequence. */
2350
2351rtx
2352next_insn (insn)
2353 rtx insn;
2354{
2355 if (insn)
2356 {
2357 insn = NEXT_INSN (insn);
2358 if (insn && GET_CODE (insn) == INSN
2359 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2360 insn = XVECEXP (PATTERN (insn), 0, 0);
2361 }
2362
2363 return insn;
2364}
2365
2366/* Return the previous insn. If it is a SEQUENCE, return the last insn
2367 of the sequence. */
2368
2369rtx
2370previous_insn (insn)
2371 rtx insn;
2372{
2373 if (insn)
2374 {
2375 insn = PREV_INSN (insn);
2376 if (insn && GET_CODE (insn) == INSN
2377 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2378 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2379 }
2380
2381 return insn;
2382}
2383
2384/* Return the next insn after INSN that is not a NOTE. This routine does not
2385 look inside SEQUENCEs. */
2386
2387rtx
2388next_nonnote_insn (insn)
2389 rtx insn;
2390{
2391 while (insn)
2392 {
2393 insn = NEXT_INSN (insn);
2394 if (insn == 0 || GET_CODE (insn) != NOTE)
2395 break;
2396 }
2397
2398 return insn;
2399}
2400
2401/* Return the previous insn before INSN that is not a NOTE. This routine does
2402 not look inside SEQUENCEs. */
2403
2404rtx
2405prev_nonnote_insn (insn)
2406 rtx insn;
2407{
2408 while (insn)
2409 {
2410 insn = PREV_INSN (insn);
2411 if (insn == 0 || GET_CODE (insn) != NOTE)
2412 break;
2413 }
2414
2415 return insn;
2416}
2417
2418/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2419 or 0, if there is none. This routine does not look inside
0f41302f 2420 SEQUENCEs. */
23b2ce53
RS
2421
2422rtx
2423next_real_insn (insn)
2424 rtx insn;
2425{
2426 while (insn)
2427 {
2428 insn = NEXT_INSN (insn);
2429 if (insn == 0 || GET_CODE (insn) == INSN
2430 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2431 break;
2432 }
2433
2434 return insn;
2435}
2436
2437/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2438 or 0, if there is none. This routine does not look inside
2439 SEQUENCEs. */
2440
2441rtx
2442prev_real_insn (insn)
2443 rtx insn;
2444{
2445 while (insn)
2446 {
2447 insn = PREV_INSN (insn);
2448 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2449 || GET_CODE (insn) == JUMP_INSN)
2450 break;
2451 }
2452
2453 return insn;
2454}
2455
2456/* Find the next insn after INSN that really does something. This routine
2457 does not look inside SEQUENCEs. Until reload has completed, this is the
2458 same as next_real_insn. */
2459
69732dcb
RH
2460int
2461active_insn_p (insn)
2462 rtx insn;
2463{
2464 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2465 || (GET_CODE (insn) == INSN
2466 && (! reload_completed
2467 || (GET_CODE (PATTERN (insn)) != USE
2468 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2469}
2470
23b2ce53
RS
2471rtx
2472next_active_insn (insn)
2473 rtx insn;
2474{
2475 while (insn)
2476 {
2477 insn = NEXT_INSN (insn);
69732dcb 2478 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2479 break;
2480 }
2481
2482 return insn;
2483}
2484
2485/* Find the last insn before INSN that really does something. This routine
2486 does not look inside SEQUENCEs. Until reload has completed, this is the
2487 same as prev_real_insn. */
2488
2489rtx
2490prev_active_insn (insn)
2491 rtx insn;
2492{
2493 while (insn)
2494 {
2495 insn = PREV_INSN (insn);
69732dcb 2496 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2497 break;
2498 }
2499
2500 return insn;
2501}
2502
2503/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2504
2505rtx
2506next_label (insn)
2507 rtx insn;
2508{
2509 while (insn)
2510 {
2511 insn = NEXT_INSN (insn);
2512 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2513 break;
2514 }
2515
2516 return insn;
2517}
2518
2519/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2520
2521rtx
2522prev_label (insn)
2523 rtx insn;
2524{
2525 while (insn)
2526 {
2527 insn = PREV_INSN (insn);
2528 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2529 break;
2530 }
2531
2532 return insn;
2533}
2534\f
2535#ifdef HAVE_cc0
c572e5ba
JVA
2536/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2537 and REG_CC_USER notes so we can find it. */
2538
2539void
2540link_cc0_insns (insn)
2541 rtx insn;
2542{
2543 rtx user = next_nonnote_insn (insn);
2544
2545 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2546 user = XVECEXP (PATTERN (user), 0, 0);
2547
c5c76735
JL
2548 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2549 REG_NOTES (user));
3b80f6ca 2550 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
c572e5ba
JVA
2551}
2552
23b2ce53
RS
2553/* Return the next insn that uses CC0 after INSN, which is assumed to
2554 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2555 applied to the result of this function should yield INSN).
2556
2557 Normally, this is simply the next insn. However, if a REG_CC_USER note
2558 is present, it contains the insn that uses CC0.
2559
2560 Return 0 if we can't find the insn. */
2561
2562rtx
2563next_cc0_user (insn)
2564 rtx insn;
2565{
906c4e36 2566 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
2567
2568 if (note)
2569 return XEXP (note, 0);
2570
2571 insn = next_nonnote_insn (insn);
2572 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2573 insn = XVECEXP (PATTERN (insn), 0, 0);
2574
2c3c49de 2575 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
2576 return insn;
2577
2578 return 0;
2579}
2580
2581/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2582 note, it is the previous insn. */
2583
2584rtx
2585prev_cc0_setter (insn)
2586 rtx insn;
2587{
906c4e36 2588 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
2589
2590 if (note)
2591 return XEXP (note, 0);
2592
2593 insn = prev_nonnote_insn (insn);
2594 if (! sets_cc0_p (PATTERN (insn)))
2595 abort ();
2596
2597 return insn;
2598}
2599#endif
e5bef2e4
HB
2600
2601/* Increment the label uses for all labels present in rtx. */
2602
2603static void
2604mark_label_nuses(x)
2605 rtx x;
2606{
b3694847
SS
2607 enum rtx_code code;
2608 int i, j;
2609 const char *fmt;
e5bef2e4
HB
2610
2611 code = GET_CODE (x);
2612 if (code == LABEL_REF)
2613 LABEL_NUSES (XEXP (x, 0))++;
2614
2615 fmt = GET_RTX_FORMAT (code);
2616 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2617 {
2618 if (fmt[i] == 'e')
2619 mark_label_nuses (XEXP (x, i));
2620 else if (fmt[i] == 'E')
2621 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2622 mark_label_nuses (XVECEXP (x, i, j));
2623 }
2624}
2625
23b2ce53
RS
2626\f
2627/* Try splitting insns that can be split for better scheduling.
2628 PAT is the pattern which might split.
2629 TRIAL is the insn providing PAT.
11147ebe 2630 LAST is non-zero if we should return the last insn of the sequence produced.
23b2ce53
RS
2631
2632 If this routine succeeds in splitting, it returns the first or last
11147ebe 2633 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
2634 returns TRIAL. If the insn to be returned can be split, it will be. */
2635
2636rtx
11147ebe 2637try_split (pat, trial, last)
23b2ce53 2638 rtx pat, trial;
11147ebe 2639 int last;
23b2ce53
RS
2640{
2641 rtx before = PREV_INSN (trial);
2642 rtx after = NEXT_INSN (trial);
23b2ce53
RS
2643 int has_barrier = 0;
2644 rtx tem;
6b24c259
JH
2645 rtx note, seq;
2646 int probability;
2647
2648 if (any_condjump_p (trial)
2649 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
2650 split_branch_probability = INTVAL (XEXP (note, 0));
2651 probability = split_branch_probability;
2652
2653 seq = split_insns (pat, trial);
2654
2655 split_branch_probability = -1;
23b2ce53
RS
2656
2657 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2658 We may need to handle this specially. */
2659 if (after && GET_CODE (after) == BARRIER)
2660 {
2661 has_barrier = 1;
2662 after = NEXT_INSN (after);
2663 }
2664
2665 if (seq)
2666 {
2667 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2668 The latter case will normally arise only when being done so that
2669 it, in turn, will be split (SFmode on the 29k is an example). */
2670 if (GET_CODE (seq) == SEQUENCE)
2671 {
6b24c259 2672 int i, njumps = 0;
750c9258
AJ
2673
2674 /* Avoid infinite loop if any insn of the result matches
4b5e8abe
CP
2675 the original pattern. */
2676 for (i = 0; i < XVECLEN (seq, 0); i++)
750c9258 2677 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
4b5e8abe 2678 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
750c9258 2679 return trial;
4b5e8abe 2680
90a74703
JH
2681 /* Mark labels. */
2682 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2683 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
6b24c259
JH
2684 {
2685 rtx insn = XVECEXP (seq, 0, i);
2686 mark_jump_label (PATTERN (insn),
2687 XVECEXP (seq, 0, i), 0);
2688 njumps++;
2689 if (probability != -1
2690 && any_condjump_p (insn)
2691 && !find_reg_note (insn, REG_BR_PROB, 0))
2692 {
2693 /* We can preserve the REG_BR_PROB notes only if exactly
2694 one jump is created, otherwise the machinde description
2695 is responsible for this step using
2696 split_branch_probability variable. */
2697 if (njumps != 1)
2698 abort ();
2699 REG_NOTES (insn)
2700 = gen_rtx_EXPR_LIST (REG_BR_PROB,
2701 GEN_INT (probability),
2702 REG_NOTES (insn));
2703 }
2704 }
216183ce 2705
2d01e445
AO
2706 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2707 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
2708 if (GET_CODE (trial) == CALL_INSN)
2709 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2710 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
2711 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
2712 = CALL_INSN_FUNCTION_USAGE (trial);
2713
216183ce
RH
2714 /* Copy notes, particularly those related to the CFG. */
2715 for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
2716 {
2717 switch (REG_NOTE_KIND (note))
2718 {
2719 case REG_EH_REGION:
2720 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2721 {
2722 rtx insn = XVECEXP (seq, 0, i);
2723 if (GET_CODE (insn) == CALL_INSN
2724 || (flag_non_call_exceptions
2725 && may_trap_p (PATTERN (insn))))
2726 REG_NOTES (insn)
2727 = gen_rtx_EXPR_LIST (REG_EH_REGION,
2728 XEXP (note, 0),
2729 REG_NOTES (insn));
2730 }
2731 break;
2732
2733 case REG_NORETURN:
2734 case REG_SETJMP:
2735 case REG_ALWAYS_RETURN:
2736 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2737 {
2738 rtx insn = XVECEXP (seq, 0, i);
2739 if (GET_CODE (insn) == CALL_INSN)
2740 REG_NOTES (insn)
2741 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2742 XEXP (note, 0),
2743 REG_NOTES (insn));
2744 }
2745 break;
2746
2747 case REG_NON_LOCAL_GOTO:
2748 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2749 {
2750 rtx insn = XVECEXP (seq, 0, i);
2751 if (GET_CODE (insn) == JUMP_INSN)
2752 REG_NOTES (insn)
2753 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
2754 XEXP (note, 0),
2755 REG_NOTES (insn));
2756 }
2757 break;
2758
2759 default:
2760 break;
2761 }
2762 }
d6e95df8 2763
e5bef2e4
HB
2764 /* If there are LABELS inside the split insns increment the
2765 usage count so we don't delete the label. */
2766 if (GET_CODE (trial) == INSN)
2767 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2768 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
2769 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
2770
3c030e88 2771 tem = emit_insn_after (seq, trial);
23b2ce53 2772
53c17031 2773 delete_related_insns (trial);
23b2ce53
RS
2774 if (has_barrier)
2775 emit_barrier_after (tem);
11147ebe
RK
2776
2777 /* Recursively call try_split for each new insn created; by the
2778 time control returns here that insn will be fully split, so
2779 set LAST and continue from the insn after the one returned.
f4a3cd05 2780 We can't use next_active_insn here since AFTER may be a note.
23886015 2781 Ignore deleted insns, which can be occur if not optimizing. */
2c3c49de
RB
2782 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
2783 if (! INSN_DELETED_P (tem) && INSN_P (tem))
f4a3cd05 2784 tem = try_split (PATTERN (tem), tem, 1);
23b2ce53
RS
2785 }
2786 /* Avoid infinite loop if the result matches the original pattern. */
2787 else if (rtx_equal_p (seq, pat))
2788 return trial;
2789 else
2790 {
2791 PATTERN (trial) = seq;
2792 INSN_CODE (trial) = -1;
11147ebe 2793 try_split (seq, trial, last);
23b2ce53
RS
2794 }
2795
11147ebe
RK
2796 /* Return either the first or the last insn, depending on which was
2797 requested. */
750c9258 2798 return last
6b24c259
JH
2799 ? (after ? PREV_INSN (after) : last_insn)
2800 : NEXT_INSN (before);
23b2ce53
RS
2801 }
2802
2803 return trial;
2804}
2805\f
2806/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 2807 Store PATTERN in the pattern slots. */
23b2ce53
RS
2808
2809rtx
4b1f5e8c 2810make_insn_raw (pattern)
23b2ce53 2811 rtx pattern;
23b2ce53 2812{
b3694847 2813 rtx insn;
23b2ce53 2814
1f8f4a0b 2815 insn = rtx_alloc (INSN);
23b2ce53 2816
43127294 2817 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
2818 PATTERN (insn) = pattern;
2819 INSN_CODE (insn) = -1;
1632afca
RS
2820 LOG_LINKS (insn) = NULL;
2821 REG_NOTES (insn) = NULL;
23b2ce53 2822
47984720
NC
2823#ifdef ENABLE_RTL_CHECKING
2824 if (insn
2c3c49de 2825 && INSN_P (insn)
47984720
NC
2826 && (returnjump_p (insn)
2827 || (GET_CODE (insn) == SET
2828 && SET_DEST (insn) == pc_rtx)))
2829 {
2830 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2831 debug_rtx (insn);
2832 }
2833#endif
750c9258 2834
23b2ce53
RS
2835 return insn;
2836}
2837
2838/* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2839
2840static rtx
4b1f5e8c 2841make_jump_insn_raw (pattern)
23b2ce53 2842 rtx pattern;
23b2ce53 2843{
b3694847 2844 rtx insn;
23b2ce53 2845
4b1f5e8c 2846 insn = rtx_alloc (JUMP_INSN);
1632afca 2847 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
2848
2849 PATTERN (insn) = pattern;
2850 INSN_CODE (insn) = -1;
1632afca
RS
2851 LOG_LINKS (insn) = NULL;
2852 REG_NOTES (insn) = NULL;
2853 JUMP_LABEL (insn) = NULL;
23b2ce53
RS
2854
2855 return insn;
2856}
aff507f4
RK
2857
2858/* Like `make_insn' but make a CALL_INSN instead of an insn. */
2859
2860static rtx
2861make_call_insn_raw (pattern)
2862 rtx pattern;
2863{
b3694847 2864 rtx insn;
aff507f4
RK
2865
2866 insn = rtx_alloc (CALL_INSN);
2867 INSN_UID (insn) = cur_insn_uid++;
2868
2869 PATTERN (insn) = pattern;
2870 INSN_CODE (insn) = -1;
2871 LOG_LINKS (insn) = NULL;
2872 REG_NOTES (insn) = NULL;
2873 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2874
2875 return insn;
2876}
23b2ce53
RS
2877\f
2878/* Add INSN to the end of the doubly-linked list.
2879 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2880
2881void
2882add_insn (insn)
b3694847 2883 rtx insn;
23b2ce53
RS
2884{
2885 PREV_INSN (insn) = last_insn;
2886 NEXT_INSN (insn) = 0;
2887
2888 if (NULL != last_insn)
2889 NEXT_INSN (last_insn) = insn;
2890
2891 if (NULL == first_insn)
2892 first_insn = insn;
2893
2894 last_insn = insn;
2895}
2896
a0ae8e8d
RK
2897/* Add INSN into the doubly-linked list after insn AFTER. This and
2898 the next should be the only functions called to insert an insn once
ba213285 2899 delay slots have been filled since only they know how to update a
a0ae8e8d 2900 SEQUENCE. */
23b2ce53
RS
2901
2902void
2903add_insn_after (insn, after)
2904 rtx insn, after;
2905{
2906 rtx next = NEXT_INSN (after);
3c030e88 2907 basic_block bb;
23b2ce53 2908
6782074d 2909 if (optimize && INSN_DELETED_P (after))
ba213285
RK
2910 abort ();
2911
23b2ce53
RS
2912 NEXT_INSN (insn) = next;
2913 PREV_INSN (insn) = after;
2914
2915 if (next)
2916 {
2917 PREV_INSN (next) = insn;
2918 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2919 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2920 }
2921 else if (last_insn == after)
2922 last_insn = insn;
2923 else
2924 {
49ad7cfa 2925 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
2926 /* Scan all pending sequences too. */
2927 for (; stack; stack = stack->next)
2928 if (after == stack->last)
fef0509b
RK
2929 {
2930 stack->last = insn;
2931 break;
2932 }
a0ae8e8d
RK
2933
2934 if (stack == 0)
2935 abort ();
23b2ce53
RS
2936 }
2937
3c030e88
JH
2938 if (basic_block_for_insn
2939 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
2940 && (bb = BLOCK_FOR_INSN (after)))
2941 {
2942 set_block_for_insn (insn, bb);
2943 /* Should not happen as first in the BB is always
2944 eigther NOTE or LABEL. */
2945 if (bb->end == after
2946 /* Avoid clobbering of structure when creating new BB. */
2947 && GET_CODE (insn) != BARRIER
2948 && (GET_CODE (insn) != NOTE
2949 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
2950 bb->end = insn;
2951 }
2952
23b2ce53
RS
2953 NEXT_INSN (after) = insn;
2954 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2955 {
2956 rtx sequence = PATTERN (after);
2957 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2958 }
2959}
2960
a0ae8e8d
RK
2961/* Add INSN into the doubly-linked list before insn BEFORE. This and
2962 the previous should be the only functions called to insert an insn once
ba213285 2963 delay slots have been filled since only they know how to update a
a0ae8e8d
RK
2964 SEQUENCE. */
2965
2966void
2967add_insn_before (insn, before)
2968 rtx insn, before;
2969{
2970 rtx prev = PREV_INSN (before);
3c030e88 2971 basic_block bb;
a0ae8e8d 2972
6782074d 2973 if (optimize && INSN_DELETED_P (before))
ba213285
RK
2974 abort ();
2975
a0ae8e8d
RK
2976 PREV_INSN (insn) = prev;
2977 NEXT_INSN (insn) = before;
2978
2979 if (prev)
2980 {
2981 NEXT_INSN (prev) = insn;
2982 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2983 {
2984 rtx sequence = PATTERN (prev);
2985 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2986 }
2987 }
2988 else if (first_insn == before)
2989 first_insn = insn;
2990 else
2991 {
49ad7cfa 2992 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
2993 /* Scan all pending sequences too. */
2994 for (; stack; stack = stack->next)
2995 if (before == stack->first)
fef0509b
RK
2996 {
2997 stack->first = insn;
2998 break;
2999 }
a0ae8e8d
RK
3000
3001 if (stack == 0)
3002 abort ();
3003 }
3004
3c030e88
JH
3005 if (basic_block_for_insn
3006 && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
3007 && (bb = BLOCK_FOR_INSN (before)))
3008 {
3009 set_block_for_insn (insn, bb);
3010 /* Should not happen as first in the BB is always
3011 eigther NOTE or LABEl. */
3012 if (bb->head == insn
3013 /* Avoid clobbering of structure when creating new BB. */
3014 && GET_CODE (insn) != BARRIER
3015 && (GET_CODE (insn) != NOTE
3016 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3017 abort ();
3018 }
3019
a0ae8e8d
RK
3020 PREV_INSN (before) = insn;
3021 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3022 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3023}
3024
89e99eea
DB
3025/* Remove an insn from its doubly-linked list. This function knows how
3026 to handle sequences. */
3027void
3028remove_insn (insn)
3029 rtx insn;
3030{
3031 rtx next = NEXT_INSN (insn);
3032 rtx prev = PREV_INSN (insn);
53c17031
JH
3033 basic_block bb;
3034
89e99eea
DB
3035 if (prev)
3036 {
3037 NEXT_INSN (prev) = next;
3038 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3039 {
3040 rtx sequence = PATTERN (prev);
3041 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3042 }
3043 }
3044 else if (first_insn == insn)
3045 first_insn = next;
3046 else
3047 {
49ad7cfa 3048 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3049 /* Scan all pending sequences too. */
3050 for (; stack; stack = stack->next)
3051 if (insn == stack->first)
3052 {
3053 stack->first = next;
3054 break;
3055 }
3056
3057 if (stack == 0)
3058 abort ();
3059 }
3060
3061 if (next)
3062 {
3063 PREV_INSN (next) = prev;
3064 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3065 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3066 }
3067 else if (last_insn == insn)
3068 last_insn = prev;
3069 else
3070 {
49ad7cfa 3071 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3072 /* Scan all pending sequences too. */
3073 for (; stack; stack = stack->next)
3074 if (insn == stack->last)
3075 {
3076 stack->last = prev;
3077 break;
3078 }
3079
3080 if (stack == 0)
3081 abort ();
3082 }
53c17031
JH
3083 if (basic_block_for_insn
3084 && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
3085 && (bb = BLOCK_FOR_INSN (insn)))
3086 {
3087 if (bb->head == insn)
3088 {
3089 /* Never ever delete the basic block note without deleting whole basic
3090 block. */
3091 if (GET_CODE (insn) == NOTE)
3092 abort ();
3093 bb->head = next;
3094 }
3095 if (bb->end == insn)
3096 bb->end = prev;
3097 }
89e99eea
DB
3098}
3099
23b2ce53
RS
3100/* Delete all insns made since FROM.
3101 FROM becomes the new last instruction. */
3102
3103void
3104delete_insns_since (from)
3105 rtx from;
3106{
3107 if (from == 0)
3108 first_insn = 0;
3109 else
3110 NEXT_INSN (from) = 0;
3111 last_insn = from;
3112}
3113
5dab5552
MS
3114/* This function is deprecated, please use sequences instead.
3115
3116 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3117 The insns to be moved are those between FROM and TO.
3118 They are moved to a new position after the insn AFTER.
3119 AFTER must not be FROM or TO or any insn in between.
3120
3121 This function does not know about SEQUENCEs and hence should not be
3122 called after delay-slot filling has been done. */
3123
3124void
3c030e88 3125reorder_insns_nobb (from, to, after)
23b2ce53
RS
3126 rtx from, to, after;
3127{
3128 /* Splice this bunch out of where it is now. */
3129 if (PREV_INSN (from))
3130 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3131 if (NEXT_INSN (to))
3132 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3133 if (last_insn == to)
3134 last_insn = PREV_INSN (from);
3135 if (first_insn == from)
3136 first_insn = NEXT_INSN (to);
3137
3138 /* Make the new neighbors point to it and it to them. */
3139 if (NEXT_INSN (after))
3140 PREV_INSN (NEXT_INSN (after)) = to;
3141
3142 NEXT_INSN (to) = NEXT_INSN (after);
3143 PREV_INSN (from) = after;
3144 NEXT_INSN (after) = from;
3145 if (after == last_insn)
3146 last_insn = to;
3147}
3148
3c030e88
JH
3149/* Same as function above, but take care to update BB boundaries. */
3150void
3151reorder_insns (from, to, after)
3152 rtx from, to, after;
3153{
3154 rtx prev = PREV_INSN (from);
3155 basic_block bb, bb2;
3156
3157 reorder_insns_nobb (from, to, after);
3158
3159 if (basic_block_for_insn
3160 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3161 && (bb = BLOCK_FOR_INSN (after)))
3162 {
3163 rtx x;
3164
3165 if (basic_block_for_insn
3166 && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
3167 && (bb2 = BLOCK_FOR_INSN (from)))
3168 {
3169 if (bb2->end == to)
3170 bb2->end = prev;
3171 }
3172
3173 if (bb->end == after)
3174 bb->end = to;
3175
3176 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3177 set_block_for_insn (x, bb);
3178 }
3179}
3180
23b2ce53
RS
3181/* Return the line note insn preceding INSN. */
3182
3183static rtx
3184find_line_note (insn)
3185 rtx insn;
3186{
3187 if (no_line_numbers)
3188 return 0;
3189
3190 for (; insn; insn = PREV_INSN (insn))
3191 if (GET_CODE (insn) == NOTE
3192 && NOTE_LINE_NUMBER (insn) >= 0)
3193 break;
3194
3195 return insn;
3196}
3197
3198/* Like reorder_insns, but inserts line notes to preserve the line numbers
3199 of the moved insns when debugging. This may insert a note between AFTER
3200 and FROM, and another one after TO. */
3201
3202void
3203reorder_insns_with_line_notes (from, to, after)
3204 rtx from, to, after;
3205{
3206 rtx from_line = find_line_note (from);
3207 rtx after_line = find_line_note (after);
3208
3209 reorder_insns (from, to, after);
3210
3211 if (from_line == after_line)
3212 return;
3213
3214 if (from_line)
3215 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3216 NOTE_LINE_NUMBER (from_line),
3217 after);
3218 if (after_line)
3219 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3220 NOTE_LINE_NUMBER (after_line),
3221 to);
3222}
aeeeda03 3223
64b59a80 3224/* Remove unnecessary notes from the instruction stream. */
aeeeda03
MM
3225
3226void
64b59a80 3227remove_unnecessary_notes ()
aeeeda03 3228{
542d73ae
RH
3229 rtx block_stack = NULL_RTX;
3230 rtx eh_stack = NULL_RTX;
aeeeda03
MM
3231 rtx insn;
3232 rtx next;
542d73ae 3233 rtx tmp;
aeeeda03 3234
116eebd6
MM
3235 /* We must not remove the first instruction in the function because
3236 the compiler depends on the first instruction being a note. */
aeeeda03
MM
3237 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3238 {
3239 /* Remember what's next. */
3240 next = NEXT_INSN (insn);
3241
3242 /* We're only interested in notes. */
3243 if (GET_CODE (insn) != NOTE)
3244 continue;
3245
542d73ae 3246 switch (NOTE_LINE_NUMBER (insn))
18c038b9 3247 {
542d73ae
RH
3248 case NOTE_INSN_DELETED:
3249 remove_insn (insn);
3250 break;
3251
3252 case NOTE_INSN_EH_REGION_BEG:
3253 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3254 break;
3255
3256 case NOTE_INSN_EH_REGION_END:
3257 /* Too many end notes. */
3258 if (eh_stack == NULL_RTX)
3259 abort ();
3260 /* Mismatched nesting. */
3261 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3262 abort ();
3263 tmp = eh_stack;
3264 eh_stack = XEXP (eh_stack, 1);
3265 free_INSN_LIST_node (tmp);
3266 break;
3267
3268 case NOTE_INSN_BLOCK_BEG:
3269 /* By now, all notes indicating lexical blocks should have
3270 NOTE_BLOCK filled in. */
3271 if (NOTE_BLOCK (insn) == NULL_TREE)
3272 abort ();
3273 block_stack = alloc_INSN_LIST (insn, block_stack);
3274 break;
3275
3276 case NOTE_INSN_BLOCK_END:
3277 /* Too many end notes. */
3278 if (block_stack == NULL_RTX)
3279 abort ();
3280 /* Mismatched nesting. */
3281 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3282 abort ();
3283 tmp = block_stack;
3284 block_stack = XEXP (block_stack, 1);
3285 free_INSN_LIST_node (tmp);
3286
18c038b9
MM
3287 /* Scan back to see if there are any non-note instructions
3288 between INSN and the beginning of this block. If not,
3289 then there is no PC range in the generated code that will
3290 actually be in this block, so there's no point in
3291 remembering the existence of the block. */
542d73ae 3292 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
18c038b9
MM
3293 {
3294 /* This block contains a real instruction. Note that we
3295 don't include labels; if the only thing in the block
3296 is a label, then there are still no PC values that
3297 lie within the block. */
542d73ae 3298 if (INSN_P (tmp))
18c038b9
MM
3299 break;
3300
3301 /* We're only interested in NOTEs. */
542d73ae 3302 if (GET_CODE (tmp) != NOTE)
18c038b9
MM
3303 continue;
3304
542d73ae 3305 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
18c038b9 3306 {
e1772ac0
NB
3307 /* We just verified that this BLOCK matches us with
3308 the block_stack check above. Never delete the
3309 BLOCK for the outermost scope of the function; we
3310 can refer to names from that scope even if the
3311 block notes are messed up. */
3312 if (! is_body_block (NOTE_BLOCK (insn))
3313 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
deb5e280 3314 {
542d73ae 3315 remove_insn (tmp);
deb5e280
JM
3316 remove_insn (insn);
3317 }
18c038b9
MM
3318 break;
3319 }
542d73ae 3320 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
18c038b9
MM
3321 /* There's a nested block. We need to leave the
3322 current block in place since otherwise the debugger
3323 wouldn't be able to show symbols from our block in
3324 the nested block. */
3325 break;
3326 }
3327 }
aeeeda03 3328 }
542d73ae
RH
3329
3330 /* Too many begin notes. */
3331 if (block_stack || eh_stack)
3332 abort ();
aeeeda03
MM
3333}
3334
23b2ce53
RS
3335\f
3336/* Emit an insn of given code and pattern
3337 at a specified place within the doubly-linked list. */
3338
3339/* Make an instruction with body PATTERN
3340 and output it before the instruction BEFORE. */
3341
3342rtx
3343emit_insn_before (pattern, before)
b3694847 3344 rtx pattern, before;
23b2ce53 3345{
b3694847 3346 rtx insn = before;
23b2ce53
RS
3347
3348 if (GET_CODE (pattern) == SEQUENCE)
3349 {
b3694847 3350 int i;
23b2ce53
RS
3351
3352 for (i = 0; i < XVECLEN (pattern, 0); i++)
3353 {
3354 insn = XVECEXP (pattern, 0, i);
a0ae8e8d 3355 add_insn_before (insn, before);
23b2ce53 3356 }
23b2ce53
RS
3357 }
3358 else
3359 {
4b1f5e8c 3360 insn = make_insn_raw (pattern);
a0ae8e8d 3361 add_insn_before (insn, before);
23b2ce53
RS
3362 }
3363
3364 return insn;
3365}
3366
3367/* Make an instruction with body PATTERN and code JUMP_INSN
3368 and output it before the instruction BEFORE. */
3369
3370rtx
3371emit_jump_insn_before (pattern, before)
b3694847 3372 rtx pattern, before;
23b2ce53 3373{
b3694847 3374 rtx insn;
23b2ce53
RS
3375
3376 if (GET_CODE (pattern) == SEQUENCE)
3377 insn = emit_insn_before (pattern, before);
3378 else
3379 {
85cf32bc 3380 insn = make_jump_insn_raw (pattern);
a0ae8e8d 3381 add_insn_before (insn, before);
23b2ce53
RS
3382 }
3383
3384 return insn;
3385}
3386
3387/* Make an instruction with body PATTERN and code CALL_INSN
3388 and output it before the instruction BEFORE. */
3389
3390rtx
3391emit_call_insn_before (pattern, before)
b3694847 3392 rtx pattern, before;
23b2ce53 3393{
b3694847 3394 rtx insn;
aff507f4
RK
3395
3396 if (GET_CODE (pattern) == SEQUENCE)
3397 insn = emit_insn_before (pattern, before);
3398 else
3399 {
3400 insn = make_call_insn_raw (pattern);
a0ae8e8d 3401 add_insn_before (insn, before);
aff507f4
RK
3402 PUT_CODE (insn, CALL_INSN);
3403 }
3404
23b2ce53
RS
3405 return insn;
3406}
3407
3408/* Make an insn of code BARRIER
e881bb1b 3409 and output it before the insn BEFORE. */
23b2ce53
RS
3410
3411rtx
3412emit_barrier_before (before)
b3694847 3413 rtx before;
23b2ce53 3414{
b3694847 3415 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
3416
3417 INSN_UID (insn) = cur_insn_uid++;
3418
a0ae8e8d 3419 add_insn_before (insn, before);
23b2ce53
RS
3420 return insn;
3421}
3422
e881bb1b
RH
3423/* Emit the label LABEL before the insn BEFORE. */
3424
3425rtx
3426emit_label_before (label, before)
3427 rtx label, before;
3428{
3429 /* This can be called twice for the same label as a result of the
3430 confusion that follows a syntax error! So make it harmless. */
3431 if (INSN_UID (label) == 0)
3432 {
3433 INSN_UID (label) = cur_insn_uid++;
3434 add_insn_before (label, before);
3435 }
3436
3437 return label;
3438}
3439
23b2ce53
RS
3440/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3441
3442rtx
3443emit_note_before (subtype, before)
3444 int subtype;
3445 rtx before;
3446{
b3694847 3447 rtx note = rtx_alloc (NOTE);
23b2ce53
RS
3448 INSN_UID (note) = cur_insn_uid++;
3449 NOTE_SOURCE_FILE (note) = 0;
3450 NOTE_LINE_NUMBER (note) = subtype;
3451
a0ae8e8d 3452 add_insn_before (note, before);
23b2ce53
RS
3453 return note;
3454}
3455\f
3456/* Make an insn of code INSN with body PATTERN
3457 and output it after the insn AFTER. */
3458
3459rtx
3460emit_insn_after (pattern, after)
b3694847 3461 rtx pattern, after;
23b2ce53 3462{
b3694847 3463 rtx insn = after;
23b2ce53
RS
3464
3465 if (GET_CODE (pattern) == SEQUENCE)
3466 {
b3694847 3467 int i;
23b2ce53
RS
3468
3469 for (i = 0; i < XVECLEN (pattern, 0); i++)
3470 {
3471 insn = XVECEXP (pattern, 0, i);
3472 add_insn_after (insn, after);
3473 after = insn;
3474 }
23b2ce53
RS
3475 }
3476 else
3477 {
4b1f5e8c 3478 insn = make_insn_raw (pattern);
23b2ce53
RS
3479 add_insn_after (insn, after);
3480 }
3481
3482 return insn;
3483}
3484
255680cf
RK
3485/* Similar to emit_insn_after, except that line notes are to be inserted so
3486 as to act as if this insn were at FROM. */
3487
3488void
3489emit_insn_after_with_line_notes (pattern, after, from)
3490 rtx pattern, after, from;
3491{
3492 rtx from_line = find_line_note (from);
3493 rtx after_line = find_line_note (after);
3494 rtx insn = emit_insn_after (pattern, after);
3495
3496 if (from_line)
3497 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3498 NOTE_LINE_NUMBER (from_line),
3499 after);
3500
3501 if (after_line)
3502 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3503 NOTE_LINE_NUMBER (after_line),
3504 insn);
3505}
3506
23b2ce53
RS
3507/* Make an insn of code JUMP_INSN with body PATTERN
3508 and output it after the insn AFTER. */
3509
3510rtx
3511emit_jump_insn_after (pattern, after)
b3694847 3512 rtx pattern, after;
23b2ce53 3513{
b3694847 3514 rtx insn;
23b2ce53
RS
3515
3516 if (GET_CODE (pattern) == SEQUENCE)
3517 insn = emit_insn_after (pattern, after);
3518 else
3519 {
85cf32bc 3520 insn = make_jump_insn_raw (pattern);
23b2ce53
RS
3521 add_insn_after (insn, after);
3522 }
3523
3524 return insn;
3525}
3526
3527/* Make an insn of code BARRIER
3528 and output it after the insn AFTER. */
3529
3530rtx
3531emit_barrier_after (after)
b3694847 3532 rtx after;
23b2ce53 3533{
b3694847 3534 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
3535
3536 INSN_UID (insn) = cur_insn_uid++;
3537
3538 add_insn_after (insn, after);
3539 return insn;
3540}
3541
3542/* Emit the label LABEL after the insn AFTER. */
3543
3544rtx
3545emit_label_after (label, after)
3546 rtx label, after;
3547{
3548 /* This can be called twice for the same label
3549 as a result of the confusion that follows a syntax error!
3550 So make it harmless. */
3551 if (INSN_UID (label) == 0)
3552 {
3553 INSN_UID (label) = cur_insn_uid++;
3554 add_insn_after (label, after);
3555 }
3556
3557 return label;
3558}
3559
3560/* Emit a note of subtype SUBTYPE after the insn AFTER. */
3561
3562rtx
3563emit_note_after (subtype, after)
3564 int subtype;
3565 rtx after;
3566{
b3694847 3567 rtx note = rtx_alloc (NOTE);
23b2ce53
RS
3568 INSN_UID (note) = cur_insn_uid++;
3569 NOTE_SOURCE_FILE (note) = 0;
3570 NOTE_LINE_NUMBER (note) = subtype;
3571 add_insn_after (note, after);
3572 return note;
3573}
3574
3575/* Emit a line note for FILE and LINE after the insn AFTER. */
3576
3577rtx
3578emit_line_note_after (file, line, after)
3cce094d 3579 const char *file;
23b2ce53
RS
3580 int line;
3581 rtx after;
3582{
b3694847 3583 rtx note;
23b2ce53
RS
3584
3585 if (no_line_numbers && line > 0)
3586 {
3587 cur_insn_uid++;
3588 return 0;
3589 }
3590
3591 note = rtx_alloc (NOTE);
3592 INSN_UID (note) = cur_insn_uid++;
3593 NOTE_SOURCE_FILE (note) = file;
3594 NOTE_LINE_NUMBER (note) = line;
3595 add_insn_after (note, after);
3596 return note;
3597}
3598\f
3599/* Make an insn of code INSN with pattern PATTERN
3600 and add it to the end of the doubly-linked list.
3601 If PATTERN is a SEQUENCE, take the elements of it
3602 and emit an insn for each element.
3603
3604 Returns the last insn emitted. */
3605
3606rtx
3607emit_insn (pattern)
3608 rtx pattern;
3609{
3610 rtx insn = last_insn;
3611
3612 if (GET_CODE (pattern) == SEQUENCE)
3613 {
b3694847 3614 int i;
23b2ce53
RS
3615
3616 for (i = 0; i < XVECLEN (pattern, 0); i++)
3617 {
3618 insn = XVECEXP (pattern, 0, i);
3619 add_insn (insn);
3620 }
23b2ce53
RS
3621 }
3622 else
3623 {
4b1f5e8c 3624 insn = make_insn_raw (pattern);
23b2ce53
RS
3625 add_insn (insn);
3626 }
3627
3628 return insn;
3629}
3630
3631/* Emit the insns in a chain starting with INSN.
3632 Return the last insn emitted. */
3633
3634rtx
3635emit_insns (insn)
3636 rtx insn;
3637{
3638 rtx last = 0;
3639
3640 while (insn)
3641 {
3642 rtx next = NEXT_INSN (insn);
3643 add_insn (insn);
3644 last = insn;
3645 insn = next;
3646 }
3647
3648 return last;
3649}
3650
3651/* Emit the insns in a chain starting with INSN and place them in front of
3652 the insn BEFORE. Return the last insn emitted. */
3653
3654rtx
3655emit_insns_before (insn, before)
3656 rtx insn;
3657 rtx before;
3658{
3659 rtx last = 0;
3660
3661 while (insn)
3662 {
3663 rtx next = NEXT_INSN (insn);
a0ae8e8d 3664 add_insn_before (insn, before);
23b2ce53
RS
3665 last = insn;
3666 insn = next;
3667 }
3668
3669 return last;
3670}
3671
e0a5c5eb
RS
3672/* Emit the insns in a chain starting with FIRST and place them in back of
3673 the insn AFTER. Return the last insn emitted. */
3674
3675rtx
3676emit_insns_after (first, after)
b3694847
SS
3677 rtx first;
3678 rtx after;
e0a5c5eb 3679{
b3694847
SS
3680 rtx last;
3681 rtx after_after;
3c030e88 3682 basic_block bb;
e0a5c5eb
RS
3683
3684 if (!after)
3685 abort ();
3686
3687 if (!first)
ca6c03ca 3688 return after;
e0a5c5eb 3689
3c030e88
JH
3690 if (basic_block_for_insn
3691 && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
3692 && (bb = BLOCK_FOR_INSN (after)))
3693 {
3694 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3695 set_block_for_insn (last, bb);
3696 set_block_for_insn (last, bb);
3697 if (bb->end == after)
3698 bb->end = last;
3699 }
3700 else
3701 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3702 continue;
e0a5c5eb
RS
3703
3704 after_after = NEXT_INSN (after);
3705
3706 NEXT_INSN (after) = first;
3707 PREV_INSN (first) = after;
3708 NEXT_INSN (last) = after_after;
3709 if (after_after)
3710 PREV_INSN (after_after) = last;
3711
c4d990db
RS
3712 if (after == last_insn)
3713 last_insn = last;
e0a5c5eb
RS
3714 return last;
3715}
3716
23b2ce53
RS
3717/* Make an insn of code JUMP_INSN with pattern PATTERN
3718 and add it to the end of the doubly-linked list. */
3719
3720rtx
3721emit_jump_insn (pattern)
3722 rtx pattern;
3723{
3724 if (GET_CODE (pattern) == SEQUENCE)
3725 return emit_insn (pattern);
3726 else
3727 {
b3694847 3728 rtx insn = make_jump_insn_raw (pattern);
23b2ce53
RS
3729 add_insn (insn);
3730 return insn;
3731 }
3732}
3733
3734/* Make an insn of code CALL_INSN with pattern PATTERN
3735 and add it to the end of the doubly-linked list. */
3736
3737rtx
3738emit_call_insn (pattern)
3739 rtx pattern;
3740{
3741 if (GET_CODE (pattern) == SEQUENCE)
3742 return emit_insn (pattern);
3743 else
3744 {
b3694847 3745 rtx insn = make_call_insn_raw (pattern);
23b2ce53
RS
3746 add_insn (insn);
3747 PUT_CODE (insn, CALL_INSN);
3748 return insn;
3749 }
3750}
3751
3752/* Add the label LABEL to the end of the doubly-linked list. */
3753
3754rtx
3755emit_label (label)
3756 rtx label;
3757{
3758 /* This can be called twice for the same label
3759 as a result of the confusion that follows a syntax error!
3760 So make it harmless. */
3761 if (INSN_UID (label) == 0)
3762 {
3763 INSN_UID (label) = cur_insn_uid++;
3764 add_insn (label);
3765 }
3766 return label;
3767}
3768
3769/* Make an insn of code BARRIER
3770 and add it to the end of the doubly-linked list. */
3771
3772rtx
3773emit_barrier ()
3774{
b3694847 3775 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
3776 INSN_UID (barrier) = cur_insn_uid++;
3777 add_insn (barrier);
3778 return barrier;
3779}
3780
3781/* Make an insn of code NOTE
3782 with data-fields specified by FILE and LINE
3783 and add it to the end of the doubly-linked list,
3784 but only if line-numbers are desired for debugging info. */
3785
3786rtx
3787emit_line_note (file, line)
3cce094d 3788 const char *file;
23b2ce53
RS
3789 int line;
3790{
3f1d071b 3791 set_file_and_line_for_stmt (file, line);
23b2ce53
RS
3792
3793#if 0
3794 if (no_line_numbers)
3795 return 0;
3796#endif
3797
3798 return emit_note (file, line);
3799}
3800
3801/* Make an insn of code NOTE
3802 with data-fields specified by FILE and LINE
3803 and add it to the end of the doubly-linked list.
3804 If it is a line-number NOTE, omit it if it matches the previous one. */
3805
3806rtx
3807emit_note (file, line)
3cce094d 3808 const char *file;
23b2ce53
RS
3809 int line;
3810{
b3694847 3811 rtx note;
23b2ce53
RS
3812
3813 if (line > 0)
3814 {
3815 if (file && last_filename && !strcmp (file, last_filename)
3816 && line == last_linenum)
3817 return 0;
3818 last_filename = file;
3819 last_linenum = line;
3820 }
3821
3822 if (no_line_numbers && line > 0)
3823 {
3824 cur_insn_uid++;
3825 return 0;
3826 }
3827
3828 note = rtx_alloc (NOTE);
3829 INSN_UID (note) = cur_insn_uid++;
3830 NOTE_SOURCE_FILE (note) = file;
3831 NOTE_LINE_NUMBER (note) = line;
3832 add_insn (note);
3833 return note;
3834}
3835
fe77a034 3836/* Emit a NOTE, and don't omit it even if LINE is the previous note. */
23b2ce53
RS
3837
3838rtx
3839emit_line_note_force (file, line)
3cce094d 3840 const char *file;
23b2ce53
RS
3841 int line;
3842{
3843 last_linenum = -1;
3844 return emit_line_note (file, line);
3845}
3846
3847/* Cause next statement to emit a line note even if the line number
3848 has not changed. This is used at the beginning of a function. */
3849
3850void
3851force_next_line_note ()
3852{
3853 last_linenum = -1;
3854}
87b47c85
AM
3855
3856/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 3857 note of this type already exists, remove it first. */
87b47c85 3858
750c9258 3859void
87b47c85
AM
3860set_unique_reg_note (insn, kind, datum)
3861 rtx insn;
3862 enum reg_note kind;
3863 rtx datum;
3864{
3865 rtx note = find_reg_note (insn, kind, NULL_RTX);
3866
3867 /* First remove the note if there already is one. */
750c9258 3868 if (note)
87b47c85
AM
3869 remove_note (insn, note);
3870
3871 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3872}
23b2ce53
RS
3873\f
3874/* Return an indication of which type of insn should have X as a body.
3875 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3876
3877enum rtx_code
3878classify_insn (x)
3879 rtx x;
3880{
3881 if (GET_CODE (x) == CODE_LABEL)
3882 return CODE_LABEL;
3883 if (GET_CODE (x) == CALL)
3884 return CALL_INSN;
3885 if (GET_CODE (x) == RETURN)
3886 return JUMP_INSN;
3887 if (GET_CODE (x) == SET)
3888 {
3889 if (SET_DEST (x) == pc_rtx)
3890 return JUMP_INSN;
3891 else if (GET_CODE (SET_SRC (x)) == CALL)
3892 return CALL_INSN;
3893 else
3894 return INSN;
3895 }
3896 if (GET_CODE (x) == PARALLEL)
3897 {
b3694847 3898 int j;
23b2ce53
RS
3899 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3900 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3901 return CALL_INSN;
3902 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3903 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3904 return JUMP_INSN;
3905 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3906 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3907 return CALL_INSN;
3908 }
3909 return INSN;
3910}
3911
3912/* Emit the rtl pattern X as an appropriate kind of insn.
3913 If X is a label, it is simply added into the insn chain. */
3914
3915rtx
3916emit (x)
3917 rtx x;
3918{
3919 enum rtx_code code = classify_insn (x);
3920
3921 if (code == CODE_LABEL)
3922 return emit_label (x);
3923 else if (code == INSN)
3924 return emit_insn (x);
3925 else if (code == JUMP_INSN)
3926 {
b3694847 3927 rtx insn = emit_jump_insn (x);
7f1c097d 3928 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
23b2ce53
RS
3929 return emit_barrier ();
3930 return insn;
3931 }
3932 else if (code == CALL_INSN)
3933 return emit_call_insn (x);
3934 else
3935 abort ();
3936}
3937\f
5c7a310f
MM
3938/* Begin emitting insns to a sequence which can be packaged in an
3939 RTL_EXPR. If this sequence will contain something that might cause
3940 the compiler to pop arguments to function calls (because those
3941 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3942 details), use do_pending_stack_adjust before calling this function.
3943 That will ensure that the deferred pops are not accidentally
4eb00163 3944 emitted in the middle of this sequence. */
23b2ce53
RS
3945
3946void
3947start_sequence ()
3948{
3949 struct sequence_stack *tem;
3950
a3770a81 3951 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
23b2ce53 3952
49ad7cfa 3953 tem->next = seq_stack;
23b2ce53
RS
3954 tem->first = first_insn;
3955 tem->last = last_insn;
591ccf92 3956 tem->sequence_rtl_expr = seq_rtl_expr;
23b2ce53 3957
49ad7cfa 3958 seq_stack = tem;
23b2ce53
RS
3959
3960 first_insn = 0;
3961 last_insn = 0;
3962}
3963
591ccf92
MM
3964/* Similarly, but indicate that this sequence will be placed in T, an
3965 RTL_EXPR. See the documentation for start_sequence for more
3966 information about how to use this function. */
3967
3968void
3969start_sequence_for_rtl_expr (t)
3970 tree t;
3971{
3972 start_sequence ();
3973
3974 seq_rtl_expr = t;
3975}
3976
5c7a310f
MM
3977/* Set up the insn chain starting with FIRST as the current sequence,
3978 saving the previously current one. See the documentation for
3979 start_sequence for more information about how to use this function. */
23b2ce53
RS
3980
3981void
3982push_to_sequence (first)
3983 rtx first;
3984{
3985 rtx last;
3986
3987 start_sequence ();
3988
3989 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3990
3991 first_insn = first;
3992 last_insn = last;
3993}
3994
c14f7160
ML
3995/* Set up the insn chain from a chain stort in FIRST to LAST. */
3996
3997void
3998push_to_full_sequence (first, last)
3999 rtx first, last;
4000{
4001 start_sequence ();
4002 first_insn = first;
4003 last_insn = last;
4004 /* We really should have the end of the insn chain here. */
4005 if (last && NEXT_INSN (last))
4006 abort ();
4007}
4008
f15ae3a1
TW
4009/* Set up the outer-level insn chain
4010 as the current sequence, saving the previously current one. */
4011
4012void
4013push_topmost_sequence ()
4014{
aefdd5ab 4015 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4016
4017 start_sequence ();
4018
49ad7cfa 4019 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4020 top = stack;
4021
4022 first_insn = top->first;
4023 last_insn = top->last;
591ccf92 4024 seq_rtl_expr = top->sequence_rtl_expr;
f15ae3a1
TW
4025}
4026
4027/* After emitting to the outer-level insn chain, update the outer-level
4028 insn chain, and restore the previous saved state. */
4029
4030void
4031pop_topmost_sequence ()
4032{
aefdd5ab 4033 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4034
49ad7cfa 4035 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4036 top = stack;
4037
4038 top->first = first_insn;
4039 top->last = last_insn;
591ccf92 4040 /* ??? Why don't we save seq_rtl_expr here? */
f15ae3a1
TW
4041
4042 end_sequence ();
4043}
4044
23b2ce53
RS
4045/* After emitting to a sequence, restore previous saved state.
4046
5c7a310f 4047 To get the contents of the sequence just made, you must call
750c9258 4048 `gen_sequence' *before* calling here.
5c7a310f
MM
4049
4050 If the compiler might have deferred popping arguments while
4051 generating this sequence, and this sequence will not be immediately
4052 inserted into the instruction stream, use do_pending_stack_adjust
4053 before calling gen_sequence. That will ensure that the deferred
4054 pops are inserted into this sequence, and not into some random
4055 location in the instruction stream. See INHIBIT_DEFER_POP for more
4056 information about deferred popping of arguments. */
23b2ce53
RS
4057
4058void
4059end_sequence ()
4060{
49ad7cfa 4061 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4062
4063 first_insn = tem->first;
4064 last_insn = tem->last;
591ccf92 4065 seq_rtl_expr = tem->sequence_rtl_expr;
49ad7cfa 4066 seq_stack = tem->next;
23b2ce53 4067
a3770a81 4068 free (tem);
23b2ce53
RS
4069}
4070
c14f7160
ML
4071/* This works like end_sequence, but records the old sequence in FIRST
4072 and LAST. */
4073
4074void
4075end_full_sequence (first, last)
4076 rtx *first, *last;
4077{
4078 *first = first_insn;
4079 *last = last_insn;
4080 end_sequence();
4081}
4082
23b2ce53
RS
4083/* Return 1 if currently emitting into a sequence. */
4084
4085int
4086in_sequence_p ()
4087{
49ad7cfa 4088 return seq_stack != 0;
23b2ce53
RS
4089}
4090
4091/* Generate a SEQUENCE rtx containing the insns already emitted
4092 to the current sequence.
4093
4094 This is how the gen_... function from a DEFINE_EXPAND
4095 constructs the SEQUENCE that it returns. */
4096
4097rtx
4098gen_sequence ()
4099{
4100 rtx result;
4101 rtx tem;
23b2ce53
RS
4102 int i;
4103 int len;
4104
4105 /* Count the insns in the chain. */
4106 len = 0;
4107 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4108 len++;
4109
ee265800 4110 /* If only one insn, return it rather than a SEQUENCE.
23b2ce53 4111 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
750c9258 4112 the case of an empty list.)
ee265800
AH
4113 We only return the pattern of an insn if its code is INSN and it
4114 has no notes. This ensures that no information gets lost. */
23b2ce53 4115 if (len == 1
ca55abae 4116 && ! RTX_FRAME_RELATED_P (first_insn)
ee265800 4117 && GET_CODE (first_insn) == INSN
30f7a378 4118 /* Don't throw away any reg notes. */
ee265800 4119 && REG_NOTES (first_insn) == 0)
1f8f4a0b 4120 return PATTERN (first_insn);
23b2ce53 4121
1f8f4a0b 4122 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
23b2ce53
RS
4123
4124 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4125 XVECEXP (result, 0, i) = tem;
4126
4127 return result;
4128}
4129\f
59ec66dc
MM
4130/* Put the various virtual registers into REGNO_REG_RTX. */
4131
4132void
49ad7cfa
BS
4133init_virtual_regs (es)
4134 struct emit_status *es;
59ec66dc 4135{
49ad7cfa
BS
4136 rtx *ptr = es->x_regno_reg_rtx;
4137 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4138 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4139 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4140 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4141 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4142}
4143
4144void
4145clear_emit_caches ()
4146{
4147 int i;
4148
4149 /* Clear the start_sequence/gen_sequence cache. */
49ad7cfa
BS
4150 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4151 sequence_result[i] = 0;
4152 free_insn = 0;
59ec66dc 4153}
da43a810
BS
4154\f
4155/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4156static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4157static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4158static int copy_insn_n_scratches;
4159
4160/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4161 copied an ASM_OPERANDS.
4162 In that case, it is the original input-operand vector. */
4163static rtvec orig_asm_operands_vector;
4164
4165/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4166 copied an ASM_OPERANDS.
4167 In that case, it is the copied input-operand vector. */
4168static rtvec copy_asm_operands_vector;
4169
4170/* Likewise for the constraints vector. */
4171static rtvec orig_asm_constraints_vector;
4172static rtvec copy_asm_constraints_vector;
4173
4174/* Recursively create a new copy of an rtx for copy_insn.
4175 This function differs from copy_rtx in that it handles SCRATCHes and
4176 ASM_OPERANDs properly.
4177 Normally, this function is not used directly; use copy_insn as front end.
4178 However, you could first copy an insn pattern with copy_insn and then use
4179 this function afterwards to properly copy any REG_NOTEs containing
4180 SCRATCHes. */
4181
4182rtx
4183copy_insn_1 (orig)
b3694847 4184 rtx orig;
da43a810 4185{
b3694847
SS
4186 rtx copy;
4187 int i, j;
4188 RTX_CODE code;
4189 const char *format_ptr;
da43a810
BS
4190
4191 code = GET_CODE (orig);
4192
4193 switch (code)
4194 {
4195 case REG:
4196 case QUEUED:
4197 case CONST_INT:
4198 case CONST_DOUBLE:
4199 case SYMBOL_REF:
4200 case CODE_LABEL:
4201 case PC:
4202 case CC0:
4203 case ADDRESSOF:
4204 return orig;
4205
4206 case SCRATCH:
4207 for (i = 0; i < copy_insn_n_scratches; i++)
4208 if (copy_insn_scratch_in[i] == orig)
4209 return copy_insn_scratch_out[i];
4210 break;
4211
4212 case CONST:
4213 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4214 a LABEL_REF, it isn't sharable. */
4215 if (GET_CODE (XEXP (orig, 0)) == PLUS
4216 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4217 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4218 return orig;
4219 break;
750c9258 4220
da43a810
BS
4221 /* A MEM with a constant address is not sharable. The problem is that
4222 the constant address may need to be reloaded. If the mem is shared,
4223 then reloading one copy of this mem will cause all copies to appear
4224 to have been reloaded. */
4225
4226 default:
4227 break;
4228 }
4229
4230 copy = rtx_alloc (code);
4231
4232 /* Copy the various flags, and other information. We assume that
4233 all fields need copying, and then clear the fields that should
4234 not be copied. That is the sensible default behavior, and forces
4235 us to explicitly document why we are *not* copying a flag. */
4236 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4237
4238 /* We do not copy the USED flag, which is used as a mark bit during
4239 walks over the RTL. */
4240 copy->used = 0;
4241
4242 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4243 if (GET_RTX_CLASS (code) == 'i')
4244 {
4245 copy->jump = 0;
4246 copy->call = 0;
4247 copy->frame_related = 0;
4248 }
750c9258 4249
da43a810
BS
4250 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4251
4252 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4253 {
e63db8f6 4254 copy->fld[i] = orig->fld[i];
da43a810
BS
4255 switch (*format_ptr++)
4256 {
4257 case 'e':
da43a810
BS
4258 if (XEXP (orig, i) != NULL)
4259 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4260 break;
4261
da43a810
BS
4262 case 'E':
4263 case 'V':
da43a810
BS
4264 if (XVEC (orig, i) == orig_asm_constraints_vector)
4265 XVEC (copy, i) = copy_asm_constraints_vector;
4266 else if (XVEC (orig, i) == orig_asm_operands_vector)
4267 XVEC (copy, i) = copy_asm_operands_vector;
4268 else if (XVEC (orig, i) != NULL)
4269 {
4270 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4271 for (j = 0; j < XVECLEN (copy, i); j++)
4272 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4273 }
4274 break;
4275
da43a810 4276 case 't':
da43a810 4277 case 'w':
da43a810 4278 case 'i':
da43a810
BS
4279 case 's':
4280 case 'S':
e63db8f6
BS
4281 case 'u':
4282 case '0':
4283 /* These are left unchanged. */
da43a810
BS
4284 break;
4285
4286 default:
4287 abort ();
4288 }
4289 }
4290
4291 if (code == SCRATCH)
4292 {
4293 i = copy_insn_n_scratches++;
4294 if (i >= MAX_RECOG_OPERANDS)
4295 abort ();
4296 copy_insn_scratch_in[i] = orig;
4297 copy_insn_scratch_out[i] = copy;
4298 }
4299 else if (code == ASM_OPERANDS)
4300 {
6462bb43
AO
4301 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4302 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4303 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4304 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
4305 }
4306
4307 return copy;
4308}
4309
4310/* Create a new copy of an rtx.
4311 This function differs from copy_rtx in that it handles SCRATCHes and
4312 ASM_OPERANDs properly.
4313 INSN doesn't really have to be a full INSN; it could be just the
4314 pattern. */
4315rtx
4316copy_insn (insn)
4317 rtx insn;
4318{
4319 copy_insn_n_scratches = 0;
4320 orig_asm_operands_vector = 0;
4321 orig_asm_constraints_vector = 0;
4322 copy_asm_operands_vector = 0;
4323 copy_asm_constraints_vector = 0;
4324 return copy_insn_1 (insn);
4325}
59ec66dc 4326
23b2ce53
RS
4327/* Initialize data structures and variables in this file
4328 before generating rtl for each function. */
4329
4330void
4331init_emit ()
4332{
01d939e8 4333 struct function *f = cfun;
23b2ce53 4334
49ad7cfa 4335 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
23b2ce53
RS
4336 first_insn = NULL;
4337 last_insn = NULL;
591ccf92 4338 seq_rtl_expr = NULL;
23b2ce53
RS
4339 cur_insn_uid = 1;
4340 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4341 last_linenum = 0;
4342 last_filename = 0;
4343 first_label_num = label_num;
4344 last_label_num = 0;
49ad7cfa 4345 seq_stack = NULL;
23b2ce53 4346
49ad7cfa 4347 clear_emit_caches ();
23b2ce53
RS
4348
4349 /* Init the tables that describe all the pseudo regs. */
4350
3502dc9c 4351 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 4352
49ad7cfa 4353 f->emit->regno_pointer_align
3502dc9c 4354 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
f9e158c3 4355 sizeof (unsigned char));
86fe05e0 4356
750c9258 4357 regno_reg_rtx
3502dc9c 4358 = (rtx *) xcalloc (f->emit->regno_pointer_align_length * sizeof (rtx),
a3770a81 4359 sizeof (rtx));
23b2ce53
RS
4360
4361 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
49ad7cfa 4362 init_virtual_regs (f->emit);
740ab4a2
RK
4363
4364 /* Indicate that the virtual registers and stack locations are
4365 all pointers. */
3502dc9c
JDA
4366 REG_POINTER (stack_pointer_rtx) = 1;
4367 REG_POINTER (frame_pointer_rtx) = 1;
4368 REG_POINTER (hard_frame_pointer_rtx) = 1;
4369 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 4370
3502dc9c
JDA
4371 REG_POINTER (virtual_incoming_args_rtx) = 1;
4372 REG_POINTER (virtual_stack_vars_rtx) = 1;
4373 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4374 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4375 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 4376
86fe05e0 4377#ifdef STACK_BOUNDARY
bdb429a5
RK
4378 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4379 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4380 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4381 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4382
4383 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4384 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4385 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4386 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4387 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
4388#endif
4389
5e82e7bd
JVA
4390#ifdef INIT_EXPANDERS
4391 INIT_EXPANDERS;
4392#endif
23b2ce53
RS
4393}
4394
87ff9c8e
RH
4395/* Mark SS for GC. */
4396
4397static void
4398mark_sequence_stack (ss)
4399 struct sequence_stack *ss;
4400{
4401 while (ss)
4402 {
4403 ggc_mark_rtx (ss->first);
591ccf92 4404 ggc_mark_tree (ss->sequence_rtl_expr);
87ff9c8e
RH
4405 ss = ss->next;
4406 }
4407}
4408
4409/* Mark ES for GC. */
4410
4411void
fa51b01b 4412mark_emit_status (es)
87ff9c8e
RH
4413 struct emit_status *es;
4414{
4415 rtx *r;
4416 int i;
4417
4418 if (es == 0)
4419 return;
4420
3502dc9c 4421 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx;
87ff9c8e
RH
4422 i > 0; --i, ++r)
4423 ggc_mark_rtx (*r);
4424
4425 mark_sequence_stack (es->sequence_stack);
591ccf92 4426 ggc_mark_tree (es->sequence_rtl_expr);
87ff9c8e
RH
4427 ggc_mark_rtx (es->x_first_insn);
4428}
4429
23b2ce53
RS
4430/* Create some permanent unique rtl objects shared between all functions.
4431 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4432
4433void
4434init_emit_once (line_numbers)
4435 int line_numbers;
4436{
4437 int i;
4438 enum machine_mode mode;
9ec36da5 4439 enum machine_mode double_mode;
23b2ce53 4440
173b24b9 4441 /* Initialize the CONST_INT and memory attribute hash tables. */
750c9258 4442 const_int_htab = htab_create (37, const_int_htab_hash,
67673f5c 4443 const_int_htab_eq, NULL);
173b24b9
RK
4444 ggc_add_deletable_htab (const_int_htab, 0, 0);
4445
4446 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4447 mem_attrs_htab_eq, NULL);
4448 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
67673f5c 4449
23b2ce53
RS
4450 no_line_numbers = ! line_numbers;
4451
43fa6302
AS
4452 /* Compute the word and byte modes. */
4453
4454 byte_mode = VOIDmode;
4455 word_mode = VOIDmode;
4456 double_mode = VOIDmode;
4457
4458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4459 mode = GET_MODE_WIDER_MODE (mode))
4460 {
4461 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4462 && byte_mode == VOIDmode)
4463 byte_mode = mode;
4464
4465 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4466 && word_mode == VOIDmode)
4467 word_mode = mode;
4468 }
4469
43fa6302
AS
4470 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4471 mode = GET_MODE_WIDER_MODE (mode))
4472 {
4473 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4474 && double_mode == VOIDmode)
4475 double_mode = mode;
4476 }
4477
4478 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4479
5da077de
AS
4480 /* Assign register numbers to the globally defined register rtx.
4481 This must be done at runtime because the register number field
4482 is in a union and some compilers can't initialize unions. */
4483
4484 pc_rtx = gen_rtx (PC, VOIDmode);
4485 cc0_rtx = gen_rtx (CC0, VOIDmode);
08394eef
BS
4486 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4487 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5da077de 4488 if (hard_frame_pointer_rtx == 0)
750c9258 4489 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
08394eef 4490 HARD_FRAME_POINTER_REGNUM);
5da077de 4491 if (arg_pointer_rtx == 0)
08394eef 4492 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
750c9258 4493 virtual_incoming_args_rtx =
08394eef 4494 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
750c9258 4495 virtual_stack_vars_rtx =
08394eef 4496 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
750c9258 4497 virtual_stack_dynamic_rtx =
08394eef 4498 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
750c9258
AJ
4499 virtual_outgoing_args_rtx =
4500 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
08394eef 4501 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5da077de
AS
4502
4503 /* These rtx must be roots if GC is enabled. */
1f8f4a0b 4504 ggc_add_rtx_root (global_rtl, GR_MAX);
5da077de 4505
5da077de 4506#ifdef INIT_EXPANDERS
414c4dc4
NC
4507 /* This is to initialize {init|mark|free}_machine_status before the first
4508 call to push_function_context_to. This is needed by the Chill front
4509 end which calls push_function_context_to before the first cal to
5da077de
AS
4510 init_function_start. */
4511 INIT_EXPANDERS;
4512#endif
4513
23b2ce53
RS
4514 /* Create the unique rtx's for certain rtx codes and operand values. */
4515
c5c76735
JL
4516 /* Don't use gen_rtx here since gen_rtx in this case
4517 tries to use these variables. */
23b2ce53 4518 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 4519 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5da077de 4520 gen_rtx_raw_CONST_INT (VOIDmode, i);
1f8f4a0b 4521 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
23b2ce53 4522
68d75312
JC
4523 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4524 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 4525 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 4526 else
3b80f6ca 4527 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 4528
9ec36da5
JL
4529 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4530 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4531 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4532 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
23b2ce53
RS
4533
4534 for (i = 0; i <= 2; i++)
4535 {
4536 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4537 mode = GET_MODE_WIDER_MODE (mode))
4538 {
4539 rtx tem = rtx_alloc (CONST_DOUBLE);
4540 union real_extract u;
4541
d425c5b0
RH
4542 /* Zero any holes in a structure. */
4543 memset ((char *) &u, 0, sizeof u);
23b2ce53
RS
4544 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4545
d425c5b0
RH
4546 /* Avoid trailing garbage in the rtx. */
4547 if (sizeof (u) < sizeof (HOST_WIDE_INT))
4548 CONST_DOUBLE_LOW (tem) = 0;
4549 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
4550 CONST_DOUBLE_HIGH (tem) = 0;
4551
4e135bdd 4552 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
23b2ce53 4553 CONST_DOUBLE_MEM (tem) = cc0_rtx;
f8a83ee3 4554 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
23b2ce53
RS
4555 PUT_MODE (tem, mode);
4556
4557 const_tiny_rtx[i][(int) mode] = tem;
4558 }
4559
906c4e36 4560 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53
RS
4561
4562 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4563 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 4564 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
4565
4566 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4567 mode != VOIDmode;
4568 mode = GET_MODE_WIDER_MODE (mode))
4569 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
4570 }
4571
dbbbbf3b
JDA
4572 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4573 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4574 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 4575
f0417c82
RH
4576 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4577 if (STORE_FLAG_VALUE == 1)
4578 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4579
b6a1cbae
GM
4580 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4581 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4582 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4583 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
21cd906e 4584 ggc_add_rtx_root (&const_true_rtx, 1);
a7e1e2ac
AO
4585
4586#ifdef RETURN_ADDRESS_POINTER_REGNUM
4587 return_address_pointer_rtx
08394eef 4588 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
a7e1e2ac
AO
4589#endif
4590
4591#ifdef STRUCT_VALUE
4592 struct_value_rtx = STRUCT_VALUE;
4593#else
4594 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4595#endif
4596
4597#ifdef STRUCT_VALUE_INCOMING
4598 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4599#else
4600#ifdef STRUCT_VALUE_INCOMING_REGNUM
4601 struct_value_incoming_rtx
4602 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4603#else
4604 struct_value_incoming_rtx = struct_value_rtx;
4605#endif
4606#endif
4607
4608#ifdef STATIC_CHAIN_REGNUM
4609 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4610
4611#ifdef STATIC_CHAIN_INCOMING_REGNUM
4612 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
4613 static_chain_incoming_rtx
4614 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
4615 else
4616#endif
4617 static_chain_incoming_rtx = static_chain_rtx;
4618#endif
4619
4620#ifdef STATIC_CHAIN
4621 static_chain_rtx = STATIC_CHAIN;
4622
4623#ifdef STATIC_CHAIN_INCOMING
4624 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
4625#else
4626 static_chain_incoming_rtx = static_chain_rtx;
4627#endif
4628#endif
4629
848e0190
JH
4630 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4631 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
a7e1e2ac 4632
d7db6646
RH
4633 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
4634 ggc_add_rtx_root (&struct_value_rtx, 1);
4635 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
4636 ggc_add_rtx_root (&static_chain_rtx, 1);
4637 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
4638 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
23b2ce53 4639}
a11759a3
JR
4640\f
4641/* Query and clear/ restore no_line_numbers. This is used by the
4642 switch / case handling in stmt.c to give proper line numbers in
4643 warnings about unreachable code. */
4644
4645int
4646force_line_numbers ()
4647{
4648 int old = no_line_numbers;
4649
4650 no_line_numbers = 0;
4651 if (old)
4652 force_next_line_note ();
4653 return old;
4654}
4655
4656void
4657restore_line_number_status (old_value)
4658 int old_value;
4659{
4660 no_line_numbers = old_value;
4661}