]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
* Makefile.in: Take PICFLAG and RUNTESTFLAGS out of FLAGS_TO_PASS.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
15bbde2b 1/* Emit RTL for the GNU C-Compiler expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
5377f687 3 1999, 2000, 2001 Free Software Foundation, Inc.
15bbde2b 4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
8d62a21c 19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
15bbde2b 21
22
23/* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38#include "config.h"
405711de 39#include "system.h"
d3b64f2d 40#include "toplev.h"
15bbde2b 41#include "rtl.h"
3fd7e17f 42#include "tree.h"
7953c610 43#include "tm_p.h"
15bbde2b 44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
c6b6c51f 48#include "hard-reg-set.h"
73f5c1e3 49#include "hashtab.h"
15bbde2b 50#include "insn-config.h"
0dbd1c74 51#include "recog.h"
15bbde2b 52#include "real.h"
649d8da6 53#include "obstack.h"
a3426c4c 54#include "bitmap.h"
f3d96a58 55#include "basic-block.h"
a7b0c170 56#include "ggc.h"
649d8da6 57
399d45d3 58/* Commonly used modes. */
59
a92771b8 60enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
61enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 62enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 63enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 64
15bbde2b 65
66/* This is *not* reset after each function. It gives each CODE_LABEL
67 in the entire compilation a unique label number. */
68
69static int label_num = 1;
70
15bbde2b 71/* Highest label number in current function.
72 Zero means use the value of label_num instead.
73 This is nonzero only when belatedly compiling an inline function. */
74
75static int last_label_num;
76
77/* Value label_num had when set_new_first_and_last_label_number was called.
78 If label_num has not changed since then, last_label_num is valid. */
79
80static int base_label_num;
81
82/* Nonzero means do not generate NOTEs for source line numbers. */
83
84static int no_line_numbers;
85
86/* Commonly used rtx's, so that we only need space for one copy.
87 These are initialized once for the entire compilation.
88 All of these except perhaps the floating-point CONST_DOUBLEs
89 are unique; no other rtx-object will be equal to any of these. */
90
57c097d5 91rtx global_rtl[GR_MAX];
15bbde2b 92
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx. */
96
97rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
98
1a60f06a 99rtx const_true_rtx;
100
15bbde2b 101REAL_VALUE_TYPE dconst0;
102REAL_VALUE_TYPE dconst1;
103REAL_VALUE_TYPE dconst2;
104REAL_VALUE_TYPE dconstm1;
105
106/* All references to the following fixed hard registers go through
107 these unique rtl objects. On machines where the frame-pointer and
108 arg-pointer are the same register, they use the same unique object.
109
110 After register allocation, other rtl objects which used to be pseudo-regs
111 may be clobbered to refer to the frame-pointer register.
112 But references that were originally to the frame-pointer can be
113 distinguished from the others because they contain frame_pointer_rtx.
114
90c25824 115 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
116 tricky: until register elimination has taken place hard_frame_pointer_rtx
d823ba47 117 should be used if it is being set, and frame_pointer_rtx otherwise. After
90c25824 118 register elimination hard_frame_pointer_rtx should always be used.
119 On machines where the two registers are same (most) then these are the
120 same.
121
15bbde2b 122 In an inline procedure, the stack and frame pointer rtxs may not be
123 used for anything else. */
15bbde2b 124rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
125rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
126rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
127rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
128rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
129
4b0c5859 130/* This is used to implement __builtin_return_address for some machines.
131 See for instance the MIPS port. */
132rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
133
15bbde2b 134/* We make one copy of (const_int C) where C is in
135 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
136 to save space during the compilation and simplify comparisons of
137 integers. */
138
57c097d5 139rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 140
73f5c1e3 141/* A hash table storing CONST_INTs whose absolute value is greater
142 than MAX_SAVED_CONST_INT. */
143
144static htab_t const_int_htab;
145
15bbde2b 146/* start_sequence and gen_sequence can make a lot of rtx expressions which are
147 shortly thrown away. We use two mechanisms to prevent this waste:
148
1bfd55c5 149 For sizes up to 5 elements, we keep a SEQUENCE and its associated
150 rtvec for use by gen_sequence. One entry for each size is
151 sufficient because most cases are calls to gen_sequence followed by
152 immediately emitting the SEQUENCE. Reuse is safe since emitting a
153 sequence is destructive on the insn in it anyway and hence can't be
154 redone.
15bbde2b 155
156 We do not bother to save this cached data over nested function calls.
157 Instead, we just reinitialize them. */
158
159#define SEQUENCE_RESULT_SIZE 5
160
15bbde2b 161static rtx sequence_result[SEQUENCE_RESULT_SIZE];
162
a92771b8 163/* During RTL generation, we also keep a list of free INSN rtl codes. */
575333f9 164static rtx free_insn;
165
08513b52 166#define first_insn (cfun->emit->x_first_insn)
167#define last_insn (cfun->emit->x_last_insn)
168#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
169#define last_linenum (cfun->emit->x_last_linenum)
170#define last_filename (cfun->emit->x_last_filename)
171#define first_label_num (cfun->emit->x_first_label_num)
15bbde2b 172
621f6678 173static rtx make_jump_insn_raw PARAMS ((rtx));
174static rtx make_call_insn_raw PARAMS ((rtx));
175static rtx find_line_note PARAMS ((rtx));
176static void mark_sequence_stack PARAMS ((struct sequence_stack *));
2d96a59a 177static void unshare_all_rtl_1 PARAMS ((rtx));
72ec8878 178static void unshare_all_decls PARAMS ((tree));
01dc9f0c 179static void reset_used_decls PARAMS ((tree));
344dc2fa 180static void mark_label_nuses PARAMS ((rtx));
73f5c1e3 181static hashval_t const_int_htab_hash PARAMS ((const void *));
182static int const_int_htab_eq PARAMS ((const void *,
183 const void *));
184static int rtx_htab_mark_1 PARAMS ((void **, void *));
185static void rtx_htab_mark PARAMS ((void *));
186
649d8da6 187\f
73f5c1e3 188/* Returns a hash code for X (which is a really a CONST_INT). */
189
190static hashval_t
191const_int_htab_hash (x)
192 const void *x;
193{
5e431971 194 return (hashval_t) INTVAL ((const struct rtx_def *) x);
73f5c1e3 195}
196
197/* Returns non-zero if the value represented by X (which is really a
198 CONST_INT) is the same as that given by Y (which is really a
199 HOST_WIDE_INT *). */
200
201static int
202const_int_htab_eq (x, y)
203 const void *x;
204 const void *y;
205{
5e431971 206 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
73f5c1e3 207}
208
209/* Mark the hash-table element X (which is really a pointer to an
210 rtx). */
211
212static int
213rtx_htab_mark_1 (x, data)
214 void **x;
215 void *data ATTRIBUTE_UNUSED;
216{
217 ggc_mark_rtx (*x);
218 return 1;
219}
220
221/* Mark all the elements of HTAB (which is really an htab_t full of
222 rtxs). */
223
224static void
225rtx_htab_mark (htab)
226 void *htab;
227{
228 htab_traverse (*((htab_t *) htab), rtx_htab_mark_1, NULL);
229}
230
22cf44bc 231/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
232 don't attempt to share with the various global pieces of rtl (such as
233 frame_pointer_rtx). */
234
235rtx
236gen_raw_REG (mode, regno)
237 enum machine_mode mode;
238 int regno;
239{
240 rtx x = gen_rtx_raw_REG (mode, regno);
241 ORIGINAL_REGNO (x) = regno;
242 return x;
243}
244
7014838c 245/* There are some RTL codes that require special attention; the generation
246 functions do the raw handling. If you add to this list, modify
247 special_rtx in gengenrtl.c as well. */
248
3ad7bb1c 249rtx
250gen_rtx_CONST_INT (mode, arg)
73f5c1e3 251 enum machine_mode mode ATTRIBUTE_UNUSED;
3ad7bb1c 252 HOST_WIDE_INT arg;
253{
73f5c1e3 254 void **slot;
255
3ad7bb1c 256 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 257 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 258
259#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
260 if (const_true_rtx && arg == STORE_FLAG_VALUE)
261 return const_true_rtx;
262#endif
263
73f5c1e3 264 /* Look up the CONST_INT in the hash table. */
2b3dbc20 265 slot = htab_find_slot_with_hash (const_int_htab, &arg,
266 (hashval_t) arg, INSERT);
7f2875d3 267 if (*slot == 0)
d7c47c0e 268 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 269
270 return (rtx) *slot;
3ad7bb1c 271}
272
7f2875d3 273/* CONST_DOUBLEs needs special handling because their length is known
67f2a2eb 274 only at run-time. */
7f2875d3 275
67f2a2eb 276rtx
277gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
278 enum machine_mode mode;
279 rtx arg0;
280 HOST_WIDE_INT arg1, arg2;
281{
282 rtx r = rtx_alloc (CONST_DOUBLE);
4e929432 283 int i;
284
67f2a2eb 285 PUT_MODE (r, mode);
286 XEXP (r, 0) = arg0;
25999090 287 X0EXP (r, 1) = NULL_RTX;
4e929432 288 XWINT (r, 2) = arg1;
289 XWINT (r, 3) = arg2;
290
291 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
292 XWINT (r, i) = 0;
293
67f2a2eb 294 return r;
295}
296
3ad7bb1c 297rtx
298gen_rtx_REG (mode, regno)
299 enum machine_mode mode;
300 int regno;
301{
302 /* In case the MD file explicitly references the frame pointer, have
303 all such references point to the same frame pointer. This is
304 used during frame pointer elimination to distinguish the explicit
305 references to these registers from pseudos that happened to be
306 assigned to them.
307
308 If we have eliminated the frame pointer or arg pointer, we will
309 be using it as a normal register, for example as a spill
310 register. In such cases, we might be accessing it in a mode that
311 is not Pmode and therefore cannot use the pre-allocated rtx.
312
313 Also don't do this when we are making new REGs in reload, since
314 we don't want to get confused with the real pointers. */
315
316 if (mode == Pmode && !reload_in_progress)
317 {
e8b59353 318 if (regno == FRAME_POINTER_REGNUM)
3ad7bb1c 319 return frame_pointer_rtx;
320#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e8b59353 321 if (regno == HARD_FRAME_POINTER_REGNUM)
3ad7bb1c 322 return hard_frame_pointer_rtx;
323#endif
324#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
e8b59353 325 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 326 return arg_pointer_rtx;
327#endif
328#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 329 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 330 return return_address_pointer_rtx;
331#endif
e8b59353 332 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 333 return stack_pointer_rtx;
334 }
335
22cf44bc 336 return gen_raw_REG (mode, regno);
3ad7bb1c 337}
338
b5ba9f3a 339rtx
340gen_rtx_MEM (mode, addr)
341 enum machine_mode mode;
342 rtx addr;
343{
344 rtx rt = gen_rtx_raw_MEM (mode, addr);
345
346 /* This field is not cleared by the mere allocation of the rtx, so
347 we clear it here. */
348 MEM_ALIAS_SET (rt) = 0;
349
350 return rt;
351}
701e46d0 352
353rtx
354gen_rtx_SUBREG (mode, reg, offset)
355 enum machine_mode mode;
356 rtx reg;
357 int offset;
358{
359 /* This is the most common failure type.
360 Catch it early so we can see who does it. */
361 if ((offset % GET_MODE_SIZE (mode)) != 0)
362 abort ();
363
364 /* This check isn't usable right now because combine will
365 throw arbitrary crap like a CALL into a SUBREG in
366 gen_lowpart_for_combine so we must just eat it. */
367#if 0
368 /* Check for this too. */
369 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
370 abort ();
371#endif
372 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
373}
374
375/* Generate a SUBREG representing the least-significant part
376 * of REG if MODE is smaller than mode of REG, otherwise
377 * paradoxical SUBREG. */
378rtx
379gen_lowpart_SUBREG (mode, reg)
380 enum machine_mode mode;
381 rtx reg;
382{
383 enum machine_mode inmode;
701e46d0 384
385 inmode = GET_MODE (reg);
386 if (inmode == VOIDmode)
387 inmode = mode;
81802af6 388 return gen_rtx_SUBREG (mode, reg,
389 subreg_lowpart_offset (mode, inmode));
701e46d0 390}
7014838c 391\f
15bbde2b 392/* rtx gen_rtx (code, mode, [element1, ..., elementn])
393**
394** This routine generates an RTX of the size specified by
395** <code>, which is an RTX code. The RTX structure is initialized
396** from the arguments <element1> through <elementn>, which are
397** interpreted according to the specific RTX type's format. The
398** special machine mode associated with the rtx (if any) is specified
399** in <mode>.
400**
fc92fa61 401** gen_rtx can be invoked in a way which resembles the lisp-like
15bbde2b 402** rtx it will generate. For example, the following rtx structure:
403**
404** (plus:QI (mem:QI (reg:SI 1))
405** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
406**
407** ...would be generated by the following C code:
408**
d823ba47 409** gen_rtx (PLUS, QImode,
15bbde2b 410** gen_rtx (MEM, QImode,
411** gen_rtx (REG, SImode, 1)),
412** gen_rtx (MEM, QImode,
413** gen_rtx (PLUS, SImode,
414** gen_rtx (REG, SImode, 2),
415** gen_rtx (REG, SImode, 3)))),
416*/
417
418/*VARARGS2*/
419rtx
621f6678 420gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
15bbde2b 421{
7bca031c 422#ifndef ANSI_PROTOTYPES
15bbde2b 423 enum rtx_code code;
424 enum machine_mode mode;
e5fcd76a 425#endif
426 va_list p;
15bbde2b 427 register int i; /* Array indices... */
d2ca078f 428 register const char *fmt; /* Current rtx's format... */
15bbde2b 429 register rtx rt_val; /* RTX to return to caller... */
430
e5fcd76a 431 VA_START (p, mode);
432
7bca031c 433#ifndef ANSI_PROTOTYPES
15bbde2b 434 code = va_arg (p, enum rtx_code);
435 mode = va_arg (p, enum machine_mode);
e5fcd76a 436#endif
15bbde2b 437
67f2a2eb 438 switch (code)
15bbde2b 439 {
67f2a2eb 440 case CONST_INT:
441 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
442 break;
443
444 case CONST_DOUBLE:
445 {
446 rtx arg0 = va_arg (p, rtx);
447 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
448 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
449 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
450 }
451 break;
452
453 case REG:
454 rt_val = gen_rtx_REG (mode, va_arg (p, int));
455 break;
456
457 case MEM:
458 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
459 break;
460
461 default:
15bbde2b 462 rt_val = rtx_alloc (code); /* Allocate the storage space. */
463 rt_val->mode = mode; /* Store the machine mode... */
464
465 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
466 for (i = 0; i < GET_RTX_LENGTH (code); i++)
467 {
468 switch (*fmt++)
469 {
470 case '0': /* Unused field. */
471 break;
472
473 case 'i': /* An integer? */
474 XINT (rt_val, i) = va_arg (p, int);
475 break;
476
b572011e 477 case 'w': /* A wide integer? */
478 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
479 break;
480
15bbde2b 481 case 's': /* A string? */
482 XSTR (rt_val, i) = va_arg (p, char *);
483 break;
484
485 case 'e': /* An expression? */
486 case 'u': /* An insn? Same except when printing. */
487 XEXP (rt_val, i) = va_arg (p, rtx);
488 break;
489
490 case 'E': /* An RTX vector? */
491 XVEC (rt_val, i) = va_arg (p, rtvec);
492 break;
493
a3426c4c 494 case 'b': /* A bitmap? */
495 XBITMAP (rt_val, i) = va_arg (p, bitmap);
496 break;
497
498 case 't': /* A tree? */
499 XTREE (rt_val, i) = va_arg (p, tree);
500 break;
501
15bbde2b 502 default:
fc92fa61 503 abort ();
15bbde2b 504 }
505 }
67f2a2eb 506 break;
15bbde2b 507 }
67f2a2eb 508
15bbde2b 509 va_end (p);
67f2a2eb 510 return rt_val;
15bbde2b 511}
512
513/* gen_rtvec (n, [rt1, ..., rtn])
514**
515** This routine creates an rtvec and stores within it the
516** pointers to rtx's which are its arguments.
517*/
518
519/*VARARGS1*/
520rtvec
621f6678 521gen_rtvec VPARAMS ((int n, ...))
15bbde2b 522{
7bca031c 523#ifndef ANSI_PROTOTYPES
e5fcd76a 524 int n;
525#endif
526 int i;
15bbde2b 527 va_list p;
528 rtx *vector;
529
e5fcd76a 530 VA_START (p, n);
531
7bca031c 532#ifndef ANSI_PROTOTYPES
15bbde2b 533 n = va_arg (p, int);
e5fcd76a 534#endif
15bbde2b 535
536 if (n == 0)
537 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
538
539 vector = (rtx *) alloca (n * sizeof (rtx));
e5fcd76a 540
15bbde2b 541 for (i = 0; i < n; i++)
542 vector[i] = va_arg (p, rtx);
543 va_end (p);
544
545 return gen_rtvec_v (n, vector);
546}
547
548rtvec
549gen_rtvec_v (n, argp)
550 int n;
551 rtx *argp;
552{
553 register int i;
554 register rtvec rt_val;
555
556 if (n == 0)
557 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
558
559 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
560
561 for (i = 0; i < n; i++)
a4070a91 562 rt_val->elem[i] = *argp++;
15bbde2b 563
564 return rt_val;
565}
fbaa9422 566
15bbde2b 567\f
568/* Generate a REG rtx for a new pseudo register of mode MODE.
569 This pseudo is assigned the next sequential register number. */
570
571rtx
572gen_reg_rtx (mode)
573 enum machine_mode mode;
574{
08513b52 575 struct function *f = cfun;
15bbde2b 576 register rtx val;
577
9e519b97 578 /* Don't let anything called after initial flow analysis create new
579 registers. */
580 if (no_new_pseudos)
15bbde2b 581 abort ();
582
316bc009 583 if (generating_concat_p
584 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
585 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 586 {
587 /* For complex modes, don't make a single pseudo.
588 Instead, make a CONCAT of two pseudos.
589 This allows noncontiguous allocation of the real and imaginary parts,
590 which makes much better code. Besides, allocating DCmode
591 pseudos overstrains reload on some machines like the 386. */
592 rtx realpart, imagpart;
593 int size = GET_MODE_UNIT_SIZE (mode);
594 enum machine_mode partmode
595 = mode_for_size (size * BITS_PER_UNIT,
596 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
597 ? MODE_FLOAT : MODE_INT),
598 0);
599
600 realpart = gen_reg_rtx (partmode);
601 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 602 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 603 }
604
e61a0a7f 605 /* Make sure regno_pointer_align and regno_reg_rtx are large enough
606 to have an element for this pseudo reg number. */
15bbde2b 607
e61a0a7f 608 if (reg_rtx_no == f->emit->regno_pointer_align_length)
15bbde2b 609 {
e61a0a7f 610 int old_size = f->emit->regno_pointer_align_length;
15bbde2b 611 rtx *new1;
26df1c5e 612 char *new;
26df1c5e 613 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
0a893c29 614 memset (new + old_size, 0, old_size);
4491f79f 615 f->emit->regno_pointer_align = (unsigned char *) new;
0a893c29 616
26df1c5e 617 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
618 old_size * 2 * sizeof (rtx));
0a893c29 619 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 620 regno_reg_rtx = new1;
621
e61a0a7f 622 f->emit->regno_pointer_align_length = old_size * 2;
15bbde2b 623 }
624
22cf44bc 625 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 626 regno_reg_rtx[reg_rtx_no++] = val;
627 return val;
628}
629
de8ecfb5 630/* Identify REG (which may be a CONCAT) as a user register. */
631
632void
633mark_user_reg (reg)
634 rtx reg;
635{
636 if (GET_CODE (reg) == CONCAT)
637 {
638 REG_USERVAR_P (XEXP (reg, 0)) = 1;
639 REG_USERVAR_P (XEXP (reg, 1)) = 1;
640 }
641 else if (GET_CODE (reg) == REG)
642 REG_USERVAR_P (reg) = 1;
643 else
644 abort ();
645}
646
d4c332ff 647/* Identify REG as a probable pointer register and show its alignment
648 as ALIGN, if nonzero. */
15bbde2b 649
650void
d4c332ff 651mark_reg_pointer (reg, align)
15bbde2b 652 rtx reg;
d4c332ff 653 int align;
15bbde2b 654{
e61a0a7f 655 if (! REG_POINTER (reg))
612409a6 656 {
e61a0a7f 657 REG_POINTER (reg) = 1;
d4c332ff 658
612409a6 659 if (align)
660 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
661 }
662 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
663 /* We can no-longer be sure just how aligned this pointer is */
d4c332ff 664 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 665}
666
667/* Return 1 plus largest pseudo reg number used in the current function. */
668
669int
670max_reg_num ()
671{
672 return reg_rtx_no;
673}
674
675/* Return 1 + the largest label number used so far in the current function. */
676
677int
678max_label_num ()
679{
680 if (last_label_num && label_num == base_label_num)
681 return last_label_num;
682 return label_num;
683}
684
685/* Return first label number used in this function (if any were used). */
686
687int
688get_first_label_num ()
689{
690 return first_label_num;
691}
692\f
701e46d0 693/* Return the final regno of X, which is a SUBREG of a hard
694 register. */
695int
696subreg_hard_regno (x, check_mode)
697 register rtx x;
698 int check_mode;
699{
700 enum machine_mode mode = GET_MODE (x);
701 unsigned int byte_offset, base_regno, final_regno;
702 rtx reg = SUBREG_REG (x);
703
704 /* This is where we attempt to catch illegal subregs
705 created by the compiler. */
706 if (GET_CODE (x) != SUBREG
707 || GET_CODE (reg) != REG)
708 abort ();
709 base_regno = REGNO (reg);
710 if (base_regno >= FIRST_PSEUDO_REGISTER)
711 abort ();
475fa9bd 712 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
701e46d0 713 abort ();
714
715 /* Catch non-congruent offsets too. */
716 byte_offset = SUBREG_BYTE (x);
717 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
718 abort ();
719
720 final_regno = subreg_regno (x);
721
722 return final_regno;
723}
724
15bbde2b 725/* Return a value representing some low-order bits of X, where the number
726 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 727 between floating-point and fixed-point values, rather, the bit
15bbde2b 728 representation is returned.
729
730 This function handles the cases in common between gen_lowpart, below,
731 and two variants in cse.c and combine.c. These are the cases that can
732 be safely handled at all points in the compilation.
733
734 If this is not a case we can handle, return 0. */
735
736rtx
737gen_lowpart_common (mode, x)
738 enum machine_mode mode;
739 register rtx x;
740{
701e46d0 741 int msize = GET_MODE_SIZE (mode);
742 int xsize = GET_MODE_SIZE (GET_MODE (x));
743 int offset = 0;
15bbde2b 744
745 if (GET_MODE (x) == mode)
746 return x;
747
748 /* MODE must occupy no more words than the mode of X. */
749 if (GET_MODE (x) != VOIDmode
701e46d0 750 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
751 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
15bbde2b 752 return 0;
753
81802af6 754 offset = subreg_lowpart_offset (mode, GET_MODE (x));
15bbde2b 755
756 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 757 && (GET_MODE_CLASS (mode) == MODE_INT
758 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 759 {
760 /* If we are getting the low-order part of something that has been
761 sign- or zero-extended, we can either just use the object being
762 extended or make a narrower extension. If we want an even smaller
763 piece than the size of the object being extended, call ourselves
764 recursively.
765
766 This case is used mostly by combine and cse. */
767
768 if (GET_MODE (XEXP (x, 0)) == mode)
769 return XEXP (x, 0);
770 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
771 return gen_lowpart_common (mode, XEXP (x, 0));
772 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
3ad7bb1c 773 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 774 }
3c27c2b2 775 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
776 || GET_CODE (x) == CONCAT)
81802af6 777 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
15bbde2b 778 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
779 from the low-order part of the constant. */
64115b39 780 else if ((GET_MODE_CLASS (mode) == MODE_INT
781 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
782 && GET_MODE (x) == VOIDmode
15bbde2b 783 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
99e0010b 784 {
785 /* If MODE is twice the host word size, X is already the desired
786 representation. Otherwise, if MODE is wider than a word, we can't
b3b27b2a 787 do this. If MODE is exactly a word, return just one CONST_INT. */
99e0010b 788
d347deeb 789 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
99e0010b 790 return x;
b572011e 791 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
99e0010b 792 return 0;
b572011e 793 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
99e0010b 794 return (GET_CODE (x) == CONST_INT ? x
b572011e 795 : GEN_INT (CONST_DOUBLE_LOW (x)));
99e0010b 796 else
797 {
a0d52dee 798 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
b572011e 799 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
800 : CONST_DOUBLE_LOW (x));
99e0010b 801
a0d52dee 802 /* Sign extend to HOST_WIDE_INT. */
b3b27b2a 803 val = trunc_int_for_mode (val, mode);
99e0010b 804
805 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
b572011e 806 : GEN_INT (val));
99e0010b 807 }
808 }
15bbde2b 809
c8971cad 810#ifndef REAL_ARITHMETIC
4a307dd5 811 /* If X is an integral constant but we want it in floating-point, it
812 must be the case that we have a union of an integer and a floating-point
813 value. If the machine-parameters allow it, simulate that union here
d823ba47 814 and return the result. The two-word and single-word cases are
414a3664 815 different. */
4a307dd5 816
f51cb679 817 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
b572011e 818 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
f51cb679 819 || flag_pretend_float)
4a307dd5 820 && GET_MODE_CLASS (mode) == MODE_FLOAT
414a3664 821 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
822 && GET_CODE (x) == CONST_INT
b572011e 823 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
414a3664 824 {
b572011e 825 union {HOST_WIDE_INT i; float d; } u;
414a3664 826
827 u.i = INTVAL (x);
715907c0 828 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
414a3664 829 }
414a3664 830 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
b572011e 831 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
414a3664 832 || flag_pretend_float)
833 && GET_MODE_CLASS (mode) == MODE_FLOAT
834 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
4a307dd5 835 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
836 && GET_MODE (x) == VOIDmode
b572011e 837 && (sizeof (double) * HOST_BITS_PER_CHAR
838 == 2 * HOST_BITS_PER_WIDE_INT))
4a307dd5 839 {
b572011e 840 union {HOST_WIDE_INT i[2]; double d; } u;
841 HOST_WIDE_INT low, high;
4a307dd5 842
843 if (GET_CODE (x) == CONST_INT)
b572011e 844 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
4a307dd5 845 else
846 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
847
848#ifdef HOST_WORDS_BIG_ENDIAN
849 u.i[0] = high, u.i[1] = low;
850#else
851 u.i[0] = low, u.i[1] = high;
852#endif
853
715907c0 854 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
4a307dd5 855 }
e21bed9e 856
f51cb679 857 /* Similarly, if this is converting a floating-point value into a
858 single-word integer. Only do this is the host and target parameters are
859 compatible. */
860
861 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
b572011e 862 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
f51cb679 863 || flag_pretend_float)
64115b39 864 && (GET_MODE_CLASS (mode) == MODE_INT
865 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
f51cb679 866 && GET_CODE (x) == CONST_DOUBLE
867 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
868 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
701e46d0 869 return constant_subword (x, (offset / UNITS_PER_WORD), GET_MODE (x));
f51cb679 870
4a307dd5 871 /* Similarly, if this is converting a floating-point value into a
872 two-word integer, we can do this one word at a time and make an
873 integer. Only do this is the host and target parameters are
874 compatible. */
875
f51cb679 876 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
b572011e 877 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
f51cb679 878 || flag_pretend_float)
64115b39 879 && (GET_MODE_CLASS (mode) == MODE_INT
6ff0332a 880 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4a307dd5 881 && GET_CODE (x) == CONST_DOUBLE
882 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
883 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
884 {
701e46d0 885 rtx lowpart, highpart;
886
887 lowpart = constant_subword (x,
888 (offset / UNITS_PER_WORD) + WORDS_BIG_ENDIAN,
889 GET_MODE (x));
890 highpart = constant_subword (x,
891 (offset / UNITS_PER_WORD) + (! WORDS_BIG_ENDIAN),
892 GET_MODE (x));
4a307dd5 893 if (lowpart && GET_CODE (lowpart) == CONST_INT
894 && highpart && GET_CODE (highpart) == CONST_INT)
895 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
896 }
c8971cad 897#else /* ifndef REAL_ARITHMETIC */
898
899 /* When we have a FP emulator, we can handle all conversions between
900 FP and integer operands. This simplifies reload because it
901 doesn't have to deal with constructs like (subreg:DI
902 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
7547817f 903 /* Single-precision floats are always 32-bits and double-precision
904 floats are always 64-bits. */
c8971cad 905
3c27c2b2 906 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
7547817f 907 && GET_MODE_BITSIZE (mode) == 32
c8971cad 908 && GET_CODE (x) == CONST_INT)
3c27c2b2 909 {
c8971cad 910 REAL_VALUE_TYPE r;
911 HOST_WIDE_INT i;
912
913 i = INTVAL (x);
914 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
915 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
3c27c2b2 916 }
917 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
7547817f 918 && GET_MODE_BITSIZE (mode) == 64
c8971cad 919 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
920 && GET_MODE (x) == VOIDmode)
921 {
922 REAL_VALUE_TYPE r;
923 HOST_WIDE_INT i[2];
924 HOST_WIDE_INT low, high;
925
926 if (GET_CODE (x) == CONST_INT)
927 {
928 low = INTVAL (x);
929 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
930 }
931 else
932 {
d823ba47 933 low = CONST_DOUBLE_LOW (x);
c8971cad 934 high = CONST_DOUBLE_HIGH (x);
935 }
936
937 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
938 target machine. */
939 if (WORDS_BIG_ENDIAN)
940 i[0] = high, i[1] = low;
941 else
942 i[0] = low, i[1] = high;
943
944 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
945 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
946 }
947 else if ((GET_MODE_CLASS (mode) == MODE_INT
948 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
949 && GET_CODE (x) == CONST_DOUBLE
950 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
951 {
952 REAL_VALUE_TYPE r;
953 long i[4]; /* Only the low 32 bits of each 'long' are used. */
954 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
955
956 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7547817f 957 switch (GET_MODE_BITSIZE (GET_MODE (x)))
c8971cad 958 {
7547817f 959 case 32:
c8971cad 960 REAL_VALUE_TO_TARGET_SINGLE (r, i[endian]);
a774762c 961 i[1 - endian] = 0;
c8971cad 962 break;
7547817f 963 case 64:
c8971cad 964 REAL_VALUE_TO_TARGET_DOUBLE (r, i);
965 break;
7547817f 966 case 96:
3f980cf7 967 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
968 i[3-3*endian] = 0;
3c27c2b2 969 break;
7547817f 970 case 128:
c8971cad 971 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
972 break;
973 default:
a774762c 974 abort ();
c8971cad 975 }
976
977 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
978 and return it. */
979#if HOST_BITS_PER_WIDE_INT == 32
a774762c 980 return immed_double_const (i[endian], i[1 - endian], mode);
c8971cad 981#else
83e32a86 982 {
983 int c;
984
985 if (HOST_BITS_PER_WIDE_INT != 64)
a774762c 986 abort ();
987
83e32a86 988 for (c = 0; c < 4; c++)
a774762c 989 i[c] &= ~ (0L);
3f980cf7 990
7547817f 991 switch (GET_MODE_BITSIZE (GET_MODE (x)))
3f980cf7 992 {
7547817f 993 case 32:
994 case 64:
3f980cf7 995 return immed_double_const (((unsigned long) i[endian]) |
996 (((HOST_WIDE_INT) i[1-endian]) << 32),
997 0, mode);
7547817f 998 case 96:
999 case 128:
3f980cf7 1000 return immed_double_const (((unsigned long) i[endian*3]) |
1001 (((HOST_WIDE_INT) i[1+endian]) << 32),
1002 ((unsigned long) i[2-endian]) |
1003 (((HOST_WIDE_INT) i[3-endian*3]) << 32),
1004 mode);
3c27c2b2 1005 default:
1006 abort ();
3f980cf7 1007 }
83e32a86 1008 }
c8971cad 1009#endif
1010 }
1011#endif /* ifndef REAL_ARITHMETIC */
4a307dd5 1012
15bbde2b 1013 /* Otherwise, we can't do this. */
1014 return 0;
1015}
1016\f
568f439b 1017/* Return the real part (which has mode MODE) of a complex value X.
1018 This always comes at the low address in memory. */
1019
1020rtx
1021gen_realpart (mode, x)
1022 enum machine_mode mode;
1023 register rtx x;
1024{
81802af6 1025 if (WORDS_BIG_ENDIAN
1026 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1027 && REG_P (x)
1028 && REGNO (x) < FIRST_PSEUDO_REGISTER)
f060a027 1029 internal_error
1030 ("Can't access real part of complex value in hard register");
0864dc1b 1031 else if (WORDS_BIG_ENDIAN)
568f439b 1032 return gen_highpart (mode, x);
1033 else
1034 return gen_lowpart (mode, x);
1035}
1036
1037/* Return the imaginary part (which has mode MODE) of a complex value X.
1038 This always comes at the high address in memory. */
1039
1040rtx
1041gen_imagpart (mode, x)
1042 enum machine_mode mode;
1043 register rtx x;
1044{
81802af6 1045 if (WORDS_BIG_ENDIAN)
568f439b 1046 return gen_lowpart (mode, x);
701e46d0 1047 else if (! WORDS_BIG_ENDIAN
ea9a92b6 1048 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1049 && REG_P (x)
1050 && REGNO (x) < FIRST_PSEUDO_REGISTER)
f060a027 1051 internal_error
1052 ("can't access imaginary part of complex value in hard register");
568f439b 1053 else
1054 return gen_highpart (mode, x);
1055}
48c70a46 1056
1057/* Return 1 iff X, assumed to be a SUBREG,
1058 refers to the real part of the complex value in its containing reg.
1059 Complex values are always stored with the real part in the first word,
1060 regardless of WORDS_BIG_ENDIAN. */
1061
1062int
1063subreg_realpart_p (x)
1064 rtx x;
1065{
1066 if (GET_CODE (x) != SUBREG)
1067 abort ();
1068
701e46d0 1069 return ((unsigned int) SUBREG_BYTE (x)
02e7a332 1070 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
48c70a46 1071}
568f439b 1072\f
15bbde2b 1073/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1074 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1075 least-significant part of X.
1076 MODE specifies how big a part of X to return;
1077 it usually should not be larger than a word.
1078 If X is a MEM whose address is a QUEUED, the value may be so also. */
1079
1080rtx
1081gen_lowpart (mode, x)
1082 enum machine_mode mode;
1083 register rtx x;
1084{
1085 rtx result = gen_lowpart_common (mode, x);
1086
1087 if (result)
1088 return result;
365c9063 1089 else if (GET_CODE (x) == REG)
1090 {
1091 /* Must be a hard reg that's not valid in MODE. */
1092 result = gen_lowpart_common (mode, copy_to_reg (x));
1093 if (result == 0)
1094 abort ();
314d6ec8 1095 return result;
365c9063 1096 }
15bbde2b 1097 else if (GET_CODE (x) == MEM)
1098 {
1099 /* The only additional case we can do is MEM. */
1100 register int offset = 0;
1101 if (WORDS_BIG_ENDIAN)
1102 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1103 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1104
1105 if (BYTES_BIG_ENDIAN)
1106 /* Adjust the address so that the address-after-the-data
1107 is unchanged. */
1108 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1109 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1110
e513d163 1111 return adjust_address (x, mode, offset);
15bbde2b 1112 }
0dbd1c74 1113 else if (GET_CODE (x) == ADDRESSOF)
1114 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
15bbde2b 1115 else
1116 abort ();
1117}
1118
d823ba47 1119/* Like `gen_lowpart', but refer to the most significant part.
d56d0ca2 1120 This is used to access the imaginary part of a complex number. */
1121
1122rtx
1123gen_highpart (mode, x)
1124 enum machine_mode mode;
1125 register rtx x;
1126{
701e46d0 1127 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1128 rtx result;
701e46d0 1129
d56d0ca2 1130 /* This case loses if X is a subreg. To catch bugs early,
1131 complain if an invalid MODE is used even in other cases. */
701e46d0 1132 if (msize > UNITS_PER_WORD
1133 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
d56d0ca2 1134 abort ();
701e46d0 1135
81802af6 1136 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1137 subreg_highpart_offset (mode, GET_MODE (x)));
a8c36ab2 1138
1139 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1140 the target if we have a MEM. gen_highpart must return a valid operand,
1141 emitting code if necessary to do so. */
1142 if (GET_CODE (result) == MEM)
1143 result = validize_mem (result);
1144
81802af6 1145 if (!result)
1146 abort ();
1147 return result;
1148}
1149/* Return offset in bytes to get OUTERMODE low part
1150 of the value in mode INNERMODE stored in memory in target format. */
10ef59ac 1151
81802af6 1152unsigned int
1153subreg_lowpart_offset (outermode, innermode)
1154 enum machine_mode outermode, innermode;
1155{
1156 unsigned int offset = 0;
1157 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1158
81802af6 1159 if (difference > 0)
d56d0ca2 1160 {
81802af6 1161 if (WORDS_BIG_ENDIAN)
1162 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1163 if (BYTES_BIG_ENDIAN)
1164 offset += difference % UNITS_PER_WORD;
d56d0ca2 1165 }
701e46d0 1166
81802af6 1167 return offset;
d56d0ca2 1168}
64ab453f 1169
81802af6 1170/* Return offset in bytes to get OUTERMODE high part
1171 of the value in mode INNERMODE stored in memory in target format. */
1172unsigned int
1173subreg_highpart_offset (outermode, innermode)
64ab453f 1174 enum machine_mode outermode, innermode;
64ab453f 1175{
1176 unsigned int offset = 0;
1177 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1178
81802af6 1179 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1180 abort ();
1181
64ab453f 1182 if (difference > 0)
1183 {
81802af6 1184 if (! WORDS_BIG_ENDIAN)
64ab453f 1185 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1186 if (! BYTES_BIG_ENDIAN)
64ab453f 1187 offset += difference % UNITS_PER_WORD;
1188 }
1189
81802af6 1190 return offset;
64ab453f 1191}
d56d0ca2 1192
15bbde2b 1193/* Return 1 iff X, assumed to be a SUBREG,
1194 refers to the least significant part of its containing reg.
1195 If X is not a SUBREG, always return 1 (it is its own low part!). */
1196
1197int
1198subreg_lowpart_p (x)
1199 rtx x;
1200{
1201 if (GET_CODE (x) != SUBREG)
1202 return 1;
7e14c1bf 1203 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1204 return 0;
15bbde2b 1205
81802af6 1206 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1207 == SUBREG_BYTE (x));
15bbde2b 1208}
1209\f
15bbde2b 1210
701e46d0 1211/* Helper routine for all the constant cases of operand_subword.
1212 Some places invoke this directly. */
15bbde2b 1213
1214rtx
701e46d0 1215constant_subword (op, offset, mode)
15bbde2b 1216 rtx op;
701e46d0 1217 int offset;
15bbde2b 1218 enum machine_mode mode;
1219{
b572011e 1220 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
701e46d0 1221 HOST_WIDE_INT val;
15bbde2b 1222
1223 /* If OP is already an integer word, return it. */
1224 if (GET_MODE_CLASS (mode) == MODE_INT
1225 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1226 return op;
1227
fc92fa61 1228#ifdef REAL_ARITHMETIC
915c336f 1229 /* The output is some bits, the width of the target machine's word.
1230 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1231 host can't. */
0477aa9f 1232 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
fc92fa61 1233 && GET_MODE_CLASS (mode) == MODE_FLOAT
48509d90 1234 && GET_MODE_BITSIZE (mode) == 64
fc92fa61 1235 && GET_CODE (op) == CONST_DOUBLE)
1236 {
0477aa9f 1237 long k[2];
fc92fa61 1238 REAL_VALUE_TYPE rv;
1239
1240 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1241 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
48509d90 1242
0477aa9f 1243 /* We handle 32-bit and >= 64-bit words here. Note that the order in
48509d90 1244 which the words are written depends on the word endianness.
48509d90 1245 ??? This is a potential portability problem and should
cd1091f4 1246 be fixed at some point.
1247
1248 We must excercise caution with the sign bit. By definition there
1249 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1250 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1251 So we explicitly mask and sign-extend as necessary. */
0477aa9f 1252 if (BITS_PER_WORD == 32)
cd1091f4 1253 {
701e46d0 1254 val = k[offset];
cd1091f4 1255 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1256 return GEN_INT (val);
1257 }
1258#if HOST_BITS_PER_WIDE_INT >= 64
701e46d0 1259 else if (BITS_PER_WORD >= 64 && offset == 0)
cd1091f4 1260 {
1261 val = k[! WORDS_BIG_ENDIAN];
1262 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1263 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1264 return GEN_INT (val);
1265 }
0477aa9f 1266#endif
de8de626 1267 else if (BITS_PER_WORD == 16)
1268 {
701e46d0 1269 val = k[offset >> 1];
1270 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
cd1091f4 1271 val >>= 16;
bfc60c50 1272 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
cd1091f4 1273 return GEN_INT (val);
de8de626 1274 }
48509d90 1275 else
1276 abort ();
fc92fa61 1277 }
61f41e95 1278 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1279 && GET_MODE_CLASS (mode) == MODE_FLOAT
1280 && GET_MODE_BITSIZE (mode) > 64
1281 && GET_CODE (op) == CONST_DOUBLE)
915c336f 1282 {
1283 long k[4];
1284 REAL_VALUE_TYPE rv;
61f41e95 1285
915c336f 1286 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1287 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
61f41e95 1288
915c336f 1289 if (BITS_PER_WORD == 32)
1290 {
701e46d0 1291 val = k[offset];
915c336f 1292 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1293 return GEN_INT (val);
1294 }
1295#if HOST_BITS_PER_WIDE_INT >= 64
701e46d0 1296 else if (BITS_PER_WORD >= 64 && offset <= 1)
915c336f 1297 {
701e46d0 1298 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
915c336f 1299 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
701e46d0 1300 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
915c336f 1301 return GEN_INT (val);
1302 }
1303#endif
1304 else
1305 abort ();
1306 }
fc92fa61 1307#else /* no REAL_ARITHMETIC */
15bbde2b 1308 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
b572011e 1309 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
15bbde2b 1310 || flag_pretend_float)
1311 && GET_MODE_CLASS (mode) == MODE_FLOAT
1312 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1313 && GET_CODE (op) == CONST_DOUBLE)
35bb04fc 1314 {
1315 /* The constant is stored in the host's word-ordering,
1316 but we want to access it in the target's word-ordering. Some
1317 compilers don't like a conditional inside macro args, so we have two
1318 copies of the return. */
06683233 1319#ifdef HOST_WORDS_BIG_ENDIAN
701e46d0 1320 return GEN_INT (offset == WORDS_BIG_ENDIAN
35bb04fc 1321 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
06683233 1322#else
701e46d0 1323 return GEN_INT (offset != WORDS_BIG_ENDIAN
35bb04fc 1324 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
06683233 1325#endif
35bb04fc 1326 }
fc92fa61 1327#endif /* no REAL_ARITHMETIC */
15bbde2b 1328
1329 /* Single word float is a little harder, since single- and double-word
1330 values often do not have the same high-order bits. We have already
1331 verified that we want the only defined word of the single-word value. */
fc92fa61 1332#ifdef REAL_ARITHMETIC
0477aa9f 1333 if (GET_MODE_CLASS (mode) == MODE_FLOAT
48509d90 1334 && GET_MODE_BITSIZE (mode) == 32
fc92fa61 1335 && GET_CODE (op) == CONST_DOUBLE)
1336 {
0477aa9f 1337 long l;
fc92fa61 1338 REAL_VALUE_TYPE rv;
1339
1340 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1341 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
587a9fdf 1342
cd1091f4 1343 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1344 val = l;
1345 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
eb294a2f 1346
587a9fdf 1347 if (BITS_PER_WORD == 16)
1348 {
701e46d0 1349 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
cd1091f4 1350 val >>= 16;
bfc60c50 1351 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
587a9fdf 1352 }
cd1091f4 1353
1354 return GEN_INT (val);
fc92fa61 1355 }
1356#else
15bbde2b 1357 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
b572011e 1358 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
15bbde2b 1359 || flag_pretend_float)
dc611f66 1360 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
15bbde2b 1361 && GET_MODE_CLASS (mode) == MODE_FLOAT
1362 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1363 && GET_CODE (op) == CONST_DOUBLE)
1364 {
1365 double d;
b572011e 1366 union {float f; HOST_WIDE_INT i; } u;
15bbde2b 1367
1368 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1369
1370 u.f = d;
b572011e 1371 return GEN_INT (u.i);
15bbde2b 1372 }
dc611f66 1373 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1374 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1375 || flag_pretend_float)
1376 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1377 && GET_MODE_CLASS (mode) == MODE_FLOAT
1378 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1379 && GET_CODE (op) == CONST_DOUBLE)
1380 {
1381 double d;
1382 union {double d; HOST_WIDE_INT i; } u;
1383
1384 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1385
1386 u.d = d;
1387 return GEN_INT (u.i);
1388 }
fc92fa61 1389#endif /* no REAL_ARITHMETIC */
d823ba47 1390
15bbde2b 1391 /* The only remaining cases that we can handle are integers.
1392 Convert to proper endianness now since these cases need it.
d823ba47 1393 At this point, offset == 0 means the low-order word.
15bbde2b 1394
870288ea 1395 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1396 in general. However, if OP is (const_int 0), we can just return
1397 it for any word. */
1398
1399 if (op == const0_rtx)
1400 return op;
15bbde2b 1401
1402 if (GET_MODE_CLASS (mode) != MODE_INT
870288ea 1403 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
5c1fd70d 1404 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
15bbde2b 1405 return 0;
1406
1407 if (WORDS_BIG_ENDIAN)
701e46d0 1408 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
15bbde2b 1409
1410 /* Find out which word on the host machine this value is in and get
1411 it from the constant. */
701e46d0 1412 val = (offset / size_ratio == 0
15bbde2b 1413 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1414 : (GET_CODE (op) == CONST_INT
1415 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1416
5418f2a8 1417 /* Get the value we want into the low bits of val. */
b572011e 1418 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
701e46d0 1419 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
5418f2a8 1420
b2345915 1421 val = trunc_int_for_mode (val, word_mode);
15bbde2b 1422
b572011e 1423 return GEN_INT (val);
15bbde2b 1424}
1425
701e46d0 1426/* Return subword OFFSET of operand OP.
1427 The word number, OFFSET, is interpreted as the word number starting
1428 at the low-order address. OFFSET 0 is the low-order word if not
1429 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1430
1431 If we cannot extract the required word, we return zero. Otherwise,
1432 an rtx corresponding to the requested word will be returned.
1433
1434 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1435 reload has completed, a valid address will always be returned. After
1436 reload, if a valid address cannot be returned, we return zero.
1437
1438 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1439 it is the responsibility of the caller.
1440
1441 MODE is the mode of OP in case it is a CONST_INT.
1442
1443 ??? This is still rather broken for some cases. The problem for the
1444 moment is that all callers of this thing provide no 'goal mode' to
1445 tell us to work with. This exists because all callers were written
84e81e84 1446 in a word based SUBREG world.
1447 Now use of this function can be deprecated by simplify_subreg in most
1448 cases.
1449 */
701e46d0 1450
1451rtx
1452operand_subword (op, offset, validate_address, mode)
1453 rtx op;
1454 unsigned int offset;
1455 int validate_address;
1456 enum machine_mode mode;
1457{
1458 if (mode == VOIDmode)
1459 mode = GET_MODE (op);
1460
1461 if (mode == VOIDmode)
1462 abort ();
1463
1464 /* If OP is narrower than a word, fail. */
1465 if (mode != BLKmode
1466 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1467 return 0;
1468
1469 /* If we want a word outside OP, return zero. */
1470 if (mode != BLKmode
1471 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1472 return const0_rtx;
1473
701e46d0 1474 /* Form a new MEM at the requested address. */
1475 if (GET_CODE (op) == MEM)
1476 {
1477 rtx addr = plus_constant (XEXP (op, 0), (offset * UNITS_PER_WORD));
1478 rtx new;
1479
1480 if (validate_address)
1481 {
1482 if (reload_completed)
1483 {
1484 if (! strict_memory_address_p (word_mode, addr))
1485 return 0;
1486 }
1487 else
1488 addr = memory_address (word_mode, addr);
1489 }
1490
1491 new = gen_rtx_MEM (word_mode, addr);
1492 MEM_COPY_ATTRIBUTES (new, op);
1493 return new;
1494 }
1495
84e81e84 1496 /* Rest can be handled by simplify_subreg. */
1497 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1498}
1499
15bbde2b 1500/* Similar to `operand_subword', but never return 0. If we can't extract
1501 the required subword, put OP into a register and try again. If that fails,
d823ba47 1502 abort. We always validate the address in this case.
15bbde2b 1503
1504 MODE is the mode of OP, in case it is CONST_INT. */
1505
1506rtx
701e46d0 1507operand_subword_force (op, offset, mode)
15bbde2b 1508 rtx op;
701e46d0 1509 unsigned int offset;
15bbde2b 1510 enum machine_mode mode;
1511{
701e46d0 1512 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1513
1514 if (result)
1515 return result;
1516
1517 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1518 {
1519 /* If this is a register which can not be accessed by words, copy it
1520 to a pseudo register. */
1521 if (GET_CODE (op) == REG)
1522 op = copy_to_reg (op);
1523 else
1524 op = force_reg (mode, op);
1525 }
15bbde2b 1526
701e46d0 1527 result = operand_subword (op, offset, 1, mode);
15bbde2b 1528 if (result == 0)
1529 abort ();
1530
1531 return result;
1532}
1533\f
1534/* Given a compare instruction, swap the operands.
1535 A test instruction is changed into a compare of 0 against the operand. */
1536
1537void
1538reverse_comparison (insn)
1539 rtx insn;
1540{
1541 rtx body = PATTERN (insn);
1542 rtx comp;
1543
1544 if (GET_CODE (body) == SET)
1545 comp = SET_SRC (body);
1546 else
1547 comp = SET_SRC (XVECEXP (body, 0, 0));
1548
1549 if (GET_CODE (comp) == COMPARE)
1550 {
1551 rtx op0 = XEXP (comp, 0);
1552 rtx op1 = XEXP (comp, 1);
1553 XEXP (comp, 0) = op1;
1554 XEXP (comp, 1) = op0;
1555 }
1556 else
1557 {
7014838c 1558 rtx new = gen_rtx_COMPARE (VOIDmode,
1559 CONST0_RTX (GET_MODE (comp)), comp);
15bbde2b 1560 if (GET_CODE (body) == SET)
1561 SET_SRC (body) = new;
1562 else
1563 SET_SRC (XVECEXP (body, 0, 0)) = new;
1564 }
1565}
1566\f
1567/* Return a memory reference like MEMREF, but with its mode changed
1568 to MODE and its address changed to ADDR.
1569 (VOIDmode means don't change the mode.
1570 NULL for ADDR means don't change the address.) */
1571
1572rtx
1573change_address (memref, mode, addr)
1574 rtx memref;
1575 enum machine_mode mode;
1576 rtx addr;
1577{
1578 rtx new;
1579
1580 if (GET_CODE (memref) != MEM)
1581 abort ();
1582 if (mode == VOIDmode)
1583 mode = GET_MODE (memref);
1584 if (addr == 0)
1585 addr = XEXP (memref, 0);
1586
1587 /* If reload is in progress or has completed, ADDR must be valid.
1588 Otherwise, we can call memory_address to make it valid. */
1589 if (reload_completed || reload_in_progress)
1590 {
1591 if (! memory_address_p (mode, addr))
1592 abort ();
1593 }
1594 else
1595 addr = memory_address (mode, addr);
d823ba47 1596
e8976cd7 1597 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1598 return memref;
1599
3ad7bb1c 1600 new = gen_rtx_MEM (mode, addr);
6a0934dd 1601 MEM_COPY_ATTRIBUTES (new, memref);
15bbde2b 1602 return new;
1603}
e513d163 1604/* Return a memory reference like MEMREF, but with its mode changed
1605 to MODE and its address offset by OFFSET bytes. */
1606
1607rtx
1608adjust_address (memref, mode, offset)
1609 rtx memref;
1610 enum machine_mode mode;
1611 HOST_WIDE_INT offset;
1612{
1613 /* For now, this is just a wrapper for change_address, but eventually
1614 will do memref tracking. */
1615 return
1616 change_address (memref, mode, plus_constant (XEXP (memref, 0), offset));
1617}
15bbde2b 1618\f
1619/* Return a newly created CODE_LABEL rtx with a unique label number. */
1620
1621rtx
1622gen_label_rtx ()
1623{
649d8da6 1624 register rtx label;
1625
b54842d8 1626 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
4679ade3 1627 NULL_RTX, label_num++, NULL, NULL);
649d8da6 1628
15bbde2b 1629 LABEL_NUSES (label) = 0;
bfee5366 1630 LABEL_ALTERNATE_NAME (label) = NULL;
15bbde2b 1631 return label;
1632}
1633\f
1634/* For procedure integration. */
1635
15bbde2b 1636/* Install new pointers to the first and last insns in the chain.
d4c332ff 1637 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 1638 Used for an inline-procedure after copying the insn chain. */
1639
1640void
1641set_new_first_and_last_insn (first, last)
1642 rtx first, last;
1643{
d4c332ff 1644 rtx insn;
1645
15bbde2b 1646 first_insn = first;
1647 last_insn = last;
d4c332ff 1648 cur_insn_uid = 0;
1649
1650 for (insn = first; insn; insn = NEXT_INSN (insn))
1651 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1652
1653 cur_insn_uid++;
15bbde2b 1654}
1655
1656/* Set the range of label numbers found in the current function.
1657 This is used when belatedly compiling an inline function. */
1658
1659void
1660set_new_first_and_last_label_num (first, last)
1661 int first, last;
1662{
1663 base_label_num = label_num;
1664 first_label_num = first;
1665 last_label_num = last;
1666}
0a893c29 1667
1668/* Set the last label number found in the current function.
1669 This is used when belatedly compiling an inline function. */
15bbde2b 1670
1671void
0a893c29 1672set_new_last_label_num (last)
1673 int last;
15bbde2b 1674{
0a893c29 1675 base_label_num = label_num;
1676 last_label_num = last;
15bbde2b 1677}
0a893c29 1678\f
15bbde2b 1679/* Restore all variables describing the current status from the structure *P.
1680 This is used after a nested function. */
1681
1682void
1683restore_emit_status (p)
c5b89159 1684 struct function *p ATTRIBUTE_UNUSED;
15bbde2b 1685{
bb9d6298 1686 last_label_num = 0;
0a893c29 1687 clear_emit_caches ();
15bbde2b 1688}
26df1c5e 1689
c788feb1 1690/* Clear out all parts of the state in F that can safely be discarded
26df1c5e 1691 after the function has been compiled, to let garbage collection
a57bcb3b 1692 reclaim the memory. */
c788feb1 1693
26df1c5e 1694void
a57bcb3b 1695free_emit_status (f)
26df1c5e 1696 struct function *f;
1697{
1698 free (f->emit->x_regno_reg_rtx);
26df1c5e 1699 free (f->emit->regno_pointer_align);
3c3bb268 1700 free (f->emit);
1701 f->emit = NULL;
26df1c5e 1702}
15bbde2b 1703\f
d823ba47 1704/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 1705 structure. This routine should only be called once. */
15bbde2b 1706
1707void
2d96a59a 1708unshare_all_rtl (fndecl, insn)
1709 tree fndecl;
1710 rtx insn;
15bbde2b 1711{
2d96a59a 1712 tree decl;
15bbde2b 1713
2d96a59a 1714 /* Make sure that virtual parameters are not shared. */
1715 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
0e8e37b2 1716 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2d96a59a 1717
72ec8878 1718 /* Make sure that virtual stack slots are not shared. */
1719 unshare_all_decls (DECL_INITIAL (fndecl));
1720
2d96a59a 1721 /* Unshare just about everything else. */
1722 unshare_all_rtl_1 (insn);
d823ba47 1723
15bbde2b 1724 /* Make sure the addresses of stack slots found outside the insn chain
1725 (such as, in DECL_RTL of a variable) are not shared
1726 with the insn chain.
1727
1728 This special care is necessary when the stack slot MEM does not
1729 actually appear in the insn chain. If it does appear, its address
1730 is unshared from all else at that point. */
45733446 1731 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 1732}
1733
d823ba47 1734/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 1735 structure, again. This is a fairly expensive thing to do so it
1736 should be done sparingly. */
1737
1738void
1739unshare_all_rtl_again (insn)
1740 rtx insn;
1741{
1742 rtx p;
5244079b 1743 tree decl;
1744
2d96a59a 1745 for (p = insn; p; p = NEXT_INSN (p))
9204e736 1746 if (INSN_P (p))
2d96a59a 1747 {
1748 reset_used_flags (PATTERN (p));
1749 reset_used_flags (REG_NOTES (p));
1750 reset_used_flags (LOG_LINKS (p));
1751 }
5244079b 1752
01dc9f0c 1753 /* Make sure that virtual stack slots are not shared. */
1754 reset_used_decls (DECL_INITIAL (cfun->decl));
1755
5244079b 1756 /* Make sure that virtual parameters are not shared. */
1757 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
1758 reset_used_flags (DECL_RTL (decl));
1759
1760 reset_used_flags (stack_slot_list);
1761
1762 unshare_all_rtl (cfun->decl, insn);
2d96a59a 1763}
1764
1765/* Go through all the RTL insn bodies and copy any invalid shared structure.
1766 Assumes the mark bits are cleared at entry. */
1767
1768static void
1769unshare_all_rtl_1 (insn)
1770 rtx insn;
1771{
1772 for (; insn; insn = NEXT_INSN (insn))
9204e736 1773 if (INSN_P (insn))
2d96a59a 1774 {
1775 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1776 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1777 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1778 }
1779}
1780
72ec8878 1781/* Go through all virtual stack slots of a function and copy any
1782 shared structure. */
1783static void
1784unshare_all_decls (blk)
1785 tree blk;
1786{
1787 tree t;
1788
1789 /* Copy shared decls. */
1790 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 1791 if (DECL_RTL_SET_P (t))
1792 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
72ec8878 1793
1794 /* Now process sub-blocks. */
1795 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
1796 unshare_all_decls (t);
1797}
1798
01dc9f0c 1799/* Go through all virtual stack slots of a function and mark them as
1800 not shared. */
1801static void
1802reset_used_decls (blk)
1803 tree blk;
1804{
1805 tree t;
1806
1807 /* Mark decls. */
1808 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 1809 if (DECL_RTL_SET_P (t))
1810 reset_used_flags (DECL_RTL (t));
01dc9f0c 1811
1812 /* Now process sub-blocks. */
1813 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
1814 reset_used_decls (t);
1815}
1816
15bbde2b 1817/* Mark ORIG as in use, and return a copy of it if it was already in use.
1818 Recursively does the same for subexpressions. */
1819
1820rtx
1821copy_rtx_if_shared (orig)
1822 rtx orig;
1823{
1824 register rtx x = orig;
1825 register int i;
1826 register enum rtx_code code;
d2ca078f 1827 register const char *format_ptr;
15bbde2b 1828 int copied = 0;
1829
1830 if (x == 0)
1831 return 0;
1832
1833 code = GET_CODE (x);
1834
1835 /* These types may be freely shared. */
1836
1837 switch (code)
1838 {
1839 case REG:
1840 case QUEUED:
1841 case CONST_INT:
1842 case CONST_DOUBLE:
1843 case SYMBOL_REF:
1844 case CODE_LABEL:
1845 case PC:
1846 case CC0:
1847 case SCRATCH:
a92771b8 1848 /* SCRATCH must be shared because they represent distinct values. */
15bbde2b 1849 return x;
1850
f63d12e3 1851 case CONST:
1852 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1853 a LABEL_REF, it isn't sharable. */
1854 if (GET_CODE (XEXP (x, 0)) == PLUS
1855 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1856 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1857 return x;
1858 break;
1859
15bbde2b 1860 case INSN:
1861 case JUMP_INSN:
1862 case CALL_INSN:
1863 case NOTE:
15bbde2b 1864 case BARRIER:
1865 /* The chain of insns is not being copied. */
1866 return x;
1867
1868 case MEM:
baf55c37 1869 /* A MEM is allowed to be shared if its address is constant.
1870
d823ba47 1871 We used to allow sharing of MEMs which referenced
baf55c37 1872 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
1873 that can lose. instantiate_virtual_regs will not unshare
1874 the MEMs, and combine may change the structure of the address
1875 because it looks safe and profitable in one context, but
1876 in some other context it creates unrecognizable RTL. */
1877 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
15bbde2b 1878 return x;
1879
0dbd1c74 1880 break;
1881
1882 default:
1883 break;
15bbde2b 1884 }
1885
1886 /* This rtx may not be shared. If it has already been seen,
1887 replace it with a copy of itself. */
1888
1889 if (x->used)
1890 {
1891 register rtx copy;
1892
1893 copy = rtx_alloc (code);
b1b63592 1894 memcpy (copy, x,
748e6d74 1895 (sizeof (*copy) - sizeof (copy->fld)
1896 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
15bbde2b 1897 x = copy;
1898 copied = 1;
1899 }
1900 x->used = 1;
1901
1902 /* Now scan the subexpressions recursively.
1903 We can store any replaced subexpressions directly into X
1904 since we know X is not shared! Any vectors in X
1905 must be copied if X was copied. */
1906
1907 format_ptr = GET_RTX_FORMAT (code);
1908
1909 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1910 {
1911 switch (*format_ptr++)
1912 {
1913 case 'e':
1914 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1915 break;
1916
1917 case 'E':
1918 if (XVEC (x, i) != NULL)
1919 {
1920 register int j;
ffe0869b 1921 int len = XVECLEN (x, i);
15bbde2b 1922
ffe0869b 1923 if (copied && len > 0)
a4070a91 1924 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
ffe0869b 1925 for (j = 0; j < len; j++)
1926 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
15bbde2b 1927 }
1928 break;
1929 }
1930 }
1931 return x;
1932}
1933
1934/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1935 to look for shared sub-parts. */
1936
1937void
1938reset_used_flags (x)
1939 rtx x;
1940{
1941 register int i, j;
1942 register enum rtx_code code;
d2ca078f 1943 register const char *format_ptr;
15bbde2b 1944
1945 if (x == 0)
1946 return;
1947
1948 code = GET_CODE (x);
1949
c3418f42 1950 /* These types may be freely shared so we needn't do any resetting
15bbde2b 1951 for them. */
1952
1953 switch (code)
1954 {
1955 case REG:
1956 case QUEUED:
1957 case CONST_INT:
1958 case CONST_DOUBLE:
1959 case SYMBOL_REF:
1960 case CODE_LABEL:
1961 case PC:
1962 case CC0:
1963 return;
1964
1965 case INSN:
1966 case JUMP_INSN:
1967 case CALL_INSN:
1968 case NOTE:
1969 case LABEL_REF:
1970 case BARRIER:
1971 /* The chain of insns is not being copied. */
1972 return;
d823ba47 1973
0dbd1c74 1974 default:
1975 break;
15bbde2b 1976 }
1977
1978 x->used = 0;
1979
1980 format_ptr = GET_RTX_FORMAT (code);
1981 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1982 {
1983 switch (*format_ptr++)
1984 {
1985 case 'e':
1986 reset_used_flags (XEXP (x, i));
1987 break;
1988
1989 case 'E':
1990 for (j = 0; j < XVECLEN (x, i); j++)
1991 reset_used_flags (XVECEXP (x, i, j));
1992 break;
1993 }
1994 }
1995}
1996\f
1997/* Copy X if necessary so that it won't be altered by changes in OTHER.
1998 Return X or the rtx for the pseudo reg the value of X was copied into.
1999 OTHER must be valid as a SET_DEST. */
2000
2001rtx
2002make_safe_from (x, other)
2003 rtx x, other;
2004{
2005 while (1)
2006 switch (GET_CODE (other))
2007 {
2008 case SUBREG:
2009 other = SUBREG_REG (other);
2010 break;
2011 case STRICT_LOW_PART:
2012 case SIGN_EXTEND:
2013 case ZERO_EXTEND:
2014 other = XEXP (other, 0);
2015 break;
2016 default:
2017 goto done;
2018 }
2019 done:
2020 if ((GET_CODE (other) == MEM
2021 && ! CONSTANT_P (x)
2022 && GET_CODE (x) != REG
2023 && GET_CODE (x) != SUBREG)
2024 || (GET_CODE (other) == REG
2025 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2026 || reg_mentioned_p (other, x))))
2027 {
2028 rtx temp = gen_reg_rtx (GET_MODE (x));
2029 emit_move_insn (temp, x);
2030 return temp;
2031 }
2032 return x;
2033}
2034\f
2035/* Emission of insns (adding them to the doubly-linked list). */
2036
2037/* Return the first insn of the current sequence or current function. */
2038
2039rtx
2040get_insns ()
2041{
2042 return first_insn;
2043}
2044
2045/* Return the last insn emitted in current sequence or current function. */
2046
2047rtx
2048get_last_insn ()
2049{
2050 return last_insn;
2051}
2052
2053/* Specify a new insn as the last in the chain. */
2054
2055void
2056set_last_insn (insn)
2057 rtx insn;
2058{
2059 if (NEXT_INSN (insn) != 0)
2060 abort ();
2061 last_insn = insn;
2062}
2063
2064/* Return the last insn emitted, even if it is in a sequence now pushed. */
2065
2066rtx
2067get_last_insn_anywhere ()
2068{
2069 struct sequence_stack *stack;
2070 if (last_insn)
2071 return last_insn;
0a893c29 2072 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 2073 if (stack->last != 0)
2074 return stack->last;
2075 return 0;
2076}
2077
2078/* Return a number larger than any instruction's uid in this function. */
2079
2080int
2081get_max_uid ()
2082{
2083 return cur_insn_uid;
2084}
90b89d2c 2085
214d02d0 2086/* Renumber instructions so that no instruction UIDs are wasted. */
2087
90b89d2c 2088void
214d02d0 2089renumber_insns (stream)
2090 FILE *stream;
90b89d2c 2091{
2092 rtx insn;
90b89d2c 2093
214d02d0 2094 /* If we're not supposed to renumber instructions, don't. */
2095 if (!flag_renumber_insns)
2096 return;
2097
90b89d2c 2098 /* If there aren't that many instructions, then it's not really
2099 worth renumbering them. */
214d02d0 2100 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
90b89d2c 2101 return;
2102
2103 cur_insn_uid = 1;
2104
2105 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
214d02d0 2106 {
2107 if (stream)
d823ba47 2108 fprintf (stream, "Renumbering insn %d to %d\n",
214d02d0 2109 INSN_UID (insn), cur_insn_uid);
2110 INSN_UID (insn) = cur_insn_uid++;
2111 }
90b89d2c 2112}
15bbde2b 2113\f
2114/* Return the next insn. If it is a SEQUENCE, return the first insn
2115 of the sequence. */
2116
2117rtx
2118next_insn (insn)
2119 rtx insn;
2120{
2121 if (insn)
2122 {
2123 insn = NEXT_INSN (insn);
2124 if (insn && GET_CODE (insn) == INSN
2125 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2126 insn = XVECEXP (PATTERN (insn), 0, 0);
2127 }
2128
2129 return insn;
2130}
2131
2132/* Return the previous insn. If it is a SEQUENCE, return the last insn
2133 of the sequence. */
2134
2135rtx
2136previous_insn (insn)
2137 rtx insn;
2138{
2139 if (insn)
2140 {
2141 insn = PREV_INSN (insn);
2142 if (insn && GET_CODE (insn) == INSN
2143 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2144 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2145 }
2146
2147 return insn;
2148}
2149
2150/* Return the next insn after INSN that is not a NOTE. This routine does not
2151 look inside SEQUENCEs. */
2152
2153rtx
2154next_nonnote_insn (insn)
2155 rtx insn;
2156{
2157 while (insn)
2158 {
2159 insn = NEXT_INSN (insn);
2160 if (insn == 0 || GET_CODE (insn) != NOTE)
2161 break;
2162 }
2163
2164 return insn;
2165}
2166
2167/* Return the previous insn before INSN that is not a NOTE. This routine does
2168 not look inside SEQUENCEs. */
2169
2170rtx
2171prev_nonnote_insn (insn)
2172 rtx insn;
2173{
2174 while (insn)
2175 {
2176 insn = PREV_INSN (insn);
2177 if (insn == 0 || GET_CODE (insn) != NOTE)
2178 break;
2179 }
2180
2181 return insn;
2182}
2183
2184/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2185 or 0, if there is none. This routine does not look inside
a92771b8 2186 SEQUENCEs. */
15bbde2b 2187
2188rtx
2189next_real_insn (insn)
2190 rtx insn;
2191{
2192 while (insn)
2193 {
2194 insn = NEXT_INSN (insn);
2195 if (insn == 0 || GET_CODE (insn) == INSN
2196 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2197 break;
2198 }
2199
2200 return insn;
2201}
2202
2203/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2204 or 0, if there is none. This routine does not look inside
2205 SEQUENCEs. */
2206
2207rtx
2208prev_real_insn (insn)
2209 rtx insn;
2210{
2211 while (insn)
2212 {
2213 insn = PREV_INSN (insn);
2214 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2215 || GET_CODE (insn) == JUMP_INSN)
2216 break;
2217 }
2218
2219 return insn;
2220}
2221
2222/* Find the next insn after INSN that really does something. This routine
2223 does not look inside SEQUENCEs. Until reload has completed, this is the
2224 same as next_real_insn. */
2225
2215ca0d 2226int
2227active_insn_p (insn)
2228 rtx insn;
2229{
2230 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2231 || (GET_CODE (insn) == INSN
2232 && (! reload_completed
2233 || (GET_CODE (PATTERN (insn)) != USE
2234 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2235}
2236
15bbde2b 2237rtx
2238next_active_insn (insn)
2239 rtx insn;
2240{
2241 while (insn)
2242 {
2243 insn = NEXT_INSN (insn);
2215ca0d 2244 if (insn == 0 || active_insn_p (insn))
15bbde2b 2245 break;
2246 }
2247
2248 return insn;
2249}
2250
2251/* Find the last insn before INSN that really does something. This routine
2252 does not look inside SEQUENCEs. Until reload has completed, this is the
2253 same as prev_real_insn. */
2254
2255rtx
2256prev_active_insn (insn)
2257 rtx insn;
2258{
2259 while (insn)
2260 {
2261 insn = PREV_INSN (insn);
2215ca0d 2262 if (insn == 0 || active_insn_p (insn))
15bbde2b 2263 break;
2264 }
2265
2266 return insn;
2267}
2268
2269/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2270
2271rtx
2272next_label (insn)
2273 rtx insn;
2274{
2275 while (insn)
2276 {
2277 insn = NEXT_INSN (insn);
2278 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2279 break;
2280 }
2281
2282 return insn;
2283}
2284
2285/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2286
2287rtx
2288prev_label (insn)
2289 rtx insn;
2290{
2291 while (insn)
2292 {
2293 insn = PREV_INSN (insn);
2294 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2295 break;
2296 }
2297
2298 return insn;
2299}
2300\f
2301#ifdef HAVE_cc0
b15e0bba 2302/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2303 and REG_CC_USER notes so we can find it. */
2304
2305void
2306link_cc0_insns (insn)
2307 rtx insn;
2308{
2309 rtx user = next_nonnote_insn (insn);
2310
2311 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2312 user = XVECEXP (PATTERN (user), 0, 0);
2313
7014838c 2314 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
2315 REG_NOTES (user));
3ad7bb1c 2316 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
b15e0bba 2317}
2318
15bbde2b 2319/* Return the next insn that uses CC0 after INSN, which is assumed to
2320 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2321 applied to the result of this function should yield INSN).
2322
2323 Normally, this is simply the next insn. However, if a REG_CC_USER note
2324 is present, it contains the insn that uses CC0.
2325
2326 Return 0 if we can't find the insn. */
2327
2328rtx
2329next_cc0_user (insn)
2330 rtx insn;
2331{
b572011e 2332 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 2333
2334 if (note)
2335 return XEXP (note, 0);
2336
2337 insn = next_nonnote_insn (insn);
2338 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2339 insn = XVECEXP (PATTERN (insn), 0, 0);
2340
9204e736 2341 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 2342 return insn;
2343
2344 return 0;
2345}
2346
2347/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2348 note, it is the previous insn. */
2349
2350rtx
2351prev_cc0_setter (insn)
2352 rtx insn;
2353{
b572011e 2354 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 2355
2356 if (note)
2357 return XEXP (note, 0);
2358
2359 insn = prev_nonnote_insn (insn);
2360 if (! sets_cc0_p (PATTERN (insn)))
2361 abort ();
2362
2363 return insn;
2364}
2365#endif
344dc2fa 2366
2367/* Increment the label uses for all labels present in rtx. */
2368
2369static void
2370mark_label_nuses(x)
2371 rtx x;
2372{
2373 register enum rtx_code code;
2374 register int i, j;
2375 register const char *fmt;
2376
2377 code = GET_CODE (x);
2378 if (code == LABEL_REF)
2379 LABEL_NUSES (XEXP (x, 0))++;
2380
2381 fmt = GET_RTX_FORMAT (code);
2382 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2383 {
2384 if (fmt[i] == 'e')
2385 mark_label_nuses (XEXP (x, i));
2386 else if (fmt[i] == 'E')
2387 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2388 mark_label_nuses (XVECEXP (x, i, j));
2389 }
2390}
2391
15bbde2b 2392\f
2393/* Try splitting insns that can be split for better scheduling.
2394 PAT is the pattern which might split.
2395 TRIAL is the insn providing PAT.
0e69a50a 2396 LAST is non-zero if we should return the last insn of the sequence produced.
15bbde2b 2397
2398 If this routine succeeds in splitting, it returns the first or last
0e69a50a 2399 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 2400 returns TRIAL. If the insn to be returned can be split, it will be. */
2401
2402rtx
0e69a50a 2403try_split (pat, trial, last)
15bbde2b 2404 rtx pat, trial;
0e69a50a 2405 int last;
15bbde2b 2406{
2407 rtx before = PREV_INSN (trial);
2408 rtx after = NEXT_INSN (trial);
2409 rtx seq = split_insns (pat, trial);
2410 int has_barrier = 0;
2411 rtx tem;
2412
2413 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2414 We may need to handle this specially. */
2415 if (after && GET_CODE (after) == BARRIER)
2416 {
2417 has_barrier = 1;
2418 after = NEXT_INSN (after);
2419 }
2420
2421 if (seq)
2422 {
2423 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2424 The latter case will normally arise only when being done so that
2425 it, in turn, will be split (SFmode on the 29k is an example). */
2426 if (GET_CODE (seq) == SEQUENCE)
2427 {
5262c253 2428 int i;
5bb27a4b 2429 rtx eh_note;
d823ba47 2430
2431 /* Avoid infinite loop if any insn of the result matches
5262c253 2432 the original pattern. */
2433 for (i = 0; i < XVECLEN (seq, 0); i++)
d823ba47 2434 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
5262c253 2435 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
d823ba47 2436 return trial;
5262c253 2437
5377f687 2438 /* Mark labels. */
2439 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2440 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2441 mark_jump_label (PATTERN (XVECEXP (seq, 0, i)),
2442 XVECEXP (seq, 0, i), 0, 0);
15bbde2b 2443
1b3a64a8 2444 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2445 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
2446 if (GET_CODE (trial) == CALL_INSN)
2447 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2448 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
2449 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
2450 = CALL_INSN_FUNCTION_USAGE (trial);
2451
5bb27a4b 2452 /* Copy EH notes. */
2453 if ((eh_note = find_reg_note (trial, REG_EH_REGION, NULL_RTX)))
2454 for (i = 0; i < XVECLEN (seq, 0); i++)
2455 {
2456 rtx insn = XVECEXP (seq, 0, i);
2457 if (GET_CODE (insn) == CALL_INSN
d823ba47 2458 || (flag_non_call_exceptions
5bb27a4b 2459 && may_trap_p (PATTERN (insn))))
d823ba47 2460 REG_NOTES (insn)
5bb27a4b 2461 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
2462 REG_NOTES (insn));
2463 }
2464
344dc2fa 2465 /* If there are LABELS inside the split insns increment the
2466 usage count so we don't delete the label. */
2467 if (GET_CODE (trial) == INSN)
2468 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2469 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
2470 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
2471
15bbde2b 2472 tem = emit_insn_after (seq, before);
2473
2474 delete_insn (trial);
2475 if (has_barrier)
2476 emit_barrier_after (tem);
0e69a50a 2477
2478 /* Recursively call try_split for each new insn created; by the
2479 time control returns here that insn will be fully split, so
2480 set LAST and continue from the insn after the one returned.
06343e77 2481 We can't use next_active_insn here since AFTER may be a note.
ab6683d6 2482 Ignore deleted insns, which can be occur if not optimizing. */
9204e736 2483 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
2484 if (! INSN_DELETED_P (tem) && INSN_P (tem))
06343e77 2485 tem = try_split (PATTERN (tem), tem, 1);
15bbde2b 2486 }
2487 /* Avoid infinite loop if the result matches the original pattern. */
2488 else if (rtx_equal_p (seq, pat))
2489 return trial;
2490 else
2491 {
2492 PATTERN (trial) = seq;
2493 INSN_CODE (trial) = -1;
0e69a50a 2494 try_split (seq, trial, last);
15bbde2b 2495 }
2496
0e69a50a 2497 /* Return either the first or the last insn, depending on which was
2498 requested. */
d823ba47 2499 return last
2500 ? (after ? prev_active_insn (after) : last_insn)
068a89d7 2501 : next_active_insn (before);
15bbde2b 2502 }
2503
2504 return trial;
2505}
2506\f
2507/* Make and return an INSN rtx, initializing all its slots.
6a84e367 2508 Store PATTERN in the pattern slots. */
15bbde2b 2509
2510rtx
6a84e367 2511make_insn_raw (pattern)
15bbde2b 2512 rtx pattern;
15bbde2b 2513{
2514 register rtx insn;
2515
d7c47c0e 2516 insn = rtx_alloc (INSN);
15bbde2b 2517
575333f9 2518 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 2519 PATTERN (insn) = pattern;
2520 INSN_CODE (insn) = -1;
fc92fa61 2521 LOG_LINKS (insn) = NULL;
2522 REG_NOTES (insn) = NULL;
15bbde2b 2523
fe7f701d 2524#ifdef ENABLE_RTL_CHECKING
2525 if (insn
9204e736 2526 && INSN_P (insn)
fe7f701d 2527 && (returnjump_p (insn)
2528 || (GET_CODE (insn) == SET
2529 && SET_DEST (insn) == pc_rtx)))
2530 {
2531 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
2532 debug_rtx (insn);
2533 }
2534#endif
d823ba47 2535
15bbde2b 2536 return insn;
2537}
2538
2539/* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2540
2541static rtx
6a84e367 2542make_jump_insn_raw (pattern)
15bbde2b 2543 rtx pattern;
15bbde2b 2544{
2545 register rtx insn;
2546
6a84e367 2547 insn = rtx_alloc (JUMP_INSN);
fc92fa61 2548 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 2549
2550 PATTERN (insn) = pattern;
2551 INSN_CODE (insn) = -1;
fc92fa61 2552 LOG_LINKS (insn) = NULL;
2553 REG_NOTES (insn) = NULL;
2554 JUMP_LABEL (insn) = NULL;
15bbde2b 2555
2556 return insn;
2557}
6e911104 2558
2559/* Like `make_insn' but make a CALL_INSN instead of an insn. */
2560
2561static rtx
2562make_call_insn_raw (pattern)
2563 rtx pattern;
2564{
2565 register rtx insn;
2566
2567 insn = rtx_alloc (CALL_INSN);
2568 INSN_UID (insn) = cur_insn_uid++;
2569
2570 PATTERN (insn) = pattern;
2571 INSN_CODE (insn) = -1;
2572 LOG_LINKS (insn) = NULL;
2573 REG_NOTES (insn) = NULL;
2574 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2575
2576 return insn;
2577}
15bbde2b 2578\f
2579/* Add INSN to the end of the doubly-linked list.
2580 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2581
2582void
2583add_insn (insn)
2584 register rtx insn;
2585{
2586 PREV_INSN (insn) = last_insn;
2587 NEXT_INSN (insn) = 0;
2588
2589 if (NULL != last_insn)
2590 NEXT_INSN (last_insn) = insn;
2591
2592 if (NULL == first_insn)
2593 first_insn = insn;
2594
2595 last_insn = insn;
2596}
2597
312de84d 2598/* Add INSN into the doubly-linked list after insn AFTER. This and
2599 the next should be the only functions called to insert an insn once
f65c10c0 2600 delay slots have been filled since only they know how to update a
312de84d 2601 SEQUENCE. */
15bbde2b 2602
2603void
2604add_insn_after (insn, after)
2605 rtx insn, after;
2606{
2607 rtx next = NEXT_INSN (after);
2608
9ea33026 2609 if (optimize && INSN_DELETED_P (after))
f65c10c0 2610 abort ();
2611
15bbde2b 2612 NEXT_INSN (insn) = next;
2613 PREV_INSN (insn) = after;
2614
2615 if (next)
2616 {
2617 PREV_INSN (next) = insn;
2618 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2619 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2620 }
2621 else if (last_insn == after)
2622 last_insn = insn;
2623 else
2624 {
0a893c29 2625 struct sequence_stack *stack = seq_stack;
15bbde2b 2626 /* Scan all pending sequences too. */
2627 for (; stack; stack = stack->next)
2628 if (after == stack->last)
398f4855 2629 {
2630 stack->last = insn;
2631 break;
2632 }
312de84d 2633
2634 if (stack == 0)
2635 abort ();
15bbde2b 2636 }
2637
2638 NEXT_INSN (after) = insn;
2639 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2640 {
2641 rtx sequence = PATTERN (after);
2642 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2643 }
2644}
2645
312de84d 2646/* Add INSN into the doubly-linked list before insn BEFORE. This and
2647 the previous should be the only functions called to insert an insn once
f65c10c0 2648 delay slots have been filled since only they know how to update a
312de84d 2649 SEQUENCE. */
2650
2651void
2652add_insn_before (insn, before)
2653 rtx insn, before;
2654{
2655 rtx prev = PREV_INSN (before);
2656
9ea33026 2657 if (optimize && INSN_DELETED_P (before))
f65c10c0 2658 abort ();
2659
312de84d 2660 PREV_INSN (insn) = prev;
2661 NEXT_INSN (insn) = before;
2662
2663 if (prev)
2664 {
2665 NEXT_INSN (prev) = insn;
2666 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2667 {
2668 rtx sequence = PATTERN (prev);
2669 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2670 }
2671 }
2672 else if (first_insn == before)
2673 first_insn = insn;
2674 else
2675 {
0a893c29 2676 struct sequence_stack *stack = seq_stack;
312de84d 2677 /* Scan all pending sequences too. */
2678 for (; stack; stack = stack->next)
2679 if (before == stack->first)
398f4855 2680 {
2681 stack->first = insn;
2682 break;
2683 }
312de84d 2684
2685 if (stack == 0)
2686 abort ();
2687 }
2688
2689 PREV_INSN (before) = insn;
2690 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2691 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2692}
2693
7ddcf2bf 2694/* Remove an insn from its doubly-linked list. This function knows how
2695 to handle sequences. */
2696void
2697remove_insn (insn)
2698 rtx insn;
2699{
2700 rtx next = NEXT_INSN (insn);
2701 rtx prev = PREV_INSN (insn);
2702 if (prev)
2703 {
2704 NEXT_INSN (prev) = next;
2705 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2706 {
2707 rtx sequence = PATTERN (prev);
2708 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2709 }
2710 }
2711 else if (first_insn == insn)
2712 first_insn = next;
2713 else
2714 {
0a893c29 2715 struct sequence_stack *stack = seq_stack;
7ddcf2bf 2716 /* Scan all pending sequences too. */
2717 for (; stack; stack = stack->next)
2718 if (insn == stack->first)
2719 {
2720 stack->first = next;
2721 break;
2722 }
2723
2724 if (stack == 0)
2725 abort ();
2726 }
2727
2728 if (next)
2729 {
2730 PREV_INSN (next) = prev;
2731 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2732 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2733 }
2734 else if (last_insn == insn)
2735 last_insn = prev;
2736 else
2737 {
0a893c29 2738 struct sequence_stack *stack = seq_stack;
7ddcf2bf 2739 /* Scan all pending sequences too. */
2740 for (; stack; stack = stack->next)
2741 if (insn == stack->last)
2742 {
2743 stack->last = prev;
2744 break;
2745 }
2746
2747 if (stack == 0)
2748 abort ();
2749 }
2750}
2751
15bbde2b 2752/* Delete all insns made since FROM.
2753 FROM becomes the new last instruction. */
2754
2755void
2756delete_insns_since (from)
2757 rtx from;
2758{
2759 if (from == 0)
2760 first_insn = 0;
2761 else
2762 NEXT_INSN (from) = 0;
2763 last_insn = from;
2764}
2765
34e2ddcd 2766/* This function is deprecated, please use sequences instead.
2767
2768 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 2769 The insns to be moved are those between FROM and TO.
2770 They are moved to a new position after the insn AFTER.
2771 AFTER must not be FROM or TO or any insn in between.
2772
2773 This function does not know about SEQUENCEs and hence should not be
2774 called after delay-slot filling has been done. */
2775
2776void
2777reorder_insns (from, to, after)
2778 rtx from, to, after;
2779{
2780 /* Splice this bunch out of where it is now. */
2781 if (PREV_INSN (from))
2782 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2783 if (NEXT_INSN (to))
2784 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2785 if (last_insn == to)
2786 last_insn = PREV_INSN (from);
2787 if (first_insn == from)
2788 first_insn = NEXT_INSN (to);
2789
2790 /* Make the new neighbors point to it and it to them. */
2791 if (NEXT_INSN (after))
2792 PREV_INSN (NEXT_INSN (after)) = to;
2793
2794 NEXT_INSN (to) = NEXT_INSN (after);
2795 PREV_INSN (from) = after;
2796 NEXT_INSN (after) = from;
2797 if (after == last_insn)
2798 last_insn = to;
2799}
2800
2801/* Return the line note insn preceding INSN. */
2802
2803static rtx
2804find_line_note (insn)
2805 rtx insn;
2806{
2807 if (no_line_numbers)
2808 return 0;
2809
2810 for (; insn; insn = PREV_INSN (insn))
2811 if (GET_CODE (insn) == NOTE
2812 && NOTE_LINE_NUMBER (insn) >= 0)
2813 break;
2814
2815 return insn;
2816}
2817
2818/* Like reorder_insns, but inserts line notes to preserve the line numbers
2819 of the moved insns when debugging. This may insert a note between AFTER
2820 and FROM, and another one after TO. */
2821
2822void
2823reorder_insns_with_line_notes (from, to, after)
2824 rtx from, to, after;
2825{
2826 rtx from_line = find_line_note (from);
2827 rtx after_line = find_line_note (after);
2828
2829 reorder_insns (from, to, after);
2830
2831 if (from_line == after_line)
2832 return;
2833
2834 if (from_line)
2835 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2836 NOTE_LINE_NUMBER (from_line),
2837 after);
2838 if (after_line)
2839 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2840 NOTE_LINE_NUMBER (after_line),
2841 to);
2842}
90b89d2c 2843
0a78547b 2844/* Remove unnecessary notes from the instruction stream. */
90b89d2c 2845
2846void
0a78547b 2847remove_unnecessary_notes ()
90b89d2c 2848{
92cfc4a8 2849 rtx block_stack = NULL_RTX;
2850 rtx eh_stack = NULL_RTX;
90b89d2c 2851 rtx insn;
2852 rtx next;
92cfc4a8 2853 rtx tmp;
90b89d2c 2854
f1ab82be 2855 /* We must not remove the first instruction in the function because
2856 the compiler depends on the first instruction being a note. */
90b89d2c 2857 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
2858 {
2859 /* Remember what's next. */
2860 next = NEXT_INSN (insn);
2861
2862 /* We're only interested in notes. */
2863 if (GET_CODE (insn) != NOTE)
2864 continue;
2865
92cfc4a8 2866 switch (NOTE_LINE_NUMBER (insn))
5846cb0f 2867 {
92cfc4a8 2868 case NOTE_INSN_DELETED:
2869 remove_insn (insn);
2870 break;
2871
2872 case NOTE_INSN_EH_REGION_BEG:
2873 eh_stack = alloc_INSN_LIST (insn, eh_stack);
2874 break;
2875
2876 case NOTE_INSN_EH_REGION_END:
2877 /* Too many end notes. */
2878 if (eh_stack == NULL_RTX)
2879 abort ();
2880 /* Mismatched nesting. */
2881 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
2882 abort ();
2883 tmp = eh_stack;
2884 eh_stack = XEXP (eh_stack, 1);
2885 free_INSN_LIST_node (tmp);
2886 break;
2887
2888 case NOTE_INSN_BLOCK_BEG:
2889 /* By now, all notes indicating lexical blocks should have
2890 NOTE_BLOCK filled in. */
2891 if (NOTE_BLOCK (insn) == NULL_TREE)
2892 abort ();
2893 block_stack = alloc_INSN_LIST (insn, block_stack);
2894 break;
2895
2896 case NOTE_INSN_BLOCK_END:
2897 /* Too many end notes. */
2898 if (block_stack == NULL_RTX)
2899 abort ();
2900 /* Mismatched nesting. */
2901 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
2902 abort ();
2903 tmp = block_stack;
2904 block_stack = XEXP (block_stack, 1);
2905 free_INSN_LIST_node (tmp);
2906
5846cb0f 2907 /* Scan back to see if there are any non-note instructions
2908 between INSN and the beginning of this block. If not,
2909 then there is no PC range in the generated code that will
2910 actually be in this block, so there's no point in
2911 remembering the existence of the block. */
92cfc4a8 2912 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
5846cb0f 2913 {
2914 /* This block contains a real instruction. Note that we
2915 don't include labels; if the only thing in the block
2916 is a label, then there are still no PC values that
2917 lie within the block. */
92cfc4a8 2918 if (INSN_P (tmp))
5846cb0f 2919 break;
2920
2921 /* We're only interested in NOTEs. */
92cfc4a8 2922 if (GET_CODE (tmp) != NOTE)
5846cb0f 2923 continue;
2924
92cfc4a8 2925 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
5846cb0f 2926 {
92cfc4a8 2927 /* We just verified that this BLOCK matches us
2928 with the block_stack check above. */
0a78547b 2929 if (debug_ignore_block (NOTE_BLOCK (insn)))
e33dbbdf 2930 {
92cfc4a8 2931 remove_insn (tmp);
e33dbbdf 2932 remove_insn (insn);
2933 }
5846cb0f 2934 break;
2935 }
92cfc4a8 2936 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
5846cb0f 2937 /* There's a nested block. We need to leave the
2938 current block in place since otherwise the debugger
2939 wouldn't be able to show symbols from our block in
2940 the nested block. */
2941 break;
2942 }
2943 }
90b89d2c 2944 }
92cfc4a8 2945
2946 /* Too many begin notes. */
2947 if (block_stack || eh_stack)
2948 abort ();
90b89d2c 2949}
2950
15bbde2b 2951\f
2952/* Emit an insn of given code and pattern
2953 at a specified place within the doubly-linked list. */
2954
2955/* Make an instruction with body PATTERN
2956 and output it before the instruction BEFORE. */
2957
2958rtx
2959emit_insn_before (pattern, before)
2960 register rtx pattern, before;
2961{
2962 register rtx insn = before;
2963
2964 if (GET_CODE (pattern) == SEQUENCE)
2965 {
2966 register int i;
2967
2968 for (i = 0; i < XVECLEN (pattern, 0); i++)
2969 {
2970 insn = XVECEXP (pattern, 0, i);
312de84d 2971 add_insn_before (insn, before);
15bbde2b 2972 }
15bbde2b 2973 }
2974 else
2975 {
6a84e367 2976 insn = make_insn_raw (pattern);
312de84d 2977 add_insn_before (insn, before);
15bbde2b 2978 }
2979
2980 return insn;
2981}
2982
f3d96a58 2983/* Similar to emit_insn_before, but update basic block boundaries as well. */
2984
2985rtx
2986emit_block_insn_before (pattern, before, block)
2987 rtx pattern, before;
2988 basic_block block;
2989{
2990 rtx prev = PREV_INSN (before);
2991 rtx r = emit_insn_before (pattern, before);
2992 if (block && block->head == before)
2993 block->head = NEXT_INSN (prev);
2994 return r;
2995}
2996
15bbde2b 2997/* Make an instruction with body PATTERN and code JUMP_INSN
2998 and output it before the instruction BEFORE. */
2999
3000rtx
3001emit_jump_insn_before (pattern, before)
3002 register rtx pattern, before;
3003{
3004 register rtx insn;
3005
3006 if (GET_CODE (pattern) == SEQUENCE)
3007 insn = emit_insn_before (pattern, before);
3008 else
3009 {
8f060628 3010 insn = make_jump_insn_raw (pattern);
312de84d 3011 add_insn_before (insn, before);
15bbde2b 3012 }
3013
3014 return insn;
3015}
3016
3017/* Make an instruction with body PATTERN and code CALL_INSN
3018 and output it before the instruction BEFORE. */
3019
3020rtx
3021emit_call_insn_before (pattern, before)
3022 register rtx pattern, before;
3023{
6e911104 3024 register rtx insn;
3025
3026 if (GET_CODE (pattern) == SEQUENCE)
3027 insn = emit_insn_before (pattern, before);
3028 else
3029 {
3030 insn = make_call_insn_raw (pattern);
312de84d 3031 add_insn_before (insn, before);
6e911104 3032 PUT_CODE (insn, CALL_INSN);
3033 }
3034
15bbde2b 3035 return insn;
3036}
3037
3038/* Make an insn of code BARRIER
71caadc0 3039 and output it before the insn BEFORE. */
15bbde2b 3040
3041rtx
3042emit_barrier_before (before)
3043 register rtx before;
3044{
3045 register rtx insn = rtx_alloc (BARRIER);
3046
3047 INSN_UID (insn) = cur_insn_uid++;
3048
312de84d 3049 add_insn_before (insn, before);
15bbde2b 3050 return insn;
3051}
3052
71caadc0 3053/* Emit the label LABEL before the insn BEFORE. */
3054
3055rtx
3056emit_label_before (label, before)
3057 rtx label, before;
3058{
3059 /* This can be called twice for the same label as a result of the
3060 confusion that follows a syntax error! So make it harmless. */
3061 if (INSN_UID (label) == 0)
3062 {
3063 INSN_UID (label) = cur_insn_uid++;
3064 add_insn_before (label, before);
3065 }
3066
3067 return label;
3068}
3069
15bbde2b 3070/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3071
3072rtx
3073emit_note_before (subtype, before)
3074 int subtype;
3075 rtx before;
3076{
3077 register rtx note = rtx_alloc (NOTE);
3078 INSN_UID (note) = cur_insn_uid++;
3079 NOTE_SOURCE_FILE (note) = 0;
3080 NOTE_LINE_NUMBER (note) = subtype;
3081
312de84d 3082 add_insn_before (note, before);
15bbde2b 3083 return note;
3084}
3085\f
3086/* Make an insn of code INSN with body PATTERN
3087 and output it after the insn AFTER. */
3088
3089rtx
3090emit_insn_after (pattern, after)
3091 register rtx pattern, after;
3092{
3093 register rtx insn = after;
3094
3095 if (GET_CODE (pattern) == SEQUENCE)
3096 {
3097 register int i;
3098
3099 for (i = 0; i < XVECLEN (pattern, 0); i++)
3100 {
3101 insn = XVECEXP (pattern, 0, i);
3102 add_insn_after (insn, after);
3103 after = insn;
3104 }
15bbde2b 3105 }
3106 else
3107 {
6a84e367 3108 insn = make_insn_raw (pattern);
15bbde2b 3109 add_insn_after (insn, after);
3110 }
3111
3112 return insn;
3113}
3114
1bea98fb 3115/* Similar to emit_insn_after, except that line notes are to be inserted so
3116 as to act as if this insn were at FROM. */
3117
3118void
3119emit_insn_after_with_line_notes (pattern, after, from)
3120 rtx pattern, after, from;
3121{
3122 rtx from_line = find_line_note (from);
3123 rtx after_line = find_line_note (after);
3124 rtx insn = emit_insn_after (pattern, after);
3125
3126 if (from_line)
3127 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3128 NOTE_LINE_NUMBER (from_line),
3129 after);
3130
3131 if (after_line)
3132 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3133 NOTE_LINE_NUMBER (after_line),
3134 insn);
3135}
3136
f3d96a58 3137/* Similar to emit_insn_after, but update basic block boundaries as well. */
3138
3139rtx
3140emit_block_insn_after (pattern, after, block)
3141 rtx pattern, after;
3142 basic_block block;
3143{
3144 rtx r = emit_insn_after (pattern, after);
3145 if (block && block->end == after)
3146 block->end = r;
3147 return r;
3148}
3149
15bbde2b 3150/* Make an insn of code JUMP_INSN with body PATTERN
3151 and output it after the insn AFTER. */
3152
3153rtx
3154emit_jump_insn_after (pattern, after)
3155 register rtx pattern, after;
3156{
3157 register rtx insn;
3158
3159 if (GET_CODE (pattern) == SEQUENCE)
3160 insn = emit_insn_after (pattern, after);
3161 else
3162 {
8f060628 3163 insn = make_jump_insn_raw (pattern);
15bbde2b 3164 add_insn_after (insn, after);
3165 }
3166
3167 return insn;
3168}
3169
3170/* Make an insn of code BARRIER
3171 and output it after the insn AFTER. */
3172
3173rtx
3174emit_barrier_after (after)
3175 register rtx after;
3176{
3177 register rtx insn = rtx_alloc (BARRIER);
3178
3179 INSN_UID (insn) = cur_insn_uid++;
3180
3181 add_insn_after (insn, after);
3182 return insn;
3183}
3184
3185/* Emit the label LABEL after the insn AFTER. */
3186
3187rtx
3188emit_label_after (label, after)
3189 rtx label, after;
3190{
3191 /* This can be called twice for the same label
3192 as a result of the confusion that follows a syntax error!
3193 So make it harmless. */
3194 if (INSN_UID (label) == 0)
3195 {
3196 INSN_UID (label) = cur_insn_uid++;
3197 add_insn_after (label, after);
3198 }
3199
3200 return label;
3201}
3202
3203/* Emit a note of subtype SUBTYPE after the insn AFTER. */
3204
3205rtx
3206emit_note_after (subtype, after)
3207 int subtype;
3208 rtx after;
3209{
3210 register rtx note = rtx_alloc (NOTE);
3211 INSN_UID (note) = cur_insn_uid++;
3212 NOTE_SOURCE_FILE (note) = 0;
3213 NOTE_LINE_NUMBER (note) = subtype;
3214 add_insn_after (note, after);
3215 return note;
3216}
3217
3218/* Emit a line note for FILE and LINE after the insn AFTER. */
3219
3220rtx
3221emit_line_note_after (file, line, after)
9a356c3c 3222 const char *file;
15bbde2b 3223 int line;
3224 rtx after;
3225{
3226 register rtx note;
3227
3228 if (no_line_numbers && line > 0)
3229 {
3230 cur_insn_uid++;
3231 return 0;
3232 }
3233
3234 note = rtx_alloc (NOTE);
3235 INSN_UID (note) = cur_insn_uid++;
3236 NOTE_SOURCE_FILE (note) = file;
3237 NOTE_LINE_NUMBER (note) = line;
3238 add_insn_after (note, after);
3239 return note;
3240}
3241\f
3242/* Make an insn of code INSN with pattern PATTERN
3243 and add it to the end of the doubly-linked list.
3244 If PATTERN is a SEQUENCE, take the elements of it
3245 and emit an insn for each element.
3246
3247 Returns the last insn emitted. */
3248
3249rtx
3250emit_insn (pattern)
3251 rtx pattern;
3252{
3253 rtx insn = last_insn;
3254
3255 if (GET_CODE (pattern) == SEQUENCE)
3256 {
3257 register int i;
3258
3259 for (i = 0; i < XVECLEN (pattern, 0); i++)
3260 {
3261 insn = XVECEXP (pattern, 0, i);
3262 add_insn (insn);
3263 }
15bbde2b 3264 }
3265 else
3266 {
6a84e367 3267 insn = make_insn_raw (pattern);
15bbde2b 3268 add_insn (insn);
3269 }
3270
3271 return insn;
3272}
3273
3274/* Emit the insns in a chain starting with INSN.
3275 Return the last insn emitted. */
3276
3277rtx
3278emit_insns (insn)
3279 rtx insn;
3280{
3281 rtx last = 0;
3282
3283 while (insn)
3284 {
3285 rtx next = NEXT_INSN (insn);
3286 add_insn (insn);
3287 last = insn;
3288 insn = next;
3289 }
3290
3291 return last;
3292}
3293
3294/* Emit the insns in a chain starting with INSN and place them in front of
3295 the insn BEFORE. Return the last insn emitted. */
3296
3297rtx
3298emit_insns_before (insn, before)
3299 rtx insn;
3300 rtx before;
3301{
3302 rtx last = 0;
3303
3304 while (insn)
3305 {
3306 rtx next = NEXT_INSN (insn);
312de84d 3307 add_insn_before (insn, before);
15bbde2b 3308 last = insn;
3309 insn = next;
3310 }
3311
3312 return last;
3313}
3314
b36b07d8 3315/* Emit the insns in a chain starting with FIRST and place them in back of
3316 the insn AFTER. Return the last insn emitted. */
3317
3318rtx
3319emit_insns_after (first, after)
3320 register rtx first;
3321 register rtx after;
3322{
3323 register rtx last;
3324 register rtx after_after;
3325
3326 if (!after)
3327 abort ();
3328
3329 if (!first)
3330 return first;
3331
3332 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3333 continue;
3334
3335 after_after = NEXT_INSN (after);
3336
3337 NEXT_INSN (after) = first;
3338 PREV_INSN (first) = after;
3339 NEXT_INSN (last) = after_after;
3340 if (after_after)
3341 PREV_INSN (after_after) = last;
3342
0c905390 3343 if (after == last_insn)
3344 last_insn = last;
b36b07d8 3345 return last;
3346}
3347
15bbde2b 3348/* Make an insn of code JUMP_INSN with pattern PATTERN
3349 and add it to the end of the doubly-linked list. */
3350
3351rtx
3352emit_jump_insn (pattern)
3353 rtx pattern;
3354{
3355 if (GET_CODE (pattern) == SEQUENCE)
3356 return emit_insn (pattern);
3357 else
3358 {
8f060628 3359 register rtx insn = make_jump_insn_raw (pattern);
15bbde2b 3360 add_insn (insn);
3361 return insn;
3362 }
3363}
3364
3365/* Make an insn of code CALL_INSN with pattern PATTERN
3366 and add it to the end of the doubly-linked list. */
3367
3368rtx
3369emit_call_insn (pattern)
3370 rtx pattern;
3371{
3372 if (GET_CODE (pattern) == SEQUENCE)
3373 return emit_insn (pattern);
3374 else
3375 {
6e911104 3376 register rtx insn = make_call_insn_raw (pattern);
15bbde2b 3377 add_insn (insn);
3378 PUT_CODE (insn, CALL_INSN);
3379 return insn;
3380 }
3381}
3382
3383/* Add the label LABEL to the end of the doubly-linked list. */
3384
3385rtx
3386emit_label (label)
3387 rtx label;
3388{
3389 /* This can be called twice for the same label
3390 as a result of the confusion that follows a syntax error!
3391 So make it harmless. */
3392 if (INSN_UID (label) == 0)
3393 {
3394 INSN_UID (label) = cur_insn_uid++;
3395 add_insn (label);
3396 }
3397 return label;
3398}
3399
3400/* Make an insn of code BARRIER
3401 and add it to the end of the doubly-linked list. */
3402
3403rtx
3404emit_barrier ()
3405{
3406 register rtx barrier = rtx_alloc (BARRIER);
3407 INSN_UID (barrier) = cur_insn_uid++;
3408 add_insn (barrier);
3409 return barrier;
3410}
3411
3412/* Make an insn of code NOTE
3413 with data-fields specified by FILE and LINE
3414 and add it to the end of the doubly-linked list,
3415 but only if line-numbers are desired for debugging info. */
3416
3417rtx
3418emit_line_note (file, line)
9a356c3c 3419 const char *file;
15bbde2b 3420 int line;
3421{
21b005ed 3422 set_file_and_line_for_stmt (file, line);
15bbde2b 3423
3424#if 0
3425 if (no_line_numbers)
3426 return 0;
3427#endif
3428
3429 return emit_note (file, line);
3430}
3431
3432/* Make an insn of code NOTE
3433 with data-fields specified by FILE and LINE
3434 and add it to the end of the doubly-linked list.
3435 If it is a line-number NOTE, omit it if it matches the previous one. */
3436
3437rtx
3438emit_note (file, line)
9a356c3c 3439 const char *file;
15bbde2b 3440 int line;
3441{
3442 register rtx note;
3443
3444 if (line > 0)
3445 {
3446 if (file && last_filename && !strcmp (file, last_filename)
3447 && line == last_linenum)
3448 return 0;
3449 last_filename = file;
3450 last_linenum = line;
3451 }
3452
3453 if (no_line_numbers && line > 0)
3454 {
3455 cur_insn_uid++;
3456 return 0;
3457 }
3458
3459 note = rtx_alloc (NOTE);
3460 INSN_UID (note) = cur_insn_uid++;
3461 NOTE_SOURCE_FILE (note) = file;
3462 NOTE_LINE_NUMBER (note) = line;
3463 add_insn (note);
3464 return note;
3465}
3466
8d54063f 3467/* Emit a NOTE, and don't omit it even if LINE is the previous note. */
15bbde2b 3468
3469rtx
3470emit_line_note_force (file, line)
9a356c3c 3471 const char *file;
15bbde2b 3472 int line;
3473{
3474 last_linenum = -1;
3475 return emit_line_note (file, line);
3476}
3477
3478/* Cause next statement to emit a line note even if the line number
3479 has not changed. This is used at the beginning of a function. */
3480
3481void
3482force_next_line_note ()
3483{
3484 last_linenum = -1;
3485}
f1934a33 3486
3487/* Place a note of KIND on insn INSN with DATUM as the datum. If a
3488 note of this type already exists, remove it first. */
3489
d823ba47 3490void
f1934a33 3491set_unique_reg_note (insn, kind, datum)
3492 rtx insn;
3493 enum reg_note kind;
3494 rtx datum;
3495{
3496 rtx note = find_reg_note (insn, kind, NULL_RTX);
3497
3498 /* First remove the note if there already is one. */
d823ba47 3499 if (note)
f1934a33 3500 remove_note (insn, note);
3501
3502 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3503}
15bbde2b 3504\f
3505/* Return an indication of which type of insn should have X as a body.
3506 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3507
3508enum rtx_code
3509classify_insn (x)
3510 rtx x;
3511{
3512 if (GET_CODE (x) == CODE_LABEL)
3513 return CODE_LABEL;
3514 if (GET_CODE (x) == CALL)
3515 return CALL_INSN;
3516 if (GET_CODE (x) == RETURN)
3517 return JUMP_INSN;
3518 if (GET_CODE (x) == SET)
3519 {
3520 if (SET_DEST (x) == pc_rtx)
3521 return JUMP_INSN;
3522 else if (GET_CODE (SET_SRC (x)) == CALL)
3523 return CALL_INSN;
3524 else
3525 return INSN;
3526 }
3527 if (GET_CODE (x) == PARALLEL)
3528 {
3529 register int j;
3530 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3531 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3532 return CALL_INSN;
3533 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3534 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3535 return JUMP_INSN;
3536 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3537 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3538 return CALL_INSN;
3539 }
3540 return INSN;
3541}
3542
3543/* Emit the rtl pattern X as an appropriate kind of insn.
3544 If X is a label, it is simply added into the insn chain. */
3545
3546rtx
3547emit (x)
3548 rtx x;
3549{
3550 enum rtx_code code = classify_insn (x);
3551
3552 if (code == CODE_LABEL)
3553 return emit_label (x);
3554 else if (code == INSN)
3555 return emit_insn (x);
3556 else if (code == JUMP_INSN)
3557 {
3558 register rtx insn = emit_jump_insn (x);
b2816317 3559 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
15bbde2b 3560 return emit_barrier ();
3561 return insn;
3562 }
3563 else if (code == CALL_INSN)
3564 return emit_call_insn (x);
3565 else
3566 abort ();
3567}
3568\f
b49854c6 3569/* Begin emitting insns to a sequence which can be packaged in an
3570 RTL_EXPR. If this sequence will contain something that might cause
3571 the compiler to pop arguments to function calls (because those
3572 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3573 details), use do_pending_stack_adjust before calling this function.
3574 That will ensure that the deferred pops are not accidentally
9588521d 3575 emitted in the middle of this sequence. */
15bbde2b 3576
3577void
3578start_sequence ()
3579{
3580 struct sequence_stack *tem;
3581
1bfd55c5 3582 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
15bbde2b 3583
0a893c29 3584 tem->next = seq_stack;
15bbde2b 3585 tem->first = first_insn;
3586 tem->last = last_insn;
961819fb 3587 tem->sequence_rtl_expr = seq_rtl_expr;
15bbde2b 3588
0a893c29 3589 seq_stack = tem;
15bbde2b 3590
3591 first_insn = 0;
3592 last_insn = 0;
3593}
3594
961819fb 3595/* Similarly, but indicate that this sequence will be placed in T, an
3596 RTL_EXPR. See the documentation for start_sequence for more
3597 information about how to use this function. */
3598
3599void
3600start_sequence_for_rtl_expr (t)
3601 tree t;
3602{
3603 start_sequence ();
3604
3605 seq_rtl_expr = t;
3606}
3607
b49854c6 3608/* Set up the insn chain starting with FIRST as the current sequence,
3609 saving the previously current one. See the documentation for
3610 start_sequence for more information about how to use this function. */
15bbde2b 3611
3612void
3613push_to_sequence (first)
3614 rtx first;
3615{
3616 rtx last;
3617
3618 start_sequence ();
3619
3620 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3621
3622 first_insn = first;
3623 last_insn = last;
3624}
3625
78147e84 3626/* Set up the insn chain from a chain stort in FIRST to LAST. */
3627
3628void
3629push_to_full_sequence (first, last)
3630 rtx first, last;
3631{
3632 start_sequence ();
3633 first_insn = first;
3634 last_insn = last;
3635 /* We really should have the end of the insn chain here. */
3636 if (last && NEXT_INSN (last))
3637 abort ();
3638}
3639
ab74c92f 3640/* Set up the outer-level insn chain
3641 as the current sequence, saving the previously current one. */
3642
3643void
3644push_topmost_sequence ()
3645{
2041cfd9 3646 struct sequence_stack *stack, *top = NULL;
ab74c92f 3647
3648 start_sequence ();
3649
0a893c29 3650 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 3651 top = stack;
3652
3653 first_insn = top->first;
3654 last_insn = top->last;
961819fb 3655 seq_rtl_expr = top->sequence_rtl_expr;
ab74c92f 3656}
3657
3658/* After emitting to the outer-level insn chain, update the outer-level
3659 insn chain, and restore the previous saved state. */
3660
3661void
3662pop_topmost_sequence ()
3663{
2041cfd9 3664 struct sequence_stack *stack, *top = NULL;
ab74c92f 3665
0a893c29 3666 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 3667 top = stack;
3668
3669 top->first = first_insn;
3670 top->last = last_insn;
961819fb 3671 /* ??? Why don't we save seq_rtl_expr here? */
ab74c92f 3672
3673 end_sequence ();
3674}
3675
15bbde2b 3676/* After emitting to a sequence, restore previous saved state.
3677
b49854c6 3678 To get the contents of the sequence just made, you must call
d823ba47 3679 `gen_sequence' *before* calling here.
b49854c6 3680
3681 If the compiler might have deferred popping arguments while
3682 generating this sequence, and this sequence will not be immediately
3683 inserted into the instruction stream, use do_pending_stack_adjust
3684 before calling gen_sequence. That will ensure that the deferred
3685 pops are inserted into this sequence, and not into some random
3686 location in the instruction stream. See INHIBIT_DEFER_POP for more
3687 information about deferred popping of arguments. */
15bbde2b 3688
3689void
3690end_sequence ()
3691{
0a893c29 3692 struct sequence_stack *tem = seq_stack;
15bbde2b 3693
3694 first_insn = tem->first;
3695 last_insn = tem->last;
961819fb 3696 seq_rtl_expr = tem->sequence_rtl_expr;
0a893c29 3697 seq_stack = tem->next;
15bbde2b 3698
1bfd55c5 3699 free (tem);
15bbde2b 3700}
3701
78147e84 3702/* This works like end_sequence, but records the old sequence in FIRST
3703 and LAST. */
3704
3705void
3706end_full_sequence (first, last)
3707 rtx *first, *last;
3708{
3709 *first = first_insn;
3710 *last = last_insn;
3711 end_sequence();
3712}
3713
15bbde2b 3714/* Return 1 if currently emitting into a sequence. */
3715
3716int
3717in_sequence_p ()
3718{
0a893c29 3719 return seq_stack != 0;
15bbde2b 3720}
3721
3722/* Generate a SEQUENCE rtx containing the insns already emitted
3723 to the current sequence.
3724
3725 This is how the gen_... function from a DEFINE_EXPAND
3726 constructs the SEQUENCE that it returns. */
3727
3728rtx
3729gen_sequence ()
3730{
3731 rtx result;
3732 rtx tem;
15bbde2b 3733 int i;
3734 int len;
3735
3736 /* Count the insns in the chain. */
3737 len = 0;
3738 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3739 len++;
3740
88bbcaa6 3741 /* If only one insn, return it rather than a SEQUENCE.
15bbde2b 3742 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
d823ba47 3743 the case of an empty list.)
88bbcaa6 3744 We only return the pattern of an insn if its code is INSN and it
3745 has no notes. This ensures that no information gets lost. */
15bbde2b 3746 if (len == 1
447a9eb9 3747 && ! RTX_FRAME_RELATED_P (first_insn)
88bbcaa6 3748 && GET_CODE (first_insn) == INSN
3749 /* Don't throw away any reg notes. */
3750 && REG_NOTES (first_insn) == 0)
d7c47c0e 3751 return PATTERN (first_insn);
15bbde2b 3752
d7c47c0e 3753 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
15bbde2b 3754
3755 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3756 XVECEXP (result, 0, i) = tem;
3757
3758 return result;
3759}
3760\f
02ebfa52 3761/* Put the various virtual registers into REGNO_REG_RTX. */
3762
3763void
0a893c29 3764init_virtual_regs (es)
3765 struct emit_status *es;
02ebfa52 3766{
0a893c29 3767 rtx *ptr = es->x_regno_reg_rtx;
3768 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3769 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3770 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3771 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3772 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3773}
3774
3775void
3776clear_emit_caches ()
3777{
3778 int i;
3779
3780 /* Clear the start_sequence/gen_sequence cache. */
0a893c29 3781 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3782 sequence_result[i] = 0;
3783 free_insn = 0;
02ebfa52 3784}
928d57e3 3785\f
3786/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
3787static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
3788static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
3789static int copy_insn_n_scratches;
3790
3791/* When an insn is being copied by copy_insn_1, this is nonzero if we have
3792 copied an ASM_OPERANDS.
3793 In that case, it is the original input-operand vector. */
3794static rtvec orig_asm_operands_vector;
3795
3796/* When an insn is being copied by copy_insn_1, this is nonzero if we have
3797 copied an ASM_OPERANDS.
3798 In that case, it is the copied input-operand vector. */
3799static rtvec copy_asm_operands_vector;
3800
3801/* Likewise for the constraints vector. */
3802static rtvec orig_asm_constraints_vector;
3803static rtvec copy_asm_constraints_vector;
3804
3805/* Recursively create a new copy of an rtx for copy_insn.
3806 This function differs from copy_rtx in that it handles SCRATCHes and
3807 ASM_OPERANDs properly.
3808 Normally, this function is not used directly; use copy_insn as front end.
3809 However, you could first copy an insn pattern with copy_insn and then use
3810 this function afterwards to properly copy any REG_NOTEs containing
3811 SCRATCHes. */
3812
3813rtx
3814copy_insn_1 (orig)
3815 register rtx orig;
3816{
3817 register rtx copy;
3818 register int i, j;
3819 register RTX_CODE code;
90bf951c 3820 register const char *format_ptr;
928d57e3 3821
3822 code = GET_CODE (orig);
3823
3824 switch (code)
3825 {
3826 case REG:
3827 case QUEUED:
3828 case CONST_INT:
3829 case CONST_DOUBLE:
3830 case SYMBOL_REF:
3831 case CODE_LABEL:
3832 case PC:
3833 case CC0:
3834 case ADDRESSOF:
3835 return orig;
3836
3837 case SCRATCH:
3838 for (i = 0; i < copy_insn_n_scratches; i++)
3839 if (copy_insn_scratch_in[i] == orig)
3840 return copy_insn_scratch_out[i];
3841 break;
3842
3843 case CONST:
3844 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3845 a LABEL_REF, it isn't sharable. */
3846 if (GET_CODE (XEXP (orig, 0)) == PLUS
3847 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3848 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3849 return orig;
3850 break;
d823ba47 3851
928d57e3 3852 /* A MEM with a constant address is not sharable. The problem is that
3853 the constant address may need to be reloaded. If the mem is shared,
3854 then reloading one copy of this mem will cause all copies to appear
3855 to have been reloaded. */
3856
3857 default:
3858 break;
3859 }
3860
3861 copy = rtx_alloc (code);
3862
3863 /* Copy the various flags, and other information. We assume that
3864 all fields need copying, and then clear the fields that should
3865 not be copied. That is the sensible default behavior, and forces
3866 us to explicitly document why we are *not* copying a flag. */
3867 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
3868
3869 /* We do not copy the USED flag, which is used as a mark bit during
3870 walks over the RTL. */
3871 copy->used = 0;
3872
3873 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
3874 if (GET_RTX_CLASS (code) == 'i')
3875 {
3876 copy->jump = 0;
3877 copy->call = 0;
3878 copy->frame_related = 0;
3879 }
d823ba47 3880
928d57e3 3881 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3882
3883 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3884 {
d925550d 3885 copy->fld[i] = orig->fld[i];
928d57e3 3886 switch (*format_ptr++)
3887 {
3888 case 'e':
928d57e3 3889 if (XEXP (orig, i) != NULL)
3890 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
3891 break;
3892
928d57e3 3893 case 'E':
3894 case 'V':
928d57e3 3895 if (XVEC (orig, i) == orig_asm_constraints_vector)
3896 XVEC (copy, i) = copy_asm_constraints_vector;
3897 else if (XVEC (orig, i) == orig_asm_operands_vector)
3898 XVEC (copy, i) = copy_asm_operands_vector;
3899 else if (XVEC (orig, i) != NULL)
3900 {
3901 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3902 for (j = 0; j < XVECLEN (copy, i); j++)
3903 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
3904 }
3905 break;
3906
928d57e3 3907 case 't':
928d57e3 3908 case 'w':
928d57e3 3909 case 'i':
928d57e3 3910 case 's':
3911 case 'S':
d925550d 3912 case 'u':
3913 case '0':
3914 /* These are left unchanged. */
928d57e3 3915 break;
3916
3917 default:
3918 abort ();
3919 }
3920 }
3921
3922 if (code == SCRATCH)
3923 {
3924 i = copy_insn_n_scratches++;
3925 if (i >= MAX_RECOG_OPERANDS)
3926 abort ();
3927 copy_insn_scratch_in[i] = orig;
3928 copy_insn_scratch_out[i] = copy;
3929 }
3930 else if (code == ASM_OPERANDS)
3931 {
d91f2122 3932 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
3933 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
3934 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
3935 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 3936 }
3937
3938 return copy;
3939}
3940
3941/* Create a new copy of an rtx.
3942 This function differs from copy_rtx in that it handles SCRATCHes and
3943 ASM_OPERANDs properly.
3944 INSN doesn't really have to be a full INSN; it could be just the
3945 pattern. */
3946rtx
3947copy_insn (insn)
3948 rtx insn;
3949{
3950 copy_insn_n_scratches = 0;
3951 orig_asm_operands_vector = 0;
3952 orig_asm_constraints_vector = 0;
3953 copy_asm_operands_vector = 0;
3954 copy_asm_constraints_vector = 0;
3955 return copy_insn_1 (insn);
3956}
02ebfa52 3957
15bbde2b 3958/* Initialize data structures and variables in this file
3959 before generating rtl for each function. */
3960
3961void
3962init_emit ()
3963{
08513b52 3964 struct function *f = cfun;
15bbde2b 3965
0a893c29 3966 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
15bbde2b 3967 first_insn = NULL;
3968 last_insn = NULL;
961819fb 3969 seq_rtl_expr = NULL;
15bbde2b 3970 cur_insn_uid = 1;
3971 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
3972 last_linenum = 0;
3973 last_filename = 0;
3974 first_label_num = label_num;
3975 last_label_num = 0;
0a893c29 3976 seq_stack = NULL;
15bbde2b 3977
0a893c29 3978 clear_emit_caches ();
15bbde2b 3979
3980 /* Init the tables that describe all the pseudo regs. */
3981
e61a0a7f 3982 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 3983
0a893c29 3984 f->emit->regno_pointer_align
e61a0a7f 3985 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4491f79f 3986 sizeof (unsigned char));
d4c332ff 3987
d823ba47 3988 regno_reg_rtx
e61a0a7f 3989 = (rtx *) xcalloc (f->emit->regno_pointer_align_length * sizeof (rtx),
1bfd55c5 3990 sizeof (rtx));
15bbde2b 3991
3992 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
0a893c29 3993 init_virtual_regs (f->emit);
888e0d33 3994
3995 /* Indicate that the virtual registers and stack locations are
3996 all pointers. */
e61a0a7f 3997 REG_POINTER (stack_pointer_rtx) = 1;
3998 REG_POINTER (frame_pointer_rtx) = 1;
3999 REG_POINTER (hard_frame_pointer_rtx) = 1;
4000 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 4001
e61a0a7f 4002 REG_POINTER (virtual_incoming_args_rtx) = 1;
4003 REG_POINTER (virtual_stack_vars_rtx) = 1;
4004 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4005 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4006 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 4007
d4c332ff 4008#ifdef STACK_BOUNDARY
80909c64 4009 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4010 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4011 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4012 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4013
4014 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4015 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4016 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4017 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4018 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 4019#endif
4020
89525da0 4021#ifdef INIT_EXPANDERS
4022 INIT_EXPANDERS;
4023#endif
15bbde2b 4024}
4025
a7b0c170 4026/* Mark SS for GC. */
4027
4028static void
4029mark_sequence_stack (ss)
4030 struct sequence_stack *ss;
4031{
4032 while (ss)
4033 {
4034 ggc_mark_rtx (ss->first);
961819fb 4035 ggc_mark_tree (ss->sequence_rtl_expr);
a7b0c170 4036 ss = ss->next;
4037 }
4038}
4039
4040/* Mark ES for GC. */
4041
4042void
3c3bb268 4043mark_emit_status (es)
a7b0c170 4044 struct emit_status *es;
4045{
4046 rtx *r;
4047 int i;
4048
4049 if (es == 0)
4050 return;
4051
e61a0a7f 4052 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx;
a7b0c170 4053 i > 0; --i, ++r)
4054 ggc_mark_rtx (*r);
4055
4056 mark_sequence_stack (es->sequence_stack);
961819fb 4057 ggc_mark_tree (es->sequence_rtl_expr);
a7b0c170 4058 ggc_mark_rtx (es->x_first_insn);
4059}
4060
15bbde2b 4061/* Create some permanent unique rtl objects shared between all functions.
4062 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4063
4064void
4065init_emit_once (line_numbers)
4066 int line_numbers;
4067{
4068 int i;
4069 enum machine_mode mode;
9e042f31 4070 enum machine_mode double_mode;
15bbde2b 4071
77695070 4072 /* Initialize the CONST_INT hash table. */
d823ba47 4073 const_int_htab = htab_create (37, const_int_htab_hash,
77695070 4074 const_int_htab_eq, NULL);
d823ba47 4075 ggc_add_root (&const_int_htab, 1, sizeof (const_int_htab),
77695070 4076 rtx_htab_mark);
4077
15bbde2b 4078 no_line_numbers = ! line_numbers;
4079
71d7daa2 4080 /* Compute the word and byte modes. */
4081
4082 byte_mode = VOIDmode;
4083 word_mode = VOIDmode;
4084 double_mode = VOIDmode;
4085
4086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4087 mode = GET_MODE_WIDER_MODE (mode))
4088 {
4089 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4090 && byte_mode == VOIDmode)
4091 byte_mode = mode;
4092
4093 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4094 && word_mode == VOIDmode)
4095 word_mode = mode;
4096 }
4097
71d7daa2 4098 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4099 mode = GET_MODE_WIDER_MODE (mode))
4100 {
4101 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
4102 && double_mode == VOIDmode)
4103 double_mode = mode;
4104 }
4105
4106 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
4107
57c097d5 4108 /* Assign register numbers to the globally defined register rtx.
4109 This must be done at runtime because the register number field
4110 is in a union and some compilers can't initialize unions. */
4111
4112 pc_rtx = gen_rtx (PC, VOIDmode);
4113 cc0_rtx = gen_rtx (CC0, VOIDmode);
22cf44bc 4114 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
4115 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
57c097d5 4116 if (hard_frame_pointer_rtx == 0)
d823ba47 4117 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
22cf44bc 4118 HARD_FRAME_POINTER_REGNUM);
57c097d5 4119 if (arg_pointer_rtx == 0)
22cf44bc 4120 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
d823ba47 4121 virtual_incoming_args_rtx =
22cf44bc 4122 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
d823ba47 4123 virtual_stack_vars_rtx =
22cf44bc 4124 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
d823ba47 4125 virtual_stack_dynamic_rtx =
22cf44bc 4126 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
d823ba47 4127 virtual_outgoing_args_rtx =
4128 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
22cf44bc 4129 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
57c097d5 4130
4131 /* These rtx must be roots if GC is enabled. */
d7c47c0e 4132 ggc_add_rtx_root (global_rtl, GR_MAX);
57c097d5 4133
57c097d5 4134#ifdef INIT_EXPANDERS
ab5beff9 4135 /* This is to initialize {init|mark|free}_machine_status before the first
4136 call to push_function_context_to. This is needed by the Chill front
4137 end which calls push_function_context_to before the first cal to
57c097d5 4138 init_function_start. */
4139 INIT_EXPANDERS;
4140#endif
4141
15bbde2b 4142 /* Create the unique rtx's for certain rtx codes and operand values. */
4143
7014838c 4144 /* Don't use gen_rtx here since gen_rtx in this case
4145 tries to use these variables. */
15bbde2b 4146 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 4147 const_int_rtx[i + MAX_SAVED_CONST_INT] =
57c097d5 4148 gen_rtx_raw_CONST_INT (VOIDmode, i);
d7c47c0e 4149 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
15bbde2b 4150
1a60f06a 4151 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
4152 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 4153 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 4154 else
3ad7bb1c 4155 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 4156
9e042f31 4157 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
4158 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
4159 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
4160 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
15bbde2b 4161
4162 for (i = 0; i <= 2; i++)
4163 {
4164 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
4165 mode = GET_MODE_WIDER_MODE (mode))
4166 {
4167 rtx tem = rtx_alloc (CONST_DOUBLE);
4168 union real_extract u;
4169
fd781f88 4170 /* Zero any holes in a structure. */
4171 memset ((char *) &u, 0, sizeof u);
15bbde2b 4172 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
4173
fd781f88 4174 /* Avoid trailing garbage in the rtx. */
4175 if (sizeof (u) < sizeof (HOST_WIDE_INT))
4176 CONST_DOUBLE_LOW (tem) = 0;
4177 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
4178 CONST_DOUBLE_HIGH (tem) = 0;
4179
b1b63592 4180 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
15bbde2b 4181 CONST_DOUBLE_MEM (tem) = cc0_rtx;
791ceafe 4182 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
15bbde2b 4183 PUT_MODE (tem, mode);
4184
4185 const_tiny_rtx[i][(int) mode] = tem;
4186 }
4187
b572011e 4188 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 4189
4190 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4191 mode = GET_MODE_WIDER_MODE (mode))
b572011e 4192 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 4193
4194 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
4195 mode != VOIDmode;
4196 mode = GET_MODE_WIDER_MODE (mode))
4197 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 4198 }
4199
0fd4500a 4200 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
4201 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
4202 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 4203
065336b4 4204 const_tiny_rtx[0][(int) BImode] = const0_rtx;
4205 if (STORE_FLAG_VALUE == 1)
4206 const_tiny_rtx[1][(int) BImode] = const1_rtx;
4207
3098b2d3 4208 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
4209 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
4210 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
4211 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
c788feb1 4212 ggc_add_rtx_root (&const_true_rtx, 1);
f4bffa58 4213
4214#ifdef RETURN_ADDRESS_POINTER_REGNUM
4215 return_address_pointer_rtx
22cf44bc 4216 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
f4bffa58 4217#endif
4218
4219#ifdef STRUCT_VALUE
4220 struct_value_rtx = STRUCT_VALUE;
4221#else
4222 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
4223#endif
4224
4225#ifdef STRUCT_VALUE_INCOMING
4226 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
4227#else
4228#ifdef STRUCT_VALUE_INCOMING_REGNUM
4229 struct_value_incoming_rtx
4230 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
4231#else
4232 struct_value_incoming_rtx = struct_value_rtx;
4233#endif
4234#endif
4235
4236#ifdef STATIC_CHAIN_REGNUM
4237 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
4238
4239#ifdef STATIC_CHAIN_INCOMING_REGNUM
4240 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
4241 static_chain_incoming_rtx
4242 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
4243 else
4244#endif
4245 static_chain_incoming_rtx = static_chain_rtx;
4246#endif
4247
4248#ifdef STATIC_CHAIN
4249 static_chain_rtx = STATIC_CHAIN;
4250
4251#ifdef STATIC_CHAIN_INCOMING
4252 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
4253#else
4254 static_chain_incoming_rtx = static_chain_rtx;
4255#endif
4256#endif
4257
14e6e4d4 4258 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4259 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
f4bffa58 4260
521dd524 4261 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
4262 ggc_add_rtx_root (&struct_value_rtx, 1);
4263 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
4264 ggc_add_rtx_root (&static_chain_rtx, 1);
4265 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
4266 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
15bbde2b 4267}
ac6c481d 4268\f
4269/* Query and clear/ restore no_line_numbers. This is used by the
4270 switch / case handling in stmt.c to give proper line numbers in
4271 warnings about unreachable code. */
4272
4273int
4274force_line_numbers ()
4275{
4276 int old = no_line_numbers;
4277
4278 no_line_numbers = 0;
4279 if (old)
4280 force_next_line_note ();
4281 return old;
4282}
4283
4284void
4285restore_line_number_status (old_value)
4286 int old_value;
4287{
4288 no_line_numbers = old_value;
4289}