]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/integrate.c
Update mainline egcs to gcc2 snapshot 971021.
[thirdparty/gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include <stdio.h>
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "flags.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "expr.h"
32 #include "output.h"
33 #include "recog.h"
34 #include "integrate.h"
35 #include "real.h"
36 #include "except.h"
37 #include "function.h"
38 #include "bytecode.h"
39
40 #include "obstack.h"
41 #define obstack_chunk_alloc xmalloc
42 #define obstack_chunk_free free
43
44 extern struct obstack *function_maybepermanent_obstack;
45
46 extern tree pushdecl ();
47 extern tree poplevel ();
48
49 /* Similar, but round to the next highest integer that meets the
50 alignment. */
51 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52
53 /* Default max number of insns a function can have and still be inline.
54 This is overridden on RISC machines. */
55 #ifndef INTEGRATE_THRESHOLD
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58 #endif
59 \f
60 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
61 static void finish_inline PROTO((tree, rtx));
62 static void adjust_copied_decl_tree PROTO((tree));
63 static tree copy_decl_list PROTO((tree));
64 static tree copy_decl_tree PROTO((tree));
65 static void copy_decl_rtls PROTO((tree));
66 static void save_constants PROTO((rtx *));
67 static void note_modified_parmregs PROTO((rtx, rtx));
68 static rtx copy_for_inline PROTO((rtx));
69 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
70 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
71 static void save_constants_in_decl_trees PROTO ((tree));
72 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
73 static void restore_constants PROTO((rtx *));
74 static void set_block_origin_self PROTO((tree));
75 static void set_decl_origin_self PROTO((tree));
76 static void set_block_abstract_flags PROTO((tree, int));
77
78 void set_decl_abstract_flags PROTO((tree, int));
79 \f
80 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
81 is safe and reasonable to integrate into other functions.
82 Nonzero means value is a warning message with a single %s
83 for the function's name. */
84
85 char *
86 function_cannot_inline_p (fndecl)
87 register tree fndecl;
88 {
89 register rtx insn;
90 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
91 int max_insns = INTEGRATE_THRESHOLD (fndecl);
92 register int ninsns = 0;
93 register tree parms;
94 rtx result;
95
96 /* No inlines with varargs. `grokdeclarator' gives a warning
97 message about that if `inline' is specified. This code
98 it put in to catch the volunteers. */
99 if ((last && TREE_VALUE (last) != void_type_node)
100 || current_function_varargs)
101 return "varargs function cannot be inline";
102
103 if (current_function_calls_alloca)
104 return "function using alloca cannot be inline";
105
106 if (current_function_contains_functions)
107 return "function with nested functions cannot be inline";
108
109 /* If its not even close, don't even look. */
110 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
111 return "function too large to be inline";
112
113 #if 0
114 /* Don't inline functions which do not specify a function prototype and
115 have BLKmode argument or take the address of a parameter. */
116 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
117 {
118 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
119 TREE_ADDRESSABLE (parms) = 1;
120 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
121 return "no prototype, and parameter address used; cannot be inline";
122 }
123 #endif
124
125 /* We can't inline functions that return structures
126 the old-fashioned PCC way, copying into a static block. */
127 if (current_function_returns_pcc_struct)
128 return "inline functions not supported for this return value type";
129
130 /* We can't inline functions that return BLKmode structures in registers. */
131 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
132 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
133 return "inline functions not supported for this return value type";
134
135 /* We can't inline functions that return structures of varying size. */
136 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
137 return "function with varying-size return value cannot be inline";
138
139 /* Cannot inline a function with a varying size argument or one that
140 receives a transparent union. */
141 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
142 {
143 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
144 return "function with varying-size parameter cannot be inline";
145 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
146 return "function with transparent unit parameter cannot be inline";
147 }
148
149 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
150 {
151 for (ninsns = 0, insn = get_first_nonparm_insn ();
152 insn && ninsns < max_insns;
153 insn = NEXT_INSN (insn))
154 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
155 ninsns++;
156
157 if (ninsns >= max_insns)
158 return "function too large to be inline";
159 }
160
161 /* We cannot inline this function if forced_labels is non-zero. This
162 implies that a label in this function was used as an initializer.
163 Because labels can not be duplicated, all labels in the function
164 will be renamed when it is inlined. However, there is no way to find
165 and fix all variables initialized with addresses of labels in this
166 function, hence inlining is impossible. */
167
168 if (forced_labels)
169 return "function with label addresses used in initializers cannot inline";
170
171 /* We cannot inline a nested function that jumps to a nonlocal label. */
172 if (current_function_has_nonlocal_goto)
173 return "function with nonlocal goto cannot be inline";
174
175 /* This is a hack, until the inliner is taught about eh regions at
176 the start of the function. */
177 for (insn = get_insns ();
178 insn
179 && ! (GET_CODE (insn) == NOTE
180 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
181 insn = NEXT_INSN (insn))
182 {
183 if (insn && GET_CODE (insn) == NOTE
184 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
185 return "function with complex parameters cannot be inline";
186 }
187
188 /* We can't inline functions that return a PARALLEL rtx. */
189 result = DECL_RTL (DECL_RESULT (fndecl));
190 if (result && GET_CODE (result) == PARALLEL)
191 return "inline functions not supported for this return value type";
192
193 return 0;
194 }
195 \f
196 /* Variables used within save_for_inline. */
197
198 /* Mapping from old pseudo-register to new pseudo-registers.
199 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
200 It is allocated in `save_for_inline' and `expand_inline_function',
201 and deallocated on exit from each of those routines. */
202 static rtx *reg_map;
203
204 /* Mapping from old code-labels to new code-labels.
205 The first element of this map is label_map[min_labelno].
206 It is allocated in `save_for_inline' and `expand_inline_function',
207 and deallocated on exit from each of those routines. */
208 static rtx *label_map;
209
210 /* Mapping from old insn uid's to copied insns.
211 It is allocated in `save_for_inline' and `expand_inline_function',
212 and deallocated on exit from each of those routines. */
213 static rtx *insn_map;
214
215 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
216 Zero for a reg that isn't a parm's home.
217 Only reg numbers less than max_parm_reg are mapped here. */
218 static tree *parmdecl_map;
219
220 /* Keep track of first pseudo-register beyond those that are parms. */
221 extern int max_parm_reg;
222 extern rtx *parm_reg_stack_loc;
223
224 /* When an insn is being copied by copy_for_inline,
225 this is nonzero if we have copied an ASM_OPERANDS.
226 In that case, it is the original input-operand vector. */
227 static rtvec orig_asm_operands_vector;
228
229 /* When an insn is being copied by copy_for_inline,
230 this is nonzero if we have copied an ASM_OPERANDS.
231 In that case, it is the copied input-operand vector. */
232 static rtvec copy_asm_operands_vector;
233
234 /* Likewise, this is the copied constraints vector. */
235 static rtvec copy_asm_constraints_vector;
236
237 /* In save_for_inline, nonzero if past the parm-initialization insns. */
238 static int in_nonparm_insns;
239 \f
240 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
241 needed to save FNDECL's insns and info for future inline expansion. */
242
243 static rtx
244 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
245 tree fndecl;
246 int min_labelno;
247 int max_labelno;
248 int max_reg;
249 int copy;
250 {
251 int function_flags, i;
252 rtvec arg_vector;
253 tree parms;
254
255 /* Compute the values of any flags we must restore when inlining this. */
256
257 function_flags
258 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
259 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
260 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
261 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
262 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
263 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
264 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
265 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
266 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
267 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
268
269 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
270 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
271 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
272
273 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
274 parms;
275 parms = TREE_CHAIN (parms), i++)
276 {
277 rtx p = DECL_RTL (parms);
278
279 if (GET_CODE (p) == MEM && copy)
280 {
281 /* Copy the rtl so that modifications of the addresses
282 later in compilation won't affect this arg_vector.
283 Virtual register instantiation can screw the address
284 of the rtl. */
285 rtx new = copy_rtx (p);
286
287 /* Don't leave the old copy anywhere in this decl. */
288 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
289 || (GET_CODE (DECL_RTL (parms)) == MEM
290 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
291 && (XEXP (DECL_RTL (parms), 0)
292 == XEXP (DECL_INCOMING_RTL (parms), 0))))
293 DECL_INCOMING_RTL (parms) = new;
294 DECL_RTL (parms) = new;
295 }
296
297 RTVEC_ELT (arg_vector, i) = p;
298
299 if (GET_CODE (p) == REG)
300 parmdecl_map[REGNO (p)] = parms;
301 else if (GET_CODE (p) == CONCAT)
302 {
303 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
304 rtx pimag = gen_imagpart (GET_MODE (preal), p);
305
306 if (GET_CODE (preal) == REG)
307 parmdecl_map[REGNO (preal)] = parms;
308 if (GET_CODE (pimag) == REG)
309 parmdecl_map[REGNO (pimag)] = parms;
310 }
311
312 /* This flag is cleared later
313 if the function ever modifies the value of the parm. */
314 TREE_READONLY (parms) = 1;
315 }
316
317 /* Assume we start out in the insns that set up the parameters. */
318 in_nonparm_insns = 0;
319
320 /* The list of DECL_SAVED_INSNS, starts off with a header which
321 contains the following information:
322
323 the first insn of the function (not including the insns that copy
324 parameters into registers).
325 the first parameter insn of the function,
326 the first label used by that function,
327 the last label used by that function,
328 the highest register number used for parameters,
329 the total number of registers used,
330 the size of the incoming stack area for parameters,
331 the number of bytes popped on return,
332 the stack slot list,
333 the labels that are forced to exist,
334 some flags that are used to restore compiler globals,
335 the value of current_function_outgoing_args_size,
336 the original argument vector,
337 the original DECL_INITIAL,
338 and pointers to the table of psuedo regs, pointer flags, and alignment. */
339
340 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
341 max_parm_reg, max_reg,
342 current_function_args_size,
343 current_function_pops_args,
344 stack_slot_list, forced_labels, function_flags,
345 current_function_outgoing_args_size,
346 arg_vector, (rtx) DECL_INITIAL (fndecl),
347 (rtvec) regno_reg_rtx, regno_pointer_flag,
348 regno_pointer_align,
349 (rtvec) parm_reg_stack_loc);
350 }
351
352 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
353 things that must be done to make FNDECL expandable as an inline function.
354 HEAD contains the chain of insns to which FNDECL will expand. */
355
356 static void
357 finish_inline (fndecl, head)
358 tree fndecl;
359 rtx head;
360 {
361 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
362 FIRST_PARM_INSN (head) = get_insns ();
363 DECL_SAVED_INSNS (fndecl) = head;
364 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
365 }
366
367 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
368 they all point to the new (copied) rtxs. */
369
370 static void
371 adjust_copied_decl_tree (block)
372 register tree block;
373 {
374 register tree subblock;
375 register rtx original_end;
376
377 original_end = BLOCK_END_NOTE (block);
378 if (original_end)
379 {
380 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
381 NOTE_SOURCE_FILE (original_end) = 0;
382 }
383
384 /* Process all subblocks. */
385 for (subblock = BLOCK_SUBBLOCKS (block);
386 subblock;
387 subblock = TREE_CHAIN (subblock))
388 adjust_copied_decl_tree (subblock);
389 }
390
391 /* Make the insns and PARM_DECLs of the current function permanent
392 and record other information in DECL_SAVED_INSNS to allow inlining
393 of this function in subsequent calls.
394
395 This function is called when we are going to immediately compile
396 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
397 modified by the compilation process, so we copy all of them to
398 new storage and consider the new insns to be the insn chain to be
399 compiled. Our caller (rest_of_compilation) saves the original
400 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
401
402 /* ??? The nonlocal_label list should be adjusted also. However, since
403 a function that contains a nested function never gets inlined currently,
404 the nonlocal_label list will always be empty, so we don't worry about
405 it for now. */
406
407 void
408 save_for_inline_copying (fndecl)
409 tree fndecl;
410 {
411 rtx first_insn, last_insn, insn;
412 rtx head, copy;
413 int max_labelno, min_labelno, i, len;
414 int max_reg;
415 int max_uid;
416 rtx first_nonparm_insn;
417 char *new, *new1;
418
419 /* Make and emit a return-label if we have not already done so.
420 Do this before recording the bounds on label numbers. */
421
422 if (return_label == 0)
423 {
424 return_label = gen_label_rtx ();
425 emit_label (return_label);
426 }
427
428 /* Get some bounds on the labels and registers used. */
429
430 max_labelno = max_label_num ();
431 min_labelno = get_first_label_num ();
432 max_reg = max_reg_num ();
433
434 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
435 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
436 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
437 for the parms, prior to elimination of virtual registers.
438 These values are needed for substituting parms properly. */
439
440 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
441
442 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
443
444 if (current_function_uses_const_pool)
445 {
446 /* Replace any constant pool references with the actual constant. We
447 will put the constants back in the copy made below. */
448 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
449 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
450 {
451 save_constants (&PATTERN (insn));
452 if (REG_NOTES (insn))
453 save_constants (&REG_NOTES (insn));
454 }
455
456 /* Also scan all decls, and replace any constant pool references with the
457 actual constant. */
458 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
459
460 /* Clear out the constant pool so that we can recreate it with the
461 copied constants below. */
462 init_const_rtx_hash_table ();
463 clear_const_double_mem ();
464 }
465
466 max_uid = INSN_UID (head);
467
468 /* We have now allocated all that needs to be allocated permanently
469 on the rtx obstack. Set our high-water mark, so that we
470 can free the rest of this when the time comes. */
471
472 preserve_data ();
473
474 /* Copy the chain insns of this function.
475 Install the copied chain as the insns of this function,
476 for continued compilation;
477 the original chain is recorded as the DECL_SAVED_INSNS
478 for inlining future calls. */
479
480 /* If there are insns that copy parms from the stack into pseudo registers,
481 those insns are not copied. `expand_inline_function' must
482 emit the correct code to handle such things. */
483
484 insn = get_insns ();
485 if (GET_CODE (insn) != NOTE)
486 abort ();
487 first_insn = rtx_alloc (NOTE);
488 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
489 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
490 INSN_UID (first_insn) = INSN_UID (insn);
491 PREV_INSN (first_insn) = NULL;
492 NEXT_INSN (first_insn) = NULL;
493 last_insn = first_insn;
494
495 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
496 Make these new rtx's now, and install them in regno_reg_rtx, so they
497 will be the official pseudo-reg rtx's for the rest of compilation. */
498
499 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
500
501 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
502 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
503 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
504 regno_reg_rtx[i], len);
505
506 regno_reg_rtx = reg_map;
507
508 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
509 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
510 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
511 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
512 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
513
514 /* Likewise each label rtx must have a unique rtx as its copy. */
515
516 /* We used to use alloca here, but the size of what it would try to
517 allocate would occasionally cause it to exceed the stack limit and
518 cause unpredictable core dumps. Some examples were > 2Mb in size. */
519 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
520
521 for (i = min_labelno; i < max_labelno; i++)
522 label_map[i] = gen_label_rtx ();
523
524 /* Record the mapping of old insns to copied insns. */
525
526 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
527 bzero ((char *) insn_map, max_uid * sizeof (rtx));
528
529 /* Get the insn which signals the end of parameter setup code. */
530 first_nonparm_insn = get_first_nonparm_insn ();
531
532 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
533 (the former occurs when a variable has its address taken)
534 since these may be shared and can be changed by virtual
535 register instantiation. DECL_RTL values for our arguments
536 have already been copied by initialize_for_inline. */
537 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
538 if (GET_CODE (regno_reg_rtx[i]) == MEM)
539 XEXP (regno_reg_rtx[i], 0)
540 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
541
542 /* Copy the tree of subblocks of the function, and the decls in them.
543 We will use the copy for compiling this function, then restore the original
544 subblocks and decls for use when inlining this function.
545
546 Several parts of the compiler modify BLOCK trees. In particular,
547 instantiate_virtual_regs will instantiate any virtual regs
548 mentioned in the DECL_RTLs of the decls, and loop
549 unrolling will replicate any BLOCK trees inside an unrolled loop.
550
551 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
552 which we will use for inlining. The rtl might even contain pseudoregs
553 whose space has been freed. */
554
555 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
556 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
557
558 /* Now copy each DECL_RTL which is a MEM,
559 so it is safe to modify their addresses. */
560 copy_decl_rtls (DECL_INITIAL (fndecl));
561
562 /* The fndecl node acts as its own progenitor, so mark it as such. */
563 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
564
565 /* Now copy the chain of insns. Do this twice. The first copy the insn
566 itself and its body. The second time copy of REG_NOTES. This is because
567 a REG_NOTE may have a forward pointer to another insn. */
568
569 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
570 {
571 orig_asm_operands_vector = 0;
572
573 if (insn == first_nonparm_insn)
574 in_nonparm_insns = 1;
575
576 switch (GET_CODE (insn))
577 {
578 case NOTE:
579 /* No need to keep these. */
580 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
581 continue;
582
583 copy = rtx_alloc (NOTE);
584 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
585 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
586 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
587 else
588 {
589 NOTE_SOURCE_FILE (insn) = (char *) copy;
590 NOTE_SOURCE_FILE (copy) = 0;
591 }
592 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
593 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
594 {
595 /* We have to forward these both to match the new exception
596 region. */
597 NOTE_BLOCK_NUMBER (copy)
598 = CODE_LABEL_NUMBER (label_map[NOTE_BLOCK_NUMBER (copy)]);
599
600 }
601 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
602 break;
603
604 case INSN:
605 case JUMP_INSN:
606 case CALL_INSN:
607 copy = rtx_alloc (GET_CODE (insn));
608
609 if (GET_CODE (insn) == CALL_INSN)
610 CALL_INSN_FUNCTION_USAGE (copy)
611 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
612
613 PATTERN (copy) = copy_for_inline (PATTERN (insn));
614 INSN_CODE (copy) = -1;
615 LOG_LINKS (copy) = NULL_RTX;
616 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
617 break;
618
619 case CODE_LABEL:
620 copy = label_map[CODE_LABEL_NUMBER (insn)];
621 LABEL_NAME (copy) = LABEL_NAME (insn);
622 break;
623
624 case BARRIER:
625 copy = rtx_alloc (BARRIER);
626 break;
627
628 default:
629 abort ();
630 }
631 INSN_UID (copy) = INSN_UID (insn);
632 insn_map[INSN_UID (insn)] = copy;
633 NEXT_INSN (last_insn) = copy;
634 PREV_INSN (copy) = last_insn;
635 last_insn = copy;
636 }
637
638 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
639
640 /* Now copy the REG_NOTES. */
641 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
642 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
643 && insn_map[INSN_UID(insn)])
644 REG_NOTES (insn_map[INSN_UID (insn)])
645 = copy_for_inline (REG_NOTES (insn));
646
647 NEXT_INSN (last_insn) = NULL;
648
649 finish_inline (fndecl, head);
650
651 /* Make new versions of the register tables. */
652 new = (char *) savealloc (regno_pointer_flag_length);
653 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
654 new1 = (char *) savealloc (regno_pointer_flag_length);
655 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
656
657 regno_pointer_flag = new;
658 regno_pointer_align = new1;
659
660 set_new_first_and_last_insn (first_insn, last_insn);
661
662 if (label_map)
663 free (label_map);
664 }
665
666 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
667 For example, this can copy a list made of TREE_LIST nodes. While copying,
668 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
669 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
670 point to the corresponding (abstract) original node. */
671
672 static tree
673 copy_decl_list (list)
674 tree list;
675 {
676 tree head;
677 register tree prev, next;
678
679 if (list == 0)
680 return 0;
681
682 head = prev = copy_node (list);
683 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
684 DECL_ABSTRACT_ORIGIN (head) = list;
685 next = TREE_CHAIN (list);
686 while (next)
687 {
688 register tree copy;
689
690 copy = copy_node (next);
691 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
692 DECL_ABSTRACT_ORIGIN (copy) = next;
693 TREE_CHAIN (prev) = copy;
694 prev = copy;
695 next = TREE_CHAIN (next);
696 }
697 return head;
698 }
699
700 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
701
702 static tree
703 copy_decl_tree (block)
704 tree block;
705 {
706 tree t, vars, subblocks;
707
708 vars = copy_decl_list (BLOCK_VARS (block));
709 subblocks = 0;
710
711 /* Process all subblocks. */
712 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
713 {
714 tree copy = copy_decl_tree (t);
715 TREE_CHAIN (copy) = subblocks;
716 subblocks = copy;
717 }
718
719 t = copy_node (block);
720 BLOCK_VARS (t) = vars;
721 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
722 /* If the BLOCK being cloned is already marked as having been instantiated
723 from something else, then leave that `origin' marking alone. Otherwise,
724 mark the clone as having originated from the BLOCK we are cloning. */
725 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
726 BLOCK_ABSTRACT_ORIGIN (t) = block;
727 return t;
728 }
729
730 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
731
732 static void
733 copy_decl_rtls (block)
734 tree block;
735 {
736 tree t;
737
738 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
739 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
740 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
741
742 /* Process all subblocks. */
743 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
744 copy_decl_rtls (t);
745 }
746
747 /* Make the insns and PARM_DECLs of the current function permanent
748 and record other information in DECL_SAVED_INSNS to allow inlining
749 of this function in subsequent calls.
750
751 This routine need not copy any insns because we are not going
752 to immediately compile the insns in the insn chain. There
753 are two cases when we would compile the insns for FNDECL:
754 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
755 be output at the end of other compilation, because somebody took
756 its address. In the first case, the insns of FNDECL are copied
757 as it is expanded inline, so FNDECL's saved insns are not
758 modified. In the second case, FNDECL is used for the last time,
759 so modifying the rtl is not a problem.
760
761 We don't have to worry about FNDECL being inline expanded by
762 other functions which are written at the end of compilation
763 because flag_no_inline is turned on when we begin writing
764 functions at the end of compilation. */
765
766 void
767 save_for_inline_nocopy (fndecl)
768 tree fndecl;
769 {
770 rtx insn;
771 rtx head;
772 rtx first_nonparm_insn;
773
774 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
775 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
776 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
777 for the parms, prior to elimination of virtual registers.
778 These values are needed for substituting parms properly. */
779
780 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
781
782 /* Make and emit a return-label if we have not already done so. */
783
784 if (return_label == 0)
785 {
786 return_label = gen_label_rtx ();
787 emit_label (return_label);
788 }
789
790 head = initialize_for_inline (fndecl, get_first_label_num (),
791 max_label_num (), max_reg_num (), 0);
792
793 /* If there are insns that copy parms from the stack into pseudo registers,
794 those insns are not copied. `expand_inline_function' must
795 emit the correct code to handle such things. */
796
797 insn = get_insns ();
798 if (GET_CODE (insn) != NOTE)
799 abort ();
800
801 /* Get the insn which signals the end of parameter setup code. */
802 first_nonparm_insn = get_first_nonparm_insn ();
803
804 /* Now just scan the chain of insns to see what happens to our
805 PARM_DECLs. If a PARM_DECL is used but never modified, we
806 can substitute its rtl directly when expanding inline (and
807 perform constant folding when its incoming value is constant).
808 Otherwise, we have to copy its value into a new register and track
809 the new register's life. */
810
811 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
812 {
813 if (insn == first_nonparm_insn)
814 in_nonparm_insns = 1;
815
816 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
817 {
818 if (current_function_uses_const_pool)
819 {
820 /* Replace any constant pool references with the actual constant.
821 We will put the constant back if we need to write the
822 function out after all. */
823 save_constants (&PATTERN (insn));
824 if (REG_NOTES (insn))
825 save_constants (&REG_NOTES (insn));
826 }
827
828 /* Record what interesting things happen to our parameters. */
829 note_stores (PATTERN (insn), note_modified_parmregs);
830 }
831 }
832
833 /* Also scan all decls, and replace any constant pool references with the
834 actual constant. */
835 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
836
837 /* We have now allocated all that needs to be allocated permanently
838 on the rtx obstack. Set our high-water mark, so that we
839 can free the rest of this when the time comes. */
840
841 preserve_data ();
842
843 finish_inline (fndecl, head);
844 }
845 \f
846 /* Given PX, a pointer into an insn, search for references to the constant
847 pool. Replace each with a CONST that has the mode of the original
848 constant, contains the constant, and has RTX_INTEGRATED_P set.
849 Similarly, constant pool addresses not enclosed in a MEM are replaced
850 with an ADDRESS and CONST rtx which also gives the constant, its
851 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
852
853 static void
854 save_constants (px)
855 rtx *px;
856 {
857 rtx x;
858 int i, j;
859
860 again:
861 x = *px;
862
863 /* If this is a CONST_DOUBLE, don't try to fix things up in
864 CONST_DOUBLE_MEM, because this is an infinite recursion. */
865 if (GET_CODE (x) == CONST_DOUBLE)
866 return;
867 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
868 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
869 {
870 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
871 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
872 RTX_INTEGRATED_P (new) = 1;
873
874 /* If the MEM was in a different mode than the constant (perhaps we
875 were only looking at the low-order part), surround it with a
876 SUBREG so we can save both modes. */
877
878 if (GET_MODE (x) != const_mode)
879 {
880 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
881 RTX_INTEGRATED_P (new) = 1;
882 }
883
884 *px = new;
885 save_constants (&XEXP (*px, 0));
886 }
887 else if (GET_CODE (x) == SYMBOL_REF
888 && CONSTANT_POOL_ADDRESS_P (x))
889 {
890 *px = gen_rtx (ADDRESS, GET_MODE (x),
891 gen_rtx (CONST, get_pool_mode (x),
892 get_pool_constant (x)));
893 save_constants (&XEXP (*px, 0));
894 RTX_INTEGRATED_P (*px) = 1;
895 }
896
897 else
898 {
899 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
900 int len = GET_RTX_LENGTH (GET_CODE (x));
901
902 for (i = len-1; i >= 0; i--)
903 {
904 switch (fmt[i])
905 {
906 case 'E':
907 for (j = 0; j < XVECLEN (x, i); j++)
908 save_constants (&XVECEXP (x, i, j));
909 break;
910
911 case 'e':
912 if (XEXP (x, i) == 0)
913 continue;
914 if (i == 0)
915 {
916 /* Hack tail-recursion here. */
917 px = &XEXP (x, 0);
918 goto again;
919 }
920 save_constants (&XEXP (x, i));
921 break;
922 }
923 }
924 }
925 }
926 \f
927 /* Note whether a parameter is modified or not. */
928
929 static void
930 note_modified_parmregs (reg, x)
931 rtx reg;
932 rtx x;
933 {
934 if (GET_CODE (reg) == REG && in_nonparm_insns
935 && REGNO (reg) < max_parm_reg
936 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
937 && parmdecl_map[REGNO (reg)] != 0)
938 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
939 }
940
941 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
942 according to `reg_map' and `label_map'. The original rtl insns
943 will be saved for inlining; this is used to make a copy
944 which is used to finish compiling the inline function itself.
945
946 If we find a "saved" constant pool entry, one which was replaced with
947 the value of the constant, convert it back to a constant pool entry.
948 Since the pool wasn't touched, this should simply restore the old
949 address.
950
951 All other kinds of rtx are copied except those that can never be
952 changed during compilation. */
953
954 static rtx
955 copy_for_inline (orig)
956 rtx orig;
957 {
958 register rtx x = orig;
959 register rtx new;
960 register int i;
961 register enum rtx_code code;
962 register char *format_ptr;
963
964 if (x == 0)
965 return x;
966
967 code = GET_CODE (x);
968
969 /* These types may be freely shared. */
970
971 switch (code)
972 {
973 case QUEUED:
974 case CONST_INT:
975 case SYMBOL_REF:
976 case PC:
977 case CC0:
978 return x;
979
980 case CONST_DOUBLE:
981 /* We have to make a new CONST_DOUBLE to ensure that we account for
982 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
983 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
984 {
985 REAL_VALUE_TYPE d;
986
987 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
988 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
989 }
990 else
991 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
992 VOIDmode);
993
994 case CONST:
995 /* Get constant pool entry for constant in the pool. */
996 if (RTX_INTEGRATED_P (x))
997 return validize_mem (force_const_mem (GET_MODE (x),
998 copy_for_inline (XEXP (x, 0))));
999 break;
1000
1001 case SUBREG:
1002 /* Get constant pool entry, but access in different mode. */
1003 if (RTX_INTEGRATED_P (x))
1004 {
1005 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1006 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1007
1008 PUT_MODE (new, GET_MODE (x));
1009 return validize_mem (new);
1010 }
1011 break;
1012
1013 case ADDRESS:
1014 /* If not special for constant pool error. Else get constant pool
1015 address. */
1016 if (! RTX_INTEGRATED_P (x))
1017 abort ();
1018
1019 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1020 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1021 new = XEXP (new, 0);
1022
1023 #ifdef POINTERS_EXTEND_UNSIGNED
1024 if (GET_MODE (new) != GET_MODE (x))
1025 new = convert_memory_address (GET_MODE (x), new);
1026 #endif
1027
1028 return new;
1029
1030 case ASM_OPERANDS:
1031 /* If a single asm insn contains multiple output operands
1032 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1033 We must make sure that the copied insn continues to share it. */
1034 if (orig_asm_operands_vector == XVEC (orig, 3))
1035 {
1036 x = rtx_alloc (ASM_OPERANDS);
1037 x->volatil = orig->volatil;
1038 XSTR (x, 0) = XSTR (orig, 0);
1039 XSTR (x, 1) = XSTR (orig, 1);
1040 XINT (x, 2) = XINT (orig, 2);
1041 XVEC (x, 3) = copy_asm_operands_vector;
1042 XVEC (x, 4) = copy_asm_constraints_vector;
1043 XSTR (x, 5) = XSTR (orig, 5);
1044 XINT (x, 6) = XINT (orig, 6);
1045 return x;
1046 }
1047 break;
1048
1049 case MEM:
1050 /* A MEM is usually allowed to be shared if its address is constant
1051 or is a constant plus one of the special registers.
1052
1053 We do not allow sharing of addresses that are either a special
1054 register or the sum of a constant and a special register because
1055 it is possible for unshare_all_rtl to copy the address, into memory
1056 that won't be saved. Although the MEM can safely be shared, and
1057 won't be copied there, the address itself cannot be shared, and may
1058 need to be copied.
1059
1060 There are also two exceptions with constants: The first is if the
1061 constant is a LABEL_REF or the sum of the LABEL_REF
1062 and an integer. This case can happen if we have an inline
1063 function that supplies a constant operand to the call of another
1064 inline function that uses it in a switch statement. In this case,
1065 we will be replacing the LABEL_REF, so we have to replace this MEM
1066 as well.
1067
1068 The second case is if we have a (const (plus (address ..) ...)).
1069 In that case we need to put back the address of the constant pool
1070 entry. */
1071
1072 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1073 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1074 && ! (GET_CODE (XEXP (x, 0)) == CONST
1075 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1076 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1077 == LABEL_REF)
1078 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1079 == ADDRESS)))))
1080 return x;
1081 break;
1082
1083 case LABEL_REF:
1084 /* If this is a non-local label, just make a new LABEL_REF.
1085 Otherwise, use the new label as well. */
1086 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1087 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1088 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1089 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1090 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1091 return x;
1092
1093 case REG:
1094 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1095 return reg_map [REGNO (x)];
1096 else
1097 return x;
1098
1099 case SET:
1100 /* If a parm that gets modified lives in a pseudo-reg,
1101 clear its TREE_READONLY to prevent certain optimizations. */
1102 {
1103 rtx dest = SET_DEST (x);
1104
1105 while (GET_CODE (dest) == STRICT_LOW_PART
1106 || GET_CODE (dest) == ZERO_EXTRACT
1107 || GET_CODE (dest) == SUBREG)
1108 dest = XEXP (dest, 0);
1109
1110 if (GET_CODE (dest) == REG
1111 && REGNO (dest) < max_parm_reg
1112 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1113 && parmdecl_map[REGNO (dest)] != 0
1114 /* The insn to load an arg pseudo from a stack slot
1115 does not count as modifying it. */
1116 && in_nonparm_insns)
1117 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1118 }
1119 break;
1120
1121 #if 0 /* This is a good idea, but here is the wrong place for it. */
1122 /* Arrange that CONST_INTs always appear as the second operand
1123 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1124 always appear as the first. */
1125 case PLUS:
1126 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1127 || (XEXP (x, 1) == frame_pointer_rtx
1128 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1129 && XEXP (x, 1) == arg_pointer_rtx)))
1130 {
1131 rtx t = XEXP (x, 0);
1132 XEXP (x, 0) = XEXP (x, 1);
1133 XEXP (x, 1) = t;
1134 }
1135 break;
1136 #endif
1137 default:
1138 break;
1139 }
1140
1141 /* Replace this rtx with a copy of itself. */
1142
1143 x = rtx_alloc (code);
1144 bcopy ((char *) orig, (char *) x,
1145 (sizeof (*x) - sizeof (x->fld)
1146 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1147
1148 /* Now scan the subexpressions recursively.
1149 We can store any replaced subexpressions directly into X
1150 since we know X is not shared! Any vectors in X
1151 must be copied if X was copied. */
1152
1153 format_ptr = GET_RTX_FORMAT (code);
1154
1155 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1156 {
1157 switch (*format_ptr++)
1158 {
1159 case 'e':
1160 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1161 break;
1162
1163 case 'u':
1164 /* Change any references to old-insns to point to the
1165 corresponding copied insns. */
1166 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1167 break;
1168
1169 case 'E':
1170 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1171 {
1172 register int j;
1173
1174 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1175 for (j = 0; j < XVECLEN (x, i); j++)
1176 XVECEXP (x, i, j)
1177 = copy_for_inline (XVECEXP (x, i, j));
1178 }
1179 break;
1180 }
1181 }
1182
1183 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1184 {
1185 orig_asm_operands_vector = XVEC (orig, 3);
1186 copy_asm_operands_vector = XVEC (x, 3);
1187 copy_asm_constraints_vector = XVEC (x, 4);
1188 }
1189
1190 return x;
1191 }
1192
1193 /* Unfortunately, we need a global copy of const_equiv map for communication
1194 with a function called from note_stores. Be *very* careful that this
1195 is used properly in the presence of recursion. */
1196
1197 rtx *global_const_equiv_map;
1198 int global_const_equiv_map_size;
1199 \f
1200 #define FIXED_BASE_PLUS_P(X) \
1201 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1202 && GET_CODE (XEXP (X, 0)) == REG \
1203 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1204 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1205
1206 /* Integrate the procedure defined by FNDECL. Note that this function
1207 may wind up calling itself. Since the static variables are not
1208 reentrant, we do not assign them until after the possibility
1209 of recursion is eliminated.
1210
1211 If IGNORE is nonzero, do not produce a value.
1212 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1213
1214 Value is:
1215 (rtx)-1 if we could not substitute the function
1216 0 if we substituted it and it does not produce a value
1217 else an rtx for where the value is stored. */
1218
1219 rtx
1220 expand_inline_function (fndecl, parms, target, ignore, type,
1221 structure_value_addr)
1222 tree fndecl, parms;
1223 rtx target;
1224 int ignore;
1225 tree type;
1226 rtx structure_value_addr;
1227 {
1228 tree formal, actual, block;
1229 rtx header = DECL_SAVED_INSNS (fndecl);
1230 rtx insns = FIRST_FUNCTION_INSN (header);
1231 rtx parm_insns = FIRST_PARM_INSN (header);
1232 tree *arg_trees;
1233 rtx *arg_vals;
1234 rtx insn;
1235 int max_regno;
1236 register int i;
1237 int min_labelno = FIRST_LABELNO (header);
1238 int max_labelno = LAST_LABELNO (header);
1239 int nargs;
1240 rtx local_return_label = 0;
1241 rtx loc;
1242 rtx stack_save = 0;
1243 rtx temp;
1244 struct inline_remap *map;
1245 rtx cc0_insn = 0;
1246 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1247 rtx static_chain_value = 0;
1248
1249 /* The pointer used to track the true location of the memory used
1250 for MAP->LABEL_MAP. */
1251 rtx *real_label_map = 0;
1252
1253 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1254 max_regno = MAX_REGNUM (header) + 3;
1255 if (max_regno < FIRST_PSEUDO_REGISTER)
1256 abort ();
1257
1258 nargs = list_length (DECL_ARGUMENTS (fndecl));
1259
1260 /* Check that the parms type match and that sufficient arguments were
1261 passed. Since the appropriate conversions or default promotions have
1262 already been applied, the machine modes should match exactly. */
1263
1264 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1265 formal;
1266 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1267 {
1268 tree arg;
1269 enum machine_mode mode;
1270
1271 if (actual == 0)
1272 return (rtx) (HOST_WIDE_INT) -1;
1273
1274 arg = TREE_VALUE (actual);
1275 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1276
1277 if (mode != TYPE_MODE (TREE_TYPE (arg))
1278 /* If they are block mode, the types should match exactly.
1279 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1280 which could happen if the parameter has incomplete type. */
1281 || (mode == BLKmode
1282 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1283 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1284 return (rtx) (HOST_WIDE_INT) -1;
1285 }
1286
1287 /* Extra arguments are valid, but will be ignored below, so we must
1288 evaluate them here for side-effects. */
1289 for (; actual; actual = TREE_CHAIN (actual))
1290 expand_expr (TREE_VALUE (actual), const0_rtx,
1291 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1292
1293 /* Make a binding contour to keep inline cleanups called at
1294 outer function-scope level from looking like they are shadowing
1295 parameter declarations. */
1296 pushlevel (0);
1297
1298 /* Expand the function arguments. Do this first so that any
1299 new registers get created before we allocate the maps. */
1300
1301 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1302 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1303
1304 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1305 formal;
1306 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1307 {
1308 /* Actual parameter, converted to the type of the argument within the
1309 function. */
1310 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1311 /* Mode of the variable used within the function. */
1312 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1313 int invisiref = 0;
1314
1315 arg_trees[i] = arg;
1316 loc = RTVEC_ELT (arg_vector, i);
1317
1318 /* If this is an object passed by invisible reference, we copy the
1319 object into a stack slot and save its address. If this will go
1320 into memory, we do nothing now. Otherwise, we just expand the
1321 argument. */
1322 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1323 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1324 {
1325 rtx stack_slot
1326 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1327 int_size_in_bytes (TREE_TYPE (arg)), 1);
1328 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1329
1330 store_expr (arg, stack_slot, 0);
1331
1332 arg_vals[i] = XEXP (stack_slot, 0);
1333 invisiref = 1;
1334 }
1335 else if (GET_CODE (loc) != MEM)
1336 {
1337 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1338 /* The mode if LOC and ARG can differ if LOC was a variable
1339 that had its mode promoted via PROMOTED_MODE. */
1340 arg_vals[i] = convert_modes (GET_MODE (loc),
1341 TYPE_MODE (TREE_TYPE (arg)),
1342 expand_expr (arg, NULL_RTX, mode,
1343 EXPAND_SUM),
1344 TREE_UNSIGNED (TREE_TYPE (formal)));
1345 else
1346 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1347 }
1348 else
1349 arg_vals[i] = 0;
1350
1351 if (arg_vals[i] != 0
1352 && (! TREE_READONLY (formal)
1353 /* If the parameter is not read-only, copy our argument through
1354 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1355 TARGET in any way. In the inline function, they will likely
1356 be two different pseudos, and `safe_from_p' will make all
1357 sorts of smart assumptions about their not conflicting.
1358 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1359 wrong, so put ARG_VALS[I] into a fresh register.
1360 Don't worry about invisible references, since their stack
1361 temps will never overlap the target. */
1362 || (target != 0
1363 && ! invisiref
1364 && (GET_CODE (arg_vals[i]) == REG
1365 || GET_CODE (arg_vals[i]) == SUBREG
1366 || GET_CODE (arg_vals[i]) == MEM)
1367 && reg_overlap_mentioned_p (arg_vals[i], target))
1368 /* ??? We must always copy a SUBREG into a REG, because it might
1369 get substituted into an address, and not all ports correctly
1370 handle SUBREGs in addresses. */
1371 || (GET_CODE (arg_vals[i]) == SUBREG)))
1372 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1373
1374 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1375 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1376 mark_reg_pointer (arg_vals[i],
1377 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1378 / BITS_PER_UNIT));
1379 }
1380
1381 /* Allocate the structures we use to remap things. */
1382
1383 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1384 map->fndecl = fndecl;
1385
1386 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1387 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1388
1389 /* We used to use alloca here, but the size of what it would try to
1390 allocate would occasionally cause it to exceed the stack limit and
1391 cause unpredictable core dumps. */
1392 real_label_map
1393 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1394 map->label_map = real_label_map;
1395
1396 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1397 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1398 map->min_insnno = 0;
1399 map->max_insnno = INSN_UID (header);
1400
1401 map->integrating = 1;
1402
1403 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1404 be large enough for all our pseudos. This is the number we are currently
1405 using plus the number in the called routine, plus 15 for each arg,
1406 five to compute the virtual frame pointer, and five for the return value.
1407 This should be enough for most cases. We do not reference entries
1408 outside the range of the map.
1409
1410 ??? These numbers are quite arbitrary and were obtained by
1411 experimentation. At some point, we should try to allocate the
1412 table after all the parameters are set up so we an more accurately
1413 estimate the number of pseudos we will need. */
1414
1415 map->const_equiv_map_size
1416 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1417
1418 map->const_equiv_map
1419 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1420 bzero ((char *) map->const_equiv_map,
1421 map->const_equiv_map_size * sizeof (rtx));
1422
1423 map->const_age_map
1424 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1425 bzero ((char *) map->const_age_map,
1426 map->const_equiv_map_size * sizeof (unsigned));
1427 map->const_age = 0;
1428
1429 /* Record the current insn in case we have to set up pointers to frame
1430 and argument memory blocks. If there are no insns yet, add a dummy
1431 insn that can be used as an insertion point. */
1432 map->insns_at_start = get_last_insn ();
1433 if (map->insns_at_start == 0)
1434 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1435
1436 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1437 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1438
1439 /* Update the outgoing argument size to allow for those in the inlined
1440 function. */
1441 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1442 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1443
1444 /* If the inline function needs to make PIC references, that means
1445 that this function's PIC offset table must be used. */
1446 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1447 current_function_uses_pic_offset_table = 1;
1448
1449 /* If this function needs a context, set it up. */
1450 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1451 static_chain_value = lookup_static_chain (fndecl);
1452
1453 if (GET_CODE (parm_insns) == NOTE
1454 && NOTE_LINE_NUMBER (parm_insns) > 0)
1455 {
1456 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1457 NOTE_LINE_NUMBER (parm_insns));
1458 if (note)
1459 RTX_INTEGRATED_P (note) = 1;
1460 }
1461
1462 /* Process each argument. For each, set up things so that the function's
1463 reference to the argument will refer to the argument being passed.
1464 We only replace REG with REG here. Any simplifications are done
1465 via const_equiv_map.
1466
1467 We make two passes: In the first, we deal with parameters that will
1468 be placed into registers, since we need to ensure that the allocated
1469 register number fits in const_equiv_map. Then we store all non-register
1470 parameters into their memory location. */
1471
1472 /* Don't try to free temp stack slots here, because we may put one of the
1473 parameters into a temp stack slot. */
1474
1475 for (i = 0; i < nargs; i++)
1476 {
1477 rtx copy = arg_vals[i];
1478
1479 loc = RTVEC_ELT (arg_vector, i);
1480
1481 /* There are three cases, each handled separately. */
1482 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1483 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1484 {
1485 /* This must be an object passed by invisible reference (it could
1486 also be a variable-sized object, but we forbid inlining functions
1487 with variable-sized arguments). COPY is the address of the
1488 actual value (this computation will cause it to be copied). We
1489 map that address for the register, noting the actual address as
1490 an equivalent in case it can be substituted into the insns. */
1491
1492 if (GET_CODE (copy) != REG)
1493 {
1494 temp = copy_addr_to_reg (copy);
1495 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1496 && REGNO (temp) < map->const_equiv_map_size)
1497 {
1498 map->const_equiv_map[REGNO (temp)] = copy;
1499 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1500 }
1501 copy = temp;
1502 }
1503 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1504 }
1505 else if (GET_CODE (loc) == MEM)
1506 {
1507 /* This is the case of a parameter that lives in memory.
1508 It will live in the block we allocate in the called routine's
1509 frame that simulates the incoming argument area. Do nothing
1510 now; we will call store_expr later. */
1511 ;
1512 }
1513 else if (GET_CODE (loc) == REG)
1514 {
1515 /* This is the good case where the parameter is in a register.
1516 If it is read-only and our argument is a constant, set up the
1517 constant equivalence.
1518
1519 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1520 that flag set if it is a register.
1521
1522 Also, don't allow hard registers here; they might not be valid
1523 when substituted into insns. */
1524
1525 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1526 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1527 && ! REG_USERVAR_P (copy))
1528 || (GET_CODE (copy) == REG
1529 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1530 {
1531 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1532 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1533 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1534 && REGNO (temp) < map->const_equiv_map_size)
1535 {
1536 map->const_equiv_map[REGNO (temp)] = copy;
1537 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1538 }
1539 copy = temp;
1540 }
1541 map->reg_map[REGNO (loc)] = copy;
1542 }
1543 else if (GET_CODE (loc) == CONCAT)
1544 {
1545 /* This is the good case where the parameter is in a
1546 pair of separate pseudos.
1547 If it is read-only and our argument is a constant, set up the
1548 constant equivalence.
1549
1550 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1551 that flag set if it is a register.
1552
1553 Also, don't allow hard registers here; they might not be valid
1554 when substituted into insns. */
1555 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1556 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1557 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1558 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1559
1560 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1561 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1562 && ! REG_USERVAR_P (copyreal))
1563 || (GET_CODE (copyreal) == REG
1564 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1565 {
1566 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1567 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1568 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1569 && REGNO (temp) < map->const_equiv_map_size)
1570 {
1571 map->const_equiv_map[REGNO (temp)] = copyreal;
1572 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1573 }
1574 copyreal = temp;
1575 }
1576 map->reg_map[REGNO (locreal)] = copyreal;
1577
1578 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1579 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1580 && ! REG_USERVAR_P (copyimag))
1581 || (GET_CODE (copyimag) == REG
1582 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1583 {
1584 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1585 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1586 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1587 && REGNO (temp) < map->const_equiv_map_size)
1588 {
1589 map->const_equiv_map[REGNO (temp)] = copyimag;
1590 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1591 }
1592 copyimag = temp;
1593 }
1594 map->reg_map[REGNO (locimag)] = copyimag;
1595 }
1596 else
1597 abort ();
1598 }
1599
1600 /* Now do the parameters that will be placed in memory. */
1601
1602 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1603 formal; formal = TREE_CHAIN (formal), i++)
1604 {
1605 loc = RTVEC_ELT (arg_vector, i);
1606
1607 if (GET_CODE (loc) == MEM
1608 /* Exclude case handled above. */
1609 && ! (GET_CODE (XEXP (loc, 0)) == REG
1610 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1611 {
1612 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1613 DECL_SOURCE_LINE (formal));
1614 if (note)
1615 RTX_INTEGRATED_P (note) = 1;
1616
1617 /* Compute the address in the area we reserved and store the
1618 value there. */
1619 temp = copy_rtx_and_substitute (loc, map);
1620 subst_constants (&temp, NULL_RTX, map);
1621 apply_change_group ();
1622 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1623 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1624 store_expr (arg_trees[i], temp, 0);
1625 }
1626 }
1627
1628 /* Deal with the places that the function puts its result.
1629 We are driven by what is placed into DECL_RESULT.
1630
1631 Initially, we assume that we don't have anything special handling for
1632 REG_FUNCTION_RETURN_VALUE_P. */
1633
1634 map->inline_target = 0;
1635 loc = DECL_RTL (DECL_RESULT (fndecl));
1636 if (TYPE_MODE (type) == VOIDmode)
1637 /* There is no return value to worry about. */
1638 ;
1639 else if (GET_CODE (loc) == MEM)
1640 {
1641 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1642 abort ();
1643
1644 /* Pass the function the address in which to return a structure value.
1645 Note that a constructor can cause someone to call us with
1646 STRUCTURE_VALUE_ADDR, but the initialization takes place
1647 via the first parameter, rather than the struct return address.
1648
1649 We have two cases: If the address is a simple register indirect,
1650 use the mapping mechanism to point that register to our structure
1651 return address. Otherwise, store the structure return value into
1652 the place that it will be referenced from. */
1653
1654 if (GET_CODE (XEXP (loc, 0)) == REG)
1655 {
1656 temp = force_reg (Pmode,
1657 force_operand (structure_value_addr, NULL_RTX));
1658 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1659 if ((CONSTANT_P (structure_value_addr)
1660 || GET_CODE (structure_value_addr) == ADDRESSOF
1661 || (GET_CODE (structure_value_addr) == PLUS
1662 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1663 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1664 && REGNO (temp) < map->const_equiv_map_size)
1665 {
1666 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1667 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1668 }
1669 }
1670 else
1671 {
1672 temp = copy_rtx_and_substitute (loc, map);
1673 subst_constants (&temp, NULL_RTX, map);
1674 apply_change_group ();
1675 emit_move_insn (temp, structure_value_addr);
1676 }
1677 }
1678 else if (ignore)
1679 /* We will ignore the result value, so don't look at its structure.
1680 Note that preparations for an aggregate return value
1681 do need to be made (above) even if it will be ignored. */
1682 ;
1683 else if (GET_CODE (loc) == REG)
1684 {
1685 /* The function returns an object in a register and we use the return
1686 value. Set up our target for remapping. */
1687
1688 /* Machine mode function was declared to return. */
1689 enum machine_mode departing_mode = TYPE_MODE (type);
1690 /* (Possibly wider) machine mode it actually computes
1691 (for the sake of callers that fail to declare it right).
1692 We have to use the mode of the result's RTL, rather than
1693 its type, since expand_function_start may have promoted it. */
1694 enum machine_mode arriving_mode
1695 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1696 rtx reg_to_map;
1697
1698 /* Don't use MEMs as direct targets because on some machines
1699 substituting a MEM for a REG makes invalid insns.
1700 Let the combiner substitute the MEM if that is valid. */
1701 if (target == 0 || GET_CODE (target) != REG
1702 || GET_MODE (target) != departing_mode)
1703 target = gen_reg_rtx (departing_mode);
1704
1705 /* If function's value was promoted before return,
1706 avoid machine mode mismatch when we substitute INLINE_TARGET.
1707 But TARGET is what we will return to the caller. */
1708 if (arriving_mode != departing_mode)
1709 {
1710 /* Avoid creating a paradoxical subreg wider than
1711 BITS_PER_WORD, since that is illegal. */
1712 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1713 {
1714 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1715 GET_MODE_BITSIZE (arriving_mode)))
1716 /* Maybe could be handled by using convert_move () ? */
1717 abort ();
1718 reg_to_map = gen_reg_rtx (arriving_mode);
1719 target = gen_lowpart (departing_mode, reg_to_map);
1720 }
1721 else
1722 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1723 }
1724 else
1725 reg_to_map = target;
1726
1727 /* Usually, the result value is the machine's return register.
1728 Sometimes it may be a pseudo. Handle both cases. */
1729 if (REG_FUNCTION_VALUE_P (loc))
1730 map->inline_target = reg_to_map;
1731 else
1732 map->reg_map[REGNO (loc)] = reg_to_map;
1733 }
1734 else
1735 abort ();
1736
1737 /* Make a fresh binding contour that we can easily remove. Do this after
1738 expanding our arguments so cleanups are properly scoped. */
1739 pushlevel (0);
1740 expand_start_bindings (0);
1741
1742 /* Make new label equivalences for the labels in the called function. */
1743 for (i = min_labelno; i < max_labelno; i++)
1744 map->label_map[i] = gen_label_rtx ();
1745
1746 /* Perform postincrements before actually calling the function. */
1747 emit_queue ();
1748
1749 /* Clean up stack so that variables might have smaller offsets. */
1750 do_pending_stack_adjust ();
1751
1752 /* Save a copy of the location of const_equiv_map for mark_stores, called
1753 via note_stores. */
1754 global_const_equiv_map = map->const_equiv_map;
1755 global_const_equiv_map_size = map->const_equiv_map_size;
1756
1757 /* If the called function does an alloca, save and restore the
1758 stack pointer around the call. This saves stack space, but
1759 also is required if this inline is being done between two
1760 pushes. */
1761 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1762 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1763
1764 /* Now copy the insns one by one. Do this in two passes, first the insns and
1765 then their REG_NOTES, just like save_for_inline. */
1766
1767 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1768
1769 for (insn = insns; insn; insn = NEXT_INSN (insn))
1770 {
1771 rtx copy, pattern, set;
1772
1773 map->orig_asm_operands_vector = 0;
1774
1775 switch (GET_CODE (insn))
1776 {
1777 case INSN:
1778 pattern = PATTERN (insn);
1779 set = single_set (insn);
1780 copy = 0;
1781 if (GET_CODE (pattern) == USE
1782 && GET_CODE (XEXP (pattern, 0)) == REG
1783 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1784 /* The (USE (REG n)) at return from the function should
1785 be ignored since we are changing (REG n) into
1786 inline_target. */
1787 break;
1788
1789 /* Ignore setting a function value that we don't want to use. */
1790 if (map->inline_target == 0
1791 && set != 0
1792 && GET_CODE (SET_DEST (set)) == REG
1793 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1794 {
1795 if (volatile_refs_p (SET_SRC (set)))
1796 {
1797 rtx new_set;
1798
1799 /* If we must not delete the source,
1800 load it into a new temporary. */
1801 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1802
1803 new_set = single_set (copy);
1804 if (new_set == 0)
1805 abort ();
1806
1807 SET_DEST (new_set)
1808 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1809 }
1810 /* If the source and destination are the same and it
1811 has a note on it, keep the insn. */
1812 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1813 && REG_NOTES (insn) != 0)
1814 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1815 else
1816 break;
1817 }
1818
1819 /* If this is setting the static chain rtx, omit it. */
1820 else if (static_chain_value != 0
1821 && set != 0
1822 && GET_CODE (SET_DEST (set)) == REG
1823 && rtx_equal_p (SET_DEST (set),
1824 static_chain_incoming_rtx))
1825 break;
1826
1827 /* If this is setting the static chain pseudo, set it from
1828 the value we want to give it instead. */
1829 else if (static_chain_value != 0
1830 && set != 0
1831 && rtx_equal_p (SET_SRC (set),
1832 static_chain_incoming_rtx))
1833 {
1834 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1835
1836 copy = emit_move_insn (newdest, static_chain_value);
1837 static_chain_value = 0;
1838 }
1839 else
1840 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1841 /* REG_NOTES will be copied later. */
1842
1843 #ifdef HAVE_cc0
1844 /* If this insn is setting CC0, it may need to look at
1845 the insn that uses CC0 to see what type of insn it is.
1846 In that case, the call to recog via validate_change will
1847 fail. So don't substitute constants here. Instead,
1848 do it when we emit the following insn.
1849
1850 For example, see the pyr.md file. That machine has signed and
1851 unsigned compares. The compare patterns must check the
1852 following branch insn to see which what kind of compare to
1853 emit.
1854
1855 If the previous insn set CC0, substitute constants on it as
1856 well. */
1857 if (sets_cc0_p (PATTERN (copy)) != 0)
1858 cc0_insn = copy;
1859 else
1860 {
1861 if (cc0_insn)
1862 try_constants (cc0_insn, map);
1863 cc0_insn = 0;
1864 try_constants (copy, map);
1865 }
1866 #else
1867 try_constants (copy, map);
1868 #endif
1869 break;
1870
1871 case JUMP_INSN:
1872 if (GET_CODE (PATTERN (insn)) == RETURN
1873 || (GET_CODE (PATTERN (insn)) == PARALLEL
1874 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1875 {
1876 if (local_return_label == 0)
1877 local_return_label = gen_label_rtx ();
1878 pattern = gen_jump (local_return_label);
1879 }
1880 else
1881 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1882
1883 copy = emit_jump_insn (pattern);
1884
1885 #ifdef HAVE_cc0
1886 if (cc0_insn)
1887 try_constants (cc0_insn, map);
1888 cc0_insn = 0;
1889 #endif
1890 try_constants (copy, map);
1891
1892 /* If this used to be a conditional jump insn but whose branch
1893 direction is now know, we must do something special. */
1894 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1895 {
1896 #ifdef HAVE_cc0
1897 /* The previous insn set cc0 for us. So delete it. */
1898 delete_insn (PREV_INSN (copy));
1899 #endif
1900
1901 /* If this is now a no-op, delete it. */
1902 if (map->last_pc_value == pc_rtx)
1903 {
1904 delete_insn (copy);
1905 copy = 0;
1906 }
1907 else
1908 /* Otherwise, this is unconditional jump so we must put a
1909 BARRIER after it. We could do some dead code elimination
1910 here, but jump.c will do it just as well. */
1911 emit_barrier ();
1912 }
1913 break;
1914
1915 case CALL_INSN:
1916 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1917 copy = emit_call_insn (pattern);
1918
1919 /* Because the USAGE information potentially contains objects other
1920 than hard registers, we need to copy it. */
1921 CALL_INSN_FUNCTION_USAGE (copy)
1922 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1923
1924 #ifdef HAVE_cc0
1925 if (cc0_insn)
1926 try_constants (cc0_insn, map);
1927 cc0_insn = 0;
1928 #endif
1929 try_constants (copy, map);
1930
1931 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1932 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1933 map->const_equiv_map[i] = 0;
1934 break;
1935
1936 case CODE_LABEL:
1937 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1938 LABEL_NAME (copy) = LABEL_NAME (insn);
1939 map->const_age++;
1940 break;
1941
1942 case BARRIER:
1943 copy = emit_barrier ();
1944 break;
1945
1946 case NOTE:
1947 /* It is important to discard function-end and function-beg notes,
1948 so we have only one of each in the current function.
1949 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1950 deleted these in the copy used for continuing compilation,
1951 not the copy used for inlining). */
1952 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1953 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1954 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1955 {
1956 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1957 if (copy && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1958 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1959 {
1960 rtx label = map->label_map[NOTE_BLOCK_NUMBER (copy)];
1961
1962 /* We have to forward these both to match the new exception
1963 region. */
1964 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
1965 }
1966 }
1967 else
1968 copy = 0;
1969 break;
1970
1971 default:
1972 abort ();
1973 break;
1974 }
1975
1976 if (copy)
1977 RTX_INTEGRATED_P (copy) = 1;
1978
1979 map->insn_map[INSN_UID (insn)] = copy;
1980 }
1981
1982 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1983 from parameters can be substituted in. These are the only ones that
1984 are valid across the entire function. */
1985 map->const_age++;
1986 for (insn = insns; insn; insn = NEXT_INSN (insn))
1987 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1988 && map->insn_map[INSN_UID (insn)]
1989 && REG_NOTES (insn))
1990 {
1991 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1992 /* We must also do subst_constants, in case one of our parameters
1993 has const type and constant value. */
1994 subst_constants (&tem, NULL_RTX, map);
1995 apply_change_group ();
1996 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1997 }
1998
1999 if (local_return_label)
2000 emit_label (local_return_label);
2001
2002 /* Restore the stack pointer if we saved it above. */
2003 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2004 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2005
2006 /* Make copies of the decls of the symbols in the inline function, so that
2007 the copies of the variables get declared in the current function. Set
2008 up things so that lookup_static_chain knows that to interpret registers
2009 in SAVE_EXPRs for TYPE_SIZEs as local. */
2010
2011 inline_function_decl = fndecl;
2012 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2013 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2014 inline_function_decl = 0;
2015
2016 /* End the scope containing the copied formal parameter variables
2017 and copied LABEL_DECLs. */
2018
2019 expand_end_bindings (getdecls (), 1, 1);
2020 block = poplevel (1, 1, 0);
2021 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2022 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2023 poplevel (0, 0, 0);
2024
2025 /* Must mark the line number note after inlined functions as a repeat, so
2026 that the test coverage code can avoid counting the call twice. This
2027 just tells the code to ignore the immediately following line note, since
2028 there already exists a copy of this note before the expanded inline call.
2029 This line number note is still needed for debugging though, so we can't
2030 delete it. */
2031 if (flag_test_coverage)
2032 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2033
2034 emit_line_note (input_filename, lineno);
2035
2036 if (structure_value_addr)
2037 {
2038 target = gen_rtx (MEM, TYPE_MODE (type),
2039 memory_address (TYPE_MODE (type), structure_value_addr));
2040 MEM_IN_STRUCT_P (target) = 1;
2041 }
2042
2043 /* Make sure we free the things we explicitly allocated with xmalloc. */
2044 if (real_label_map)
2045 free (real_label_map);
2046
2047 return target;
2048 }
2049 \f
2050 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2051 push all of those decls and give each one the corresponding home. */
2052
2053 static void
2054 integrate_parm_decls (args, map, arg_vector)
2055 tree args;
2056 struct inline_remap *map;
2057 rtvec arg_vector;
2058 {
2059 register tree tail;
2060 register int i;
2061
2062 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2063 {
2064 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2065 TREE_TYPE (tail));
2066 rtx new_decl_rtl
2067 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2068
2069 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2070 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2071 here, but that's going to require some more work. */
2072 /* DECL_INCOMING_RTL (decl) = ?; */
2073 /* These args would always appear unused, if not for this. */
2074 TREE_USED (decl) = 1;
2075 /* Prevent warning for shadowing with these. */
2076 DECL_ABSTRACT_ORIGIN (decl) = tail;
2077 pushdecl (decl);
2078 /* Fully instantiate the address with the equivalent form so that the
2079 debugging information contains the actual register, instead of the
2080 virtual register. Do this by not passing an insn to
2081 subst_constants. */
2082 subst_constants (&new_decl_rtl, NULL_RTX, map);
2083 apply_change_group ();
2084 DECL_RTL (decl) = new_decl_rtl;
2085 }
2086 }
2087
2088 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2089 current function a tree of contexts isomorphic to the one that is given.
2090
2091 LEVEL indicates how far down into the BLOCK tree is the node we are
2092 currently traversing. It is always zero except for recursive calls.
2093
2094 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2095 registers used in the DECL_RTL field should be remapped. If it is zero,
2096 no mapping is necessary. */
2097
2098 static void
2099 integrate_decl_tree (let, level, map)
2100 tree let;
2101 int level;
2102 struct inline_remap *map;
2103 {
2104 tree t, node;
2105
2106 if (level > 0)
2107 pushlevel (0);
2108
2109 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2110 {
2111 tree d;
2112
2113 push_obstacks_nochange ();
2114 saveable_allocation ();
2115 d = copy_node (t);
2116 pop_obstacks ();
2117
2118 if (DECL_RTL (t) != 0)
2119 {
2120 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2121 /* Fully instantiate the address with the equivalent form so that the
2122 debugging information contains the actual register, instead of the
2123 virtual register. Do this by not passing an insn to
2124 subst_constants. */
2125 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2126 apply_change_group ();
2127 }
2128 /* These args would always appear unused, if not for this. */
2129 TREE_USED (d) = 1;
2130 /* Prevent warning for shadowing with these. */
2131 DECL_ABSTRACT_ORIGIN (d) = t;
2132
2133 if (DECL_LANG_SPECIFIC (d))
2134 copy_lang_decl (d);
2135
2136 pushdecl (d);
2137 }
2138
2139 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2140 integrate_decl_tree (t, level + 1, map);
2141
2142 if (level > 0)
2143 {
2144 node = poplevel (1, 0, 0);
2145 if (node)
2146 {
2147 TREE_USED (node) = TREE_USED (let);
2148 BLOCK_ABSTRACT_ORIGIN (node) = let;
2149 }
2150 }
2151 }
2152
2153 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2154 through save_constants. */
2155
2156 static void
2157 save_constants_in_decl_trees (let)
2158 tree let;
2159 {
2160 tree t;
2161
2162 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2163 if (DECL_RTL (t) != 0)
2164 save_constants (&DECL_RTL (t));
2165
2166 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2167 save_constants_in_decl_trees (t);
2168 }
2169 \f
2170 /* Create a new copy of an rtx.
2171 Recursively copies the operands of the rtx,
2172 except for those few rtx codes that are sharable.
2173
2174 We always return an rtx that is similar to that incoming rtx, with the
2175 exception of possibly changing a REG to a SUBREG or vice versa. No
2176 rtl is ever emitted.
2177
2178 Handle constants that need to be placed in the constant pool by
2179 calling `force_const_mem'. */
2180
2181 rtx
2182 copy_rtx_and_substitute (orig, map)
2183 register rtx orig;
2184 struct inline_remap *map;
2185 {
2186 register rtx copy, temp;
2187 register int i, j;
2188 register RTX_CODE code;
2189 register enum machine_mode mode;
2190 register char *format_ptr;
2191 int regno;
2192
2193 if (orig == 0)
2194 return 0;
2195
2196 code = GET_CODE (orig);
2197 mode = GET_MODE (orig);
2198
2199 switch (code)
2200 {
2201 case REG:
2202 /* If the stack pointer register shows up, it must be part of
2203 stack-adjustments (*not* because we eliminated the frame pointer!).
2204 Small hard registers are returned as-is. Pseudo-registers
2205 go through their `reg_map'. */
2206 regno = REGNO (orig);
2207 if (regno <= LAST_VIRTUAL_REGISTER)
2208 {
2209 /* Some hard registers are also mapped,
2210 but others are not translated. */
2211 if (map->reg_map[regno] != 0)
2212 return map->reg_map[regno];
2213
2214 /* If this is the virtual frame pointer, make space in current
2215 function's stack frame for the stack frame of the inline function.
2216
2217 Copy the address of this area into a pseudo. Map
2218 virtual_stack_vars_rtx to this pseudo and set up a constant
2219 equivalence for it to be the address. This will substitute the
2220 address into insns where it can be substituted and use the new
2221 pseudo where it can't. */
2222 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2223 {
2224 rtx loc, seq;
2225 int size = DECL_FRAME_SIZE (map->fndecl);
2226
2227 #ifdef FRAME_GROWS_DOWNWARD
2228 /* In this case, virtual_stack_vars_rtx points to one byte
2229 higher than the top of the frame area. So make sure we
2230 allocate a big enough chunk to keep the frame pointer
2231 aligned like a real one. */
2232 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2233 #endif
2234 start_sequence ();
2235 loc = assign_stack_temp (BLKmode, size, 1);
2236 loc = XEXP (loc, 0);
2237 #ifdef FRAME_GROWS_DOWNWARD
2238 /* In this case, virtual_stack_vars_rtx points to one byte
2239 higher than the top of the frame area. So compute the offset
2240 to one byte higher than our substitute frame. */
2241 loc = plus_constant (loc, size);
2242 #endif
2243 map->reg_map[regno] = temp
2244 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2245
2246 #ifdef STACK_BOUNDARY
2247 mark_reg_pointer (map->reg_map[regno],
2248 STACK_BOUNDARY / BITS_PER_UNIT);
2249 #endif
2250
2251 if (REGNO (temp) < map->const_equiv_map_size)
2252 {
2253 map->const_equiv_map[REGNO (temp)] = loc;
2254 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2255 }
2256
2257 seq = gen_sequence ();
2258 end_sequence ();
2259 emit_insn_after (seq, map->insns_at_start);
2260 return temp;
2261 }
2262 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2263 {
2264 /* Do the same for a block to contain any arguments referenced
2265 in memory. */
2266 rtx loc, seq;
2267 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2268
2269 start_sequence ();
2270 loc = assign_stack_temp (BLKmode, size, 1);
2271 loc = XEXP (loc, 0);
2272 /* When arguments grow downward, the virtual incoming
2273 args pointer points to the top of the argument block,
2274 so the remapped location better do the same. */
2275 #ifdef ARGS_GROW_DOWNWARD
2276 loc = plus_constant (loc, size);
2277 #endif
2278 map->reg_map[regno] = temp
2279 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2280
2281 #ifdef STACK_BOUNDARY
2282 mark_reg_pointer (map->reg_map[regno],
2283 STACK_BOUNDARY / BITS_PER_UNIT);
2284 #endif
2285
2286 if (REGNO (temp) < map->const_equiv_map_size)
2287 {
2288 map->const_equiv_map[REGNO (temp)] = loc;
2289 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2290 }
2291
2292 seq = gen_sequence ();
2293 end_sequence ();
2294 emit_insn_after (seq, map->insns_at_start);
2295 return temp;
2296 }
2297 else if (REG_FUNCTION_VALUE_P (orig))
2298 {
2299 /* This is a reference to the function return value. If
2300 the function doesn't have a return value, error. If the
2301 mode doesn't agree, make a SUBREG. */
2302 if (map->inline_target == 0)
2303 /* Must be unrolling loops or replicating code if we
2304 reach here, so return the register unchanged. */
2305 return orig;
2306 else if (mode != GET_MODE (map->inline_target))
2307 return gen_lowpart (mode, map->inline_target);
2308 else
2309 return map->inline_target;
2310 }
2311 return orig;
2312 }
2313 if (map->reg_map[regno] == NULL)
2314 {
2315 map->reg_map[regno] = gen_reg_rtx (mode);
2316 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2317 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2318 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2319 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2320
2321 if (map->regno_pointer_flag[regno])
2322 mark_reg_pointer (map->reg_map[regno],
2323 map->regno_pointer_align[regno]);
2324 }
2325 return map->reg_map[regno];
2326
2327 case SUBREG:
2328 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2329 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2330 if (GET_CODE (copy) == SUBREG)
2331 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2332 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2333 else if (GET_CODE (copy) == CONCAT)
2334 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2335 else
2336 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2337 SUBREG_WORD (orig));
2338
2339 case ADDRESSOF:
2340 copy = gen_rtx (ADDRESSOF, mode,
2341 copy_rtx_and_substitute (XEXP (orig, 0), map));
2342 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2343 regno = ADDRESSOF_REGNO (orig);
2344 if (map->reg_map[regno])
2345 regno = REGNO (map->reg_map[regno]);
2346 else if (regno > LAST_VIRTUAL_REGISTER)
2347 {
2348 temp = XEXP (orig, 0);
2349 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2350 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2351 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2352 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2354
2355 if (map->regno_pointer_flag[regno])
2356 mark_reg_pointer (map->reg_map[regno],
2357 map->regno_pointer_align[regno]);
2358 regno = REGNO (map->reg_map[regno]);
2359 }
2360 ADDRESSOF_REGNO (copy) = regno;
2361 return copy;
2362
2363 case USE:
2364 case CLOBBER:
2365 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2366 to (use foo) if the original insn didn't have a subreg.
2367 Removing the subreg distorts the VAX movstrhi pattern
2368 by changing the mode of an operand. */
2369 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2370 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2371 copy = SUBREG_REG (copy);
2372 return gen_rtx (code, VOIDmode, copy);
2373
2374 case CODE_LABEL:
2375 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2376 = LABEL_PRESERVE_P (orig);
2377 return map->label_map[CODE_LABEL_NUMBER (orig)];
2378
2379 case LABEL_REF:
2380 copy = gen_rtx (LABEL_REF, mode,
2381 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2382 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2383 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2384
2385 /* The fact that this label was previously nonlocal does not mean
2386 it still is, so we must check if it is within the range of
2387 this function's labels. */
2388 LABEL_REF_NONLOCAL_P (copy)
2389 = (LABEL_REF_NONLOCAL_P (orig)
2390 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2391 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2392
2393 /* If we have made a nonlocal label local, it means that this
2394 inlined call will be referring to our nonlocal goto handler.
2395 So make sure we create one for this block; we normally would
2396 not since this is not otherwise considered a "call". */
2397 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2398 function_call_count++;
2399
2400 return copy;
2401
2402 case PC:
2403 case CC0:
2404 case CONST_INT:
2405 return orig;
2406
2407 case SYMBOL_REF:
2408 /* Symbols which represent the address of a label stored in the constant
2409 pool must be modified to point to a constant pool entry for the
2410 remapped label. Otherwise, symbols are returned unchanged. */
2411 if (CONSTANT_POOL_ADDRESS_P (orig))
2412 {
2413 rtx constant = get_pool_constant (orig);
2414 if (GET_CODE (constant) == LABEL_REF)
2415 return XEXP (force_const_mem (GET_MODE (orig),
2416 copy_rtx_and_substitute (constant,
2417 map)),
2418 0);
2419 }
2420
2421 return orig;
2422
2423 case CONST_DOUBLE:
2424 /* We have to make a new copy of this CONST_DOUBLE because don't want
2425 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2426 duplicate of a CONST_DOUBLE we have already seen. */
2427 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2428 {
2429 REAL_VALUE_TYPE d;
2430
2431 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2432 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2433 }
2434 else
2435 return immed_double_const (CONST_DOUBLE_LOW (orig),
2436 CONST_DOUBLE_HIGH (orig), VOIDmode);
2437
2438 case CONST:
2439 /* Make new constant pool entry for a constant
2440 that was in the pool of the inline function. */
2441 if (RTX_INTEGRATED_P (orig))
2442 {
2443 /* If this was an address of a constant pool entry that itself
2444 had to be placed in the constant pool, it might not be a
2445 valid address. So the recursive call below might turn it
2446 into a register. In that case, it isn't a constant any
2447 more, so return it. This has the potential of changing a
2448 MEM into a REG, but we'll assume that it safe. */
2449 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2450 if (! CONSTANT_P (temp))
2451 return temp;
2452 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2453 }
2454 break;
2455
2456 case ADDRESS:
2457 /* If from constant pool address, make new constant pool entry and
2458 return its address. */
2459 if (! RTX_INTEGRATED_P (orig))
2460 abort ();
2461
2462 temp
2463 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2464 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2465 map));
2466
2467 #if 0
2468 /* Legitimizing the address here is incorrect.
2469
2470 The only ADDRESS rtx's that can reach here are ones created by
2471 save_constants. Hence the operand of the ADDRESS is always valid
2472 in this position of the instruction, since the original rtx without
2473 the ADDRESS was valid.
2474
2475 The reason we don't legitimize the address here is that on the
2476 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2477 This code forces the operand of the address to a register, which
2478 fails because we can not take the HIGH part of a register.
2479
2480 Also, change_address may create new registers. These registers
2481 will not have valid reg_map entries. This can cause try_constants()
2482 to fail because assumes that all registers in the rtx have valid
2483 reg_map entries, and it may end up replacing one of these new
2484 registers with junk. */
2485
2486 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2487 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2488 #endif
2489
2490 temp = XEXP (temp, 0);
2491
2492 #ifdef POINTERS_EXTEND_UNSIGNED
2493 if (GET_MODE (temp) != GET_MODE (orig))
2494 temp = convert_memory_address (GET_MODE (orig), temp);
2495 #endif
2496
2497 return temp;
2498
2499 case ASM_OPERANDS:
2500 /* If a single asm insn contains multiple output operands
2501 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2502 We must make sure that the copied insn continues to share it. */
2503 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2504 {
2505 copy = rtx_alloc (ASM_OPERANDS);
2506 copy->volatil = orig->volatil;
2507 XSTR (copy, 0) = XSTR (orig, 0);
2508 XSTR (copy, 1) = XSTR (orig, 1);
2509 XINT (copy, 2) = XINT (orig, 2);
2510 XVEC (copy, 3) = map->copy_asm_operands_vector;
2511 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2512 XSTR (copy, 5) = XSTR (orig, 5);
2513 XINT (copy, 6) = XINT (orig, 6);
2514 return copy;
2515 }
2516 break;
2517
2518 case CALL:
2519 /* This is given special treatment because the first
2520 operand of a CALL is a (MEM ...) which may get
2521 forced into a register for cse. This is undesirable
2522 if function-address cse isn't wanted or if we won't do cse. */
2523 #ifndef NO_FUNCTION_CSE
2524 if (! (optimize && ! flag_no_function_cse))
2525 #endif
2526 return gen_rtx (CALL, GET_MODE (orig),
2527 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2528 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2529 copy_rtx_and_substitute (XEXP (orig, 1), map));
2530 break;
2531
2532 #if 0
2533 /* Must be ifdefed out for loop unrolling to work. */
2534 case RETURN:
2535 abort ();
2536 #endif
2537
2538 case SET:
2539 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2540 Adjust the setting by the offset of the area we made.
2541 If the nonlocal goto is into the current function,
2542 this will result in unnecessarily bad code, but should work. */
2543 if (SET_DEST (orig) == virtual_stack_vars_rtx
2544 || SET_DEST (orig) == virtual_incoming_args_rtx)
2545 {
2546 /* In case a translation hasn't occurred already, make one now. */
2547 rtx junk = copy_rtx_and_substitute (SET_DEST (orig), map);
2548 rtx equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2549 rtx equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2550 HOST_WIDE_INT loc_offset
2551 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2552
2553 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2554 force_operand
2555 (plus_constant
2556 (copy_rtx_and_substitute (SET_SRC (orig), map),
2557 - loc_offset),
2558 NULL_RTX));
2559 }
2560 break;
2561
2562 case MEM:
2563 copy = rtx_alloc (MEM);
2564 PUT_MODE (copy, mode);
2565 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2566 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2567 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2568
2569 /* If doing function inlining, this MEM might not be const in the
2570 function that it is being inlined into, and thus may not be
2571 unchanging after function inlining. Constant pool references are
2572 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2573 for them. */
2574 if (! map->integrating)
2575 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2576
2577 return copy;
2578
2579 default:
2580 break;
2581 }
2582
2583 copy = rtx_alloc (code);
2584 PUT_MODE (copy, mode);
2585 copy->in_struct = orig->in_struct;
2586 copy->volatil = orig->volatil;
2587 copy->unchanging = orig->unchanging;
2588
2589 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2590
2591 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2592 {
2593 switch (*format_ptr++)
2594 {
2595 case '0':
2596 XEXP (copy, i) = XEXP (orig, i);
2597 break;
2598
2599 case 'e':
2600 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2601 break;
2602
2603 case 'u':
2604 /* Change any references to old-insns to point to the
2605 corresponding copied insns. */
2606 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2607 break;
2608
2609 case 'E':
2610 XVEC (copy, i) = XVEC (orig, i);
2611 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2612 {
2613 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2614 for (j = 0; j < XVECLEN (copy, i); j++)
2615 XVECEXP (copy, i, j)
2616 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2617 }
2618 break;
2619
2620 case 'w':
2621 XWINT (copy, i) = XWINT (orig, i);
2622 break;
2623
2624 case 'i':
2625 XINT (copy, i) = XINT (orig, i);
2626 break;
2627
2628 case 's':
2629 XSTR (copy, i) = XSTR (orig, i);
2630 break;
2631
2632 default:
2633 abort ();
2634 }
2635 }
2636
2637 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2638 {
2639 map->orig_asm_operands_vector = XVEC (orig, 3);
2640 map->copy_asm_operands_vector = XVEC (copy, 3);
2641 map->copy_asm_constraints_vector = XVEC (copy, 4);
2642 }
2643
2644 return copy;
2645 }
2646 \f
2647 /* Substitute known constant values into INSN, if that is valid. */
2648
2649 void
2650 try_constants (insn, map)
2651 rtx insn;
2652 struct inline_remap *map;
2653 {
2654 int i;
2655
2656 map->num_sets = 0;
2657 subst_constants (&PATTERN (insn), insn, map);
2658
2659 /* Apply the changes if they are valid; otherwise discard them. */
2660 apply_change_group ();
2661
2662 /* Show we don't know the value of anything stored or clobbered. */
2663 note_stores (PATTERN (insn), mark_stores);
2664 map->last_pc_value = 0;
2665 #ifdef HAVE_cc0
2666 map->last_cc0_value = 0;
2667 #endif
2668
2669 /* Set up any constant equivalences made in this insn. */
2670 for (i = 0; i < map->num_sets; i++)
2671 {
2672 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2673 {
2674 int regno = REGNO (map->equiv_sets[i].dest);
2675
2676 if (regno < map->const_equiv_map_size
2677 && (map->const_equiv_map[regno] == 0
2678 /* Following clause is a hack to make case work where GNU C++
2679 reassigns a variable to make cse work right. */
2680 || ! rtx_equal_p (map->const_equiv_map[regno],
2681 map->equiv_sets[i].equiv)))
2682 {
2683 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2684 map->const_age_map[regno] = map->const_age;
2685 }
2686 }
2687 else if (map->equiv_sets[i].dest == pc_rtx)
2688 map->last_pc_value = map->equiv_sets[i].equiv;
2689 #ifdef HAVE_cc0
2690 else if (map->equiv_sets[i].dest == cc0_rtx)
2691 map->last_cc0_value = map->equiv_sets[i].equiv;
2692 #endif
2693 }
2694 }
2695 \f
2696 /* Substitute known constants for pseudo regs in the contents of LOC,
2697 which are part of INSN.
2698 If INSN is zero, the substitution should always be done (this is used to
2699 update DECL_RTL).
2700 These changes are taken out by try_constants if the result is not valid.
2701
2702 Note that we are more concerned with determining when the result of a SET
2703 is a constant, for further propagation, than actually inserting constants
2704 into insns; cse will do the latter task better.
2705
2706 This function is also used to adjust address of items previously addressed
2707 via the virtual stack variable or virtual incoming arguments registers. */
2708
2709 static void
2710 subst_constants (loc, insn, map)
2711 rtx *loc;
2712 rtx insn;
2713 struct inline_remap *map;
2714 {
2715 rtx x = *loc;
2716 register int i;
2717 register enum rtx_code code;
2718 register char *format_ptr;
2719 int num_changes = num_validated_changes ();
2720 rtx new = 0;
2721 enum machine_mode op0_mode;
2722
2723 code = GET_CODE (x);
2724
2725 switch (code)
2726 {
2727 case PC:
2728 case CONST_INT:
2729 case CONST_DOUBLE:
2730 case SYMBOL_REF:
2731 case CONST:
2732 case LABEL_REF:
2733 case ADDRESS:
2734 return;
2735
2736 #ifdef HAVE_cc0
2737 case CC0:
2738 validate_change (insn, loc, map->last_cc0_value, 1);
2739 return;
2740 #endif
2741
2742 case USE:
2743 case CLOBBER:
2744 /* The only thing we can do with a USE or CLOBBER is possibly do
2745 some substitutions in a MEM within it. */
2746 if (GET_CODE (XEXP (x, 0)) == MEM)
2747 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2748 return;
2749
2750 case REG:
2751 /* Substitute for parms and known constants. Don't replace
2752 hard regs used as user variables with constants. */
2753 {
2754 int regno = REGNO (x);
2755
2756 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2757 && regno < map->const_equiv_map_size
2758 && map->const_equiv_map[regno] != 0
2759 && map->const_age_map[regno] >= map->const_age)
2760 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2761 return;
2762 }
2763
2764 case SUBREG:
2765 /* SUBREG applied to something other than a reg
2766 should be treated as ordinary, since that must
2767 be a special hack and we don't know how to treat it specially.
2768 Consider for example mulsidi3 in m68k.md.
2769 Ordinary SUBREG of a REG needs this special treatment. */
2770 if (GET_CODE (SUBREG_REG (x)) == REG)
2771 {
2772 rtx inner = SUBREG_REG (x);
2773 rtx new = 0;
2774
2775 /* We can't call subst_constants on &SUBREG_REG (x) because any
2776 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2777 see what is inside, try to form the new SUBREG and see if that is
2778 valid. We handle two cases: extracting a full word in an
2779 integral mode and extracting the low part. */
2780 subst_constants (&inner, NULL_RTX, map);
2781
2782 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2783 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2784 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2785 new = operand_subword (inner, SUBREG_WORD (x), 0,
2786 GET_MODE (SUBREG_REG (x)));
2787
2788 cancel_changes (num_changes);
2789 if (new == 0 && subreg_lowpart_p (x))
2790 new = gen_lowpart_common (GET_MODE (x), inner);
2791
2792 if (new)
2793 validate_change (insn, loc, new, 1);
2794
2795 return;
2796 }
2797 break;
2798
2799 case MEM:
2800 subst_constants (&XEXP (x, 0), insn, map);
2801
2802 /* If a memory address got spoiled, change it back. */
2803 if (insn != 0 && num_validated_changes () != num_changes
2804 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2805 cancel_changes (num_changes);
2806 return;
2807
2808 case SET:
2809 {
2810 /* Substitute constants in our source, and in any arguments to a
2811 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2812 itself. */
2813 rtx *dest_loc = &SET_DEST (x);
2814 rtx dest = *dest_loc;
2815 rtx src, tem;
2816
2817 subst_constants (&SET_SRC (x), insn, map);
2818 src = SET_SRC (x);
2819
2820 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2821 || GET_CODE (*dest_loc) == SUBREG
2822 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2823 {
2824 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2825 {
2826 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2827 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2828 }
2829 dest_loc = &XEXP (*dest_loc, 0);
2830 }
2831
2832 /* Do substitute in the address of a destination in memory. */
2833 if (GET_CODE (*dest_loc) == MEM)
2834 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2835
2836 /* Check for the case of DEST a SUBREG, both it and the underlying
2837 register are less than one word, and the SUBREG has the wider mode.
2838 In the case, we are really setting the underlying register to the
2839 source converted to the mode of DEST. So indicate that. */
2840 if (GET_CODE (dest) == SUBREG
2841 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2842 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2843 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2844 <= GET_MODE_SIZE (GET_MODE (dest)))
2845 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2846 src)))
2847 src = tem, dest = SUBREG_REG (dest);
2848
2849 /* If storing a recognizable value save it for later recording. */
2850 if ((map->num_sets < MAX_RECOG_OPERANDS)
2851 && (CONSTANT_P (src)
2852 || (GET_CODE (src) == REG
2853 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2854 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2855 || (GET_CODE (src) == PLUS
2856 && GET_CODE (XEXP (src, 0)) == REG
2857 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2858 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2859 && CONSTANT_P (XEXP (src, 1)))
2860 || GET_CODE (src) == COMPARE
2861 #ifdef HAVE_cc0
2862 || dest == cc0_rtx
2863 #endif
2864 || (dest == pc_rtx
2865 && (src == pc_rtx || GET_CODE (src) == RETURN
2866 || GET_CODE (src) == LABEL_REF))))
2867 {
2868 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2869 it will cause us to save the COMPARE with any constants
2870 substituted, which is what we want for later. */
2871 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2872 map->equiv_sets[map->num_sets++].dest = dest;
2873 }
2874 }
2875 return;
2876
2877 default:
2878 break;
2879 }
2880
2881 format_ptr = GET_RTX_FORMAT (code);
2882
2883 /* If the first operand is an expression, save its mode for later. */
2884 if (*format_ptr == 'e')
2885 op0_mode = GET_MODE (XEXP (x, 0));
2886
2887 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2888 {
2889 switch (*format_ptr++)
2890 {
2891 case '0':
2892 break;
2893
2894 case 'e':
2895 if (XEXP (x, i))
2896 subst_constants (&XEXP (x, i), insn, map);
2897 break;
2898
2899 case 'u':
2900 case 'i':
2901 case 's':
2902 case 'w':
2903 break;
2904
2905 case 'E':
2906 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2907 {
2908 int j;
2909 for (j = 0; j < XVECLEN (x, i); j++)
2910 subst_constants (&XVECEXP (x, i, j), insn, map);
2911 }
2912 break;
2913
2914 default:
2915 abort ();
2916 }
2917 }
2918
2919 /* If this is a commutative operation, move a constant to the second
2920 operand unless the second operand is already a CONST_INT. */
2921 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2922 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2923 {
2924 rtx tem = XEXP (x, 0);
2925 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2926 validate_change (insn, &XEXP (x, 1), tem, 1);
2927 }
2928
2929 /* Simplify the expression in case we put in some constants. */
2930 switch (GET_RTX_CLASS (code))
2931 {
2932 case '1':
2933 new = simplify_unary_operation (code, GET_MODE (x),
2934 XEXP (x, 0), op0_mode);
2935 break;
2936
2937 case '<':
2938 {
2939 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2940 if (op_mode == VOIDmode)
2941 op_mode = GET_MODE (XEXP (x, 1));
2942 new = simplify_relational_operation (code, op_mode,
2943 XEXP (x, 0), XEXP (x, 1));
2944 #ifdef FLOAT_STORE_FLAG_VALUE
2945 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2946 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2947 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2948 GET_MODE (x)));
2949 #endif
2950 break;
2951 }
2952
2953 case '2':
2954 case 'c':
2955 new = simplify_binary_operation (code, GET_MODE (x),
2956 XEXP (x, 0), XEXP (x, 1));
2957 break;
2958
2959 case 'b':
2960 case '3':
2961 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2962 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2963 break;
2964 }
2965
2966 if (new)
2967 validate_change (insn, loc, new, 1);
2968 }
2969
2970 /* Show that register modified no longer contain known constants. We are
2971 called from note_stores with parts of the new insn. */
2972
2973 void
2974 mark_stores (dest, x)
2975 rtx dest;
2976 rtx x;
2977 {
2978 int regno = -1;
2979 enum machine_mode mode;
2980
2981 /* DEST is always the innermost thing set, except in the case of
2982 SUBREGs of hard registers. */
2983
2984 if (GET_CODE (dest) == REG)
2985 regno = REGNO (dest), mode = GET_MODE (dest);
2986 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2987 {
2988 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2989 mode = GET_MODE (SUBREG_REG (dest));
2990 }
2991
2992 if (regno >= 0)
2993 {
2994 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2995 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2996 int i;
2997
2998 /* Ignore virtual stack var or virtual arg register since those
2999 are handled separately. */
3000 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3001 && regno != VIRTUAL_STACK_VARS_REGNUM)
3002 for (i = regno; i <= last_reg; i++)
3003 if (i < global_const_equiv_map_size)
3004 global_const_equiv_map[i] = 0;
3005 }
3006 }
3007 \f
3008 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3009 pointed to by PX, they represent constants in the constant pool.
3010 Replace these with a new memory reference obtained from force_const_mem.
3011 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3012 address of a constant pool entry. Replace them with the address of
3013 a new constant pool entry obtained from force_const_mem. */
3014
3015 static void
3016 restore_constants (px)
3017 rtx *px;
3018 {
3019 rtx x = *px;
3020 int i, j;
3021 char *fmt;
3022
3023 if (x == 0)
3024 return;
3025
3026 if (GET_CODE (x) == CONST_DOUBLE)
3027 {
3028 /* We have to make a new CONST_DOUBLE to ensure that we account for
3029 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3030 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3031 {
3032 REAL_VALUE_TYPE d;
3033
3034 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3035 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3036 }
3037 else
3038 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3039 VOIDmode);
3040 }
3041
3042 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3043 {
3044 restore_constants (&XEXP (x, 0));
3045 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3046 }
3047 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3048 {
3049 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3050 rtx new = XEXP (SUBREG_REG (x), 0);
3051
3052 restore_constants (&new);
3053 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3054 PUT_MODE (new, GET_MODE (x));
3055 *px = validize_mem (new);
3056 }
3057 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3058 {
3059 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3060 XEXP (XEXP (x, 0), 0)),
3061 0);
3062
3063 #ifdef POINTERS_EXTEND_UNSIGNED
3064 if (GET_MODE (new) != GET_MODE (x))
3065 new = convert_memory_address (GET_MODE (x), new);
3066 #endif
3067
3068 *px = new;
3069 }
3070 else
3071 {
3072 fmt = GET_RTX_FORMAT (GET_CODE (x));
3073 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3074 {
3075 switch (*fmt++)
3076 {
3077 case 'E':
3078 for (j = 0; j < XVECLEN (x, i); j++)
3079 restore_constants (&XVECEXP (x, i, j));
3080 break;
3081
3082 case 'e':
3083 restore_constants (&XEXP (x, i));
3084 break;
3085 }
3086 }
3087 }
3088 }
3089 \f
3090 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3091 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3092 that it points to the node itself, thus indicating that the node is its
3093 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3094 the given node is NULL, recursively descend the decl/block tree which
3095 it is the root of, and for each other ..._DECL or BLOCK node contained
3096 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3097 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3098 values to point to themselves. */
3099
3100 static void
3101 set_block_origin_self (stmt)
3102 register tree stmt;
3103 {
3104 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3105 {
3106 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3107
3108 {
3109 register tree local_decl;
3110
3111 for (local_decl = BLOCK_VARS (stmt);
3112 local_decl != NULL_TREE;
3113 local_decl = TREE_CHAIN (local_decl))
3114 set_decl_origin_self (local_decl); /* Potential recursion. */
3115 }
3116
3117 {
3118 register tree subblock;
3119
3120 for (subblock = BLOCK_SUBBLOCKS (stmt);
3121 subblock != NULL_TREE;
3122 subblock = BLOCK_CHAIN (subblock))
3123 set_block_origin_self (subblock); /* Recurse. */
3124 }
3125 }
3126 }
3127
3128 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3129 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3130 node to so that it points to the node itself, thus indicating that the
3131 node represents its own (abstract) origin. Additionally, if the
3132 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3133 the decl/block tree of which the given node is the root of, and for
3134 each other ..._DECL or BLOCK node contained therein whose
3135 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3136 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3137 point to themselves. */
3138
3139 static void
3140 set_decl_origin_self (decl)
3141 register tree decl;
3142 {
3143 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3144 {
3145 DECL_ABSTRACT_ORIGIN (decl) = decl;
3146 if (TREE_CODE (decl) == FUNCTION_DECL)
3147 {
3148 register tree arg;
3149
3150 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3151 DECL_ABSTRACT_ORIGIN (arg) = arg;
3152 if (DECL_INITIAL (decl) != NULL_TREE
3153 && DECL_INITIAL (decl) != error_mark_node)
3154 set_block_origin_self (DECL_INITIAL (decl));
3155 }
3156 }
3157 }
3158 \f
3159 /* Given a pointer to some BLOCK node, and a boolean value to set the
3160 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3161 the given block, and for all local decls and all local sub-blocks
3162 (recursively) which are contained therein. */
3163
3164 static void
3165 set_block_abstract_flags (stmt, setting)
3166 register tree stmt;
3167 register int setting;
3168 {
3169 register tree local_decl;
3170 register tree subblock;
3171
3172 BLOCK_ABSTRACT (stmt) = setting;
3173
3174 for (local_decl = BLOCK_VARS (stmt);
3175 local_decl != NULL_TREE;
3176 local_decl = TREE_CHAIN (local_decl))
3177 set_decl_abstract_flags (local_decl, setting);
3178
3179 for (subblock = BLOCK_SUBBLOCKS (stmt);
3180 subblock != NULL_TREE;
3181 subblock = BLOCK_CHAIN (subblock))
3182 set_block_abstract_flags (subblock, setting);
3183 }
3184
3185 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3186 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3187 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3188 set the abstract flags for all of the parameters, local vars, local
3189 blocks and sub-blocks (recursively) to the same setting. */
3190
3191 void
3192 set_decl_abstract_flags (decl, setting)
3193 register tree decl;
3194 register int setting;
3195 {
3196 DECL_ABSTRACT (decl) = setting;
3197 if (TREE_CODE (decl) == FUNCTION_DECL)
3198 {
3199 register tree arg;
3200
3201 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3202 DECL_ABSTRACT (arg) = setting;
3203 if (DECL_INITIAL (decl) != NULL_TREE
3204 && DECL_INITIAL (decl) != error_mark_node)
3205 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3206 }
3207 }
3208 \f
3209 /* Output the assembly language code for the function FNDECL
3210 from its DECL_SAVED_INSNS. Used for inline functions that are output
3211 at end of compilation instead of where they came in the source. */
3212
3213 void
3214 output_inline_function (fndecl)
3215 tree fndecl;
3216 {
3217 rtx head;
3218 rtx last;
3219 int save_flag_no_inline = flag_no_inline;
3220
3221 if (output_bytecode)
3222 {
3223 warning ("`inline' ignored for bytecode output");
3224 return;
3225 }
3226
3227 /* Things we allocate from here on are part of this function, not
3228 permanent. */
3229 temporary_allocation ();
3230
3231 head = DECL_SAVED_INSNS (fndecl);
3232 current_function_decl = fndecl;
3233
3234 /* This call is only used to initialize global variables. */
3235 init_function_start (fndecl, "lossage", 1);
3236
3237 /* Redo parameter determinations in case the FUNCTION_...
3238 macros took machine-specific actions that need to be redone. */
3239 assign_parms (fndecl, 1);
3240
3241 /* Set stack frame size. */
3242 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3243
3244 /* The first is a bit of a lie (the array may be larger), but doesn't
3245 matter too much and it isn't worth saving the actual bound. */
3246 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3247 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3248 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3249 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3250 max_parm_reg = MAX_PARMREG (head);
3251 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3252
3253 stack_slot_list = STACK_SLOT_LIST (head);
3254 forced_labels = FORCED_LABELS (head);
3255
3256 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3257 current_function_calls_alloca = 1;
3258
3259 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3260 current_function_calls_setjmp = 1;
3261
3262 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3263 current_function_calls_longjmp = 1;
3264
3265 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3266 current_function_returns_struct = 1;
3267
3268 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3269 current_function_returns_pcc_struct = 1;
3270
3271 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3272 current_function_needs_context = 1;
3273
3274 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3275 current_function_has_nonlocal_label = 1;
3276
3277 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3278 current_function_returns_pointer = 1;
3279
3280 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3281 current_function_uses_const_pool = 1;
3282
3283 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3284 current_function_uses_pic_offset_table = 1;
3285
3286 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3287 current_function_pops_args = POPS_ARGS (head);
3288
3289 /* This is the only thing the expand_function_end call that uses to be here
3290 actually does and that call can cause problems. */
3291 immediate_size_expand--;
3292
3293 /* Find last insn and rebuild the constant pool. */
3294 for (last = FIRST_PARM_INSN (head);
3295 NEXT_INSN (last); last = NEXT_INSN (last))
3296 {
3297 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3298 {
3299 restore_constants (&PATTERN (last));
3300 restore_constants (&REG_NOTES (last));
3301 }
3302 }
3303
3304 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3305 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3306
3307 /* We must have already output DWARF debugging information for the
3308 original (abstract) inline function declaration/definition, so
3309 we want to make sure that the debugging information we generate
3310 for this special instance of the inline function refers back to
3311 the information we already generated. To make sure that happens,
3312 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3313 node (and for all of the local ..._DECL nodes which are its children)
3314 so that they all point to themselves. */
3315
3316 set_decl_origin_self (fndecl);
3317
3318 /* We're not deferring this any longer. */
3319 DECL_DEFER_OUTPUT (fndecl) = 0;
3320
3321 /* Integrating function calls isn't safe anymore, so turn on
3322 flag_no_inline. */
3323 flag_no_inline = 1;
3324
3325 /* Compile this function all the way down to assembly code. */
3326 rest_of_compilation (fndecl);
3327
3328 /* Reset flag_no_inline to its original value. */
3329 flag_no_inline = save_flag_no_inline;
3330
3331 current_function_decl = 0;
3332 }