1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "insn-config.h"
30 #include "insn-flags.h"
34 #include "integrate.h"
41 #define obstack_chunk_alloc xmalloc
42 #define obstack_chunk_free free
44 extern struct obstack
*function_maybepermanent_obstack
;
46 extern tree
pushdecl ();
47 extern tree
poplevel ();
49 /* Similar, but round to the next highest integer that meets the
51 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Default max number of insns a function can have and still be inline.
54 This is overridden on RISC machines. */
55 #ifndef INTEGRATE_THRESHOLD
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
60 static rtx initialize_for_inline
PROTO((tree
, int, int, int, int));
61 static void finish_inline
PROTO((tree
, rtx
));
62 static void adjust_copied_decl_tree
PROTO((tree
));
63 static tree copy_decl_list
PROTO((tree
));
64 static tree copy_decl_tree
PROTO((tree
));
65 static void copy_decl_rtls
PROTO((tree
));
66 static void save_constants
PROTO((rtx
*));
67 static void note_modified_parmregs
PROTO((rtx
, rtx
));
68 static rtx copy_for_inline
PROTO((rtx
));
69 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*, rtvec
));
70 static void integrate_decl_tree
PROTO((tree
, int, struct inline_remap
*));
71 static void save_constants_in_decl_trees
PROTO ((tree
));
72 static void subst_constants
PROTO((rtx
*, rtx
, struct inline_remap
*));
73 static void restore_constants
PROTO((rtx
*));
74 static void set_block_origin_self
PROTO((tree
));
75 static void set_decl_origin_self
PROTO((tree
));
76 static void set_block_abstract_flags
PROTO((tree
, int));
78 void set_decl_abstract_flags
PROTO((tree
, int));
80 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
81 is safe and reasonable to integrate into other functions.
82 Nonzero means value is a warning message with a single %s
83 for the function's name. */
86 function_cannot_inline_p (fndecl
)
90 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
91 int max_insns
= INTEGRATE_THRESHOLD (fndecl
);
92 register int ninsns
= 0;
96 /* No inlines with varargs. `grokdeclarator' gives a warning
97 message about that if `inline' is specified. This code
98 it put in to catch the volunteers. */
99 if ((last
&& TREE_VALUE (last
) != void_type_node
)
100 || current_function_varargs
)
101 return "varargs function cannot be inline";
103 if (current_function_calls_alloca
)
104 return "function using alloca cannot be inline";
106 if (current_function_contains_functions
)
107 return "function with nested functions cannot be inline";
109 /* If its not even close, don't even look. */
110 if (!DECL_INLINE (fndecl
) && get_max_uid () > 3 * max_insns
)
111 return "function too large to be inline";
114 /* Don't inline functions which do not specify a function prototype and
115 have BLKmode argument or take the address of a parameter. */
116 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
118 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
119 TREE_ADDRESSABLE (parms
) = 1;
120 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
121 return "no prototype, and parameter address used; cannot be inline";
125 /* We can't inline functions that return structures
126 the old-fashioned PCC way, copying into a static block. */
127 if (current_function_returns_pcc_struct
)
128 return "inline functions not supported for this return value type";
130 /* We can't inline functions that return BLKmode structures in registers. */
131 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
132 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
133 return "inline functions not supported for this return value type";
135 /* We can't inline functions that return structures of varying size. */
136 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
137 return "function with varying-size return value cannot be inline";
139 /* Cannot inline a function with a varying size argument or one that
140 receives a transparent union. */
141 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
143 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
144 return "function with varying-size parameter cannot be inline";
145 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
146 return "function with transparent unit parameter cannot be inline";
149 if (!DECL_INLINE (fndecl
) && get_max_uid () > max_insns
)
151 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
152 insn
&& ninsns
< max_insns
;
153 insn
= NEXT_INSN (insn
))
154 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
157 if (ninsns
>= max_insns
)
158 return "function too large to be inline";
161 /* We cannot inline this function if forced_labels is non-zero. This
162 implies that a label in this function was used as an initializer.
163 Because labels can not be duplicated, all labels in the function
164 will be renamed when it is inlined. However, there is no way to find
165 and fix all variables initialized with addresses of labels in this
166 function, hence inlining is impossible. */
169 return "function with label addresses used in initializers cannot inline";
171 /* We cannot inline a nested function that jumps to a nonlocal label. */
172 if (current_function_has_nonlocal_goto
)
173 return "function with nonlocal goto cannot be inline";
175 /* This is a hack, until the inliner is taught about eh regions at
176 the start of the function. */
177 for (insn
= get_insns ();
179 && ! (GET_CODE (insn
) == NOTE
180 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
);
181 insn
= NEXT_INSN (insn
))
183 if (insn
&& GET_CODE (insn
) == NOTE
184 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
185 return "function with complex parameters cannot be inline";
188 /* We can't inline functions that return a PARALLEL rtx. */
189 result
= DECL_RTL (DECL_RESULT (fndecl
));
190 if (result
&& GET_CODE (result
) == PARALLEL
)
191 return "inline functions not supported for this return value type";
196 /* Variables used within save_for_inline. */
198 /* Mapping from old pseudo-register to new pseudo-registers.
199 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
200 It is allocated in `save_for_inline' and `expand_inline_function',
201 and deallocated on exit from each of those routines. */
204 /* Mapping from old code-labels to new code-labels.
205 The first element of this map is label_map[min_labelno].
206 It is allocated in `save_for_inline' and `expand_inline_function',
207 and deallocated on exit from each of those routines. */
208 static rtx
*label_map
;
210 /* Mapping from old insn uid's to copied insns.
211 It is allocated in `save_for_inline' and `expand_inline_function',
212 and deallocated on exit from each of those routines. */
213 static rtx
*insn_map
;
215 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
216 Zero for a reg that isn't a parm's home.
217 Only reg numbers less than max_parm_reg are mapped here. */
218 static tree
*parmdecl_map
;
220 /* Keep track of first pseudo-register beyond those that are parms. */
221 extern int max_parm_reg
;
222 extern rtx
*parm_reg_stack_loc
;
224 /* When an insn is being copied by copy_for_inline,
225 this is nonzero if we have copied an ASM_OPERANDS.
226 In that case, it is the original input-operand vector. */
227 static rtvec orig_asm_operands_vector
;
229 /* When an insn is being copied by copy_for_inline,
230 this is nonzero if we have copied an ASM_OPERANDS.
231 In that case, it is the copied input-operand vector. */
232 static rtvec copy_asm_operands_vector
;
234 /* Likewise, this is the copied constraints vector. */
235 static rtvec copy_asm_constraints_vector
;
237 /* In save_for_inline, nonzero if past the parm-initialization insns. */
238 static int in_nonparm_insns
;
240 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
241 needed to save FNDECL's insns and info for future inline expansion. */
244 initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, copy
)
251 int function_flags
, i
;
255 /* Compute the values of any flags we must restore when inlining this. */
258 = (current_function_calls_alloca
* FUNCTION_FLAGS_CALLS_ALLOCA
259 + current_function_calls_setjmp
* FUNCTION_FLAGS_CALLS_SETJMP
260 + current_function_calls_longjmp
* FUNCTION_FLAGS_CALLS_LONGJMP
261 + current_function_returns_struct
* FUNCTION_FLAGS_RETURNS_STRUCT
262 + current_function_returns_pcc_struct
* FUNCTION_FLAGS_RETURNS_PCC_STRUCT
263 + current_function_needs_context
* FUNCTION_FLAGS_NEEDS_CONTEXT
264 + current_function_has_nonlocal_label
* FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
265 + current_function_returns_pointer
* FUNCTION_FLAGS_RETURNS_POINTER
266 + current_function_uses_const_pool
* FUNCTION_FLAGS_USES_CONST_POOL
267 + current_function_uses_pic_offset_table
* FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
);
269 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
270 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
271 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
273 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
275 parms
= TREE_CHAIN (parms
), i
++)
277 rtx p
= DECL_RTL (parms
);
279 if (GET_CODE (p
) == MEM
&& copy
)
281 /* Copy the rtl so that modifications of the addresses
282 later in compilation won't affect this arg_vector.
283 Virtual register instantiation can screw the address
285 rtx
new = copy_rtx (p
);
287 /* Don't leave the old copy anywhere in this decl. */
288 if (DECL_RTL (parms
) == DECL_INCOMING_RTL (parms
)
289 || (GET_CODE (DECL_RTL (parms
)) == MEM
290 && GET_CODE (DECL_INCOMING_RTL (parms
)) == MEM
291 && (XEXP (DECL_RTL (parms
), 0)
292 == XEXP (DECL_INCOMING_RTL (parms
), 0))))
293 DECL_INCOMING_RTL (parms
) = new;
294 DECL_RTL (parms
) = new;
297 RTVEC_ELT (arg_vector
, i
) = p
;
299 if (GET_CODE (p
) == REG
)
300 parmdecl_map
[REGNO (p
)] = parms
;
301 else if (GET_CODE (p
) == CONCAT
)
303 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
304 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
306 if (GET_CODE (preal
) == REG
)
307 parmdecl_map
[REGNO (preal
)] = parms
;
308 if (GET_CODE (pimag
) == REG
)
309 parmdecl_map
[REGNO (pimag
)] = parms
;
312 /* This flag is cleared later
313 if the function ever modifies the value of the parm. */
314 TREE_READONLY (parms
) = 1;
317 /* Assume we start out in the insns that set up the parameters. */
318 in_nonparm_insns
= 0;
320 /* The list of DECL_SAVED_INSNS, starts off with a header which
321 contains the following information:
323 the first insn of the function (not including the insns that copy
324 parameters into registers).
325 the first parameter insn of the function,
326 the first label used by that function,
327 the last label used by that function,
328 the highest register number used for parameters,
329 the total number of registers used,
330 the size of the incoming stack area for parameters,
331 the number of bytes popped on return,
333 the labels that are forced to exist,
334 some flags that are used to restore compiler globals,
335 the value of current_function_outgoing_args_size,
336 the original argument vector,
337 the original DECL_INITIAL,
338 and pointers to the table of psuedo regs, pointer flags, and alignment. */
340 return gen_inline_header_rtx (NULL_RTX
, NULL_RTX
, min_labelno
, max_labelno
,
341 max_parm_reg
, max_reg
,
342 current_function_args_size
,
343 current_function_pops_args
,
344 stack_slot_list
, forced_labels
, function_flags
,
345 current_function_outgoing_args_size
,
346 arg_vector
, (rtx
) DECL_INITIAL (fndecl
),
347 (rtvec
) regno_reg_rtx
, regno_pointer_flag
,
349 (rtvec
) parm_reg_stack_loc
);
352 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
353 things that must be done to make FNDECL expandable as an inline function.
354 HEAD contains the chain of insns to which FNDECL will expand. */
357 finish_inline (fndecl
, head
)
361 FIRST_FUNCTION_INSN (head
) = get_first_nonparm_insn ();
362 FIRST_PARM_INSN (head
) = get_insns ();
363 DECL_SAVED_INSNS (fndecl
) = head
;
364 DECL_FRAME_SIZE (fndecl
) = get_frame_size ();
367 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
368 they all point to the new (copied) rtxs. */
371 adjust_copied_decl_tree (block
)
374 register tree subblock
;
375 register rtx original_end
;
377 original_end
= BLOCK_END_NOTE (block
);
380 BLOCK_END_NOTE (block
) = (rtx
) NOTE_SOURCE_FILE (original_end
);
381 NOTE_SOURCE_FILE (original_end
) = 0;
384 /* Process all subblocks. */
385 for (subblock
= BLOCK_SUBBLOCKS (block
);
387 subblock
= TREE_CHAIN (subblock
))
388 adjust_copied_decl_tree (subblock
);
391 /* Make the insns and PARM_DECLs of the current function permanent
392 and record other information in DECL_SAVED_INSNS to allow inlining
393 of this function in subsequent calls.
395 This function is called when we are going to immediately compile
396 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
397 modified by the compilation process, so we copy all of them to
398 new storage and consider the new insns to be the insn chain to be
399 compiled. Our caller (rest_of_compilation) saves the original
400 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
402 /* ??? The nonlocal_label list should be adjusted also. However, since
403 a function that contains a nested function never gets inlined currently,
404 the nonlocal_label list will always be empty, so we don't worry about
408 save_for_inline_copying (fndecl
)
411 rtx first_insn
, last_insn
, insn
;
413 int max_labelno
, min_labelno
, i
, len
;
416 rtx first_nonparm_insn
;
419 /* Make and emit a return-label if we have not already done so.
420 Do this before recording the bounds on label numbers. */
422 if (return_label
== 0)
424 return_label
= gen_label_rtx ();
425 emit_label (return_label
);
428 /* Get some bounds on the labels and registers used. */
430 max_labelno
= max_label_num ();
431 min_labelno
= get_first_label_num ();
432 max_reg
= max_reg_num ();
434 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
435 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
436 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
437 for the parms, prior to elimination of virtual registers.
438 These values are needed for substituting parms properly. */
440 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
442 head
= initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, 1);
444 if (current_function_uses_const_pool
)
446 /* Replace any constant pool references with the actual constant. We
447 will put the constants back in the copy made below. */
448 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
449 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
451 save_constants (&PATTERN (insn
));
452 if (REG_NOTES (insn
))
453 save_constants (®_NOTES (insn
));
456 /* Also scan all decls, and replace any constant pool references with the
458 save_constants_in_decl_trees (DECL_INITIAL (fndecl
));
460 /* Clear out the constant pool so that we can recreate it with the
461 copied constants below. */
462 init_const_rtx_hash_table ();
463 clear_const_double_mem ();
466 max_uid
= INSN_UID (head
);
468 /* We have now allocated all that needs to be allocated permanently
469 on the rtx obstack. Set our high-water mark, so that we
470 can free the rest of this when the time comes. */
474 /* Copy the chain insns of this function.
475 Install the copied chain as the insns of this function,
476 for continued compilation;
477 the original chain is recorded as the DECL_SAVED_INSNS
478 for inlining future calls. */
480 /* If there are insns that copy parms from the stack into pseudo registers,
481 those insns are not copied. `expand_inline_function' must
482 emit the correct code to handle such things. */
485 if (GET_CODE (insn
) != NOTE
)
487 first_insn
= rtx_alloc (NOTE
);
488 NOTE_SOURCE_FILE (first_insn
) = NOTE_SOURCE_FILE (insn
);
489 NOTE_LINE_NUMBER (first_insn
) = NOTE_LINE_NUMBER (insn
);
490 INSN_UID (first_insn
) = INSN_UID (insn
);
491 PREV_INSN (first_insn
) = NULL
;
492 NEXT_INSN (first_insn
) = NULL
;
493 last_insn
= first_insn
;
495 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
496 Make these new rtx's now, and install them in regno_reg_rtx, so they
497 will be the official pseudo-reg rtx's for the rest of compilation. */
499 reg_map
= (rtx
*) savealloc (regno_pointer_flag_length
* sizeof (rtx
));
501 len
= sizeof (struct rtx_def
) + (GET_RTX_LENGTH (REG
) - 1) * sizeof (rtunion
);
502 for (i
= max_reg
- 1; i
> LAST_VIRTUAL_REGISTER
; i
--)
503 reg_map
[i
] = (rtx
)obstack_copy (function_maybepermanent_obstack
,
504 regno_reg_rtx
[i
], len
);
506 regno_reg_rtx
= reg_map
;
508 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
509 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
510 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
511 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
512 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
514 /* Likewise each label rtx must have a unique rtx as its copy. */
516 /* We used to use alloca here, but the size of what it would try to
517 allocate would occasionally cause it to exceed the stack limit and
518 cause unpredictable core dumps. Some examples were > 2Mb in size. */
519 label_map
= (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
521 for (i
= min_labelno
; i
< max_labelno
; i
++)
522 label_map
[i
] = gen_label_rtx ();
524 /* Record the mapping of old insns to copied insns. */
526 insn_map
= (rtx
*) alloca (max_uid
* sizeof (rtx
));
527 bzero ((char *) insn_map
, max_uid
* sizeof (rtx
));
529 /* Get the insn which signals the end of parameter setup code. */
530 first_nonparm_insn
= get_first_nonparm_insn ();
532 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
533 (the former occurs when a variable has its address taken)
534 since these may be shared and can be changed by virtual
535 register instantiation. DECL_RTL values for our arguments
536 have already been copied by initialize_for_inline. */
537 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_reg
; i
++)
538 if (GET_CODE (regno_reg_rtx
[i
]) == MEM
)
539 XEXP (regno_reg_rtx
[i
], 0)
540 = copy_for_inline (XEXP (regno_reg_rtx
[i
], 0));
542 /* Copy the tree of subblocks of the function, and the decls in them.
543 We will use the copy for compiling this function, then restore the original
544 subblocks and decls for use when inlining this function.
546 Several parts of the compiler modify BLOCK trees. In particular,
547 instantiate_virtual_regs will instantiate any virtual regs
548 mentioned in the DECL_RTLs of the decls, and loop
549 unrolling will replicate any BLOCK trees inside an unrolled loop.
551 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
552 which we will use for inlining. The rtl might even contain pseudoregs
553 whose space has been freed. */
555 DECL_INITIAL (fndecl
) = copy_decl_tree (DECL_INITIAL (fndecl
));
556 DECL_ARGUMENTS (fndecl
) = copy_decl_list (DECL_ARGUMENTS (fndecl
));
558 /* Now copy each DECL_RTL which is a MEM,
559 so it is safe to modify their addresses. */
560 copy_decl_rtls (DECL_INITIAL (fndecl
));
562 /* The fndecl node acts as its own progenitor, so mark it as such. */
563 DECL_ABSTRACT_ORIGIN (fndecl
) = fndecl
;
565 /* Now copy the chain of insns. Do this twice. The first copy the insn
566 itself and its body. The second time copy of REG_NOTES. This is because
567 a REG_NOTE may have a forward pointer to another insn. */
569 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
571 orig_asm_operands_vector
= 0;
573 if (insn
== first_nonparm_insn
)
574 in_nonparm_insns
= 1;
576 switch (GET_CODE (insn
))
579 /* No need to keep these. */
580 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED
)
583 copy
= rtx_alloc (NOTE
);
584 NOTE_LINE_NUMBER (copy
) = NOTE_LINE_NUMBER (insn
);
585 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
)
586 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
589 NOTE_SOURCE_FILE (insn
) = (char *) copy
;
590 NOTE_SOURCE_FILE (copy
) = 0;
592 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
593 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
)
595 /* We have to forward these both to match the new exception
597 NOTE_BLOCK_NUMBER (copy
)
598 = CODE_LABEL_NUMBER (label_map
[NOTE_BLOCK_NUMBER (copy
)]);
601 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
607 copy
= rtx_alloc (GET_CODE (insn
));
609 if (GET_CODE (insn
) == CALL_INSN
)
610 CALL_INSN_FUNCTION_USAGE (copy
)
611 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn
));
613 PATTERN (copy
) = copy_for_inline (PATTERN (insn
));
614 INSN_CODE (copy
) = -1;
615 LOG_LINKS (copy
) = NULL_RTX
;
616 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
620 copy
= label_map
[CODE_LABEL_NUMBER (insn
)];
621 LABEL_NAME (copy
) = LABEL_NAME (insn
);
625 copy
= rtx_alloc (BARRIER
);
631 INSN_UID (copy
) = INSN_UID (insn
);
632 insn_map
[INSN_UID (insn
)] = copy
;
633 NEXT_INSN (last_insn
) = copy
;
634 PREV_INSN (copy
) = last_insn
;
638 adjust_copied_decl_tree (DECL_INITIAL (fndecl
));
640 /* Now copy the REG_NOTES. */
641 for (insn
= NEXT_INSN (get_insns ()); insn
; insn
= NEXT_INSN (insn
))
642 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
643 && insn_map
[INSN_UID(insn
)])
644 REG_NOTES (insn_map
[INSN_UID (insn
)])
645 = copy_for_inline (REG_NOTES (insn
));
647 NEXT_INSN (last_insn
) = NULL
;
649 finish_inline (fndecl
, head
);
651 /* Make new versions of the register tables. */
652 new = (char *) savealloc (regno_pointer_flag_length
);
653 bcopy (regno_pointer_flag
, new, regno_pointer_flag_length
);
654 new1
= (char *) savealloc (regno_pointer_flag_length
);
655 bcopy (regno_pointer_align
, new1
, regno_pointer_flag_length
);
657 regno_pointer_flag
= new;
658 regno_pointer_align
= new1
;
660 set_new_first_and_last_insn (first_insn
, last_insn
);
666 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
667 For example, this can copy a list made of TREE_LIST nodes. While copying,
668 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
669 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
670 point to the corresponding (abstract) original node. */
673 copy_decl_list (list
)
677 register tree prev
, next
;
682 head
= prev
= copy_node (list
);
683 if (DECL_ABSTRACT_ORIGIN (head
) == NULL_TREE
)
684 DECL_ABSTRACT_ORIGIN (head
) = list
;
685 next
= TREE_CHAIN (list
);
690 copy
= copy_node (next
);
691 if (DECL_ABSTRACT_ORIGIN (copy
) == NULL_TREE
)
692 DECL_ABSTRACT_ORIGIN (copy
) = next
;
693 TREE_CHAIN (prev
) = copy
;
695 next
= TREE_CHAIN (next
);
700 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
703 copy_decl_tree (block
)
706 tree t
, vars
, subblocks
;
708 vars
= copy_decl_list (BLOCK_VARS (block
));
711 /* Process all subblocks. */
712 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
714 tree copy
= copy_decl_tree (t
);
715 TREE_CHAIN (copy
) = subblocks
;
719 t
= copy_node (block
);
720 BLOCK_VARS (t
) = vars
;
721 BLOCK_SUBBLOCKS (t
) = nreverse (subblocks
);
722 /* If the BLOCK being cloned is already marked as having been instantiated
723 from something else, then leave that `origin' marking alone. Otherwise,
724 mark the clone as having originated from the BLOCK we are cloning. */
725 if (BLOCK_ABSTRACT_ORIGIN (t
) == NULL_TREE
)
726 BLOCK_ABSTRACT_ORIGIN (t
) = block
;
730 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
733 copy_decl_rtls (block
)
738 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
739 if (DECL_RTL (t
) && GET_CODE (DECL_RTL (t
)) == MEM
)
740 DECL_RTL (t
) = copy_for_inline (DECL_RTL (t
));
742 /* Process all subblocks. */
743 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
747 /* Make the insns and PARM_DECLs of the current function permanent
748 and record other information in DECL_SAVED_INSNS to allow inlining
749 of this function in subsequent calls.
751 This routine need not copy any insns because we are not going
752 to immediately compile the insns in the insn chain. There
753 are two cases when we would compile the insns for FNDECL:
754 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
755 be output at the end of other compilation, because somebody took
756 its address. In the first case, the insns of FNDECL are copied
757 as it is expanded inline, so FNDECL's saved insns are not
758 modified. In the second case, FNDECL is used for the last time,
759 so modifying the rtl is not a problem.
761 We don't have to worry about FNDECL being inline expanded by
762 other functions which are written at the end of compilation
763 because flag_no_inline is turned on when we begin writing
764 functions at the end of compilation. */
767 save_for_inline_nocopy (fndecl
)
772 rtx first_nonparm_insn
;
774 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
775 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
776 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
777 for the parms, prior to elimination of virtual registers.
778 These values are needed for substituting parms properly. */
780 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
782 /* Make and emit a return-label if we have not already done so. */
784 if (return_label
== 0)
786 return_label
= gen_label_rtx ();
787 emit_label (return_label
);
790 head
= initialize_for_inline (fndecl
, get_first_label_num (),
791 max_label_num (), max_reg_num (), 0);
793 /* If there are insns that copy parms from the stack into pseudo registers,
794 those insns are not copied. `expand_inline_function' must
795 emit the correct code to handle such things. */
798 if (GET_CODE (insn
) != NOTE
)
801 /* Get the insn which signals the end of parameter setup code. */
802 first_nonparm_insn
= get_first_nonparm_insn ();
804 /* Now just scan the chain of insns to see what happens to our
805 PARM_DECLs. If a PARM_DECL is used but never modified, we
806 can substitute its rtl directly when expanding inline (and
807 perform constant folding when its incoming value is constant).
808 Otherwise, we have to copy its value into a new register and track
809 the new register's life. */
811 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
813 if (insn
== first_nonparm_insn
)
814 in_nonparm_insns
= 1;
816 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
818 if (current_function_uses_const_pool
)
820 /* Replace any constant pool references with the actual constant.
821 We will put the constant back if we need to write the
822 function out after all. */
823 save_constants (&PATTERN (insn
));
824 if (REG_NOTES (insn
))
825 save_constants (®_NOTES (insn
));
828 /* Record what interesting things happen to our parameters. */
829 note_stores (PATTERN (insn
), note_modified_parmregs
);
833 /* Also scan all decls, and replace any constant pool references with the
835 save_constants_in_decl_trees (DECL_INITIAL (fndecl
));
837 /* We have now allocated all that needs to be allocated permanently
838 on the rtx obstack. Set our high-water mark, so that we
839 can free the rest of this when the time comes. */
843 finish_inline (fndecl
, head
);
846 /* Given PX, a pointer into an insn, search for references to the constant
847 pool. Replace each with a CONST that has the mode of the original
848 constant, contains the constant, and has RTX_INTEGRATED_P set.
849 Similarly, constant pool addresses not enclosed in a MEM are replaced
850 with an ADDRESS and CONST rtx which also gives the constant, its
851 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
863 /* If this is a CONST_DOUBLE, don't try to fix things up in
864 CONST_DOUBLE_MEM, because this is an infinite recursion. */
865 if (GET_CODE (x
) == CONST_DOUBLE
)
867 else if (GET_CODE (x
) == MEM
&& GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
868 && CONSTANT_POOL_ADDRESS_P (XEXP (x
,0)))
870 enum machine_mode const_mode
= get_pool_mode (XEXP (x
, 0));
871 rtx
new = gen_rtx (CONST
, const_mode
, get_pool_constant (XEXP (x
, 0)));
872 RTX_INTEGRATED_P (new) = 1;
874 /* If the MEM was in a different mode than the constant (perhaps we
875 were only looking at the low-order part), surround it with a
876 SUBREG so we can save both modes. */
878 if (GET_MODE (x
) != const_mode
)
880 new = gen_rtx (SUBREG
, GET_MODE (x
), new, 0);
881 RTX_INTEGRATED_P (new) = 1;
885 save_constants (&XEXP (*px
, 0));
887 else if (GET_CODE (x
) == SYMBOL_REF
888 && CONSTANT_POOL_ADDRESS_P (x
))
890 *px
= gen_rtx (ADDRESS
, GET_MODE (x
),
891 gen_rtx (CONST
, get_pool_mode (x
),
892 get_pool_constant (x
)));
893 save_constants (&XEXP (*px
, 0));
894 RTX_INTEGRATED_P (*px
) = 1;
899 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
900 int len
= GET_RTX_LENGTH (GET_CODE (x
));
902 for (i
= len
-1; i
>= 0; i
--)
907 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
908 save_constants (&XVECEXP (x
, i
, j
));
912 if (XEXP (x
, i
) == 0)
916 /* Hack tail-recursion here. */
920 save_constants (&XEXP (x
, i
));
927 /* Note whether a parameter is modified or not. */
930 note_modified_parmregs (reg
, x
)
934 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
935 && REGNO (reg
) < max_parm_reg
936 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
937 && parmdecl_map
[REGNO (reg
)] != 0)
938 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
941 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
942 according to `reg_map' and `label_map'. The original rtl insns
943 will be saved for inlining; this is used to make a copy
944 which is used to finish compiling the inline function itself.
946 If we find a "saved" constant pool entry, one which was replaced with
947 the value of the constant, convert it back to a constant pool entry.
948 Since the pool wasn't touched, this should simply restore the old
951 All other kinds of rtx are copied except those that can never be
952 changed during compilation. */
955 copy_for_inline (orig
)
958 register rtx x
= orig
;
961 register enum rtx_code code
;
962 register char *format_ptr
;
969 /* These types may be freely shared. */
981 /* We have to make a new CONST_DOUBLE to ensure that we account for
982 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
983 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
987 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
988 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
991 return immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
995 /* Get constant pool entry for constant in the pool. */
996 if (RTX_INTEGRATED_P (x
))
997 return validize_mem (force_const_mem (GET_MODE (x
),
998 copy_for_inline (XEXP (x
, 0))));
1002 /* Get constant pool entry, but access in different mode. */
1003 if (RTX_INTEGRATED_P (x
))
1005 new = force_const_mem (GET_MODE (SUBREG_REG (x
)),
1006 copy_for_inline (XEXP (SUBREG_REG (x
), 0)));
1008 PUT_MODE (new, GET_MODE (x
));
1009 return validize_mem (new);
1014 /* If not special for constant pool error. Else get constant pool
1016 if (! RTX_INTEGRATED_P (x
))
1019 new = force_const_mem (GET_MODE (XEXP (x
, 0)),
1020 copy_for_inline (XEXP (XEXP (x
, 0), 0)));
1021 new = XEXP (new, 0);
1023 #ifdef POINTERS_EXTEND_UNSIGNED
1024 if (GET_MODE (new) != GET_MODE (x
))
1025 new = convert_memory_address (GET_MODE (x
), new);
1031 /* If a single asm insn contains multiple output operands
1032 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1033 We must make sure that the copied insn continues to share it. */
1034 if (orig_asm_operands_vector
== XVEC (orig
, 3))
1036 x
= rtx_alloc (ASM_OPERANDS
);
1037 x
->volatil
= orig
->volatil
;
1038 XSTR (x
, 0) = XSTR (orig
, 0);
1039 XSTR (x
, 1) = XSTR (orig
, 1);
1040 XINT (x
, 2) = XINT (orig
, 2);
1041 XVEC (x
, 3) = copy_asm_operands_vector
;
1042 XVEC (x
, 4) = copy_asm_constraints_vector
;
1043 XSTR (x
, 5) = XSTR (orig
, 5);
1044 XINT (x
, 6) = XINT (orig
, 6);
1050 /* A MEM is usually allowed to be shared if its address is constant
1051 or is a constant plus one of the special registers.
1053 We do not allow sharing of addresses that are either a special
1054 register or the sum of a constant and a special register because
1055 it is possible for unshare_all_rtl to copy the address, into memory
1056 that won't be saved. Although the MEM can safely be shared, and
1057 won't be copied there, the address itself cannot be shared, and may
1060 There are also two exceptions with constants: The first is if the
1061 constant is a LABEL_REF or the sum of the LABEL_REF
1062 and an integer. This case can happen if we have an inline
1063 function that supplies a constant operand to the call of another
1064 inline function that uses it in a switch statement. In this case,
1065 we will be replacing the LABEL_REF, so we have to replace this MEM
1068 The second case is if we have a (const (plus (address ..) ...)).
1069 In that case we need to put back the address of the constant pool
1072 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
1073 && GET_CODE (XEXP (x
, 0)) != LABEL_REF
1074 && ! (GET_CODE (XEXP (x
, 0)) == CONST
1075 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
1076 && ((GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1078 || (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1084 /* If this is a non-local label, just make a new LABEL_REF.
1085 Otherwise, use the new label as well. */
1086 x
= gen_rtx (LABEL_REF
, GET_MODE (orig
),
1087 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1088 : label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
1089 LABEL_REF_NONLOCAL_P (x
) = LABEL_REF_NONLOCAL_P (orig
);
1090 LABEL_OUTSIDE_LOOP_P (x
) = LABEL_OUTSIDE_LOOP_P (orig
);
1094 if (REGNO (x
) > LAST_VIRTUAL_REGISTER
)
1095 return reg_map
[REGNO (x
)];
1100 /* If a parm that gets modified lives in a pseudo-reg,
1101 clear its TREE_READONLY to prevent certain optimizations. */
1103 rtx dest
= SET_DEST (x
);
1105 while (GET_CODE (dest
) == STRICT_LOW_PART
1106 || GET_CODE (dest
) == ZERO_EXTRACT
1107 || GET_CODE (dest
) == SUBREG
)
1108 dest
= XEXP (dest
, 0);
1110 if (GET_CODE (dest
) == REG
1111 && REGNO (dest
) < max_parm_reg
1112 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
1113 && parmdecl_map
[REGNO (dest
)] != 0
1114 /* The insn to load an arg pseudo from a stack slot
1115 does not count as modifying it. */
1116 && in_nonparm_insns
)
1117 TREE_READONLY (parmdecl_map
[REGNO (dest
)]) = 0;
1121 #if 0 /* This is a good idea, but here is the wrong place for it. */
1122 /* Arrange that CONST_INTs always appear as the second operand
1123 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1124 always appear as the first. */
1126 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
1127 || (XEXP (x
, 1) == frame_pointer_rtx
1128 || (ARG_POINTER_REGNUM
!= FRAME_POINTER_REGNUM
1129 && XEXP (x
, 1) == arg_pointer_rtx
)))
1131 rtx t
= XEXP (x
, 0);
1132 XEXP (x
, 0) = XEXP (x
, 1);
1141 /* Replace this rtx with a copy of itself. */
1143 x
= rtx_alloc (code
);
1144 bcopy ((char *) orig
, (char *) x
,
1145 (sizeof (*x
) - sizeof (x
->fld
)
1146 + sizeof (x
->fld
[0]) * GET_RTX_LENGTH (code
)));
1148 /* Now scan the subexpressions recursively.
1149 We can store any replaced subexpressions directly into X
1150 since we know X is not shared! Any vectors in X
1151 must be copied if X was copied. */
1153 format_ptr
= GET_RTX_FORMAT (code
);
1155 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1157 switch (*format_ptr
++)
1160 XEXP (x
, i
) = copy_for_inline (XEXP (x
, i
));
1164 /* Change any references to old-insns to point to the
1165 corresponding copied insns. */
1166 XEXP (x
, i
) = insn_map
[INSN_UID (XEXP (x
, i
))];
1170 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
1174 XVEC (x
, i
) = gen_rtvec_vv (XVECLEN (x
, i
), XVEC (x
, i
)->elem
);
1175 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1177 = copy_for_inline (XVECEXP (x
, i
, j
));
1183 if (code
== ASM_OPERANDS
&& orig_asm_operands_vector
== 0)
1185 orig_asm_operands_vector
= XVEC (orig
, 3);
1186 copy_asm_operands_vector
= XVEC (x
, 3);
1187 copy_asm_constraints_vector
= XVEC (x
, 4);
1193 /* Unfortunately, we need a global copy of const_equiv map for communication
1194 with a function called from note_stores. Be *very* careful that this
1195 is used properly in the presence of recursion. */
1197 rtx
*global_const_equiv_map
;
1198 int global_const_equiv_map_size
;
1200 #define FIXED_BASE_PLUS_P(X) \
1201 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1202 && GET_CODE (XEXP (X, 0)) == REG \
1203 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1204 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1206 /* Integrate the procedure defined by FNDECL. Note that this function
1207 may wind up calling itself. Since the static variables are not
1208 reentrant, we do not assign them until after the possibility
1209 of recursion is eliminated.
1211 If IGNORE is nonzero, do not produce a value.
1212 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1215 (rtx)-1 if we could not substitute the function
1216 0 if we substituted it and it does not produce a value
1217 else an rtx for where the value is stored. */
1220 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
1221 structure_value_addr
)
1226 rtx structure_value_addr
;
1228 tree formal
, actual
, block
;
1229 rtx header
= DECL_SAVED_INSNS (fndecl
);
1230 rtx insns
= FIRST_FUNCTION_INSN (header
);
1231 rtx parm_insns
= FIRST_PARM_INSN (header
);
1237 int min_labelno
= FIRST_LABELNO (header
);
1238 int max_labelno
= LAST_LABELNO (header
);
1240 rtx local_return_label
= 0;
1244 struct inline_remap
*map
;
1246 rtvec arg_vector
= ORIGINAL_ARG_VECTOR (header
);
1247 rtx static_chain_value
= 0;
1249 /* The pointer used to track the true location of the memory used
1250 for MAP->LABEL_MAP. */
1251 rtx
*real_label_map
= 0;
1253 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1254 max_regno
= MAX_REGNUM (header
) + 3;
1255 if (max_regno
< FIRST_PSEUDO_REGISTER
)
1258 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
1260 /* Check that the parms type match and that sufficient arguments were
1261 passed. Since the appropriate conversions or default promotions have
1262 already been applied, the machine modes should match exactly. */
1264 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
1266 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
1269 enum machine_mode mode
;
1272 return (rtx
) (HOST_WIDE_INT
) -1;
1274 arg
= TREE_VALUE (actual
);
1275 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
1277 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
1278 /* If they are block mode, the types should match exactly.
1279 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1280 which could happen if the parameter has incomplete type. */
1282 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
1283 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
1284 return (rtx
) (HOST_WIDE_INT
) -1;
1287 /* Extra arguments are valid, but will be ignored below, so we must
1288 evaluate them here for side-effects. */
1289 for (; actual
; actual
= TREE_CHAIN (actual
))
1290 expand_expr (TREE_VALUE (actual
), const0_rtx
,
1291 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
1293 /* Make a binding contour to keep inline cleanups called at
1294 outer function-scope level from looking like they are shadowing
1295 parameter declarations. */
1298 /* Expand the function arguments. Do this first so that any
1299 new registers get created before we allocate the maps. */
1301 arg_vals
= (rtx
*) alloca (nargs
* sizeof (rtx
));
1302 arg_trees
= (tree
*) alloca (nargs
* sizeof (tree
));
1304 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
1306 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
1308 /* Actual parameter, converted to the type of the argument within the
1310 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
1311 /* Mode of the variable used within the function. */
1312 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
1316 loc
= RTVEC_ELT (arg_vector
, i
);
1318 /* If this is an object passed by invisible reference, we copy the
1319 object into a stack slot and save its address. If this will go
1320 into memory, we do nothing now. Otherwise, we just expand the
1322 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1323 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1326 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
1327 int_size_in_bytes (TREE_TYPE (arg
)), 1);
1328 MEM_IN_STRUCT_P (stack_slot
) = AGGREGATE_TYPE_P (TREE_TYPE (arg
));
1330 store_expr (arg
, stack_slot
, 0);
1332 arg_vals
[i
] = XEXP (stack_slot
, 0);
1335 else if (GET_CODE (loc
) != MEM
)
1337 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
1338 /* The mode if LOC and ARG can differ if LOC was a variable
1339 that had its mode promoted via PROMOTED_MODE. */
1340 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
1341 TYPE_MODE (TREE_TYPE (arg
)),
1342 expand_expr (arg
, NULL_RTX
, mode
,
1344 TREE_UNSIGNED (TREE_TYPE (formal
)));
1346 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
1351 if (arg_vals
[i
] != 0
1352 && (! TREE_READONLY (formal
)
1353 /* If the parameter is not read-only, copy our argument through
1354 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1355 TARGET in any way. In the inline function, they will likely
1356 be two different pseudos, and `safe_from_p' will make all
1357 sorts of smart assumptions about their not conflicting.
1358 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1359 wrong, so put ARG_VALS[I] into a fresh register.
1360 Don't worry about invisible references, since their stack
1361 temps will never overlap the target. */
1364 && (GET_CODE (arg_vals
[i
]) == REG
1365 || GET_CODE (arg_vals
[i
]) == SUBREG
1366 || GET_CODE (arg_vals
[i
]) == MEM
)
1367 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
1368 /* ??? We must always copy a SUBREG into a REG, because it might
1369 get substituted into an address, and not all ports correctly
1370 handle SUBREGs in addresses. */
1371 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
1372 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
1374 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
1375 && TREE_CODE (TREE_TYPE (formal
)) == POINTER_TYPE
)
1376 mark_reg_pointer (arg_vals
[i
],
1377 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
1381 /* Allocate the structures we use to remap things. */
1383 map
= (struct inline_remap
*) alloca (sizeof (struct inline_remap
));
1384 map
->fndecl
= fndecl
;
1386 map
->reg_map
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
1387 bzero ((char *) map
->reg_map
, max_regno
* sizeof (rtx
));
1389 /* We used to use alloca here, but the size of what it would try to
1390 allocate would occasionally cause it to exceed the stack limit and
1391 cause unpredictable core dumps. */
1393 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
1394 map
->label_map
= real_label_map
;
1396 map
->insn_map
= (rtx
*) alloca (INSN_UID (header
) * sizeof (rtx
));
1397 bzero ((char *) map
->insn_map
, INSN_UID (header
) * sizeof (rtx
));
1398 map
->min_insnno
= 0;
1399 map
->max_insnno
= INSN_UID (header
);
1401 map
->integrating
= 1;
1403 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1404 be large enough for all our pseudos. This is the number we are currently
1405 using plus the number in the called routine, plus 15 for each arg,
1406 five to compute the virtual frame pointer, and five for the return value.
1407 This should be enough for most cases. We do not reference entries
1408 outside the range of the map.
1410 ??? These numbers are quite arbitrary and were obtained by
1411 experimentation. At some point, we should try to allocate the
1412 table after all the parameters are set up so we an more accurately
1413 estimate the number of pseudos we will need. */
1415 map
->const_equiv_map_size
1416 = max_reg_num () + (max_regno
- FIRST_PSEUDO_REGISTER
) + 15 * nargs
+ 10;
1418 map
->const_equiv_map
1419 = (rtx
*)alloca (map
->const_equiv_map_size
* sizeof (rtx
));
1420 bzero ((char *) map
->const_equiv_map
,
1421 map
->const_equiv_map_size
* sizeof (rtx
));
1424 = (unsigned *)alloca (map
->const_equiv_map_size
* sizeof (unsigned));
1425 bzero ((char *) map
->const_age_map
,
1426 map
->const_equiv_map_size
* sizeof (unsigned));
1429 /* Record the current insn in case we have to set up pointers to frame
1430 and argument memory blocks. If there are no insns yet, add a dummy
1431 insn that can be used as an insertion point. */
1432 map
->insns_at_start
= get_last_insn ();
1433 if (map
->insns_at_start
== 0)
1434 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
1436 map
->regno_pointer_flag
= INLINE_REGNO_POINTER_FLAG (header
);
1437 map
->regno_pointer_align
= INLINE_REGNO_POINTER_ALIGN (header
);
1439 /* Update the outgoing argument size to allow for those in the inlined
1441 if (OUTGOING_ARGS_SIZE (header
) > current_function_outgoing_args_size
)
1442 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (header
);
1444 /* If the inline function needs to make PIC references, that means
1445 that this function's PIC offset table must be used. */
1446 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
1447 current_function_uses_pic_offset_table
= 1;
1449 /* If this function needs a context, set it up. */
1450 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
1451 static_chain_value
= lookup_static_chain (fndecl
);
1453 if (GET_CODE (parm_insns
) == NOTE
1454 && NOTE_LINE_NUMBER (parm_insns
) > 0)
1456 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
1457 NOTE_LINE_NUMBER (parm_insns
));
1459 RTX_INTEGRATED_P (note
) = 1;
1462 /* Process each argument. For each, set up things so that the function's
1463 reference to the argument will refer to the argument being passed.
1464 We only replace REG with REG here. Any simplifications are done
1465 via const_equiv_map.
1467 We make two passes: In the first, we deal with parameters that will
1468 be placed into registers, since we need to ensure that the allocated
1469 register number fits in const_equiv_map. Then we store all non-register
1470 parameters into their memory location. */
1472 /* Don't try to free temp stack slots here, because we may put one of the
1473 parameters into a temp stack slot. */
1475 for (i
= 0; i
< nargs
; i
++)
1477 rtx copy
= arg_vals
[i
];
1479 loc
= RTVEC_ELT (arg_vector
, i
);
1481 /* There are three cases, each handled separately. */
1482 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1483 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1485 /* This must be an object passed by invisible reference (it could
1486 also be a variable-sized object, but we forbid inlining functions
1487 with variable-sized arguments). COPY is the address of the
1488 actual value (this computation will cause it to be copied). We
1489 map that address for the register, noting the actual address as
1490 an equivalent in case it can be substituted into the insns. */
1492 if (GET_CODE (copy
) != REG
)
1494 temp
= copy_addr_to_reg (copy
);
1495 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1496 && REGNO (temp
) < map
->const_equiv_map_size
)
1498 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1499 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1503 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
1505 else if (GET_CODE (loc
) == MEM
)
1507 /* This is the case of a parameter that lives in memory.
1508 It will live in the block we allocate in the called routine's
1509 frame that simulates the incoming argument area. Do nothing
1510 now; we will call store_expr later. */
1513 else if (GET_CODE (loc
) == REG
)
1515 /* This is the good case where the parameter is in a register.
1516 If it is read-only and our argument is a constant, set up the
1517 constant equivalence.
1519 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1520 that flag set if it is a register.
1522 Also, don't allow hard registers here; they might not be valid
1523 when substituted into insns. */
1525 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
1526 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
1527 && ! REG_USERVAR_P (copy
))
1528 || (GET_CODE (copy
) == REG
1529 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
1531 temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
1532 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
1533 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1534 && REGNO (temp
) < map
->const_equiv_map_size
)
1536 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1537 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1541 map
->reg_map
[REGNO (loc
)] = copy
;
1543 else if (GET_CODE (loc
) == CONCAT
)
1545 /* This is the good case where the parameter is in a
1546 pair of separate pseudos.
1547 If it is read-only and our argument is a constant, set up the
1548 constant equivalence.
1550 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1551 that flag set if it is a register.
1553 Also, don't allow hard registers here; they might not be valid
1554 when substituted into insns. */
1555 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
1556 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
1557 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
1558 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
1560 if ((GET_CODE (copyreal
) != REG
&& GET_CODE (copyreal
) != SUBREG
)
1561 || (GET_CODE (copyreal
) == REG
&& REG_USERVAR_P (locreal
)
1562 && ! REG_USERVAR_P (copyreal
))
1563 || (GET_CODE (copyreal
) == REG
1564 && REGNO (copyreal
) < FIRST_PSEUDO_REGISTER
))
1566 temp
= copy_to_mode_reg (GET_MODE (locreal
), copyreal
);
1567 REG_USERVAR_P (temp
) = REG_USERVAR_P (locreal
);
1568 if ((CONSTANT_P (copyreal
) || FIXED_BASE_PLUS_P (copyreal
))
1569 && REGNO (temp
) < map
->const_equiv_map_size
)
1571 map
->const_equiv_map
[REGNO (temp
)] = copyreal
;
1572 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1576 map
->reg_map
[REGNO (locreal
)] = copyreal
;
1578 if ((GET_CODE (copyimag
) != REG
&& GET_CODE (copyimag
) != SUBREG
)
1579 || (GET_CODE (copyimag
) == REG
&& REG_USERVAR_P (locimag
)
1580 && ! REG_USERVAR_P (copyimag
))
1581 || (GET_CODE (copyimag
) == REG
1582 && REGNO (copyimag
) < FIRST_PSEUDO_REGISTER
))
1584 temp
= copy_to_mode_reg (GET_MODE (locimag
), copyimag
);
1585 REG_USERVAR_P (temp
) = REG_USERVAR_P (locimag
);
1586 if ((CONSTANT_P (copyimag
) || FIXED_BASE_PLUS_P (copyimag
))
1587 && REGNO (temp
) < map
->const_equiv_map_size
)
1589 map
->const_equiv_map
[REGNO (temp
)] = copyimag
;
1590 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1594 map
->reg_map
[REGNO (locimag
)] = copyimag
;
1600 /* Now do the parameters that will be placed in memory. */
1602 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
1603 formal
; formal
= TREE_CHAIN (formal
), i
++)
1605 loc
= RTVEC_ELT (arg_vector
, i
);
1607 if (GET_CODE (loc
) == MEM
1608 /* Exclude case handled above. */
1609 && ! (GET_CODE (XEXP (loc
, 0)) == REG
1610 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
1612 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
1613 DECL_SOURCE_LINE (formal
));
1615 RTX_INTEGRATED_P (note
) = 1;
1617 /* Compute the address in the area we reserved and store the
1619 temp
= copy_rtx_and_substitute (loc
, map
);
1620 subst_constants (&temp
, NULL_RTX
, map
);
1621 apply_change_group ();
1622 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1623 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
1624 store_expr (arg_trees
[i
], temp
, 0);
1628 /* Deal with the places that the function puts its result.
1629 We are driven by what is placed into DECL_RESULT.
1631 Initially, we assume that we don't have anything special handling for
1632 REG_FUNCTION_RETURN_VALUE_P. */
1634 map
->inline_target
= 0;
1635 loc
= DECL_RTL (DECL_RESULT (fndecl
));
1636 if (TYPE_MODE (type
) == VOIDmode
)
1637 /* There is no return value to worry about. */
1639 else if (GET_CODE (loc
) == MEM
)
1641 if (! structure_value_addr
|| ! aggregate_value_p (DECL_RESULT (fndecl
)))
1644 /* Pass the function the address in which to return a structure value.
1645 Note that a constructor can cause someone to call us with
1646 STRUCTURE_VALUE_ADDR, but the initialization takes place
1647 via the first parameter, rather than the struct return address.
1649 We have two cases: If the address is a simple register indirect,
1650 use the mapping mechanism to point that register to our structure
1651 return address. Otherwise, store the structure return value into
1652 the place that it will be referenced from. */
1654 if (GET_CODE (XEXP (loc
, 0)) == REG
)
1656 temp
= force_reg (Pmode
,
1657 force_operand (structure_value_addr
, NULL_RTX
));
1658 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
1659 if ((CONSTANT_P (structure_value_addr
)
1660 || GET_CODE (structure_value_addr
) == ADDRESSOF
1661 || (GET_CODE (structure_value_addr
) == PLUS
1662 && XEXP (structure_value_addr
, 0) == virtual_stack_vars_rtx
1663 && GET_CODE (XEXP (structure_value_addr
, 1)) == CONST_INT
))
1664 && REGNO (temp
) < map
->const_equiv_map_size
)
1666 map
->const_equiv_map
[REGNO (temp
)] = structure_value_addr
;
1667 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1672 temp
= copy_rtx_and_substitute (loc
, map
);
1673 subst_constants (&temp
, NULL_RTX
, map
);
1674 apply_change_group ();
1675 emit_move_insn (temp
, structure_value_addr
);
1679 /* We will ignore the result value, so don't look at its structure.
1680 Note that preparations for an aggregate return value
1681 do need to be made (above) even if it will be ignored. */
1683 else if (GET_CODE (loc
) == REG
)
1685 /* The function returns an object in a register and we use the return
1686 value. Set up our target for remapping. */
1688 /* Machine mode function was declared to return. */
1689 enum machine_mode departing_mode
= TYPE_MODE (type
);
1690 /* (Possibly wider) machine mode it actually computes
1691 (for the sake of callers that fail to declare it right).
1692 We have to use the mode of the result's RTL, rather than
1693 its type, since expand_function_start may have promoted it. */
1694 enum machine_mode arriving_mode
1695 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
1698 /* Don't use MEMs as direct targets because on some machines
1699 substituting a MEM for a REG makes invalid insns.
1700 Let the combiner substitute the MEM if that is valid. */
1701 if (target
== 0 || GET_CODE (target
) != REG
1702 || GET_MODE (target
) != departing_mode
)
1703 target
= gen_reg_rtx (departing_mode
);
1705 /* If function's value was promoted before return,
1706 avoid machine mode mismatch when we substitute INLINE_TARGET.
1707 But TARGET is what we will return to the caller. */
1708 if (arriving_mode
!= departing_mode
)
1710 /* Avoid creating a paradoxical subreg wider than
1711 BITS_PER_WORD, since that is illegal. */
1712 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
1714 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
1715 GET_MODE_BITSIZE (arriving_mode
)))
1716 /* Maybe could be handled by using convert_move () ? */
1718 reg_to_map
= gen_reg_rtx (arriving_mode
);
1719 target
= gen_lowpart (departing_mode
, reg_to_map
);
1722 reg_to_map
= gen_rtx (SUBREG
, arriving_mode
, target
, 0);
1725 reg_to_map
= target
;
1727 /* Usually, the result value is the machine's return register.
1728 Sometimes it may be a pseudo. Handle both cases. */
1729 if (REG_FUNCTION_VALUE_P (loc
))
1730 map
->inline_target
= reg_to_map
;
1732 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1737 /* Make a fresh binding contour that we can easily remove. Do this after
1738 expanding our arguments so cleanups are properly scoped. */
1740 expand_start_bindings (0);
1742 /* Make new label equivalences for the labels in the called function. */
1743 for (i
= min_labelno
; i
< max_labelno
; i
++)
1744 map
->label_map
[i
] = gen_label_rtx ();
1746 /* Perform postincrements before actually calling the function. */
1749 /* Clean up stack so that variables might have smaller offsets. */
1750 do_pending_stack_adjust ();
1752 /* Save a copy of the location of const_equiv_map for mark_stores, called
1754 global_const_equiv_map
= map
->const_equiv_map
;
1755 global_const_equiv_map_size
= map
->const_equiv_map_size
;
1757 /* If the called function does an alloca, save and restore the
1758 stack pointer around the call. This saves stack space, but
1759 also is required if this inline is being done between two
1761 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
1762 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1764 /* Now copy the insns one by one. Do this in two passes, first the insns and
1765 then their REG_NOTES, just like save_for_inline. */
1767 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1769 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1771 rtx copy
, pattern
, set
;
1773 map
->orig_asm_operands_vector
= 0;
1775 switch (GET_CODE (insn
))
1778 pattern
= PATTERN (insn
);
1779 set
= single_set (insn
);
1781 if (GET_CODE (pattern
) == USE
1782 && GET_CODE (XEXP (pattern
, 0)) == REG
1783 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1784 /* The (USE (REG n)) at return from the function should
1785 be ignored since we are changing (REG n) into
1789 /* Ignore setting a function value that we don't want to use. */
1790 if (map
->inline_target
== 0
1792 && GET_CODE (SET_DEST (set
)) == REG
1793 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1795 if (volatile_refs_p (SET_SRC (set
)))
1799 /* If we must not delete the source,
1800 load it into a new temporary. */
1801 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1803 new_set
= single_set (copy
);
1808 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1810 /* If the source and destination are the same and it
1811 has a note on it, keep the insn. */
1812 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1813 && REG_NOTES (insn
) != 0)
1814 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1819 /* If this is setting the static chain rtx, omit it. */
1820 else if (static_chain_value
!= 0
1822 && GET_CODE (SET_DEST (set
)) == REG
1823 && rtx_equal_p (SET_DEST (set
),
1824 static_chain_incoming_rtx
))
1827 /* If this is setting the static chain pseudo, set it from
1828 the value we want to give it instead. */
1829 else if (static_chain_value
!= 0
1831 && rtx_equal_p (SET_SRC (set
),
1832 static_chain_incoming_rtx
))
1834 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
);
1836 copy
= emit_move_insn (newdest
, static_chain_value
);
1837 static_chain_value
= 0;
1840 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1841 /* REG_NOTES will be copied later. */
1844 /* If this insn is setting CC0, it may need to look at
1845 the insn that uses CC0 to see what type of insn it is.
1846 In that case, the call to recog via validate_change will
1847 fail. So don't substitute constants here. Instead,
1848 do it when we emit the following insn.
1850 For example, see the pyr.md file. That machine has signed and
1851 unsigned compares. The compare patterns must check the
1852 following branch insn to see which what kind of compare to
1855 If the previous insn set CC0, substitute constants on it as
1857 if (sets_cc0_p (PATTERN (copy
)) != 0)
1862 try_constants (cc0_insn
, map
);
1864 try_constants (copy
, map
);
1867 try_constants (copy
, map
);
1872 if (GET_CODE (PATTERN (insn
)) == RETURN
1873 || (GET_CODE (PATTERN (insn
)) == PARALLEL
1874 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
1876 if (local_return_label
== 0)
1877 local_return_label
= gen_label_rtx ();
1878 pattern
= gen_jump (local_return_label
);
1881 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1883 copy
= emit_jump_insn (pattern
);
1887 try_constants (cc0_insn
, map
);
1890 try_constants (copy
, map
);
1892 /* If this used to be a conditional jump insn but whose branch
1893 direction is now know, we must do something special. */
1894 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1897 /* The previous insn set cc0 for us. So delete it. */
1898 delete_insn (PREV_INSN (copy
));
1901 /* If this is now a no-op, delete it. */
1902 if (map
->last_pc_value
== pc_rtx
)
1908 /* Otherwise, this is unconditional jump so we must put a
1909 BARRIER after it. We could do some dead code elimination
1910 here, but jump.c will do it just as well. */
1916 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1917 copy
= emit_call_insn (pattern
);
1919 /* Because the USAGE information potentially contains objects other
1920 than hard registers, we need to copy it. */
1921 CALL_INSN_FUNCTION_USAGE (copy
)
1922 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
), map
);
1926 try_constants (cc0_insn
, map
);
1929 try_constants (copy
, map
);
1931 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1932 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1933 map
->const_equiv_map
[i
] = 0;
1937 copy
= emit_label (map
->label_map
[CODE_LABEL_NUMBER (insn
)]);
1938 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1943 copy
= emit_barrier ();
1947 /* It is important to discard function-end and function-beg notes,
1948 so we have only one of each in the current function.
1949 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1950 deleted these in the copy used for continuing compilation,
1951 not the copy used for inlining). */
1952 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1953 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1954 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1956 copy
= emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
1957 if (copy
&& (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
1958 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
))
1960 rtx label
= map
->label_map
[NOTE_BLOCK_NUMBER (copy
)];
1962 /* We have to forward these both to match the new exception
1964 NOTE_BLOCK_NUMBER (copy
) = CODE_LABEL_NUMBER (label
);
1977 RTX_INTEGRATED_P (copy
) = 1;
1979 map
->insn_map
[INSN_UID (insn
)] = copy
;
1982 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1983 from parameters can be substituted in. These are the only ones that
1984 are valid across the entire function. */
1986 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1987 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1988 && map
->insn_map
[INSN_UID (insn
)]
1989 && REG_NOTES (insn
))
1991 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
);
1992 /* We must also do subst_constants, in case one of our parameters
1993 has const type and constant value. */
1994 subst_constants (&tem
, NULL_RTX
, map
);
1995 apply_change_group ();
1996 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1999 if (local_return_label
)
2000 emit_label (local_return_label
);
2002 /* Restore the stack pointer if we saved it above. */
2003 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
2004 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
2006 /* Make copies of the decls of the symbols in the inline function, so that
2007 the copies of the variables get declared in the current function. Set
2008 up things so that lookup_static_chain knows that to interpret registers
2009 in SAVE_EXPRs for TYPE_SIZEs as local. */
2011 inline_function_decl
= fndecl
;
2012 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
2013 integrate_decl_tree ((tree
) ORIGINAL_DECL_INITIAL (header
), 0, map
);
2014 inline_function_decl
= 0;
2016 /* End the scope containing the copied formal parameter variables
2017 and copied LABEL_DECLs. */
2019 expand_end_bindings (getdecls (), 1, 1);
2020 block
= poplevel (1, 1, 0);
2021 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
2022 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
2025 /* Must mark the line number note after inlined functions as a repeat, so
2026 that the test coverage code can avoid counting the call twice. This
2027 just tells the code to ignore the immediately following line note, since
2028 there already exists a copy of this note before the expanded inline call.
2029 This line number note is still needed for debugging though, so we can't
2031 if (flag_test_coverage
)
2032 emit_note (0, NOTE_REPEATED_LINE_NUMBER
);
2034 emit_line_note (input_filename
, lineno
);
2036 if (structure_value_addr
)
2038 target
= gen_rtx (MEM
, TYPE_MODE (type
),
2039 memory_address (TYPE_MODE (type
), structure_value_addr
));
2040 MEM_IN_STRUCT_P (target
) = 1;
2043 /* Make sure we free the things we explicitly allocated with xmalloc. */
2045 free (real_label_map
);
2050 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2051 push all of those decls and give each one the corresponding home. */
2054 integrate_parm_decls (args
, map
, arg_vector
)
2056 struct inline_remap
*map
;
2062 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
2064 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
2067 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
);
2069 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
2070 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2071 here, but that's going to require some more work. */
2072 /* DECL_INCOMING_RTL (decl) = ?; */
2073 /* These args would always appear unused, if not for this. */
2074 TREE_USED (decl
) = 1;
2075 /* Prevent warning for shadowing with these. */
2076 DECL_ABSTRACT_ORIGIN (decl
) = tail
;
2078 /* Fully instantiate the address with the equivalent form so that the
2079 debugging information contains the actual register, instead of the
2080 virtual register. Do this by not passing an insn to
2082 subst_constants (&new_decl_rtl
, NULL_RTX
, map
);
2083 apply_change_group ();
2084 DECL_RTL (decl
) = new_decl_rtl
;
2088 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2089 current function a tree of contexts isomorphic to the one that is given.
2091 LEVEL indicates how far down into the BLOCK tree is the node we are
2092 currently traversing. It is always zero except for recursive calls.
2094 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2095 registers used in the DECL_RTL field should be remapped. If it is zero,
2096 no mapping is necessary. */
2099 integrate_decl_tree (let
, level
, map
)
2102 struct inline_remap
*map
;
2109 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2113 push_obstacks_nochange ();
2114 saveable_allocation ();
2118 if (DECL_RTL (t
) != 0)
2120 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
);
2121 /* Fully instantiate the address with the equivalent form so that the
2122 debugging information contains the actual register, instead of the
2123 virtual register. Do this by not passing an insn to
2125 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
);
2126 apply_change_group ();
2128 /* These args would always appear unused, if not for this. */
2130 /* Prevent warning for shadowing with these. */
2131 DECL_ABSTRACT_ORIGIN (d
) = t
;
2133 if (DECL_LANG_SPECIFIC (d
))
2139 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2140 integrate_decl_tree (t
, level
+ 1, map
);
2144 node
= poplevel (1, 0, 0);
2147 TREE_USED (node
) = TREE_USED (let
);
2148 BLOCK_ABSTRACT_ORIGIN (node
) = let
;
2153 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2154 through save_constants. */
2157 save_constants_in_decl_trees (let
)
2162 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2163 if (DECL_RTL (t
) != 0)
2164 save_constants (&DECL_RTL (t
));
2166 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2167 save_constants_in_decl_trees (t
);
2170 /* Create a new copy of an rtx.
2171 Recursively copies the operands of the rtx,
2172 except for those few rtx codes that are sharable.
2174 We always return an rtx that is similar to that incoming rtx, with the
2175 exception of possibly changing a REG to a SUBREG or vice versa. No
2176 rtl is ever emitted.
2178 Handle constants that need to be placed in the constant pool by
2179 calling `force_const_mem'. */
2182 copy_rtx_and_substitute (orig
, map
)
2184 struct inline_remap
*map
;
2186 register rtx copy
, temp
;
2188 register RTX_CODE code
;
2189 register enum machine_mode mode
;
2190 register char *format_ptr
;
2196 code
= GET_CODE (orig
);
2197 mode
= GET_MODE (orig
);
2202 /* If the stack pointer register shows up, it must be part of
2203 stack-adjustments (*not* because we eliminated the frame pointer!).
2204 Small hard registers are returned as-is. Pseudo-registers
2205 go through their `reg_map'. */
2206 regno
= REGNO (orig
);
2207 if (regno
<= LAST_VIRTUAL_REGISTER
)
2209 /* Some hard registers are also mapped,
2210 but others are not translated. */
2211 if (map
->reg_map
[regno
] != 0)
2212 return map
->reg_map
[regno
];
2214 /* If this is the virtual frame pointer, make space in current
2215 function's stack frame for the stack frame of the inline function.
2217 Copy the address of this area into a pseudo. Map
2218 virtual_stack_vars_rtx to this pseudo and set up a constant
2219 equivalence for it to be the address. This will substitute the
2220 address into insns where it can be substituted and use the new
2221 pseudo where it can't. */
2222 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
2225 int size
= DECL_FRAME_SIZE (map
->fndecl
);
2227 #ifdef FRAME_GROWS_DOWNWARD
2228 /* In this case, virtual_stack_vars_rtx points to one byte
2229 higher than the top of the frame area. So make sure we
2230 allocate a big enough chunk to keep the frame pointer
2231 aligned like a real one. */
2232 size
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2235 loc
= assign_stack_temp (BLKmode
, size
, 1);
2236 loc
= XEXP (loc
, 0);
2237 #ifdef FRAME_GROWS_DOWNWARD
2238 /* In this case, virtual_stack_vars_rtx points to one byte
2239 higher than the top of the frame area. So compute the offset
2240 to one byte higher than our substitute frame. */
2241 loc
= plus_constant (loc
, size
);
2243 map
->reg_map
[regno
] = temp
2244 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2246 #ifdef STACK_BOUNDARY
2247 mark_reg_pointer (map
->reg_map
[regno
],
2248 STACK_BOUNDARY
/ BITS_PER_UNIT
);
2251 if (REGNO (temp
) < map
->const_equiv_map_size
)
2253 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2254 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2257 seq
= gen_sequence ();
2259 emit_insn_after (seq
, map
->insns_at_start
);
2262 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
)
2264 /* Do the same for a block to contain any arguments referenced
2267 int size
= FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map
->fndecl
));
2270 loc
= assign_stack_temp (BLKmode
, size
, 1);
2271 loc
= XEXP (loc
, 0);
2272 /* When arguments grow downward, the virtual incoming
2273 args pointer points to the top of the argument block,
2274 so the remapped location better do the same. */
2275 #ifdef ARGS_GROW_DOWNWARD
2276 loc
= plus_constant (loc
, size
);
2278 map
->reg_map
[regno
] = temp
2279 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2281 #ifdef STACK_BOUNDARY
2282 mark_reg_pointer (map
->reg_map
[regno
],
2283 STACK_BOUNDARY
/ BITS_PER_UNIT
);
2286 if (REGNO (temp
) < map
->const_equiv_map_size
)
2288 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2289 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2292 seq
= gen_sequence ();
2294 emit_insn_after (seq
, map
->insns_at_start
);
2297 else if (REG_FUNCTION_VALUE_P (orig
))
2299 /* This is a reference to the function return value. If
2300 the function doesn't have a return value, error. If the
2301 mode doesn't agree, make a SUBREG. */
2302 if (map
->inline_target
== 0)
2303 /* Must be unrolling loops or replicating code if we
2304 reach here, so return the register unchanged. */
2306 else if (mode
!= GET_MODE (map
->inline_target
))
2307 return gen_lowpart (mode
, map
->inline_target
);
2309 return map
->inline_target
;
2313 if (map
->reg_map
[regno
] == NULL
)
2315 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
2316 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
2317 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
2318 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
2319 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2321 if (map
->regno_pointer_flag
[regno
])
2322 mark_reg_pointer (map
->reg_map
[regno
],
2323 map
->regno_pointer_align
[regno
]);
2325 return map
->reg_map
[regno
];
2328 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
);
2329 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2330 if (GET_CODE (copy
) == SUBREG
)
2331 return gen_rtx (SUBREG
, GET_MODE (orig
), SUBREG_REG (copy
),
2332 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
2333 else if (GET_CODE (copy
) == CONCAT
)
2334 return (subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1));
2336 return gen_rtx (SUBREG
, GET_MODE (orig
), copy
,
2337 SUBREG_WORD (orig
));
2340 copy
= gen_rtx (ADDRESSOF
, mode
,
2341 copy_rtx_and_substitute (XEXP (orig
, 0), map
));
2342 SET_ADDRESSOF_DECL (copy
, ADDRESSOF_DECL (orig
));
2343 regno
= ADDRESSOF_REGNO (orig
);
2344 if (map
->reg_map
[regno
])
2345 regno
= REGNO (map
->reg_map
[regno
]);
2346 else if (regno
> LAST_VIRTUAL_REGISTER
)
2348 temp
= XEXP (orig
, 0);
2349 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
2350 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
2351 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
2352 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
2353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2355 if (map
->regno_pointer_flag
[regno
])
2356 mark_reg_pointer (map
->reg_map
[regno
],
2357 map
->regno_pointer_align
[regno
]);
2358 regno
= REGNO (map
->reg_map
[regno
]);
2360 ADDRESSOF_REGNO (copy
) = regno
;
2365 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2366 to (use foo) if the original insn didn't have a subreg.
2367 Removing the subreg distorts the VAX movstrhi pattern
2368 by changing the mode of an operand. */
2369 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2370 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
2371 copy
= SUBREG_REG (copy
);
2372 return gen_rtx (code
, VOIDmode
, copy
);
2375 LABEL_PRESERVE_P (map
->label_map
[CODE_LABEL_NUMBER (orig
)])
2376 = LABEL_PRESERVE_P (orig
);
2377 return map
->label_map
[CODE_LABEL_NUMBER (orig
)];
2380 copy
= gen_rtx (LABEL_REF
, mode
,
2381 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
2382 : map
->label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
2383 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
2385 /* The fact that this label was previously nonlocal does not mean
2386 it still is, so we must check if it is within the range of
2387 this function's labels. */
2388 LABEL_REF_NONLOCAL_P (copy
)
2389 = (LABEL_REF_NONLOCAL_P (orig
)
2390 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
2391 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
2393 /* If we have made a nonlocal label local, it means that this
2394 inlined call will be referring to our nonlocal goto handler.
2395 So make sure we create one for this block; we normally would
2396 not since this is not otherwise considered a "call". */
2397 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
2398 function_call_count
++;
2408 /* Symbols which represent the address of a label stored in the constant
2409 pool must be modified to point to a constant pool entry for the
2410 remapped label. Otherwise, symbols are returned unchanged. */
2411 if (CONSTANT_POOL_ADDRESS_P (orig
))
2413 rtx constant
= get_pool_constant (orig
);
2414 if (GET_CODE (constant
) == LABEL_REF
)
2415 return XEXP (force_const_mem (GET_MODE (orig
),
2416 copy_rtx_and_substitute (constant
,
2424 /* We have to make a new copy of this CONST_DOUBLE because don't want
2425 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2426 duplicate of a CONST_DOUBLE we have already seen. */
2427 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
2431 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
2432 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
2435 return immed_double_const (CONST_DOUBLE_LOW (orig
),
2436 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
2439 /* Make new constant pool entry for a constant
2440 that was in the pool of the inline function. */
2441 if (RTX_INTEGRATED_P (orig
))
2443 /* If this was an address of a constant pool entry that itself
2444 had to be placed in the constant pool, it might not be a
2445 valid address. So the recursive call below might turn it
2446 into a register. In that case, it isn't a constant any
2447 more, so return it. This has the potential of changing a
2448 MEM into a REG, but we'll assume that it safe. */
2449 temp
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2450 if (! CONSTANT_P (temp
))
2452 return validize_mem (force_const_mem (GET_MODE (orig
), temp
));
2457 /* If from constant pool address, make new constant pool entry and
2458 return its address. */
2459 if (! RTX_INTEGRATED_P (orig
))
2463 = force_const_mem (GET_MODE (XEXP (orig
, 0)),
2464 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
2468 /* Legitimizing the address here is incorrect.
2470 The only ADDRESS rtx's that can reach here are ones created by
2471 save_constants. Hence the operand of the ADDRESS is always valid
2472 in this position of the instruction, since the original rtx without
2473 the ADDRESS was valid.
2475 The reason we don't legitimize the address here is that on the
2476 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2477 This code forces the operand of the address to a register, which
2478 fails because we can not take the HIGH part of a register.
2480 Also, change_address may create new registers. These registers
2481 will not have valid reg_map entries. This can cause try_constants()
2482 to fail because assumes that all registers in the rtx have valid
2483 reg_map entries, and it may end up replacing one of these new
2484 registers with junk. */
2486 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
2487 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
2490 temp
= XEXP (temp
, 0);
2492 #ifdef POINTERS_EXTEND_UNSIGNED
2493 if (GET_MODE (temp
) != GET_MODE (orig
))
2494 temp
= convert_memory_address (GET_MODE (orig
), temp
);
2500 /* If a single asm insn contains multiple output operands
2501 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2502 We must make sure that the copied insn continues to share it. */
2503 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
2505 copy
= rtx_alloc (ASM_OPERANDS
);
2506 copy
->volatil
= orig
->volatil
;
2507 XSTR (copy
, 0) = XSTR (orig
, 0);
2508 XSTR (copy
, 1) = XSTR (orig
, 1);
2509 XINT (copy
, 2) = XINT (orig
, 2);
2510 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
2511 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
2512 XSTR (copy
, 5) = XSTR (orig
, 5);
2513 XINT (copy
, 6) = XINT (orig
, 6);
2519 /* This is given special treatment because the first
2520 operand of a CALL is a (MEM ...) which may get
2521 forced into a register for cse. This is undesirable
2522 if function-address cse isn't wanted or if we won't do cse. */
2523 #ifndef NO_FUNCTION_CSE
2524 if (! (optimize
&& ! flag_no_function_cse
))
2526 return gen_rtx (CALL
, GET_MODE (orig
),
2527 gen_rtx (MEM
, GET_MODE (XEXP (orig
, 0)),
2528 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0), map
)),
2529 copy_rtx_and_substitute (XEXP (orig
, 1), map
));
2533 /* Must be ifdefed out for loop unrolling to work. */
2539 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2540 Adjust the setting by the offset of the area we made.
2541 If the nonlocal goto is into the current function,
2542 this will result in unnecessarily bad code, but should work. */
2543 if (SET_DEST (orig
) == virtual_stack_vars_rtx
2544 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
2546 /* In case a translation hasn't occurred already, make one now. */
2547 rtx junk
= copy_rtx_and_substitute (SET_DEST (orig
), map
);
2548 rtx equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
2549 rtx equiv_loc
= map
->const_equiv_map
[REGNO (equiv_reg
)];
2550 HOST_WIDE_INT loc_offset
2551 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
2553 return gen_rtx (SET
, VOIDmode
, SET_DEST (orig
),
2556 (copy_rtx_and_substitute (SET_SRC (orig
), map
),
2563 copy
= rtx_alloc (MEM
);
2564 PUT_MODE (copy
, mode
);
2565 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2566 MEM_IN_STRUCT_P (copy
) = MEM_IN_STRUCT_P (orig
);
2567 MEM_VOLATILE_P (copy
) = MEM_VOLATILE_P (orig
);
2569 /* If doing function inlining, this MEM might not be const in the
2570 function that it is being inlined into, and thus may not be
2571 unchanging after function inlining. Constant pool references are
2572 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2574 if (! map
->integrating
)
2575 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
2583 copy
= rtx_alloc (code
);
2584 PUT_MODE (copy
, mode
);
2585 copy
->in_struct
= orig
->in_struct
;
2586 copy
->volatil
= orig
->volatil
;
2587 copy
->unchanging
= orig
->unchanging
;
2589 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2591 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2593 switch (*format_ptr
++)
2596 XEXP (copy
, i
) = XEXP (orig
, i
);
2600 XEXP (copy
, i
) = copy_rtx_and_substitute (XEXP (orig
, i
), map
);
2604 /* Change any references to old-insns to point to the
2605 corresponding copied insns. */
2606 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2610 XVEC (copy
, i
) = XVEC (orig
, i
);
2611 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2613 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2614 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2615 XVECEXP (copy
, i
, j
)
2616 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
), map
);
2621 XWINT (copy
, i
) = XWINT (orig
, i
);
2625 XINT (copy
, i
) = XINT (orig
, i
);
2629 XSTR (copy
, i
) = XSTR (orig
, i
);
2637 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2639 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2640 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2641 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2647 /* Substitute known constant values into INSN, if that is valid. */
2650 try_constants (insn
, map
)
2652 struct inline_remap
*map
;
2657 subst_constants (&PATTERN (insn
), insn
, map
);
2659 /* Apply the changes if they are valid; otherwise discard them. */
2660 apply_change_group ();
2662 /* Show we don't know the value of anything stored or clobbered. */
2663 note_stores (PATTERN (insn
), mark_stores
);
2664 map
->last_pc_value
= 0;
2666 map
->last_cc0_value
= 0;
2669 /* Set up any constant equivalences made in this insn. */
2670 for (i
= 0; i
< map
->num_sets
; i
++)
2672 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2674 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2676 if (regno
< map
->const_equiv_map_size
2677 && (map
->const_equiv_map
[regno
] == 0
2678 /* Following clause is a hack to make case work where GNU C++
2679 reassigns a variable to make cse work right. */
2680 || ! rtx_equal_p (map
->const_equiv_map
[regno
],
2681 map
->equiv_sets
[i
].equiv
)))
2683 map
->const_equiv_map
[regno
] = map
->equiv_sets
[i
].equiv
;
2684 map
->const_age_map
[regno
] = map
->const_age
;
2687 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2688 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2690 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2691 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2696 /* Substitute known constants for pseudo regs in the contents of LOC,
2697 which are part of INSN.
2698 If INSN is zero, the substitution should always be done (this is used to
2700 These changes are taken out by try_constants if the result is not valid.
2702 Note that we are more concerned with determining when the result of a SET
2703 is a constant, for further propagation, than actually inserting constants
2704 into insns; cse will do the latter task better.
2706 This function is also used to adjust address of items previously addressed
2707 via the virtual stack variable or virtual incoming arguments registers. */
2710 subst_constants (loc
, insn
, map
)
2713 struct inline_remap
*map
;
2717 register enum rtx_code code
;
2718 register char *format_ptr
;
2719 int num_changes
= num_validated_changes ();
2721 enum machine_mode op0_mode
;
2723 code
= GET_CODE (x
);
2738 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2744 /* The only thing we can do with a USE or CLOBBER is possibly do
2745 some substitutions in a MEM within it. */
2746 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2747 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
);
2751 /* Substitute for parms and known constants. Don't replace
2752 hard regs used as user variables with constants. */
2754 int regno
= REGNO (x
);
2756 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2757 && regno
< map
->const_equiv_map_size
2758 && map
->const_equiv_map
[regno
] != 0
2759 && map
->const_age_map
[regno
] >= map
->const_age
)
2760 validate_change (insn
, loc
, map
->const_equiv_map
[regno
], 1);
2765 /* SUBREG applied to something other than a reg
2766 should be treated as ordinary, since that must
2767 be a special hack and we don't know how to treat it specially.
2768 Consider for example mulsidi3 in m68k.md.
2769 Ordinary SUBREG of a REG needs this special treatment. */
2770 if (GET_CODE (SUBREG_REG (x
)) == REG
)
2772 rtx inner
= SUBREG_REG (x
);
2775 /* We can't call subst_constants on &SUBREG_REG (x) because any
2776 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2777 see what is inside, try to form the new SUBREG and see if that is
2778 valid. We handle two cases: extracting a full word in an
2779 integral mode and extracting the low part. */
2780 subst_constants (&inner
, NULL_RTX
, map
);
2782 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2783 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2784 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2785 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2786 GET_MODE (SUBREG_REG (x
)));
2788 cancel_changes (num_changes
);
2789 if (new == 0 && subreg_lowpart_p (x
))
2790 new = gen_lowpart_common (GET_MODE (x
), inner
);
2793 validate_change (insn
, loc
, new, 1);
2800 subst_constants (&XEXP (x
, 0), insn
, map
);
2802 /* If a memory address got spoiled, change it back. */
2803 if (insn
!= 0 && num_validated_changes () != num_changes
2804 && !memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2805 cancel_changes (num_changes
);
2810 /* Substitute constants in our source, and in any arguments to a
2811 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2813 rtx
*dest_loc
= &SET_DEST (x
);
2814 rtx dest
= *dest_loc
;
2817 subst_constants (&SET_SRC (x
), insn
, map
);
2820 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2821 || GET_CODE (*dest_loc
) == SUBREG
2822 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2824 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2826 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
);
2827 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
);
2829 dest_loc
= &XEXP (*dest_loc
, 0);
2832 /* Do substitute in the address of a destination in memory. */
2833 if (GET_CODE (*dest_loc
) == MEM
)
2834 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
);
2836 /* Check for the case of DEST a SUBREG, both it and the underlying
2837 register are less than one word, and the SUBREG has the wider mode.
2838 In the case, we are really setting the underlying register to the
2839 source converted to the mode of DEST. So indicate that. */
2840 if (GET_CODE (dest
) == SUBREG
2841 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2842 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2843 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2844 <= GET_MODE_SIZE (GET_MODE (dest
)))
2845 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2847 src
= tem
, dest
= SUBREG_REG (dest
);
2849 /* If storing a recognizable value save it for later recording. */
2850 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2851 && (CONSTANT_P (src
)
2852 || (GET_CODE (src
) == REG
2853 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2854 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2855 || (GET_CODE (src
) == PLUS
2856 && GET_CODE (XEXP (src
, 0)) == REG
2857 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2858 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2859 && CONSTANT_P (XEXP (src
, 1)))
2860 || GET_CODE (src
) == COMPARE
2865 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2866 || GET_CODE (src
) == LABEL_REF
))))
2868 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2869 it will cause us to save the COMPARE with any constants
2870 substituted, which is what we want for later. */
2871 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2872 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2881 format_ptr
= GET_RTX_FORMAT (code
);
2883 /* If the first operand is an expression, save its mode for later. */
2884 if (*format_ptr
== 'e')
2885 op0_mode
= GET_MODE (XEXP (x
, 0));
2887 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2889 switch (*format_ptr
++)
2896 subst_constants (&XEXP (x
, i
), insn
, map
);
2906 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2909 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2910 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
);
2919 /* If this is a commutative operation, move a constant to the second
2920 operand unless the second operand is already a CONST_INT. */
2921 if ((GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2922 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2924 rtx tem
= XEXP (x
, 0);
2925 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2926 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2929 /* Simplify the expression in case we put in some constants. */
2930 switch (GET_RTX_CLASS (code
))
2933 new = simplify_unary_operation (code
, GET_MODE (x
),
2934 XEXP (x
, 0), op0_mode
);
2939 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2940 if (op_mode
== VOIDmode
)
2941 op_mode
= GET_MODE (XEXP (x
, 1));
2942 new = simplify_relational_operation (code
, op_mode
,
2943 XEXP (x
, 0), XEXP (x
, 1));
2944 #ifdef FLOAT_STORE_FLAG_VALUE
2945 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2946 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2947 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2955 new = simplify_binary_operation (code
, GET_MODE (x
),
2956 XEXP (x
, 0), XEXP (x
, 1));
2961 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2962 XEXP (x
, 0), XEXP (x
, 1), XEXP (x
, 2));
2967 validate_change (insn
, loc
, new, 1);
2970 /* Show that register modified no longer contain known constants. We are
2971 called from note_stores with parts of the new insn. */
2974 mark_stores (dest
, x
)
2979 enum machine_mode mode
;
2981 /* DEST is always the innermost thing set, except in the case of
2982 SUBREGs of hard registers. */
2984 if (GET_CODE (dest
) == REG
)
2985 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2986 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2988 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2989 mode
= GET_MODE (SUBREG_REG (dest
));
2994 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2995 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2998 /* Ignore virtual stack var or virtual arg register since those
2999 are handled separately. */
3000 if (regno
!= VIRTUAL_INCOMING_ARGS_REGNUM
3001 && regno
!= VIRTUAL_STACK_VARS_REGNUM
)
3002 for (i
= regno
; i
<= last_reg
; i
++)
3003 if (i
< global_const_equiv_map_size
)
3004 global_const_equiv_map
[i
] = 0;
3008 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3009 pointed to by PX, they represent constants in the constant pool.
3010 Replace these with a new memory reference obtained from force_const_mem.
3011 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3012 address of a constant pool entry. Replace them with the address of
3013 a new constant pool entry obtained from force_const_mem. */
3016 restore_constants (px
)
3026 if (GET_CODE (x
) == CONST_DOUBLE
)
3028 /* We have to make a new CONST_DOUBLE to ensure that we account for
3029 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3030 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
3034 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
3035 *px
= CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
3038 *px
= immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
3042 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == CONST
)
3044 restore_constants (&XEXP (x
, 0));
3045 *px
= validize_mem (force_const_mem (GET_MODE (x
), XEXP (x
, 0)));
3047 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == SUBREG
)
3049 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3050 rtx
new = XEXP (SUBREG_REG (x
), 0);
3052 restore_constants (&new);
3053 new = force_const_mem (GET_MODE (SUBREG_REG (x
)), new);
3054 PUT_MODE (new, GET_MODE (x
));
3055 *px
= validize_mem (new);
3057 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == ADDRESS
)
3059 rtx
new = XEXP (force_const_mem (GET_MODE (XEXP (x
, 0)),
3060 XEXP (XEXP (x
, 0), 0)),
3063 #ifdef POINTERS_EXTEND_UNSIGNED
3064 if (GET_MODE (new) != GET_MODE (x
))
3065 new = convert_memory_address (GET_MODE (x
), new);
3072 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3073 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
3078 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3079 restore_constants (&XVECEXP (x
, i
, j
));
3083 restore_constants (&XEXP (x
, i
));
3090 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3091 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3092 that it points to the node itself, thus indicating that the node is its
3093 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3094 the given node is NULL, recursively descend the decl/block tree which
3095 it is the root of, and for each other ..._DECL or BLOCK node contained
3096 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3097 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3098 values to point to themselves. */
3101 set_block_origin_self (stmt
)
3104 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
3106 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
3109 register tree local_decl
;
3111 for (local_decl
= BLOCK_VARS (stmt
);
3112 local_decl
!= NULL_TREE
;
3113 local_decl
= TREE_CHAIN (local_decl
))
3114 set_decl_origin_self (local_decl
); /* Potential recursion. */
3118 register tree subblock
;
3120 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
3121 subblock
!= NULL_TREE
;
3122 subblock
= BLOCK_CHAIN (subblock
))
3123 set_block_origin_self (subblock
); /* Recurse. */
3128 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3129 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3130 node to so that it points to the node itself, thus indicating that the
3131 node represents its own (abstract) origin. Additionally, if the
3132 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3133 the decl/block tree of which the given node is the root of, and for
3134 each other ..._DECL or BLOCK node contained therein whose
3135 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3136 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3137 point to themselves. */
3140 set_decl_origin_self (decl
)
3143 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
3145 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
3146 if (TREE_CODE (decl
) == FUNCTION_DECL
)
3150 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
3151 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
3152 if (DECL_INITIAL (decl
) != NULL_TREE
3153 && DECL_INITIAL (decl
) != error_mark_node
)
3154 set_block_origin_self (DECL_INITIAL (decl
));
3159 /* Given a pointer to some BLOCK node, and a boolean value to set the
3160 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3161 the given block, and for all local decls and all local sub-blocks
3162 (recursively) which are contained therein. */
3165 set_block_abstract_flags (stmt
, setting
)
3167 register int setting
;
3169 register tree local_decl
;
3170 register tree subblock
;
3172 BLOCK_ABSTRACT (stmt
) = setting
;
3174 for (local_decl
= BLOCK_VARS (stmt
);
3175 local_decl
!= NULL_TREE
;
3176 local_decl
= TREE_CHAIN (local_decl
))
3177 set_decl_abstract_flags (local_decl
, setting
);
3179 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
3180 subblock
!= NULL_TREE
;
3181 subblock
= BLOCK_CHAIN (subblock
))
3182 set_block_abstract_flags (subblock
, setting
);
3185 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3186 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3187 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3188 set the abstract flags for all of the parameters, local vars, local
3189 blocks and sub-blocks (recursively) to the same setting. */
3192 set_decl_abstract_flags (decl
, setting
)
3194 register int setting
;
3196 DECL_ABSTRACT (decl
) = setting
;
3197 if (TREE_CODE (decl
) == FUNCTION_DECL
)
3201 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
3202 DECL_ABSTRACT (arg
) = setting
;
3203 if (DECL_INITIAL (decl
) != NULL_TREE
3204 && DECL_INITIAL (decl
) != error_mark_node
)
3205 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
3209 /* Output the assembly language code for the function FNDECL
3210 from its DECL_SAVED_INSNS. Used for inline functions that are output
3211 at end of compilation instead of where they came in the source. */
3214 output_inline_function (fndecl
)
3219 int save_flag_no_inline
= flag_no_inline
;
3221 if (output_bytecode
)
3223 warning ("`inline' ignored for bytecode output");
3227 /* Things we allocate from here on are part of this function, not
3229 temporary_allocation ();
3231 head
= DECL_SAVED_INSNS (fndecl
);
3232 current_function_decl
= fndecl
;
3234 /* This call is only used to initialize global variables. */
3235 init_function_start (fndecl
, "lossage", 1);
3237 /* Redo parameter determinations in case the FUNCTION_...
3238 macros took machine-specific actions that need to be redone. */
3239 assign_parms (fndecl
, 1);
3241 /* Set stack frame size. */
3242 assign_stack_local (BLKmode
, DECL_FRAME_SIZE (fndecl
), 0);
3244 /* The first is a bit of a lie (the array may be larger), but doesn't
3245 matter too much and it isn't worth saving the actual bound. */
3246 reg_rtx_no
= regno_pointer_flag_length
= MAX_REGNUM (head
);
3247 regno_reg_rtx
= (rtx
*) INLINE_REGNO_REG_RTX (head
);
3248 regno_pointer_flag
= INLINE_REGNO_POINTER_FLAG (head
);
3249 regno_pointer_align
= INLINE_REGNO_POINTER_ALIGN (head
);
3250 max_parm_reg
= MAX_PARMREG (head
);
3251 parm_reg_stack_loc
= (rtx
*) PARMREG_STACK_LOC (head
);
3253 stack_slot_list
= STACK_SLOT_LIST (head
);
3254 forced_labels
= FORCED_LABELS (head
);
3256 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
3257 current_function_calls_alloca
= 1;
3259 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_SETJMP
)
3260 current_function_calls_setjmp
= 1;
3262 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_LONGJMP
)
3263 current_function_calls_longjmp
= 1;
3265 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_STRUCT
)
3266 current_function_returns_struct
= 1;
3268 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT
)
3269 current_function_returns_pcc_struct
= 1;
3271 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
3272 current_function_needs_context
= 1;
3274 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
)
3275 current_function_has_nonlocal_label
= 1;
3277 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_POINTER
)
3278 current_function_returns_pointer
= 1;
3280 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_CONST_POOL
)
3281 current_function_uses_const_pool
= 1;
3283 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
3284 current_function_uses_pic_offset_table
= 1;
3286 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (head
);
3287 current_function_pops_args
= POPS_ARGS (head
);
3289 /* This is the only thing the expand_function_end call that uses to be here
3290 actually does and that call can cause problems. */
3291 immediate_size_expand
--;
3293 /* Find last insn and rebuild the constant pool. */
3294 for (last
= FIRST_PARM_INSN (head
);
3295 NEXT_INSN (last
); last
= NEXT_INSN (last
))
3297 if (GET_RTX_CLASS (GET_CODE (last
)) == 'i')
3299 restore_constants (&PATTERN (last
));
3300 restore_constants (®_NOTES (last
));
3304 set_new_first_and_last_insn (FIRST_PARM_INSN (head
), last
);
3305 set_new_first_and_last_label_num (FIRST_LABELNO (head
), LAST_LABELNO (head
));
3307 /* We must have already output DWARF debugging information for the
3308 original (abstract) inline function declaration/definition, so
3309 we want to make sure that the debugging information we generate
3310 for this special instance of the inline function refers back to
3311 the information we already generated. To make sure that happens,
3312 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3313 node (and for all of the local ..._DECL nodes which are its children)
3314 so that they all point to themselves. */
3316 set_decl_origin_self (fndecl
);
3318 /* We're not deferring this any longer. */
3319 DECL_DEFER_OUTPUT (fndecl
) = 0;
3321 /* Integrating function calls isn't safe anymore, so turn on
3325 /* Compile this function all the way down to assembly code. */
3326 rest_of_compilation (fndecl
);
3328 /* Reset flag_no_inline to its original value. */
3329 flag_no_inline
= save_flag_no_inline
;
3331 current_function_decl
= 0;