]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/integrate.c
Merge from gcc-2.8
[thirdparty/gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include <stdio.h>
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "flags.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "expr.h"
32 #include "output.h"
33 #include "recog.h"
34 #include "integrate.h"
35 #include "real.h"
36 #include "except.h"
37 #include "function.h"
38 #include "bytecode.h"
39
40 #include "obstack.h"
41 #define obstack_chunk_alloc xmalloc
42 #define obstack_chunk_free free
43
44 extern struct obstack *function_maybepermanent_obstack;
45
46 extern tree pushdecl ();
47 extern tree poplevel ();
48
49 /* Similar, but round to the next highest integer that meets the
50 alignment. */
51 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52
53 /* Default max number of insns a function can have and still be inline.
54 This is overridden on RISC machines. */
55 #ifndef INTEGRATE_THRESHOLD
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58 #endif
59 \f
60 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
61 static void finish_inline PROTO((tree, rtx));
62 static void adjust_copied_decl_tree PROTO((tree));
63 static tree copy_decl_list PROTO((tree));
64 static tree copy_decl_tree PROTO((tree));
65 static void copy_decl_rtls PROTO((tree));
66 static void save_constants PROTO((rtx *));
67 static void note_modified_parmregs PROTO((rtx, rtx));
68 static rtx copy_for_inline PROTO((rtx));
69 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
70 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
71 static void save_constants_in_decl_trees PROTO ((tree));
72 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
73 static void restore_constants PROTO((rtx *));
74 static void set_block_origin_self PROTO((tree));
75 static void set_decl_origin_self PROTO((tree));
76 static void set_block_abstract_flags PROTO((tree, int));
77
78 void set_decl_abstract_flags PROTO((tree, int));
79 \f
80 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
81 is safe and reasonable to integrate into other functions.
82 Nonzero means value is a warning message with a single %s
83 for the function's name. */
84
85 char *
86 function_cannot_inline_p (fndecl)
87 register tree fndecl;
88 {
89 register rtx insn;
90 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
91 int max_insns = INTEGRATE_THRESHOLD (fndecl);
92 register int ninsns = 0;
93 register tree parms;
94 rtx result;
95
96 /* No inlines with varargs. `grokdeclarator' gives a warning
97 message about that if `inline' is specified. This code
98 it put in to catch the volunteers. */
99 if ((last && TREE_VALUE (last) != void_type_node)
100 || current_function_varargs)
101 return "varargs function cannot be inline";
102
103 if (current_function_calls_alloca)
104 return "function using alloca cannot be inline";
105
106 if (current_function_contains_functions)
107 return "function with nested functions cannot be inline";
108
109 /* If its not even close, don't even look. */
110 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
111 return "function too large to be inline";
112
113 #if 0
114 /* Don't inline functions which do not specify a function prototype and
115 have BLKmode argument or take the address of a parameter. */
116 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
117 {
118 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
119 TREE_ADDRESSABLE (parms) = 1;
120 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
121 return "no prototype, and parameter address used; cannot be inline";
122 }
123 #endif
124
125 /* We can't inline functions that return structures
126 the old-fashioned PCC way, copying into a static block. */
127 if (current_function_returns_pcc_struct)
128 return "inline functions not supported for this return value type";
129
130 /* We can't inline functions that return BLKmode structures in registers. */
131 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
132 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
133 return "inline functions not supported for this return value type";
134
135 /* We can't inline functions that return structures of varying size. */
136 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
137 return "function with varying-size return value cannot be inline";
138
139 /* Cannot inline a function with a varying size argument or one that
140 receives a transparent union. */
141 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
142 {
143 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
144 return "function with varying-size parameter cannot be inline";
145 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
146 return "function with transparent unit parameter cannot be inline";
147 }
148
149 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
150 {
151 for (ninsns = 0, insn = get_first_nonparm_insn ();
152 insn && ninsns < max_insns;
153 insn = NEXT_INSN (insn))
154 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
155 ninsns++;
156
157 if (ninsns >= max_insns)
158 return "function too large to be inline";
159 }
160
161 /* We cannot inline this function if forced_labels is non-zero. This
162 implies that a label in this function was used as an initializer.
163 Because labels can not be duplicated, all labels in the function
164 will be renamed when it is inlined. However, there is no way to find
165 and fix all variables initialized with addresses of labels in this
166 function, hence inlining is impossible. */
167
168 if (forced_labels)
169 return "function with label addresses used in initializers cannot inline";
170
171 /* We cannot inline a nested function that jumps to a nonlocal label. */
172 if (current_function_has_nonlocal_goto)
173 return "function with nonlocal goto cannot be inline";
174
175 /* This is a hack, until the inliner is taught about eh regions at
176 the start of the function. */
177 for (insn = get_insns ();
178 insn
179 && ! (GET_CODE (insn) == NOTE
180 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
181 insn = NEXT_INSN (insn))
182 {
183 if (insn && GET_CODE (insn) == NOTE
184 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
185 return "function with complex parameters cannot be inline";
186 }
187
188 /* We can't inline functions that return a PARALLEL rtx. */
189 result = DECL_RTL (DECL_RESULT (fndecl));
190 if (result && GET_CODE (result) == PARALLEL)
191 return "inline functions not supported for this return value type";
192
193 return 0;
194 }
195 \f
196 /* Variables used within save_for_inline. */
197
198 /* Mapping from old pseudo-register to new pseudo-registers.
199 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
200 It is allocated in `save_for_inline' and `expand_inline_function',
201 and deallocated on exit from each of those routines. */
202 static rtx *reg_map;
203
204 /* Mapping from old code-labels to new code-labels.
205 The first element of this map is label_map[min_labelno].
206 It is allocated in `save_for_inline' and `expand_inline_function',
207 and deallocated on exit from each of those routines. */
208 static rtx *label_map;
209
210 /* Mapping from old insn uid's to copied insns.
211 It is allocated in `save_for_inline' and `expand_inline_function',
212 and deallocated on exit from each of those routines. */
213 static rtx *insn_map;
214
215 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
216 Zero for a reg that isn't a parm's home.
217 Only reg numbers less than max_parm_reg are mapped here. */
218 static tree *parmdecl_map;
219
220 /* Keep track of first pseudo-register beyond those that are parms. */
221 extern int max_parm_reg;
222 extern rtx *parm_reg_stack_loc;
223
224 /* When an insn is being copied by copy_for_inline,
225 this is nonzero if we have copied an ASM_OPERANDS.
226 In that case, it is the original input-operand vector. */
227 static rtvec orig_asm_operands_vector;
228
229 /* When an insn is being copied by copy_for_inline,
230 this is nonzero if we have copied an ASM_OPERANDS.
231 In that case, it is the copied input-operand vector. */
232 static rtvec copy_asm_operands_vector;
233
234 /* Likewise, this is the copied constraints vector. */
235 static rtvec copy_asm_constraints_vector;
236
237 /* In save_for_inline, nonzero if past the parm-initialization insns. */
238 static int in_nonparm_insns;
239 \f
240 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
241 needed to save FNDECL's insns and info for future inline expansion. */
242
243 static rtx
244 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
245 tree fndecl;
246 int min_labelno;
247 int max_labelno;
248 int max_reg;
249 int copy;
250 {
251 int function_flags, i;
252 rtvec arg_vector;
253 tree parms;
254
255 /* Compute the values of any flags we must restore when inlining this. */
256
257 function_flags
258 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
259 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
260 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
261 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
262 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
263 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
264 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
265 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
266 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
267 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
268
269 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
270 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
271 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
272
273 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
274 parms;
275 parms = TREE_CHAIN (parms), i++)
276 {
277 rtx p = DECL_RTL (parms);
278
279 if (GET_CODE (p) == MEM && copy)
280 {
281 /* Copy the rtl so that modifications of the addresses
282 later in compilation won't affect this arg_vector.
283 Virtual register instantiation can screw the address
284 of the rtl. */
285 rtx new = copy_rtx (p);
286
287 /* Don't leave the old copy anywhere in this decl. */
288 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
289 || (GET_CODE (DECL_RTL (parms)) == MEM
290 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
291 && (XEXP (DECL_RTL (parms), 0)
292 == XEXP (DECL_INCOMING_RTL (parms), 0))))
293 DECL_INCOMING_RTL (parms) = new;
294 DECL_RTL (parms) = new;
295 }
296
297 RTVEC_ELT (arg_vector, i) = p;
298
299 if (GET_CODE (p) == REG)
300 parmdecl_map[REGNO (p)] = parms;
301 else if (GET_CODE (p) == CONCAT)
302 {
303 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
304 rtx pimag = gen_imagpart (GET_MODE (preal), p);
305
306 if (GET_CODE (preal) == REG)
307 parmdecl_map[REGNO (preal)] = parms;
308 if (GET_CODE (pimag) == REG)
309 parmdecl_map[REGNO (pimag)] = parms;
310 }
311
312 /* This flag is cleared later
313 if the function ever modifies the value of the parm. */
314 TREE_READONLY (parms) = 1;
315 }
316
317 /* Assume we start out in the insns that set up the parameters. */
318 in_nonparm_insns = 0;
319
320 /* The list of DECL_SAVED_INSNS, starts off with a header which
321 contains the following information:
322
323 the first insn of the function (not including the insns that copy
324 parameters into registers).
325 the first parameter insn of the function,
326 the first label used by that function,
327 the last label used by that function,
328 the highest register number used for parameters,
329 the total number of registers used,
330 the size of the incoming stack area for parameters,
331 the number of bytes popped on return,
332 the stack slot list,
333 the labels that are forced to exist,
334 some flags that are used to restore compiler globals,
335 the value of current_function_outgoing_args_size,
336 the original argument vector,
337 the original DECL_INITIAL,
338 and pointers to the table of pseudo regs, pointer flags, and alignment. */
339
340 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
341 max_parm_reg, max_reg,
342 current_function_args_size,
343 current_function_pops_args,
344 stack_slot_list, forced_labels, function_flags,
345 current_function_outgoing_args_size,
346 arg_vector, (rtx) DECL_INITIAL (fndecl),
347 (rtvec) regno_reg_rtx, regno_pointer_flag,
348 regno_pointer_align,
349 (rtvec) parm_reg_stack_loc);
350 }
351
352 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
353 things that must be done to make FNDECL expandable as an inline function.
354 HEAD contains the chain of insns to which FNDECL will expand. */
355
356 static void
357 finish_inline (fndecl, head)
358 tree fndecl;
359 rtx head;
360 {
361 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
362 FIRST_PARM_INSN (head) = get_insns ();
363 DECL_SAVED_INSNS (fndecl) = head;
364 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
365 }
366
367 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
368 they all point to the new (copied) rtxs. */
369
370 static void
371 adjust_copied_decl_tree (block)
372 register tree block;
373 {
374 register tree subblock;
375 register rtx original_end;
376
377 original_end = BLOCK_END_NOTE (block);
378 if (original_end)
379 {
380 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
381 NOTE_SOURCE_FILE (original_end) = 0;
382 }
383
384 /* Process all subblocks. */
385 for (subblock = BLOCK_SUBBLOCKS (block);
386 subblock;
387 subblock = TREE_CHAIN (subblock))
388 adjust_copied_decl_tree (subblock);
389 }
390
391 /* Make the insns and PARM_DECLs of the current function permanent
392 and record other information in DECL_SAVED_INSNS to allow inlining
393 of this function in subsequent calls.
394
395 This function is called when we are going to immediately compile
396 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
397 modified by the compilation process, so we copy all of them to
398 new storage and consider the new insns to be the insn chain to be
399 compiled. Our caller (rest_of_compilation) saves the original
400 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
401
402 /* ??? The nonlocal_label list should be adjusted also. However, since
403 a function that contains a nested function never gets inlined currently,
404 the nonlocal_label list will always be empty, so we don't worry about
405 it for now. */
406
407 void
408 save_for_inline_copying (fndecl)
409 tree fndecl;
410 {
411 rtx first_insn, last_insn, insn;
412 rtx head, copy;
413 int max_labelno, min_labelno, i, len;
414 int max_reg;
415 int max_uid;
416 rtx first_nonparm_insn;
417 char *new, *new1;
418 rtx *new2;
419
420 /* Make and emit a return-label if we have not already done so.
421 Do this before recording the bounds on label numbers. */
422
423 if (return_label == 0)
424 {
425 return_label = gen_label_rtx ();
426 emit_label (return_label);
427 }
428
429 /* Get some bounds on the labels and registers used. */
430
431 max_labelno = max_label_num ();
432 min_labelno = get_first_label_num ();
433 max_reg = max_reg_num ();
434
435 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
436 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
437 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
438 for the parms, prior to elimination of virtual registers.
439 These values are needed for substituting parms properly. */
440
441 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
442
443 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
444
445 if (current_function_uses_const_pool)
446 {
447 /* Replace any constant pool references with the actual constant. We
448 will put the constants back in the copy made below. */
449 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
450 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
451 {
452 save_constants (&PATTERN (insn));
453 if (REG_NOTES (insn))
454 save_constants (&REG_NOTES (insn));
455 }
456
457 /* Also scan all decls, and replace any constant pool references with the
458 actual constant. */
459 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
460
461 /* Clear out the constant pool so that we can recreate it with the
462 copied constants below. */
463 init_const_rtx_hash_table ();
464 clear_const_double_mem ();
465 }
466
467 max_uid = INSN_UID (head);
468
469 /* We have now allocated all that needs to be allocated permanently
470 on the rtx obstack. Set our high-water mark, so that we
471 can free the rest of this when the time comes. */
472
473 preserve_data ();
474
475 /* Copy the chain insns of this function.
476 Install the copied chain as the insns of this function,
477 for continued compilation;
478 the original chain is recorded as the DECL_SAVED_INSNS
479 for inlining future calls. */
480
481 /* If there are insns that copy parms from the stack into pseudo registers,
482 those insns are not copied. `expand_inline_function' must
483 emit the correct code to handle such things. */
484
485 insn = get_insns ();
486 if (GET_CODE (insn) != NOTE)
487 abort ();
488 first_insn = rtx_alloc (NOTE);
489 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
490 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
491 INSN_UID (first_insn) = INSN_UID (insn);
492 PREV_INSN (first_insn) = NULL;
493 NEXT_INSN (first_insn) = NULL;
494 last_insn = first_insn;
495
496 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
497 Make these new rtx's now, and install them in regno_reg_rtx, so they
498 will be the official pseudo-reg rtx's for the rest of compilation. */
499
500 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
501
502 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
503 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
504 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
505 regno_reg_rtx[i], len);
506
507 regno_reg_rtx = reg_map;
508
509 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
510 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
511 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
512 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
513 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
514
515 /* Likewise each label rtx must have a unique rtx as its copy. */
516
517 /* We used to use alloca here, but the size of what it would try to
518 allocate would occasionally cause it to exceed the stack limit and
519 cause unpredictable core dumps. Some examples were > 2Mb in size. */
520 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
521
522 for (i = min_labelno; i < max_labelno; i++)
523 label_map[i] = gen_label_rtx ();
524
525 /* Record the mapping of old insns to copied insns. */
526
527 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
528 bzero ((char *) insn_map, max_uid * sizeof (rtx));
529
530 /* Get the insn which signals the end of parameter setup code. */
531 first_nonparm_insn = get_first_nonparm_insn ();
532
533 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
534 (the former occurs when a variable has its address taken)
535 since these may be shared and can be changed by virtual
536 register instantiation. DECL_RTL values for our arguments
537 have already been copied by initialize_for_inline. */
538 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
539 if (GET_CODE (regno_reg_rtx[i]) == MEM)
540 XEXP (regno_reg_rtx[i], 0)
541 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
542
543 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
544 contained in it. */
545 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
546 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
547 max_parm_reg * sizeof (rtx));
548 parm_reg_stack_loc = new2;
549 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
550 if (parm_reg_stack_loc[i])
551 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
552
553 /* Copy the tree of subblocks of the function, and the decls in them.
554 We will use the copy for compiling this function, then restore the original
555 subblocks and decls for use when inlining this function.
556
557 Several parts of the compiler modify BLOCK trees. In particular,
558 instantiate_virtual_regs will instantiate any virtual regs
559 mentioned in the DECL_RTLs of the decls, and loop
560 unrolling will replicate any BLOCK trees inside an unrolled loop.
561
562 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
563 which we will use for inlining. The rtl might even contain pseudoregs
564 whose space has been freed. */
565
566 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
567 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
568
569 /* Now copy each DECL_RTL which is a MEM,
570 so it is safe to modify their addresses. */
571 copy_decl_rtls (DECL_INITIAL (fndecl));
572
573 /* The fndecl node acts as its own progenitor, so mark it as such. */
574 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
575
576 /* Now copy the chain of insns. Do this twice. The first copy the insn
577 itself and its body. The second time copy of REG_NOTES. This is because
578 a REG_NOTE may have a forward pointer to another insn. */
579
580 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
581 {
582 orig_asm_operands_vector = 0;
583
584 if (insn == first_nonparm_insn)
585 in_nonparm_insns = 1;
586
587 switch (GET_CODE (insn))
588 {
589 case NOTE:
590 /* No need to keep these. */
591 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
592 continue;
593
594 copy = rtx_alloc (NOTE);
595 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
596 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
597 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
598 else
599 {
600 NOTE_SOURCE_FILE (insn) = (char *) copy;
601 NOTE_SOURCE_FILE (copy) = 0;
602 }
603 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
604 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
605 {
606 /* We have to forward these both to match the new exception
607 region. */
608 NOTE_BLOCK_NUMBER (copy)
609 = CODE_LABEL_NUMBER (label_map[NOTE_BLOCK_NUMBER (copy)]);
610
611 }
612 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
613 break;
614
615 case INSN:
616 case JUMP_INSN:
617 case CALL_INSN:
618 copy = rtx_alloc (GET_CODE (insn));
619
620 if (GET_CODE (insn) == CALL_INSN)
621 CALL_INSN_FUNCTION_USAGE (copy)
622 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
623
624 PATTERN (copy) = copy_for_inline (PATTERN (insn));
625 INSN_CODE (copy) = -1;
626 LOG_LINKS (copy) = NULL_RTX;
627 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
628 break;
629
630 case CODE_LABEL:
631 copy = label_map[CODE_LABEL_NUMBER (insn)];
632 LABEL_NAME (copy) = LABEL_NAME (insn);
633 break;
634
635 case BARRIER:
636 copy = rtx_alloc (BARRIER);
637 break;
638
639 default:
640 abort ();
641 }
642 INSN_UID (copy) = INSN_UID (insn);
643 insn_map[INSN_UID (insn)] = copy;
644 NEXT_INSN (last_insn) = copy;
645 PREV_INSN (copy) = last_insn;
646 last_insn = copy;
647 }
648
649 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
650
651 /* Now copy the REG_NOTES. */
652 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
653 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
654 && insn_map[INSN_UID(insn)])
655 REG_NOTES (insn_map[INSN_UID (insn)])
656 = copy_for_inline (REG_NOTES (insn));
657
658 NEXT_INSN (last_insn) = NULL;
659
660 finish_inline (fndecl, head);
661
662 /* Make new versions of the register tables. */
663 new = (char *) savealloc (regno_pointer_flag_length);
664 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
665 new1 = (char *) savealloc (regno_pointer_flag_length);
666 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
667
668 regno_pointer_flag = new;
669 regno_pointer_align = new1;
670
671 set_new_first_and_last_insn (first_insn, last_insn);
672
673 if (label_map)
674 free (label_map);
675 }
676
677 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
678 For example, this can copy a list made of TREE_LIST nodes. While copying,
679 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
680 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
681 point to the corresponding (abstract) original node. */
682
683 static tree
684 copy_decl_list (list)
685 tree list;
686 {
687 tree head;
688 register tree prev, next;
689
690 if (list == 0)
691 return 0;
692
693 head = prev = copy_node (list);
694 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
695 DECL_ABSTRACT_ORIGIN (head) = list;
696 next = TREE_CHAIN (list);
697 while (next)
698 {
699 register tree copy;
700
701 copy = copy_node (next);
702 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
703 DECL_ABSTRACT_ORIGIN (copy) = next;
704 TREE_CHAIN (prev) = copy;
705 prev = copy;
706 next = TREE_CHAIN (next);
707 }
708 return head;
709 }
710
711 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
712
713 static tree
714 copy_decl_tree (block)
715 tree block;
716 {
717 tree t, vars, subblocks;
718
719 vars = copy_decl_list (BLOCK_VARS (block));
720 subblocks = 0;
721
722 /* Process all subblocks. */
723 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
724 {
725 tree copy = copy_decl_tree (t);
726 TREE_CHAIN (copy) = subblocks;
727 subblocks = copy;
728 }
729
730 t = copy_node (block);
731 BLOCK_VARS (t) = vars;
732 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
733 /* If the BLOCK being cloned is already marked as having been instantiated
734 from something else, then leave that `origin' marking alone. Otherwise,
735 mark the clone as having originated from the BLOCK we are cloning. */
736 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
737 BLOCK_ABSTRACT_ORIGIN (t) = block;
738 return t;
739 }
740
741 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
742
743 static void
744 copy_decl_rtls (block)
745 tree block;
746 {
747 tree t;
748
749 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
750 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
751 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
752
753 /* Process all subblocks. */
754 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
755 copy_decl_rtls (t);
756 }
757
758 /* Make the insns and PARM_DECLs of the current function permanent
759 and record other information in DECL_SAVED_INSNS to allow inlining
760 of this function in subsequent calls.
761
762 This routine need not copy any insns because we are not going
763 to immediately compile the insns in the insn chain. There
764 are two cases when we would compile the insns for FNDECL:
765 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
766 be output at the end of other compilation, because somebody took
767 its address. In the first case, the insns of FNDECL are copied
768 as it is expanded inline, so FNDECL's saved insns are not
769 modified. In the second case, FNDECL is used for the last time,
770 so modifying the rtl is not a problem.
771
772 We don't have to worry about FNDECL being inline expanded by
773 other functions which are written at the end of compilation
774 because flag_no_inline is turned on when we begin writing
775 functions at the end of compilation. */
776
777 void
778 save_for_inline_nocopy (fndecl)
779 tree fndecl;
780 {
781 rtx insn;
782 rtx head;
783 rtx first_nonparm_insn;
784
785 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
786 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
787 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
788 for the parms, prior to elimination of virtual registers.
789 These values are needed for substituting parms properly. */
790
791 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
792
793 /* Make and emit a return-label if we have not already done so. */
794
795 if (return_label == 0)
796 {
797 return_label = gen_label_rtx ();
798 emit_label (return_label);
799 }
800
801 head = initialize_for_inline (fndecl, get_first_label_num (),
802 max_label_num (), max_reg_num (), 0);
803
804 /* If there are insns that copy parms from the stack into pseudo registers,
805 those insns are not copied. `expand_inline_function' must
806 emit the correct code to handle such things. */
807
808 insn = get_insns ();
809 if (GET_CODE (insn) != NOTE)
810 abort ();
811
812 /* Get the insn which signals the end of parameter setup code. */
813 first_nonparm_insn = get_first_nonparm_insn ();
814
815 /* Now just scan the chain of insns to see what happens to our
816 PARM_DECLs. If a PARM_DECL is used but never modified, we
817 can substitute its rtl directly when expanding inline (and
818 perform constant folding when its incoming value is constant).
819 Otherwise, we have to copy its value into a new register and track
820 the new register's life. */
821
822 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
823 {
824 if (insn == first_nonparm_insn)
825 in_nonparm_insns = 1;
826
827 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
828 {
829 if (current_function_uses_const_pool)
830 {
831 /* Replace any constant pool references with the actual constant.
832 We will put the constant back if we need to write the
833 function out after all. */
834 save_constants (&PATTERN (insn));
835 if (REG_NOTES (insn))
836 save_constants (&REG_NOTES (insn));
837 }
838
839 /* Record what interesting things happen to our parameters. */
840 note_stores (PATTERN (insn), note_modified_parmregs);
841 }
842 }
843
844 /* Also scan all decls, and replace any constant pool references with the
845 actual constant. */
846 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
847
848 /* We have now allocated all that needs to be allocated permanently
849 on the rtx obstack. Set our high-water mark, so that we
850 can free the rest of this when the time comes. */
851
852 preserve_data ();
853
854 finish_inline (fndecl, head);
855 }
856 \f
857 /* Given PX, a pointer into an insn, search for references to the constant
858 pool. Replace each with a CONST that has the mode of the original
859 constant, contains the constant, and has RTX_INTEGRATED_P set.
860 Similarly, constant pool addresses not enclosed in a MEM are replaced
861 with an ADDRESS and CONST rtx which also gives the constant, its
862 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
863
864 static void
865 save_constants (px)
866 rtx *px;
867 {
868 rtx x;
869 int i, j;
870
871 again:
872 x = *px;
873
874 /* If this is a CONST_DOUBLE, don't try to fix things up in
875 CONST_DOUBLE_MEM, because this is an infinite recursion. */
876 if (GET_CODE (x) == CONST_DOUBLE)
877 return;
878 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
879 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
880 {
881 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
882 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
883 RTX_INTEGRATED_P (new) = 1;
884
885 /* If the MEM was in a different mode than the constant (perhaps we
886 were only looking at the low-order part), surround it with a
887 SUBREG so we can save both modes. */
888
889 if (GET_MODE (x) != const_mode)
890 {
891 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
892 RTX_INTEGRATED_P (new) = 1;
893 }
894
895 *px = new;
896 save_constants (&XEXP (*px, 0));
897 }
898 else if (GET_CODE (x) == SYMBOL_REF
899 && CONSTANT_POOL_ADDRESS_P (x))
900 {
901 *px = gen_rtx (ADDRESS, GET_MODE (x),
902 gen_rtx (CONST, get_pool_mode (x),
903 get_pool_constant (x)));
904 save_constants (&XEXP (*px, 0));
905 RTX_INTEGRATED_P (*px) = 1;
906 }
907
908 else
909 {
910 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
911 int len = GET_RTX_LENGTH (GET_CODE (x));
912
913 for (i = len-1; i >= 0; i--)
914 {
915 switch (fmt[i])
916 {
917 case 'E':
918 for (j = 0; j < XVECLEN (x, i); j++)
919 save_constants (&XVECEXP (x, i, j));
920 break;
921
922 case 'e':
923 if (XEXP (x, i) == 0)
924 continue;
925 if (i == 0)
926 {
927 /* Hack tail-recursion here. */
928 px = &XEXP (x, 0);
929 goto again;
930 }
931 save_constants (&XEXP (x, i));
932 break;
933 }
934 }
935 }
936 }
937 \f
938 /* Note whether a parameter is modified or not. */
939
940 static void
941 note_modified_parmregs (reg, x)
942 rtx reg;
943 rtx x;
944 {
945 if (GET_CODE (reg) == REG && in_nonparm_insns
946 && REGNO (reg) < max_parm_reg
947 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
948 && parmdecl_map[REGNO (reg)] != 0)
949 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
950 }
951
952 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
953 according to `reg_map' and `label_map'. The original rtl insns
954 will be saved for inlining; this is used to make a copy
955 which is used to finish compiling the inline function itself.
956
957 If we find a "saved" constant pool entry, one which was replaced with
958 the value of the constant, convert it back to a constant pool entry.
959 Since the pool wasn't touched, this should simply restore the old
960 address.
961
962 All other kinds of rtx are copied except those that can never be
963 changed during compilation. */
964
965 static rtx
966 copy_for_inline (orig)
967 rtx orig;
968 {
969 register rtx x = orig;
970 register rtx new;
971 register int i;
972 register enum rtx_code code;
973 register char *format_ptr;
974
975 if (x == 0)
976 return x;
977
978 code = GET_CODE (x);
979
980 /* These types may be freely shared. */
981
982 switch (code)
983 {
984 case QUEUED:
985 case CONST_INT:
986 case SYMBOL_REF:
987 case PC:
988 case CC0:
989 return x;
990
991 case CONST_DOUBLE:
992 /* We have to make a new CONST_DOUBLE to ensure that we account for
993 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
994 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
995 {
996 REAL_VALUE_TYPE d;
997
998 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
999 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1000 }
1001 else
1002 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1003 VOIDmode);
1004
1005 case CONST:
1006 /* Get constant pool entry for constant in the pool. */
1007 if (RTX_INTEGRATED_P (x))
1008 return validize_mem (force_const_mem (GET_MODE (x),
1009 copy_for_inline (XEXP (x, 0))));
1010 break;
1011
1012 case SUBREG:
1013 /* Get constant pool entry, but access in different mode. */
1014 if (RTX_INTEGRATED_P (x))
1015 {
1016 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1017 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1018
1019 PUT_MODE (new, GET_MODE (x));
1020 return validize_mem (new);
1021 }
1022 break;
1023
1024 case ADDRESS:
1025 /* If not special for constant pool error. Else get constant pool
1026 address. */
1027 if (! RTX_INTEGRATED_P (x))
1028 abort ();
1029
1030 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1031 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1032 new = XEXP (new, 0);
1033
1034 #ifdef POINTERS_EXTEND_UNSIGNED
1035 if (GET_MODE (new) != GET_MODE (x))
1036 new = convert_memory_address (GET_MODE (x), new);
1037 #endif
1038
1039 return new;
1040
1041 case ASM_OPERANDS:
1042 /* If a single asm insn contains multiple output operands
1043 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1044 We must make sure that the copied insn continues to share it. */
1045 if (orig_asm_operands_vector == XVEC (orig, 3))
1046 {
1047 x = rtx_alloc (ASM_OPERANDS);
1048 x->volatil = orig->volatil;
1049 XSTR (x, 0) = XSTR (orig, 0);
1050 XSTR (x, 1) = XSTR (orig, 1);
1051 XINT (x, 2) = XINT (orig, 2);
1052 XVEC (x, 3) = copy_asm_operands_vector;
1053 XVEC (x, 4) = copy_asm_constraints_vector;
1054 XSTR (x, 5) = XSTR (orig, 5);
1055 XINT (x, 6) = XINT (orig, 6);
1056 return x;
1057 }
1058 break;
1059
1060 case MEM:
1061 /* A MEM is usually allowed to be shared if its address is constant
1062 or is a constant plus one of the special registers.
1063
1064 We do not allow sharing of addresses that are either a special
1065 register or the sum of a constant and a special register because
1066 it is possible for unshare_all_rtl to copy the address, into memory
1067 that won't be saved. Although the MEM can safely be shared, and
1068 won't be copied there, the address itself cannot be shared, and may
1069 need to be copied.
1070
1071 There are also two exceptions with constants: The first is if the
1072 constant is a LABEL_REF or the sum of the LABEL_REF
1073 and an integer. This case can happen if we have an inline
1074 function that supplies a constant operand to the call of another
1075 inline function that uses it in a switch statement. In this case,
1076 we will be replacing the LABEL_REF, so we have to replace this MEM
1077 as well.
1078
1079 The second case is if we have a (const (plus (address ..) ...)).
1080 In that case we need to put back the address of the constant pool
1081 entry. */
1082
1083 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1084 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1085 && ! (GET_CODE (XEXP (x, 0)) == CONST
1086 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1087 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1088 == LABEL_REF)
1089 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1090 == ADDRESS)))))
1091 return x;
1092 break;
1093
1094 case LABEL_REF:
1095 /* If this is a non-local label, just make a new LABEL_REF.
1096 Otherwise, use the new label as well. */
1097 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1098 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1099 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1100 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1101 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1102 return x;
1103
1104 case REG:
1105 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1106 return reg_map [REGNO (x)];
1107 else
1108 return x;
1109
1110 case SET:
1111 /* If a parm that gets modified lives in a pseudo-reg,
1112 clear its TREE_READONLY to prevent certain optimizations. */
1113 {
1114 rtx dest = SET_DEST (x);
1115
1116 while (GET_CODE (dest) == STRICT_LOW_PART
1117 || GET_CODE (dest) == ZERO_EXTRACT
1118 || GET_CODE (dest) == SUBREG)
1119 dest = XEXP (dest, 0);
1120
1121 if (GET_CODE (dest) == REG
1122 && REGNO (dest) < max_parm_reg
1123 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1124 && parmdecl_map[REGNO (dest)] != 0
1125 /* The insn to load an arg pseudo from a stack slot
1126 does not count as modifying it. */
1127 && in_nonparm_insns)
1128 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1129 }
1130 break;
1131
1132 #if 0 /* This is a good idea, but here is the wrong place for it. */
1133 /* Arrange that CONST_INTs always appear as the second operand
1134 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1135 always appear as the first. */
1136 case PLUS:
1137 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1138 || (XEXP (x, 1) == frame_pointer_rtx
1139 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1140 && XEXP (x, 1) == arg_pointer_rtx)))
1141 {
1142 rtx t = XEXP (x, 0);
1143 XEXP (x, 0) = XEXP (x, 1);
1144 XEXP (x, 1) = t;
1145 }
1146 break;
1147 #endif
1148 default:
1149 break;
1150 }
1151
1152 /* Replace this rtx with a copy of itself. */
1153
1154 x = rtx_alloc (code);
1155 bcopy ((char *) orig, (char *) x,
1156 (sizeof (*x) - sizeof (x->fld)
1157 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1158
1159 /* Now scan the subexpressions recursively.
1160 We can store any replaced subexpressions directly into X
1161 since we know X is not shared! Any vectors in X
1162 must be copied if X was copied. */
1163
1164 format_ptr = GET_RTX_FORMAT (code);
1165
1166 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1167 {
1168 switch (*format_ptr++)
1169 {
1170 case 'e':
1171 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1172 break;
1173
1174 case 'u':
1175 /* Change any references to old-insns to point to the
1176 corresponding copied insns. */
1177 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1178 break;
1179
1180 case 'E':
1181 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1182 {
1183 register int j;
1184
1185 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1186 for (j = 0; j < XVECLEN (x, i); j++)
1187 XVECEXP (x, i, j)
1188 = copy_for_inline (XVECEXP (x, i, j));
1189 }
1190 break;
1191 }
1192 }
1193
1194 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1195 {
1196 orig_asm_operands_vector = XVEC (orig, 3);
1197 copy_asm_operands_vector = XVEC (x, 3);
1198 copy_asm_constraints_vector = XVEC (x, 4);
1199 }
1200
1201 return x;
1202 }
1203
1204 /* Unfortunately, we need a global copy of const_equiv map for communication
1205 with a function called from note_stores. Be *very* careful that this
1206 is used properly in the presence of recursion. */
1207
1208 rtx *global_const_equiv_map;
1209 int global_const_equiv_map_size;
1210 \f
1211 #define FIXED_BASE_PLUS_P(X) \
1212 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1213 && GET_CODE (XEXP (X, 0)) == REG \
1214 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1215 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1216
1217 /* Integrate the procedure defined by FNDECL. Note that this function
1218 may wind up calling itself. Since the static variables are not
1219 reentrant, we do not assign them until after the possibility
1220 of recursion is eliminated.
1221
1222 If IGNORE is nonzero, do not produce a value.
1223 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1224
1225 Value is:
1226 (rtx)-1 if we could not substitute the function
1227 0 if we substituted it and it does not produce a value
1228 else an rtx for where the value is stored. */
1229
1230 rtx
1231 expand_inline_function (fndecl, parms, target, ignore, type,
1232 structure_value_addr)
1233 tree fndecl, parms;
1234 rtx target;
1235 int ignore;
1236 tree type;
1237 rtx structure_value_addr;
1238 {
1239 tree formal, actual, block;
1240 rtx header = DECL_SAVED_INSNS (fndecl);
1241 rtx insns = FIRST_FUNCTION_INSN (header);
1242 rtx parm_insns = FIRST_PARM_INSN (header);
1243 tree *arg_trees;
1244 rtx *arg_vals;
1245 rtx insn;
1246 int max_regno;
1247 register int i;
1248 int min_labelno = FIRST_LABELNO (header);
1249 int max_labelno = LAST_LABELNO (header);
1250 int nargs;
1251 rtx local_return_label = 0;
1252 rtx loc;
1253 rtx stack_save = 0;
1254 rtx temp;
1255 struct inline_remap *map;
1256 rtx cc0_insn = 0;
1257 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1258 rtx static_chain_value = 0;
1259
1260 /* The pointer used to track the true location of the memory used
1261 for MAP->LABEL_MAP. */
1262 rtx *real_label_map = 0;
1263
1264 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1265 max_regno = MAX_REGNUM (header) + 3;
1266 if (max_regno < FIRST_PSEUDO_REGISTER)
1267 abort ();
1268
1269 nargs = list_length (DECL_ARGUMENTS (fndecl));
1270
1271 /* Check that the parms type match and that sufficient arguments were
1272 passed. Since the appropriate conversions or default promotions have
1273 already been applied, the machine modes should match exactly. */
1274
1275 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1276 formal;
1277 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1278 {
1279 tree arg;
1280 enum machine_mode mode;
1281
1282 if (actual == 0)
1283 return (rtx) (HOST_WIDE_INT) -1;
1284
1285 arg = TREE_VALUE (actual);
1286 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1287
1288 if (mode != TYPE_MODE (TREE_TYPE (arg))
1289 /* If they are block mode, the types should match exactly.
1290 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1291 which could happen if the parameter has incomplete type. */
1292 || (mode == BLKmode
1293 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1294 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1295 return (rtx) (HOST_WIDE_INT) -1;
1296 }
1297
1298 /* Extra arguments are valid, but will be ignored below, so we must
1299 evaluate them here for side-effects. */
1300 for (; actual; actual = TREE_CHAIN (actual))
1301 expand_expr (TREE_VALUE (actual), const0_rtx,
1302 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1303
1304 /* Make a binding contour to keep inline cleanups called at
1305 outer function-scope level from looking like they are shadowing
1306 parameter declarations. */
1307 pushlevel (0);
1308
1309 /* Expand the function arguments. Do this first so that any
1310 new registers get created before we allocate the maps. */
1311
1312 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1313 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1314
1315 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1316 formal;
1317 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1318 {
1319 /* Actual parameter, converted to the type of the argument within the
1320 function. */
1321 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1322 /* Mode of the variable used within the function. */
1323 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1324 int invisiref = 0;
1325
1326 arg_trees[i] = arg;
1327 loc = RTVEC_ELT (arg_vector, i);
1328
1329 /* If this is an object passed by invisible reference, we copy the
1330 object into a stack slot and save its address. If this will go
1331 into memory, we do nothing now. Otherwise, we just expand the
1332 argument. */
1333 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1334 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1335 {
1336 rtx stack_slot
1337 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1338 int_size_in_bytes (TREE_TYPE (arg)), 1);
1339 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1340
1341 store_expr (arg, stack_slot, 0);
1342
1343 arg_vals[i] = XEXP (stack_slot, 0);
1344 invisiref = 1;
1345 }
1346 else if (GET_CODE (loc) != MEM)
1347 {
1348 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1349 /* The mode if LOC and ARG can differ if LOC was a variable
1350 that had its mode promoted via PROMOTED_MODE. */
1351 arg_vals[i] = convert_modes (GET_MODE (loc),
1352 TYPE_MODE (TREE_TYPE (arg)),
1353 expand_expr (arg, NULL_RTX, mode,
1354 EXPAND_SUM),
1355 TREE_UNSIGNED (TREE_TYPE (formal)));
1356 else
1357 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1358 }
1359 else
1360 arg_vals[i] = 0;
1361
1362 if (arg_vals[i] != 0
1363 && (! TREE_READONLY (formal)
1364 /* If the parameter is not read-only, copy our argument through
1365 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1366 TARGET in any way. In the inline function, they will likely
1367 be two different pseudos, and `safe_from_p' will make all
1368 sorts of smart assumptions about their not conflicting.
1369 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1370 wrong, so put ARG_VALS[I] into a fresh register.
1371 Don't worry about invisible references, since their stack
1372 temps will never overlap the target. */
1373 || (target != 0
1374 && ! invisiref
1375 && (GET_CODE (arg_vals[i]) == REG
1376 || GET_CODE (arg_vals[i]) == SUBREG
1377 || GET_CODE (arg_vals[i]) == MEM)
1378 && reg_overlap_mentioned_p (arg_vals[i], target))
1379 /* ??? We must always copy a SUBREG into a REG, because it might
1380 get substituted into an address, and not all ports correctly
1381 handle SUBREGs in addresses. */
1382 || (GET_CODE (arg_vals[i]) == SUBREG)))
1383 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1384
1385 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1386 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1387 mark_reg_pointer (arg_vals[i],
1388 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1389 / BITS_PER_UNIT));
1390 }
1391
1392 /* Allocate the structures we use to remap things. */
1393
1394 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1395 map->fndecl = fndecl;
1396
1397 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1398 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1399
1400 /* We used to use alloca here, but the size of what it would try to
1401 allocate would occasionally cause it to exceed the stack limit and
1402 cause unpredictable core dumps. */
1403 real_label_map
1404 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1405 map->label_map = real_label_map;
1406
1407 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1408 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1409 map->min_insnno = 0;
1410 map->max_insnno = INSN_UID (header);
1411
1412 map->integrating = 1;
1413
1414 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1415 be large enough for all our pseudos. This is the number we are currently
1416 using plus the number in the called routine, plus 15 for each arg,
1417 five to compute the virtual frame pointer, and five for the return value.
1418 This should be enough for most cases. We do not reference entries
1419 outside the range of the map.
1420
1421 ??? These numbers are quite arbitrary and were obtained by
1422 experimentation. At some point, we should try to allocate the
1423 table after all the parameters are set up so we an more accurately
1424 estimate the number of pseudos we will need. */
1425
1426 map->const_equiv_map_size
1427 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1428
1429 map->const_equiv_map
1430 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1431 bzero ((char *) map->const_equiv_map,
1432 map->const_equiv_map_size * sizeof (rtx));
1433
1434 map->const_age_map
1435 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1436 bzero ((char *) map->const_age_map,
1437 map->const_equiv_map_size * sizeof (unsigned));
1438 map->const_age = 0;
1439
1440 /* Record the current insn in case we have to set up pointers to frame
1441 and argument memory blocks. If there are no insns yet, add a dummy
1442 insn that can be used as an insertion point. */
1443 map->insns_at_start = get_last_insn ();
1444 if (map->insns_at_start == 0)
1445 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1446
1447 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1448 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1449
1450 /* Update the outgoing argument size to allow for those in the inlined
1451 function. */
1452 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1453 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1454
1455 /* If the inline function needs to make PIC references, that means
1456 that this function's PIC offset table must be used. */
1457 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1458 current_function_uses_pic_offset_table = 1;
1459
1460 /* If this function needs a context, set it up. */
1461 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1462 static_chain_value = lookup_static_chain (fndecl);
1463
1464 if (GET_CODE (parm_insns) == NOTE
1465 && NOTE_LINE_NUMBER (parm_insns) > 0)
1466 {
1467 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1468 NOTE_LINE_NUMBER (parm_insns));
1469 if (note)
1470 RTX_INTEGRATED_P (note) = 1;
1471 }
1472
1473 /* Process each argument. For each, set up things so that the function's
1474 reference to the argument will refer to the argument being passed.
1475 We only replace REG with REG here. Any simplifications are done
1476 via const_equiv_map.
1477
1478 We make two passes: In the first, we deal with parameters that will
1479 be placed into registers, since we need to ensure that the allocated
1480 register number fits in const_equiv_map. Then we store all non-register
1481 parameters into their memory location. */
1482
1483 /* Don't try to free temp stack slots here, because we may put one of the
1484 parameters into a temp stack slot. */
1485
1486 for (i = 0; i < nargs; i++)
1487 {
1488 rtx copy = arg_vals[i];
1489
1490 loc = RTVEC_ELT (arg_vector, i);
1491
1492 /* There are three cases, each handled separately. */
1493 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1494 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1495 {
1496 /* This must be an object passed by invisible reference (it could
1497 also be a variable-sized object, but we forbid inlining functions
1498 with variable-sized arguments). COPY is the address of the
1499 actual value (this computation will cause it to be copied). We
1500 map that address for the register, noting the actual address as
1501 an equivalent in case it can be substituted into the insns. */
1502
1503 if (GET_CODE (copy) != REG)
1504 {
1505 temp = copy_addr_to_reg (copy);
1506 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1507 && REGNO (temp) < map->const_equiv_map_size)
1508 {
1509 map->const_equiv_map[REGNO (temp)] = copy;
1510 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1511 }
1512 copy = temp;
1513 }
1514 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1515 }
1516 else if (GET_CODE (loc) == MEM)
1517 {
1518 /* This is the case of a parameter that lives in memory.
1519 It will live in the block we allocate in the called routine's
1520 frame that simulates the incoming argument area. Do nothing
1521 now; we will call store_expr later. */
1522 ;
1523 }
1524 else if (GET_CODE (loc) == REG)
1525 {
1526 /* This is the good case where the parameter is in a register.
1527 If it is read-only and our argument is a constant, set up the
1528 constant equivalence.
1529
1530 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1531 that flag set if it is a register.
1532
1533 Also, don't allow hard registers here; they might not be valid
1534 when substituted into insns. */
1535
1536 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1537 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1538 && ! REG_USERVAR_P (copy))
1539 || (GET_CODE (copy) == REG
1540 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1541 {
1542 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1543 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1544 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1545 && REGNO (temp) < map->const_equiv_map_size)
1546 {
1547 map->const_equiv_map[REGNO (temp)] = copy;
1548 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1549 }
1550 copy = temp;
1551 }
1552 map->reg_map[REGNO (loc)] = copy;
1553 }
1554 else if (GET_CODE (loc) == CONCAT)
1555 {
1556 /* This is the good case where the parameter is in a
1557 pair of separate pseudos.
1558 If it is read-only and our argument is a constant, set up the
1559 constant equivalence.
1560
1561 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1562 that flag set if it is a register.
1563
1564 Also, don't allow hard registers here; they might not be valid
1565 when substituted into insns. */
1566 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1567 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1568 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1569 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1570
1571 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1572 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1573 && ! REG_USERVAR_P (copyreal))
1574 || (GET_CODE (copyreal) == REG
1575 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1576 {
1577 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1578 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1579 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1580 && REGNO (temp) < map->const_equiv_map_size)
1581 {
1582 map->const_equiv_map[REGNO (temp)] = copyreal;
1583 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1584 }
1585 copyreal = temp;
1586 }
1587 map->reg_map[REGNO (locreal)] = copyreal;
1588
1589 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1590 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1591 && ! REG_USERVAR_P (copyimag))
1592 || (GET_CODE (copyimag) == REG
1593 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1594 {
1595 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1596 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1597 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1598 && REGNO (temp) < map->const_equiv_map_size)
1599 {
1600 map->const_equiv_map[REGNO (temp)] = copyimag;
1601 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1602 }
1603 copyimag = temp;
1604 }
1605 map->reg_map[REGNO (locimag)] = copyimag;
1606 }
1607 else
1608 abort ();
1609 }
1610
1611 /* Now do the parameters that will be placed in memory. */
1612
1613 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1614 formal; formal = TREE_CHAIN (formal), i++)
1615 {
1616 loc = RTVEC_ELT (arg_vector, i);
1617
1618 if (GET_CODE (loc) == MEM
1619 /* Exclude case handled above. */
1620 && ! (GET_CODE (XEXP (loc, 0)) == REG
1621 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1622 {
1623 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1624 DECL_SOURCE_LINE (formal));
1625 if (note)
1626 RTX_INTEGRATED_P (note) = 1;
1627
1628 /* Compute the address in the area we reserved and store the
1629 value there. */
1630 temp = copy_rtx_and_substitute (loc, map);
1631 subst_constants (&temp, NULL_RTX, map);
1632 apply_change_group ();
1633 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1634 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1635 store_expr (arg_trees[i], temp, 0);
1636 }
1637 }
1638
1639 /* Deal with the places that the function puts its result.
1640 We are driven by what is placed into DECL_RESULT.
1641
1642 Initially, we assume that we don't have anything special handling for
1643 REG_FUNCTION_RETURN_VALUE_P. */
1644
1645 map->inline_target = 0;
1646 loc = DECL_RTL (DECL_RESULT (fndecl));
1647 if (TYPE_MODE (type) == VOIDmode)
1648 /* There is no return value to worry about. */
1649 ;
1650 else if (GET_CODE (loc) == MEM)
1651 {
1652 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1653 abort ();
1654
1655 /* Pass the function the address in which to return a structure value.
1656 Note that a constructor can cause someone to call us with
1657 STRUCTURE_VALUE_ADDR, but the initialization takes place
1658 via the first parameter, rather than the struct return address.
1659
1660 We have two cases: If the address is a simple register indirect,
1661 use the mapping mechanism to point that register to our structure
1662 return address. Otherwise, store the structure return value into
1663 the place that it will be referenced from. */
1664
1665 if (GET_CODE (XEXP (loc, 0)) == REG)
1666 {
1667 temp = force_reg (Pmode,
1668 force_operand (structure_value_addr, NULL_RTX));
1669 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1670 if ((CONSTANT_P (structure_value_addr)
1671 || GET_CODE (structure_value_addr) == ADDRESSOF
1672 || (GET_CODE (structure_value_addr) == PLUS
1673 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1674 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1675 && REGNO (temp) < map->const_equiv_map_size)
1676 {
1677 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1678 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1679 }
1680 }
1681 else
1682 {
1683 temp = copy_rtx_and_substitute (loc, map);
1684 subst_constants (&temp, NULL_RTX, map);
1685 apply_change_group ();
1686 emit_move_insn (temp, structure_value_addr);
1687 }
1688 }
1689 else if (ignore)
1690 /* We will ignore the result value, so don't look at its structure.
1691 Note that preparations for an aggregate return value
1692 do need to be made (above) even if it will be ignored. */
1693 ;
1694 else if (GET_CODE (loc) == REG)
1695 {
1696 /* The function returns an object in a register and we use the return
1697 value. Set up our target for remapping. */
1698
1699 /* Machine mode function was declared to return. */
1700 enum machine_mode departing_mode = TYPE_MODE (type);
1701 /* (Possibly wider) machine mode it actually computes
1702 (for the sake of callers that fail to declare it right).
1703 We have to use the mode of the result's RTL, rather than
1704 its type, since expand_function_start may have promoted it. */
1705 enum machine_mode arriving_mode
1706 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1707 rtx reg_to_map;
1708
1709 /* Don't use MEMs as direct targets because on some machines
1710 substituting a MEM for a REG makes invalid insns.
1711 Let the combiner substitute the MEM if that is valid. */
1712 if (target == 0 || GET_CODE (target) != REG
1713 || GET_MODE (target) != departing_mode)
1714 target = gen_reg_rtx (departing_mode);
1715
1716 /* If function's value was promoted before return,
1717 avoid machine mode mismatch when we substitute INLINE_TARGET.
1718 But TARGET is what we will return to the caller. */
1719 if (arriving_mode != departing_mode)
1720 {
1721 /* Avoid creating a paradoxical subreg wider than
1722 BITS_PER_WORD, since that is illegal. */
1723 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1724 {
1725 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1726 GET_MODE_BITSIZE (arriving_mode)))
1727 /* Maybe could be handled by using convert_move () ? */
1728 abort ();
1729 reg_to_map = gen_reg_rtx (arriving_mode);
1730 target = gen_lowpart (departing_mode, reg_to_map);
1731 }
1732 else
1733 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1734 }
1735 else
1736 reg_to_map = target;
1737
1738 /* Usually, the result value is the machine's return register.
1739 Sometimes it may be a pseudo. Handle both cases. */
1740 if (REG_FUNCTION_VALUE_P (loc))
1741 map->inline_target = reg_to_map;
1742 else
1743 map->reg_map[REGNO (loc)] = reg_to_map;
1744 }
1745 else
1746 abort ();
1747
1748 /* Make a fresh binding contour that we can easily remove. Do this after
1749 expanding our arguments so cleanups are properly scoped. */
1750 pushlevel (0);
1751 expand_start_bindings (0);
1752
1753 /* Make new label equivalences for the labels in the called function. */
1754 for (i = min_labelno; i < max_labelno; i++)
1755 map->label_map[i] = gen_label_rtx ();
1756
1757 /* Perform postincrements before actually calling the function. */
1758 emit_queue ();
1759
1760 /* Clean up stack so that variables might have smaller offsets. */
1761 do_pending_stack_adjust ();
1762
1763 /* Save a copy of the location of const_equiv_map for mark_stores, called
1764 via note_stores. */
1765 global_const_equiv_map = map->const_equiv_map;
1766 global_const_equiv_map_size = map->const_equiv_map_size;
1767
1768 /* If the called function does an alloca, save and restore the
1769 stack pointer around the call. This saves stack space, but
1770 also is required if this inline is being done between two
1771 pushes. */
1772 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1773 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1774
1775 /* Now copy the insns one by one. Do this in two passes, first the insns and
1776 then their REG_NOTES, just like save_for_inline. */
1777
1778 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1779
1780 for (insn = insns; insn; insn = NEXT_INSN (insn))
1781 {
1782 rtx copy, pattern, set;
1783
1784 map->orig_asm_operands_vector = 0;
1785
1786 switch (GET_CODE (insn))
1787 {
1788 case INSN:
1789 pattern = PATTERN (insn);
1790 set = single_set (insn);
1791 copy = 0;
1792 if (GET_CODE (pattern) == USE
1793 && GET_CODE (XEXP (pattern, 0)) == REG
1794 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1795 /* The (USE (REG n)) at return from the function should
1796 be ignored since we are changing (REG n) into
1797 inline_target. */
1798 break;
1799
1800 /* Ignore setting a function value that we don't want to use. */
1801 if (map->inline_target == 0
1802 && set != 0
1803 && GET_CODE (SET_DEST (set)) == REG
1804 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1805 {
1806 if (volatile_refs_p (SET_SRC (set)))
1807 {
1808 rtx new_set;
1809
1810 /* If we must not delete the source,
1811 load it into a new temporary. */
1812 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1813
1814 new_set = single_set (copy);
1815 if (new_set == 0)
1816 abort ();
1817
1818 SET_DEST (new_set)
1819 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1820 }
1821 /* If the source and destination are the same and it
1822 has a note on it, keep the insn. */
1823 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1824 && REG_NOTES (insn) != 0)
1825 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1826 else
1827 break;
1828 }
1829
1830 /* If this is setting the static chain rtx, omit it. */
1831 else if (static_chain_value != 0
1832 && set != 0
1833 && GET_CODE (SET_DEST (set)) == REG
1834 && rtx_equal_p (SET_DEST (set),
1835 static_chain_incoming_rtx))
1836 break;
1837
1838 /* If this is setting the static chain pseudo, set it from
1839 the value we want to give it instead. */
1840 else if (static_chain_value != 0
1841 && set != 0
1842 && rtx_equal_p (SET_SRC (set),
1843 static_chain_incoming_rtx))
1844 {
1845 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1846
1847 copy = emit_move_insn (newdest, static_chain_value);
1848 static_chain_value = 0;
1849 }
1850 else
1851 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1852 /* REG_NOTES will be copied later. */
1853
1854 #ifdef HAVE_cc0
1855 /* If this insn is setting CC0, it may need to look at
1856 the insn that uses CC0 to see what type of insn it is.
1857 In that case, the call to recog via validate_change will
1858 fail. So don't substitute constants here. Instead,
1859 do it when we emit the following insn.
1860
1861 For example, see the pyr.md file. That machine has signed and
1862 unsigned compares. The compare patterns must check the
1863 following branch insn to see which what kind of compare to
1864 emit.
1865
1866 If the previous insn set CC0, substitute constants on it as
1867 well. */
1868 if (sets_cc0_p (PATTERN (copy)) != 0)
1869 cc0_insn = copy;
1870 else
1871 {
1872 if (cc0_insn)
1873 try_constants (cc0_insn, map);
1874 cc0_insn = 0;
1875 try_constants (copy, map);
1876 }
1877 #else
1878 try_constants (copy, map);
1879 #endif
1880 break;
1881
1882 case JUMP_INSN:
1883 if (GET_CODE (PATTERN (insn)) == RETURN
1884 || (GET_CODE (PATTERN (insn)) == PARALLEL
1885 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1886 {
1887 if (local_return_label == 0)
1888 local_return_label = gen_label_rtx ();
1889 pattern = gen_jump (local_return_label);
1890 }
1891 else
1892 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1893
1894 copy = emit_jump_insn (pattern);
1895
1896 #ifdef HAVE_cc0
1897 if (cc0_insn)
1898 try_constants (cc0_insn, map);
1899 cc0_insn = 0;
1900 #endif
1901 try_constants (copy, map);
1902
1903 /* If this used to be a conditional jump insn but whose branch
1904 direction is now know, we must do something special. */
1905 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1906 {
1907 #ifdef HAVE_cc0
1908 /* The previous insn set cc0 for us. So delete it. */
1909 delete_insn (PREV_INSN (copy));
1910 #endif
1911
1912 /* If this is now a no-op, delete it. */
1913 if (map->last_pc_value == pc_rtx)
1914 {
1915 delete_insn (copy);
1916 copy = 0;
1917 }
1918 else
1919 /* Otherwise, this is unconditional jump so we must put a
1920 BARRIER after it. We could do some dead code elimination
1921 here, but jump.c will do it just as well. */
1922 emit_barrier ();
1923 }
1924 break;
1925
1926 case CALL_INSN:
1927 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1928 copy = emit_call_insn (pattern);
1929
1930 /* Because the USAGE information potentially contains objects other
1931 than hard registers, we need to copy it. */
1932 CALL_INSN_FUNCTION_USAGE (copy)
1933 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1934
1935 #ifdef HAVE_cc0
1936 if (cc0_insn)
1937 try_constants (cc0_insn, map);
1938 cc0_insn = 0;
1939 #endif
1940 try_constants (copy, map);
1941
1942 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1943 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1944 map->const_equiv_map[i] = 0;
1945 break;
1946
1947 case CODE_LABEL:
1948 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1949 LABEL_NAME (copy) = LABEL_NAME (insn);
1950 map->const_age++;
1951 break;
1952
1953 case BARRIER:
1954 copy = emit_barrier ();
1955 break;
1956
1957 case NOTE:
1958 /* It is important to discard function-end and function-beg notes,
1959 so we have only one of each in the current function.
1960 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1961 deleted these in the copy used for continuing compilation,
1962 not the copy used for inlining). */
1963 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1964 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1965 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1966 {
1967 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1968 if (copy && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1969 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1970 {
1971 rtx label = map->label_map[NOTE_BLOCK_NUMBER (copy)];
1972
1973 /* We have to forward these both to match the new exception
1974 region. */
1975 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
1976 }
1977 }
1978 else
1979 copy = 0;
1980 break;
1981
1982 default:
1983 abort ();
1984 break;
1985 }
1986
1987 if (copy)
1988 RTX_INTEGRATED_P (copy) = 1;
1989
1990 map->insn_map[INSN_UID (insn)] = copy;
1991 }
1992
1993 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1994 from parameters can be substituted in. These are the only ones that
1995 are valid across the entire function. */
1996 map->const_age++;
1997 for (insn = insns; insn; insn = NEXT_INSN (insn))
1998 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1999 && map->insn_map[INSN_UID (insn)]
2000 && REG_NOTES (insn))
2001 {
2002 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2003 /* We must also do subst_constants, in case one of our parameters
2004 has const type and constant value. */
2005 subst_constants (&tem, NULL_RTX, map);
2006 apply_change_group ();
2007 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2008 }
2009
2010 if (local_return_label)
2011 emit_label (local_return_label);
2012
2013 /* Restore the stack pointer if we saved it above. */
2014 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2015 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2016
2017 /* Make copies of the decls of the symbols in the inline function, so that
2018 the copies of the variables get declared in the current function. Set
2019 up things so that lookup_static_chain knows that to interpret registers
2020 in SAVE_EXPRs for TYPE_SIZEs as local. */
2021
2022 inline_function_decl = fndecl;
2023 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2024 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2025 inline_function_decl = 0;
2026
2027 /* End the scope containing the copied formal parameter variables
2028 and copied LABEL_DECLs. */
2029
2030 expand_end_bindings (getdecls (), 1, 1);
2031 block = poplevel (1, 1, 0);
2032 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2033 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2034 poplevel (0, 0, 0);
2035
2036 /* Must mark the line number note after inlined functions as a repeat, so
2037 that the test coverage code can avoid counting the call twice. This
2038 just tells the code to ignore the immediately following line note, since
2039 there already exists a copy of this note before the expanded inline call.
2040 This line number note is still needed for debugging though, so we can't
2041 delete it. */
2042 if (flag_test_coverage)
2043 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2044
2045 emit_line_note (input_filename, lineno);
2046
2047 if (structure_value_addr)
2048 {
2049 target = gen_rtx (MEM, TYPE_MODE (type),
2050 memory_address (TYPE_MODE (type), structure_value_addr));
2051 MEM_IN_STRUCT_P (target) = 1;
2052 }
2053
2054 /* Make sure we free the things we explicitly allocated with xmalloc. */
2055 if (real_label_map)
2056 free (real_label_map);
2057
2058 return target;
2059 }
2060 \f
2061 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2062 push all of those decls and give each one the corresponding home. */
2063
2064 static void
2065 integrate_parm_decls (args, map, arg_vector)
2066 tree args;
2067 struct inline_remap *map;
2068 rtvec arg_vector;
2069 {
2070 register tree tail;
2071 register int i;
2072
2073 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2074 {
2075 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2076 TREE_TYPE (tail));
2077 rtx new_decl_rtl
2078 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2079
2080 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2081 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2082 here, but that's going to require some more work. */
2083 /* DECL_INCOMING_RTL (decl) = ?; */
2084 /* These args would always appear unused, if not for this. */
2085 TREE_USED (decl) = 1;
2086 /* Prevent warning for shadowing with these. */
2087 DECL_ABSTRACT_ORIGIN (decl) = tail;
2088 pushdecl (decl);
2089 /* Fully instantiate the address with the equivalent form so that the
2090 debugging information contains the actual register, instead of the
2091 virtual register. Do this by not passing an insn to
2092 subst_constants. */
2093 subst_constants (&new_decl_rtl, NULL_RTX, map);
2094 apply_change_group ();
2095 DECL_RTL (decl) = new_decl_rtl;
2096 }
2097 }
2098
2099 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2100 current function a tree of contexts isomorphic to the one that is given.
2101
2102 LEVEL indicates how far down into the BLOCK tree is the node we are
2103 currently traversing. It is always zero except for recursive calls.
2104
2105 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2106 registers used in the DECL_RTL field should be remapped. If it is zero,
2107 no mapping is necessary. */
2108
2109 static void
2110 integrate_decl_tree (let, level, map)
2111 tree let;
2112 int level;
2113 struct inline_remap *map;
2114 {
2115 tree t, node;
2116
2117 if (level > 0)
2118 pushlevel (0);
2119
2120 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2121 {
2122 tree d;
2123
2124 push_obstacks_nochange ();
2125 saveable_allocation ();
2126 d = copy_node (t);
2127 pop_obstacks ();
2128
2129 if (DECL_RTL (t) != 0)
2130 {
2131 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2132 /* Fully instantiate the address with the equivalent form so that the
2133 debugging information contains the actual register, instead of the
2134 virtual register. Do this by not passing an insn to
2135 subst_constants. */
2136 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2137 apply_change_group ();
2138 }
2139 /* These args would always appear unused, if not for this. */
2140 TREE_USED (d) = 1;
2141 /* Prevent warning for shadowing with these. */
2142 DECL_ABSTRACT_ORIGIN (d) = t;
2143
2144 if (DECL_LANG_SPECIFIC (d))
2145 copy_lang_decl (d);
2146
2147 pushdecl (d);
2148 }
2149
2150 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2151 integrate_decl_tree (t, level + 1, map);
2152
2153 if (level > 0)
2154 {
2155 node = poplevel (1, 0, 0);
2156 if (node)
2157 {
2158 TREE_USED (node) = TREE_USED (let);
2159 BLOCK_ABSTRACT_ORIGIN (node) = let;
2160 }
2161 }
2162 }
2163
2164 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2165 through save_constants. */
2166
2167 static void
2168 save_constants_in_decl_trees (let)
2169 tree let;
2170 {
2171 tree t;
2172
2173 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2174 if (DECL_RTL (t) != 0)
2175 save_constants (&DECL_RTL (t));
2176
2177 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2178 save_constants_in_decl_trees (t);
2179 }
2180 \f
2181 /* Create a new copy of an rtx.
2182 Recursively copies the operands of the rtx,
2183 except for those few rtx codes that are sharable.
2184
2185 We always return an rtx that is similar to that incoming rtx, with the
2186 exception of possibly changing a REG to a SUBREG or vice versa. No
2187 rtl is ever emitted.
2188
2189 Handle constants that need to be placed in the constant pool by
2190 calling `force_const_mem'. */
2191
2192 rtx
2193 copy_rtx_and_substitute (orig, map)
2194 register rtx orig;
2195 struct inline_remap *map;
2196 {
2197 register rtx copy, temp;
2198 register int i, j;
2199 register RTX_CODE code;
2200 register enum machine_mode mode;
2201 register char *format_ptr;
2202 int regno;
2203
2204 if (orig == 0)
2205 return 0;
2206
2207 code = GET_CODE (orig);
2208 mode = GET_MODE (orig);
2209
2210 switch (code)
2211 {
2212 case REG:
2213 /* If the stack pointer register shows up, it must be part of
2214 stack-adjustments (*not* because we eliminated the frame pointer!).
2215 Small hard registers are returned as-is. Pseudo-registers
2216 go through their `reg_map'. */
2217 regno = REGNO (orig);
2218 if (regno <= LAST_VIRTUAL_REGISTER)
2219 {
2220 /* Some hard registers are also mapped,
2221 but others are not translated. */
2222 if (map->reg_map[regno] != 0)
2223 return map->reg_map[regno];
2224
2225 /* If this is the virtual frame pointer, make space in current
2226 function's stack frame for the stack frame of the inline function.
2227
2228 Copy the address of this area into a pseudo. Map
2229 virtual_stack_vars_rtx to this pseudo and set up a constant
2230 equivalence for it to be the address. This will substitute the
2231 address into insns where it can be substituted and use the new
2232 pseudo where it can't. */
2233 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2234 {
2235 rtx loc, seq;
2236 int size = DECL_FRAME_SIZE (map->fndecl);
2237
2238 #ifdef FRAME_GROWS_DOWNWARD
2239 /* In this case, virtual_stack_vars_rtx points to one byte
2240 higher than the top of the frame area. So make sure we
2241 allocate a big enough chunk to keep the frame pointer
2242 aligned like a real one. */
2243 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2244 #endif
2245 start_sequence ();
2246 loc = assign_stack_temp (BLKmode, size, 1);
2247 loc = XEXP (loc, 0);
2248 #ifdef FRAME_GROWS_DOWNWARD
2249 /* In this case, virtual_stack_vars_rtx points to one byte
2250 higher than the top of the frame area. So compute the offset
2251 to one byte higher than our substitute frame. */
2252 loc = plus_constant (loc, size);
2253 #endif
2254 map->reg_map[regno] = temp
2255 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2256
2257 #ifdef STACK_BOUNDARY
2258 mark_reg_pointer (map->reg_map[regno],
2259 STACK_BOUNDARY / BITS_PER_UNIT);
2260 #endif
2261
2262 if (REGNO (temp) < map->const_equiv_map_size)
2263 {
2264 map->const_equiv_map[REGNO (temp)] = loc;
2265 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2266 }
2267
2268 seq = gen_sequence ();
2269 end_sequence ();
2270 emit_insn_after (seq, map->insns_at_start);
2271 return temp;
2272 }
2273 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2274 {
2275 /* Do the same for a block to contain any arguments referenced
2276 in memory. */
2277 rtx loc, seq;
2278 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2279
2280 start_sequence ();
2281 loc = assign_stack_temp (BLKmode, size, 1);
2282 loc = XEXP (loc, 0);
2283 /* When arguments grow downward, the virtual incoming
2284 args pointer points to the top of the argument block,
2285 so the remapped location better do the same. */
2286 #ifdef ARGS_GROW_DOWNWARD
2287 loc = plus_constant (loc, size);
2288 #endif
2289 map->reg_map[regno] = temp
2290 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2291
2292 #ifdef STACK_BOUNDARY
2293 mark_reg_pointer (map->reg_map[regno],
2294 STACK_BOUNDARY / BITS_PER_UNIT);
2295 #endif
2296
2297 if (REGNO (temp) < map->const_equiv_map_size)
2298 {
2299 map->const_equiv_map[REGNO (temp)] = loc;
2300 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2301 }
2302
2303 seq = gen_sequence ();
2304 end_sequence ();
2305 emit_insn_after (seq, map->insns_at_start);
2306 return temp;
2307 }
2308 else if (REG_FUNCTION_VALUE_P (orig))
2309 {
2310 /* This is a reference to the function return value. If
2311 the function doesn't have a return value, error. If the
2312 mode doesn't agree, make a SUBREG. */
2313 if (map->inline_target == 0)
2314 /* Must be unrolling loops or replicating code if we
2315 reach here, so return the register unchanged. */
2316 return orig;
2317 else if (mode != GET_MODE (map->inline_target))
2318 return gen_lowpart (mode, map->inline_target);
2319 else
2320 return map->inline_target;
2321 }
2322 return orig;
2323 }
2324 if (map->reg_map[regno] == NULL)
2325 {
2326 map->reg_map[regno] = gen_reg_rtx (mode);
2327 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2328 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2329 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2330 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2331
2332 if (map->regno_pointer_flag[regno])
2333 mark_reg_pointer (map->reg_map[regno],
2334 map->regno_pointer_align[regno]);
2335 }
2336 return map->reg_map[regno];
2337
2338 case SUBREG:
2339 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2340 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2341 if (GET_CODE (copy) == SUBREG)
2342 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2343 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2344 else if (GET_CODE (copy) == CONCAT)
2345 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2346 else
2347 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2348 SUBREG_WORD (orig));
2349
2350 case ADDRESSOF:
2351 copy = gen_rtx (ADDRESSOF, mode,
2352 copy_rtx_and_substitute (XEXP (orig, 0), map));
2353 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2354 regno = ADDRESSOF_REGNO (orig);
2355 if (map->reg_map[regno])
2356 regno = REGNO (map->reg_map[regno]);
2357 else if (regno > LAST_VIRTUAL_REGISTER)
2358 {
2359 temp = XEXP (orig, 0);
2360 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2361 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2362 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2363 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2364 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2365
2366 if (map->regno_pointer_flag[regno])
2367 mark_reg_pointer (map->reg_map[regno],
2368 map->regno_pointer_align[regno]);
2369 regno = REGNO (map->reg_map[regno]);
2370 }
2371 ADDRESSOF_REGNO (copy) = regno;
2372 return copy;
2373
2374 case USE:
2375 case CLOBBER:
2376 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2377 to (use foo) if the original insn didn't have a subreg.
2378 Removing the subreg distorts the VAX movstrhi pattern
2379 by changing the mode of an operand. */
2380 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2381 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2382 copy = SUBREG_REG (copy);
2383 return gen_rtx (code, VOIDmode, copy);
2384
2385 case CODE_LABEL:
2386 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2387 = LABEL_PRESERVE_P (orig);
2388 return map->label_map[CODE_LABEL_NUMBER (orig)];
2389
2390 case LABEL_REF:
2391 copy = gen_rtx (LABEL_REF, mode,
2392 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2393 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2394 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2395
2396 /* The fact that this label was previously nonlocal does not mean
2397 it still is, so we must check if it is within the range of
2398 this function's labels. */
2399 LABEL_REF_NONLOCAL_P (copy)
2400 = (LABEL_REF_NONLOCAL_P (orig)
2401 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2402 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2403
2404 /* If we have made a nonlocal label local, it means that this
2405 inlined call will be referring to our nonlocal goto handler.
2406 So make sure we create one for this block; we normally would
2407 not since this is not otherwise considered a "call". */
2408 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2409 function_call_count++;
2410
2411 return copy;
2412
2413 case PC:
2414 case CC0:
2415 case CONST_INT:
2416 return orig;
2417
2418 case SYMBOL_REF:
2419 /* Symbols which represent the address of a label stored in the constant
2420 pool must be modified to point to a constant pool entry for the
2421 remapped label. Otherwise, symbols are returned unchanged. */
2422 if (CONSTANT_POOL_ADDRESS_P (orig))
2423 {
2424 rtx constant = get_pool_constant (orig);
2425 if (GET_CODE (constant) == LABEL_REF)
2426 return XEXP (force_const_mem (GET_MODE (orig),
2427 copy_rtx_and_substitute (constant,
2428 map)),
2429 0);
2430 }
2431
2432 return orig;
2433
2434 case CONST_DOUBLE:
2435 /* We have to make a new copy of this CONST_DOUBLE because don't want
2436 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2437 duplicate of a CONST_DOUBLE we have already seen. */
2438 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2439 {
2440 REAL_VALUE_TYPE d;
2441
2442 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2443 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2444 }
2445 else
2446 return immed_double_const (CONST_DOUBLE_LOW (orig),
2447 CONST_DOUBLE_HIGH (orig), VOIDmode);
2448
2449 case CONST:
2450 /* Make new constant pool entry for a constant
2451 that was in the pool of the inline function. */
2452 if (RTX_INTEGRATED_P (orig))
2453 {
2454 /* If this was an address of a constant pool entry that itself
2455 had to be placed in the constant pool, it might not be a
2456 valid address. So the recursive call below might turn it
2457 into a register. In that case, it isn't a constant any
2458 more, so return it. This has the potential of changing a
2459 MEM into a REG, but we'll assume that it safe. */
2460 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2461 if (! CONSTANT_P (temp))
2462 return temp;
2463 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2464 }
2465 break;
2466
2467 case ADDRESS:
2468 /* If from constant pool address, make new constant pool entry and
2469 return its address. */
2470 if (! RTX_INTEGRATED_P (orig))
2471 abort ();
2472
2473 temp
2474 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2475 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2476 map));
2477
2478 #if 0
2479 /* Legitimizing the address here is incorrect.
2480
2481 The only ADDRESS rtx's that can reach here are ones created by
2482 save_constants. Hence the operand of the ADDRESS is always valid
2483 in this position of the instruction, since the original rtx without
2484 the ADDRESS was valid.
2485
2486 The reason we don't legitimize the address here is that on the
2487 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2488 This code forces the operand of the address to a register, which
2489 fails because we can not take the HIGH part of a register.
2490
2491 Also, change_address may create new registers. These registers
2492 will not have valid reg_map entries. This can cause try_constants()
2493 to fail because assumes that all registers in the rtx have valid
2494 reg_map entries, and it may end up replacing one of these new
2495 registers with junk. */
2496
2497 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2498 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2499 #endif
2500
2501 temp = XEXP (temp, 0);
2502
2503 #ifdef POINTERS_EXTEND_UNSIGNED
2504 if (GET_MODE (temp) != GET_MODE (orig))
2505 temp = convert_memory_address (GET_MODE (orig), temp);
2506 #endif
2507
2508 return temp;
2509
2510 case ASM_OPERANDS:
2511 /* If a single asm insn contains multiple output operands
2512 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2513 We must make sure that the copied insn continues to share it. */
2514 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2515 {
2516 copy = rtx_alloc (ASM_OPERANDS);
2517 copy->volatil = orig->volatil;
2518 XSTR (copy, 0) = XSTR (orig, 0);
2519 XSTR (copy, 1) = XSTR (orig, 1);
2520 XINT (copy, 2) = XINT (orig, 2);
2521 XVEC (copy, 3) = map->copy_asm_operands_vector;
2522 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2523 XSTR (copy, 5) = XSTR (orig, 5);
2524 XINT (copy, 6) = XINT (orig, 6);
2525 return copy;
2526 }
2527 break;
2528
2529 case CALL:
2530 /* This is given special treatment because the first
2531 operand of a CALL is a (MEM ...) which may get
2532 forced into a register for cse. This is undesirable
2533 if function-address cse isn't wanted or if we won't do cse. */
2534 #ifndef NO_FUNCTION_CSE
2535 if (! (optimize && ! flag_no_function_cse))
2536 #endif
2537 return gen_rtx (CALL, GET_MODE (orig),
2538 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2539 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2540 copy_rtx_and_substitute (XEXP (orig, 1), map));
2541 break;
2542
2543 #if 0
2544 /* Must be ifdefed out for loop unrolling to work. */
2545 case RETURN:
2546 abort ();
2547 #endif
2548
2549 case SET:
2550 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2551 Adjust the setting by the offset of the area we made.
2552 If the nonlocal goto is into the current function,
2553 this will result in unnecessarily bad code, but should work. */
2554 if (SET_DEST (orig) == virtual_stack_vars_rtx
2555 || SET_DEST (orig) == virtual_incoming_args_rtx)
2556 {
2557 /* In case a translation hasn't occurred already, make one now. */
2558 rtx junk = copy_rtx_and_substitute (SET_DEST (orig), map);
2559 rtx equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2560 rtx equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2561 HOST_WIDE_INT loc_offset
2562 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2563
2564 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2565 force_operand
2566 (plus_constant
2567 (copy_rtx_and_substitute (SET_SRC (orig), map),
2568 - loc_offset),
2569 NULL_RTX));
2570 }
2571 break;
2572
2573 case MEM:
2574 copy = rtx_alloc (MEM);
2575 PUT_MODE (copy, mode);
2576 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2577 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2578 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2579
2580 /* If doing function inlining, this MEM might not be const in the
2581 function that it is being inlined into, and thus may not be
2582 unchanging after function inlining. Constant pool references are
2583 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2584 for them. */
2585 if (! map->integrating)
2586 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2587
2588 return copy;
2589
2590 default:
2591 break;
2592 }
2593
2594 copy = rtx_alloc (code);
2595 PUT_MODE (copy, mode);
2596 copy->in_struct = orig->in_struct;
2597 copy->volatil = orig->volatil;
2598 copy->unchanging = orig->unchanging;
2599
2600 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2601
2602 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2603 {
2604 switch (*format_ptr++)
2605 {
2606 case '0':
2607 XEXP (copy, i) = XEXP (orig, i);
2608 break;
2609
2610 case 'e':
2611 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2612 break;
2613
2614 case 'u':
2615 /* Change any references to old-insns to point to the
2616 corresponding copied insns. */
2617 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2618 break;
2619
2620 case 'E':
2621 XVEC (copy, i) = XVEC (orig, i);
2622 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2623 {
2624 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2625 for (j = 0; j < XVECLEN (copy, i); j++)
2626 XVECEXP (copy, i, j)
2627 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2628 }
2629 break;
2630
2631 case 'w':
2632 XWINT (copy, i) = XWINT (orig, i);
2633 break;
2634
2635 case 'i':
2636 XINT (copy, i) = XINT (orig, i);
2637 break;
2638
2639 case 's':
2640 XSTR (copy, i) = XSTR (orig, i);
2641 break;
2642
2643 default:
2644 abort ();
2645 }
2646 }
2647
2648 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2649 {
2650 map->orig_asm_operands_vector = XVEC (orig, 3);
2651 map->copy_asm_operands_vector = XVEC (copy, 3);
2652 map->copy_asm_constraints_vector = XVEC (copy, 4);
2653 }
2654
2655 return copy;
2656 }
2657 \f
2658 /* Substitute known constant values into INSN, if that is valid. */
2659
2660 void
2661 try_constants (insn, map)
2662 rtx insn;
2663 struct inline_remap *map;
2664 {
2665 int i;
2666
2667 map->num_sets = 0;
2668 subst_constants (&PATTERN (insn), insn, map);
2669
2670 /* Apply the changes if they are valid; otherwise discard them. */
2671 apply_change_group ();
2672
2673 /* Show we don't know the value of anything stored or clobbered. */
2674 note_stores (PATTERN (insn), mark_stores);
2675 map->last_pc_value = 0;
2676 #ifdef HAVE_cc0
2677 map->last_cc0_value = 0;
2678 #endif
2679
2680 /* Set up any constant equivalences made in this insn. */
2681 for (i = 0; i < map->num_sets; i++)
2682 {
2683 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2684 {
2685 int regno = REGNO (map->equiv_sets[i].dest);
2686
2687 if (regno < map->const_equiv_map_size
2688 && (map->const_equiv_map[regno] == 0
2689 /* Following clause is a hack to make case work where GNU C++
2690 reassigns a variable to make cse work right. */
2691 || ! rtx_equal_p (map->const_equiv_map[regno],
2692 map->equiv_sets[i].equiv)))
2693 {
2694 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2695 map->const_age_map[regno] = map->const_age;
2696 }
2697 }
2698 else if (map->equiv_sets[i].dest == pc_rtx)
2699 map->last_pc_value = map->equiv_sets[i].equiv;
2700 #ifdef HAVE_cc0
2701 else if (map->equiv_sets[i].dest == cc0_rtx)
2702 map->last_cc0_value = map->equiv_sets[i].equiv;
2703 #endif
2704 }
2705 }
2706 \f
2707 /* Substitute known constants for pseudo regs in the contents of LOC,
2708 which are part of INSN.
2709 If INSN is zero, the substitution should always be done (this is used to
2710 update DECL_RTL).
2711 These changes are taken out by try_constants if the result is not valid.
2712
2713 Note that we are more concerned with determining when the result of a SET
2714 is a constant, for further propagation, than actually inserting constants
2715 into insns; cse will do the latter task better.
2716
2717 This function is also used to adjust address of items previously addressed
2718 via the virtual stack variable or virtual incoming arguments registers. */
2719
2720 static void
2721 subst_constants (loc, insn, map)
2722 rtx *loc;
2723 rtx insn;
2724 struct inline_remap *map;
2725 {
2726 rtx x = *loc;
2727 register int i;
2728 register enum rtx_code code;
2729 register char *format_ptr;
2730 int num_changes = num_validated_changes ();
2731 rtx new = 0;
2732 enum machine_mode op0_mode;
2733
2734 code = GET_CODE (x);
2735
2736 switch (code)
2737 {
2738 case PC:
2739 case CONST_INT:
2740 case CONST_DOUBLE:
2741 case SYMBOL_REF:
2742 case CONST:
2743 case LABEL_REF:
2744 case ADDRESS:
2745 return;
2746
2747 #ifdef HAVE_cc0
2748 case CC0:
2749 validate_change (insn, loc, map->last_cc0_value, 1);
2750 return;
2751 #endif
2752
2753 case USE:
2754 case CLOBBER:
2755 /* The only thing we can do with a USE or CLOBBER is possibly do
2756 some substitutions in a MEM within it. */
2757 if (GET_CODE (XEXP (x, 0)) == MEM)
2758 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2759 return;
2760
2761 case REG:
2762 /* Substitute for parms and known constants. Don't replace
2763 hard regs used as user variables with constants. */
2764 {
2765 int regno = REGNO (x);
2766
2767 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2768 && regno < map->const_equiv_map_size
2769 && map->const_equiv_map[regno] != 0
2770 && map->const_age_map[regno] >= map->const_age)
2771 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2772 return;
2773 }
2774
2775 case SUBREG:
2776 /* SUBREG applied to something other than a reg
2777 should be treated as ordinary, since that must
2778 be a special hack and we don't know how to treat it specially.
2779 Consider for example mulsidi3 in m68k.md.
2780 Ordinary SUBREG of a REG needs this special treatment. */
2781 if (GET_CODE (SUBREG_REG (x)) == REG)
2782 {
2783 rtx inner = SUBREG_REG (x);
2784 rtx new = 0;
2785
2786 /* We can't call subst_constants on &SUBREG_REG (x) because any
2787 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2788 see what is inside, try to form the new SUBREG and see if that is
2789 valid. We handle two cases: extracting a full word in an
2790 integral mode and extracting the low part. */
2791 subst_constants (&inner, NULL_RTX, map);
2792
2793 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2794 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2795 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2796 new = operand_subword (inner, SUBREG_WORD (x), 0,
2797 GET_MODE (SUBREG_REG (x)));
2798
2799 cancel_changes (num_changes);
2800 if (new == 0 && subreg_lowpart_p (x))
2801 new = gen_lowpart_common (GET_MODE (x), inner);
2802
2803 if (new)
2804 validate_change (insn, loc, new, 1);
2805
2806 return;
2807 }
2808 break;
2809
2810 case MEM:
2811 subst_constants (&XEXP (x, 0), insn, map);
2812
2813 /* If a memory address got spoiled, change it back. */
2814 if (insn != 0 && num_validated_changes () != num_changes
2815 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2816 cancel_changes (num_changes);
2817 return;
2818
2819 case SET:
2820 {
2821 /* Substitute constants in our source, and in any arguments to a
2822 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2823 itself. */
2824 rtx *dest_loc = &SET_DEST (x);
2825 rtx dest = *dest_loc;
2826 rtx src, tem;
2827
2828 subst_constants (&SET_SRC (x), insn, map);
2829 src = SET_SRC (x);
2830
2831 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2832 || GET_CODE (*dest_loc) == SUBREG
2833 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2834 {
2835 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2836 {
2837 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2838 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2839 }
2840 dest_loc = &XEXP (*dest_loc, 0);
2841 }
2842
2843 /* Do substitute in the address of a destination in memory. */
2844 if (GET_CODE (*dest_loc) == MEM)
2845 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2846
2847 /* Check for the case of DEST a SUBREG, both it and the underlying
2848 register are less than one word, and the SUBREG has the wider mode.
2849 In the case, we are really setting the underlying register to the
2850 source converted to the mode of DEST. So indicate that. */
2851 if (GET_CODE (dest) == SUBREG
2852 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2853 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2854 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2855 <= GET_MODE_SIZE (GET_MODE (dest)))
2856 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2857 src)))
2858 src = tem, dest = SUBREG_REG (dest);
2859
2860 /* If storing a recognizable value save it for later recording. */
2861 if ((map->num_sets < MAX_RECOG_OPERANDS)
2862 && (CONSTANT_P (src)
2863 || (GET_CODE (src) == REG
2864 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2865 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2866 || (GET_CODE (src) == PLUS
2867 && GET_CODE (XEXP (src, 0)) == REG
2868 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2869 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2870 && CONSTANT_P (XEXP (src, 1)))
2871 || GET_CODE (src) == COMPARE
2872 #ifdef HAVE_cc0
2873 || dest == cc0_rtx
2874 #endif
2875 || (dest == pc_rtx
2876 && (src == pc_rtx || GET_CODE (src) == RETURN
2877 || GET_CODE (src) == LABEL_REF))))
2878 {
2879 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2880 it will cause us to save the COMPARE with any constants
2881 substituted, which is what we want for later. */
2882 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2883 map->equiv_sets[map->num_sets++].dest = dest;
2884 }
2885 }
2886 return;
2887
2888 default:
2889 break;
2890 }
2891
2892 format_ptr = GET_RTX_FORMAT (code);
2893
2894 /* If the first operand is an expression, save its mode for later. */
2895 if (*format_ptr == 'e')
2896 op0_mode = GET_MODE (XEXP (x, 0));
2897
2898 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2899 {
2900 switch (*format_ptr++)
2901 {
2902 case '0':
2903 break;
2904
2905 case 'e':
2906 if (XEXP (x, i))
2907 subst_constants (&XEXP (x, i), insn, map);
2908 break;
2909
2910 case 'u':
2911 case 'i':
2912 case 's':
2913 case 'w':
2914 break;
2915
2916 case 'E':
2917 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2918 {
2919 int j;
2920 for (j = 0; j < XVECLEN (x, i); j++)
2921 subst_constants (&XVECEXP (x, i, j), insn, map);
2922 }
2923 break;
2924
2925 default:
2926 abort ();
2927 }
2928 }
2929
2930 /* If this is a commutative operation, move a constant to the second
2931 operand unless the second operand is already a CONST_INT. */
2932 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2933 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2934 {
2935 rtx tem = XEXP (x, 0);
2936 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2937 validate_change (insn, &XEXP (x, 1), tem, 1);
2938 }
2939
2940 /* Simplify the expression in case we put in some constants. */
2941 switch (GET_RTX_CLASS (code))
2942 {
2943 case '1':
2944 new = simplify_unary_operation (code, GET_MODE (x),
2945 XEXP (x, 0), op0_mode);
2946 break;
2947
2948 case '<':
2949 {
2950 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2951 if (op_mode == VOIDmode)
2952 op_mode = GET_MODE (XEXP (x, 1));
2953 new = simplify_relational_operation (code, op_mode,
2954 XEXP (x, 0), XEXP (x, 1));
2955 #ifdef FLOAT_STORE_FLAG_VALUE
2956 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2957 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2958 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2959 GET_MODE (x)));
2960 #endif
2961 break;
2962 }
2963
2964 case '2':
2965 case 'c':
2966 new = simplify_binary_operation (code, GET_MODE (x),
2967 XEXP (x, 0), XEXP (x, 1));
2968 break;
2969
2970 case 'b':
2971 case '3':
2972 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2973 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2974 break;
2975 }
2976
2977 if (new)
2978 validate_change (insn, loc, new, 1);
2979 }
2980
2981 /* Show that register modified no longer contain known constants. We are
2982 called from note_stores with parts of the new insn. */
2983
2984 void
2985 mark_stores (dest, x)
2986 rtx dest;
2987 rtx x;
2988 {
2989 int regno = -1;
2990 enum machine_mode mode;
2991
2992 /* DEST is always the innermost thing set, except in the case of
2993 SUBREGs of hard registers. */
2994
2995 if (GET_CODE (dest) == REG)
2996 regno = REGNO (dest), mode = GET_MODE (dest);
2997 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2998 {
2999 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3000 mode = GET_MODE (SUBREG_REG (dest));
3001 }
3002
3003 if (regno >= 0)
3004 {
3005 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3006 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3007 int i;
3008
3009 /* Ignore virtual stack var or virtual arg register since those
3010 are handled separately. */
3011 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3012 && regno != VIRTUAL_STACK_VARS_REGNUM)
3013 for (i = regno; i <= last_reg; i++)
3014 if (i < global_const_equiv_map_size)
3015 global_const_equiv_map[i] = 0;
3016 }
3017 }
3018 \f
3019 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3020 pointed to by PX, they represent constants in the constant pool.
3021 Replace these with a new memory reference obtained from force_const_mem.
3022 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3023 address of a constant pool entry. Replace them with the address of
3024 a new constant pool entry obtained from force_const_mem. */
3025
3026 static void
3027 restore_constants (px)
3028 rtx *px;
3029 {
3030 rtx x = *px;
3031 int i, j;
3032 char *fmt;
3033
3034 if (x == 0)
3035 return;
3036
3037 if (GET_CODE (x) == CONST_DOUBLE)
3038 {
3039 /* We have to make a new CONST_DOUBLE to ensure that we account for
3040 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3041 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3042 {
3043 REAL_VALUE_TYPE d;
3044
3045 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3046 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3047 }
3048 else
3049 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3050 VOIDmode);
3051 }
3052
3053 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3054 {
3055 restore_constants (&XEXP (x, 0));
3056 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3057 }
3058 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3059 {
3060 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3061 rtx new = XEXP (SUBREG_REG (x), 0);
3062
3063 restore_constants (&new);
3064 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3065 PUT_MODE (new, GET_MODE (x));
3066 *px = validize_mem (new);
3067 }
3068 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3069 {
3070 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3071 XEXP (XEXP (x, 0), 0)),
3072 0);
3073
3074 #ifdef POINTERS_EXTEND_UNSIGNED
3075 if (GET_MODE (new) != GET_MODE (x))
3076 new = convert_memory_address (GET_MODE (x), new);
3077 #endif
3078
3079 *px = new;
3080 }
3081 else
3082 {
3083 fmt = GET_RTX_FORMAT (GET_CODE (x));
3084 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3085 {
3086 switch (*fmt++)
3087 {
3088 case 'E':
3089 for (j = 0; j < XVECLEN (x, i); j++)
3090 restore_constants (&XVECEXP (x, i, j));
3091 break;
3092
3093 case 'e':
3094 restore_constants (&XEXP (x, i));
3095 break;
3096 }
3097 }
3098 }
3099 }
3100 \f
3101 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3102 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3103 that it points to the node itself, thus indicating that the node is its
3104 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3105 the given node is NULL, recursively descend the decl/block tree which
3106 it is the root of, and for each other ..._DECL or BLOCK node contained
3107 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3108 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3109 values to point to themselves. */
3110
3111 static void
3112 set_block_origin_self (stmt)
3113 register tree stmt;
3114 {
3115 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3116 {
3117 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3118
3119 {
3120 register tree local_decl;
3121
3122 for (local_decl = BLOCK_VARS (stmt);
3123 local_decl != NULL_TREE;
3124 local_decl = TREE_CHAIN (local_decl))
3125 set_decl_origin_self (local_decl); /* Potential recursion. */
3126 }
3127
3128 {
3129 register tree subblock;
3130
3131 for (subblock = BLOCK_SUBBLOCKS (stmt);
3132 subblock != NULL_TREE;
3133 subblock = BLOCK_CHAIN (subblock))
3134 set_block_origin_self (subblock); /* Recurse. */
3135 }
3136 }
3137 }
3138
3139 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3140 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3141 node to so that it points to the node itself, thus indicating that the
3142 node represents its own (abstract) origin. Additionally, if the
3143 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3144 the decl/block tree of which the given node is the root of, and for
3145 each other ..._DECL or BLOCK node contained therein whose
3146 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3147 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3148 point to themselves. */
3149
3150 static void
3151 set_decl_origin_self (decl)
3152 register tree decl;
3153 {
3154 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3155 {
3156 DECL_ABSTRACT_ORIGIN (decl) = decl;
3157 if (TREE_CODE (decl) == FUNCTION_DECL)
3158 {
3159 register tree arg;
3160
3161 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3162 DECL_ABSTRACT_ORIGIN (arg) = arg;
3163 if (DECL_INITIAL (decl) != NULL_TREE
3164 && DECL_INITIAL (decl) != error_mark_node)
3165 set_block_origin_self (DECL_INITIAL (decl));
3166 }
3167 }
3168 }
3169 \f
3170 /* Given a pointer to some BLOCK node, and a boolean value to set the
3171 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3172 the given block, and for all local decls and all local sub-blocks
3173 (recursively) which are contained therein. */
3174
3175 static void
3176 set_block_abstract_flags (stmt, setting)
3177 register tree stmt;
3178 register int setting;
3179 {
3180 register tree local_decl;
3181 register tree subblock;
3182
3183 BLOCK_ABSTRACT (stmt) = setting;
3184
3185 for (local_decl = BLOCK_VARS (stmt);
3186 local_decl != NULL_TREE;
3187 local_decl = TREE_CHAIN (local_decl))
3188 set_decl_abstract_flags (local_decl, setting);
3189
3190 for (subblock = BLOCK_SUBBLOCKS (stmt);
3191 subblock != NULL_TREE;
3192 subblock = BLOCK_CHAIN (subblock))
3193 set_block_abstract_flags (subblock, setting);
3194 }
3195
3196 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3197 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3198 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3199 set the abstract flags for all of the parameters, local vars, local
3200 blocks and sub-blocks (recursively) to the same setting. */
3201
3202 void
3203 set_decl_abstract_flags (decl, setting)
3204 register tree decl;
3205 register int setting;
3206 {
3207 DECL_ABSTRACT (decl) = setting;
3208 if (TREE_CODE (decl) == FUNCTION_DECL)
3209 {
3210 register tree arg;
3211
3212 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3213 DECL_ABSTRACT (arg) = setting;
3214 if (DECL_INITIAL (decl) != NULL_TREE
3215 && DECL_INITIAL (decl) != error_mark_node)
3216 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3217 }
3218 }
3219 \f
3220 /* Output the assembly language code for the function FNDECL
3221 from its DECL_SAVED_INSNS. Used for inline functions that are output
3222 at end of compilation instead of where they came in the source. */
3223
3224 void
3225 output_inline_function (fndecl)
3226 tree fndecl;
3227 {
3228 rtx head;
3229 rtx last;
3230
3231 if (output_bytecode)
3232 {
3233 warning ("`inline' ignored for bytecode output");
3234 return;
3235 }
3236
3237 /* Things we allocate from here on are part of this function, not
3238 permanent. */
3239 temporary_allocation ();
3240
3241 head = DECL_SAVED_INSNS (fndecl);
3242 current_function_decl = fndecl;
3243
3244 /* This call is only used to initialize global variables. */
3245 init_function_start (fndecl, "lossage", 1);
3246
3247 /* Redo parameter determinations in case the FUNCTION_...
3248 macros took machine-specific actions that need to be redone. */
3249 assign_parms (fndecl, 1);
3250
3251 /* Set stack frame size. */
3252 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3253
3254 /* The first is a bit of a lie (the array may be larger), but doesn't
3255 matter too much and it isn't worth saving the actual bound. */
3256 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3257 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3258 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3259 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3260 max_parm_reg = MAX_PARMREG (head);
3261 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3262
3263 stack_slot_list = STACK_SLOT_LIST (head);
3264 forced_labels = FORCED_LABELS (head);
3265
3266 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3267 current_function_calls_alloca = 1;
3268
3269 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3270 current_function_calls_setjmp = 1;
3271
3272 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3273 current_function_calls_longjmp = 1;
3274
3275 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3276 current_function_returns_struct = 1;
3277
3278 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3279 current_function_returns_pcc_struct = 1;
3280
3281 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3282 current_function_needs_context = 1;
3283
3284 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3285 current_function_has_nonlocal_label = 1;
3286
3287 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3288 current_function_returns_pointer = 1;
3289
3290 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3291 current_function_uses_const_pool = 1;
3292
3293 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3294 current_function_uses_pic_offset_table = 1;
3295
3296 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3297 current_function_pops_args = POPS_ARGS (head);
3298
3299 /* This is the only thing the expand_function_end call that uses to be here
3300 actually does and that call can cause problems. */
3301 immediate_size_expand--;
3302
3303 /* Find last insn and rebuild the constant pool. */
3304 for (last = FIRST_PARM_INSN (head);
3305 NEXT_INSN (last); last = NEXT_INSN (last))
3306 {
3307 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3308 {
3309 restore_constants (&PATTERN (last));
3310 restore_constants (&REG_NOTES (last));
3311 }
3312 }
3313
3314 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3315 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3316
3317 /* We must have already output DWARF debugging information for the
3318 original (abstract) inline function declaration/definition, so
3319 we want to make sure that the debugging information we generate
3320 for this special instance of the inline function refers back to
3321 the information we already generated. To make sure that happens,
3322 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3323 node (and for all of the local ..._DECL nodes which are its children)
3324 so that they all point to themselves. */
3325
3326 set_decl_origin_self (fndecl);
3327
3328 /* We're not deferring this any longer. */
3329 DECL_DEFER_OUTPUT (fndecl) = 0;
3330
3331 /* We can't inline this anymore. */
3332 DECL_INLINE (fndecl) = 0;
3333
3334 /* Compile this function all the way down to assembly code. */
3335 rest_of_compilation (fndecl);
3336
3337 current_function_decl = 0;
3338 }