]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/integrate.c
Fix spelling errors in comments.
[thirdparty/gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21
22 #include <stdio.h>
23
24 #include "config.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "insn-flags.h"
30 #include "expr.h"
31 #include "output.h"
32 #include "integrate.h"
33 #include "real.h"
34 #include "function.h"
35
36 #include "obstack.h"
37 #define obstack_chunk_alloc xmalloc
38 #define obstack_chunk_free free
39
40 extern struct obstack *function_maybepermanent_obstack;
41
42 extern tree pushdecl ();
43 extern tree poplevel ();
44
45 /* Similar, but round to the next highest integer that meets the
46 alignment. */
47 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
48
49 /* Default max number of insns a function can have and still be inline.
50 This is overridden on RISC machines. */
51 #ifndef INTEGRATE_THRESHOLD
52 #define INTEGRATE_THRESHOLD(DECL) \
53 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
54 #endif
55 \f
56 /* Save any constant pool constants in an insn. */
57 static void save_constants ();
58
59 /* Note when parameter registers are the destination of a SET. */
60 static void note_modified_parmregs ();
61
62 /* Copy an rtx for save_for_inline_copying. */
63 static rtx copy_for_inline ();
64
65 /* Make copies of MEMs in DECL_RTLs. */
66 static void copy_decl_rtls ();
67
68 static tree copy_decl_tree ();
69 static tree copy_decl_list ();
70
71 /* Return the constant equivalent of a given rtx, or 0 if none. */
72 static rtx const_equiv ();
73
74 static void integrate_parm_decls ();
75 static void integrate_decl_tree ();
76
77 static void subst_constants ();
78 static rtx fold_out_const_cc0 ();
79 \f
80 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
81 is safe and reasonable to integrate into other functions.
82 Nonzero means value is a warning message with a single %s
83 for the function's name. */
84
85 char *
86 function_cannot_inline_p (fndecl)
87 register tree fndecl;
88 {
89 register rtx insn;
90 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
91 int max_insns = INTEGRATE_THRESHOLD (fndecl);
92 register int ninsns = 0;
93 register tree parms;
94
95 /* No inlines with varargs. `grokdeclarator' gives a warning
96 message about that if `inline' is specified. This code
97 it put in to catch the volunteers. */
98 if ((last && TREE_VALUE (last) != void_type_node)
99 || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl))
100 && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))),
101 "__builtin_va_alist")))
102 return "varargs function cannot be inline";
103
104 if (current_function_calls_alloca)
105 return "function using alloca cannot be inline";
106
107 if (current_function_contains_functions)
108 return "function with nested functions cannot be inline";
109
110 /* This restriction may be eliminated sometime soon. But for now, don't
111 worry about remapping the static chain. */
112 if (current_function_needs_context)
113 return "nested function cannot be inline";
114
115 /* If its not even close, don't even look. */
116 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
117 return "function too large to be inline";
118
119 #if 0
120 /* Large stacks are OK now that inlined functions can share them. */
121 /* Don't inline functions with large stack usage,
122 since they can make other recursive functions burn up stack. */
123 if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
124 return "function stack frame for inlining";
125 #endif
126
127 #if 0
128 /* Don't inline functions which do not specify a function prototype and
129 have BLKmode argument or take the address of a parameter. */
130 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
131 {
132 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
133 TREE_ADDRESSABLE (parms) = 1;
134 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
135 return "no prototype, and parameter address used; cannot be inline";
136 }
137 #endif
138
139 /* We can't inline functions that return structures
140 the old-fashioned PCC way, copying into a static block. */
141 if (current_function_returns_pcc_struct)
142 return "inline functions not supported for this return value type";
143
144 /* We can't inline functions that return structures of varying size. */
145 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
146 return "function with varying-size return value cannot be inline";
147
148 /* Cannot inline a function with a varying size argument. */
149 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
150 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
151 return "function with varying-size parameter cannot be inline";
152
153 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
154 {
155 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
156 insn = NEXT_INSN (insn))
157 {
158 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
159 ninsns++;
160 }
161
162 if (ninsns >= max_insns)
163 return "function too large to be inline";
164 }
165
166 /* We cannot inline this function if forced_labels is non-zero. This
167 implies that a label in this function was used as an initializer.
168 Because labels can not be duplicated, all labels in the function
169 will be renamed when it is inlined. However, there is no way to find
170 and fix all variables initialized with addresses of labels in this
171 function, hence inlining is impossible. */
172
173 if (forced_labels)
174 return "function with label addresses used in initializers cannot inline";
175
176 return 0;
177 }
178 \f
179 /* Variables used within save_for_inline. */
180
181 /* Mapping from old pseudo-register to new pseudo-registers.
182 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
183 It is allocated in `save_for_inline' and `expand_inline_function',
184 and deallocated on exit from each of those routines. */
185 static rtx *reg_map;
186
187 /* Mapping from old code-labels to new code-labels.
188 The first element of this map is label_map[min_labelno].
189 It is allocated in `save_for_inline' and `expand_inline_function',
190 and deallocated on exit from each of those routines. */
191 static rtx *label_map;
192
193 /* Mapping from old insn uid's to copied insns.
194 It is allocated in `save_for_inline' and `expand_inline_function',
195 and deallocated on exit from each of those routines. */
196 static rtx *insn_map;
197
198 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
199 Zero for a reg that isn't a parm's home.
200 Only reg numbers less than max_parm_reg are mapped here. */
201 static tree *parmdecl_map;
202
203 /* Keep track of first pseudo-register beyond those that are parms. */
204 static int max_parm_reg;
205
206 /* When an insn is being copied by copy_for_inline,
207 this is nonzero if we have copied an ASM_OPERANDS.
208 In that case, it is the original input-operand vector. */
209 static rtvec orig_asm_operands_vector;
210
211 /* When an insn is being copied by copy_for_inline,
212 this is nonzero if we have copied an ASM_OPERANDS.
213 In that case, it is the copied input-operand vector. */
214 static rtvec copy_asm_operands_vector;
215
216 /* Likewise, this is the copied constraints vector. */
217 static rtvec copy_asm_constraints_vector;
218
219 /* In save_for_inline, nonzero if past the parm-initialization insns. */
220 static int in_nonparm_insns;
221 \f
222 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
223 needed to save FNDECL's insns and info for future inline expansion. */
224
225 static rtx
226 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
227 tree fndecl;
228 int min_labelno;
229 int max_labelno;
230 int max_reg;
231 int copy;
232 {
233 int function_flags, i;
234 rtvec arg_vector;
235 tree parms;
236
237 /* Compute the values of any flags we must restore when inlining this. */
238
239 function_flags
240 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
241 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
242 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
243 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
244 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
245 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
246 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
247 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
248 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
249 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
250
251 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
252 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
253 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
254
255 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
256 parms;
257 parms = TREE_CHAIN (parms), i++)
258 {
259 rtx p = DECL_RTL (parms);
260
261 if (GET_CODE (p) == MEM && copy)
262 {
263 /* Copy the rtl so that modifications of the addresses
264 later in compilation won't affect this arg_vector.
265 Virtual register instantiation can screw the address
266 of the rtl. */
267 rtx new = copy_rtx (p);
268
269 /* Don't leave the old copy anywhere in this decl. */
270 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
271 || (GET_CODE (DECL_RTL (parms)) == MEM
272 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
273 && (XEXP (DECL_RTL (parms), 0)
274 == XEXP (DECL_INCOMING_RTL (parms), 0))))
275 DECL_INCOMING_RTL (parms) = new;
276 DECL_RTL (parms) = new;
277 }
278
279 RTVEC_ELT (arg_vector, i) = p;
280
281 if (GET_CODE (p) == REG)
282 parmdecl_map[REGNO (p)] = parms;
283 /* This flag is cleared later
284 if the function ever modifies the value of the parm. */
285 TREE_READONLY (parms) = 1;
286 }
287
288 /* Assume we start out in the insns that set up the parameters. */
289 in_nonparm_insns = 0;
290
291 /* The list of DECL_SAVED_INSNS, starts off with a header which
292 contains the following information:
293
294 the first insn of the function (not including the insns that copy
295 parameters into registers).
296 the first parameter insn of the function,
297 the first label used by that function,
298 the last label used by that function,
299 the highest register number used for parameters,
300 the total number of registers used,
301 the size of the incoming stack area for parameters,
302 the number of bytes popped on return,
303 the stack slot list,
304 some flags that are used to restore compiler globals,
305 the value of current_function_outgoing_args_size,
306 the original argument vector,
307 and the original DECL_INITIAL. */
308
309 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
310 max_parm_reg, max_reg,
311 current_function_args_size,
312 current_function_pops_args,
313 stack_slot_list, function_flags,
314 current_function_outgoing_args_size,
315 arg_vector, (rtx) DECL_INITIAL (fndecl));
316 }
317
318 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
319 things that must be done to make FNDECL expandable as an inline function.
320 HEAD contains the chain of insns to which FNDECL will expand. */
321
322 static void
323 finish_inline (fndecl, head)
324 tree fndecl;
325 rtx head;
326 {
327 NEXT_INSN (head) = get_first_nonparm_insn ();
328 FIRST_PARM_INSN (head) = get_insns ();
329 DECL_SAVED_INSNS (fndecl) = head;
330 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
331 DECL_INLINE (fndecl) = 1;
332 }
333
334 /* Make the insns and PARM_DECLs of the current function permanent
335 and record other information in DECL_SAVED_INSNS to allow inlining
336 of this function in subsequent calls.
337
338 This function is called when we are going to immediately compile
339 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
340 modified by the compilation process, so we copy all of them to
341 new storage and consider the new insns to be the insn chain to be
342 compiled. Our caller (rest_of_compilation) saves the original
343 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
344
345 void
346 save_for_inline_copying (fndecl)
347 tree fndecl;
348 {
349 rtx first_insn, last_insn, insn;
350 rtx head, copy;
351 int max_labelno, min_labelno, i, len;
352 int max_reg;
353 int max_uid;
354 rtx first_nonparm_insn;
355
356 /* Make and emit a return-label if we have not already done so.
357 Do this before recording the bounds on label numbers. */
358
359 if (return_label == 0)
360 {
361 return_label = gen_label_rtx ();
362 emit_label (return_label);
363 }
364
365 /* Get some bounds on the labels and registers used. */
366
367 max_labelno = max_label_num ();
368 min_labelno = get_first_label_num ();
369 max_reg = max_reg_num ();
370
371 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
372 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
373 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
374 for the parms, prior to elimination of virtual registers.
375 These values are needed for substituting parms properly. */
376
377 max_parm_reg = max_parm_reg_num ();
378 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
379
380 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
381
382 if (current_function_uses_const_pool)
383 {
384 /* Replace any constant pool references with the actual constant. We
385 will put the constants back in the copy made below. */
386 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
387 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
388 {
389 save_constants (&PATTERN (insn));
390 if (REG_NOTES (insn))
391 save_constants (&REG_NOTES (insn));
392 }
393
394 /* Clear out the constant pool so that we can recreate it with the
395 copied constants below. */
396 init_const_rtx_hash_table ();
397 clear_const_double_mem ();
398 }
399
400 max_uid = INSN_UID (head);
401
402 /* We have now allocated all that needs to be allocated permanently
403 on the rtx obstack. Set our high-water mark, so that we
404 can free the rest of this when the time comes. */
405
406 preserve_data ();
407
408 /* Copy the chain insns of this function.
409 Install the copied chain as the insns of this function,
410 for continued compilation;
411 the original chain is recorded as the DECL_SAVED_INSNS
412 for inlining future calls. */
413
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
417
418 insn = get_insns ();
419 if (GET_CODE (insn) != NOTE)
420 abort ();
421 first_insn = rtx_alloc (NOTE);
422 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
423 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
424 INSN_UID (first_insn) = INSN_UID (insn);
425 PREV_INSN (first_insn) = NULL;
426 NEXT_INSN (first_insn) = NULL;
427 last_insn = first_insn;
428
429 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
430 Make these new rtx's now, and install them in regno_reg_rtx, so they
431 will be the official pseudo-reg rtx's for the rest of compilation. */
432
433 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
434
435 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
436 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
437 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
438 regno_reg_rtx[i], len);
439
440 bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1,
441 regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1,
442 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
443
444 /* Likewise each label rtx must have a unique rtx as its copy. */
445
446 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
447 label_map -= min_labelno;
448
449 for (i = min_labelno; i < max_labelno; i++)
450 label_map[i] = gen_label_rtx ();
451
452 /* Record the mapping of old insns to copied insns. */
453
454 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
455 bzero (insn_map, max_uid * sizeof (rtx));
456
457 /* Get the insn which signals the end of parameter setup code. */
458 first_nonparm_insn = get_first_nonparm_insn ();
459
460 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
461 (the former occurs when a variable has its address taken)
462 since these may be shared and can be changed by virtual
463 register instantiation. DECL_RTL values for our arguments
464 have already been copied by initialize_for_inline. */
465 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
466 if (GET_CODE (regno_reg_rtx[i]) == MEM)
467 XEXP (regno_reg_rtx[i], 0)
468 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
469
470 /* Copy the tree of subblocks of the function, and the decls in them.
471 We will use the copy for compiling this function, then restore the original
472 subblocks and decls for use when inlining this function.
473
474 Several parts of the compiler modify BLOCK trees. In particular,
475 instantiate_virtual_regs will instantiate any virtual regs
476 mentioned in the DECL_RTLs of the decls, and loop
477 unrolling will replicate any BLOCK trees inside an unrolled loop.
478
479 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
480 which we will use for inlining. The rtl might even contain pseudoregs
481 whose space has been freed. */
482
483 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
484 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
485
486 /* Now copy each DECL_RTL which is a MEM,
487 so it is safe to modify their addresses. */
488 copy_decl_rtls (DECL_INITIAL (fndecl));
489
490 /* The fndecl node acts as its own progenitor, so mark it as such. */
491 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
492
493 /* Now copy the chain of insns. Do this twice. The first copy the insn
494 itself and its body. The second time copy of REG_NOTES. This is because
495 a REG_NOTE may have a forward pointer to another insn. */
496
497 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
498 {
499 orig_asm_operands_vector = 0;
500
501 if (insn == first_nonparm_insn)
502 in_nonparm_insns = 1;
503
504 switch (GET_CODE (insn))
505 {
506 case NOTE:
507 /* No need to keep these. */
508 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
509 continue;
510
511 copy = rtx_alloc (NOTE);
512 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
513 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
514 break;
515
516 case INSN:
517 case CALL_INSN:
518 case JUMP_INSN:
519 copy = rtx_alloc (GET_CODE (insn));
520 PATTERN (copy) = copy_for_inline (PATTERN (insn));
521 INSN_CODE (copy) = -1;
522 LOG_LINKS (copy) = NULL;
523 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
524 break;
525
526 case CODE_LABEL:
527 copy = label_map[CODE_LABEL_NUMBER (insn)];
528 LABEL_NAME (copy) = LABEL_NAME (insn);
529 break;
530
531 case BARRIER:
532 copy = rtx_alloc (BARRIER);
533 break;
534
535 default:
536 abort ();
537 }
538 INSN_UID (copy) = INSN_UID (insn);
539 insn_map[INSN_UID (insn)] = copy;
540 NEXT_INSN (last_insn) = copy;
541 PREV_INSN (copy) = last_insn;
542 last_insn = copy;
543 }
544
545 /* Now copy the REG_NOTES. */
546 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
547 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
548 && insn_map[INSN_UID(insn)])
549 REG_NOTES (insn_map[INSN_UID (insn)])
550 = copy_for_inline (REG_NOTES (insn));
551
552 NEXT_INSN (last_insn) = NULL;
553
554 finish_inline (fndecl, head);
555
556 set_new_first_and_last_insn (first_insn, last_insn);
557 }
558
559 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
560 For example, this can copy a list made of TREE_LIST nodes. While copying,
561 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
562 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
563 point to the corresponding (abstract) original node. */
564
565 static tree
566 copy_decl_list (list)
567 tree list;
568 {
569 tree head;
570 register tree prev, next;
571
572 if (list == 0)
573 return 0;
574
575 head = prev = copy_node (list);
576 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
577 DECL_ABSTRACT_ORIGIN (head) = list;
578 next = TREE_CHAIN (list);
579 while (next)
580 {
581 register tree copy;
582
583 copy = copy_node (next);
584 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
585 DECL_ABSTRACT_ORIGIN (copy) = next;
586 TREE_CHAIN (prev) = copy;
587 prev = copy;
588 next = TREE_CHAIN (next);
589 }
590 return head;
591 }
592
593 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
594
595 static tree
596 copy_decl_tree (block)
597 tree block;
598 {
599 tree t, vars, subblocks;
600
601 vars = copy_decl_list (BLOCK_VARS (block));
602 subblocks = 0;
603
604 /* Process all subblocks. */
605 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
606 {
607 tree copy = copy_decl_tree (t);
608 TREE_CHAIN (copy) = subblocks;
609 subblocks = copy;
610 }
611
612 t = copy_node (block);
613 BLOCK_VARS (t) = vars;
614 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
615 /* If the BLOCK being cloned is already marked as having been instantiated
616 from something else, then leave that `origin' marking alone. Elsewise,
617 mark the clone as having originated from the BLOCK we are cloning. */
618 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
619 BLOCK_ABSTRACT_ORIGIN (t) = block;
620 return t;
621 }
622
623 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
624
625 static void
626 copy_decl_rtls (block)
627 tree block;
628 {
629 tree t;
630
631 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
632 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
633 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
634
635 /* Process all subblocks. */
636 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
637 copy_decl_rtls (t);
638 }
639
640 /* Make the insns and PARM_DECLs of the current function permanent
641 and record other information in DECL_SAVED_INSNS to allow inlining
642 of this function in subsequent calls.
643
644 This routine need not copy any insns because we are not going
645 to immediately compile the insns in the insn chain. There
646 are two cases when we would compile the insns for FNDECL:
647 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
648 be output at the end of other compilation, because somebody took
649 its address. In the first case, the insns of FNDECL are copied
650 as it is expanded inline, so FNDECL's saved insns are not
651 modified. In the second case, FNDECL is used for the last time,
652 so modifying the rtl is not a problem.
653
654 ??? Actually, we do not verify that FNDECL is not inline expanded
655 by other functions which must also be written down at the end
656 of compilation. We could set flag_no_inline to nonzero when
657 the time comes to write down such functions. */
658
659 void
660 save_for_inline_nocopy (fndecl)
661 tree fndecl;
662 {
663 rtx insn;
664 rtx head, copy;
665 tree parms;
666 int max_labelno, min_labelno, i, len;
667 int max_reg;
668 int max_uid;
669 rtx first_nonparm_insn;
670 int function_flags;
671
672 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
673 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
674 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
675 for the parms, prior to elimination of virtual registers.
676 These values are needed for substituting parms properly. */
677
678 max_parm_reg = max_parm_reg_num ();
679 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
680
681 /* Make and emit a return-label if we have not already done so. */
682
683 if (return_label == 0)
684 {
685 return_label = gen_label_rtx ();
686 emit_label (return_label);
687 }
688
689 head = initialize_for_inline (fndecl, get_first_label_num (),
690 max_label_num (), max_reg_num (), 0);
691
692 /* If there are insns that copy parms from the stack into pseudo registers,
693 those insns are not copied. `expand_inline_function' must
694 emit the correct code to handle such things. */
695
696 insn = get_insns ();
697 if (GET_CODE (insn) != NOTE)
698 abort ();
699
700 /* Get the insn which signals the end of parameter setup code. */
701 first_nonparm_insn = get_first_nonparm_insn ();
702
703 /* Now just scan the chain of insns to see what happens to our
704 PARM_DECLs. If a PARM_DECL is used but never modified, we
705 can substitute its rtl directly when expanding inline (and
706 perform constant folding when its incoming value is constant).
707 Otherwise, we have to copy its value into a new register and track
708 the new register's life. */
709
710 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
711 {
712 if (insn == first_nonparm_insn)
713 in_nonparm_insns = 1;
714
715 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
716 {
717 if (current_function_uses_const_pool)
718 {
719 /* Replace any constant pool references with the actual constant.
720 We will put the constant back if we need to write the
721 function out after all. */
722 save_constants (&PATTERN (insn));
723 if (REG_NOTES (insn))
724 save_constants (&REG_NOTES (insn));
725 }
726
727 /* Record what interesting things happen to our parameters. */
728 note_stores (PATTERN (insn), note_modified_parmregs);
729 }
730 }
731
732 /* We have now allocated all that needs to be allocated permanently
733 on the rtx obstack. Set our high-water mark, so that we
734 can free the rest of this when the time comes. */
735
736 preserve_data ();
737
738 finish_inline (fndecl, head);
739 }
740 \f
741 /* Given PX, a pointer into an insn, search for references to the constant
742 pool. Replace each with a CONST that has the mode of the original
743 constant, contains the constant, and has RTX_INTEGRATED_P set.
744 Similarly, constant pool addresses not enclosed in a MEM are replaced
745 with an ADDRESS rtx which also gives the constant, mode, and has
746 RTX_INTEGRATED_P set. */
747
748 static void
749 save_constants (px)
750 rtx *px;
751 {
752 rtx x;
753 int i, j;
754
755 again:
756 x = *px;
757
758 /* If this is a CONST_DOUBLE, don't try to fix things up in
759 CONST_DOUBLE_MEM, because this is an infinite recursion. */
760 if (GET_CODE (x) == CONST_DOUBLE)
761 return;
762 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
763 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
764 {
765 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
766 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
767 RTX_INTEGRATED_P (new) = 1;
768
769 /* If the MEM was in a different mode than the constant (perhaps we
770 were only looking at the low-order part), surround it with a
771 SUBREG so we can save both modes. */
772
773 if (GET_MODE (x) != const_mode)
774 {
775 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
776 RTX_INTEGRATED_P (new) = 1;
777 }
778
779 *px = new;
780 save_constants (&XEXP (*px, 0));
781 }
782 else if (GET_CODE (x) == SYMBOL_REF
783 && CONSTANT_POOL_ADDRESS_P (x))
784 {
785 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
786 save_constants (&XEXP (*px, 0));
787 RTX_INTEGRATED_P (*px) = 1;
788 }
789
790 else
791 {
792 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
793 int len = GET_RTX_LENGTH (GET_CODE (x));
794
795 for (i = len-1; i >= 0; i--)
796 {
797 switch (fmt[i])
798 {
799 case 'E':
800 for (j = 0; j < XVECLEN (x, i); j++)
801 save_constants (&XVECEXP (x, i, j));
802 break;
803
804 case 'e':
805 if (XEXP (x, i) == 0)
806 continue;
807 if (i == 0)
808 {
809 /* Hack tail-recursion here. */
810 px = &XEXP (x, 0);
811 goto again;
812 }
813 save_constants (&XEXP (x, i));
814 break;
815 }
816 }
817 }
818 }
819 \f
820 /* Note whether a parameter is modified or not. */
821
822 static void
823 note_modified_parmregs (reg, x)
824 rtx reg;
825 rtx x;
826 {
827 if (GET_CODE (reg) == REG && in_nonparm_insns
828 && REGNO (reg) < max_parm_reg
829 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
830 && parmdecl_map[REGNO (reg)] != 0)
831 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
832 }
833
834 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
835 according to `reg_map' and `label_map'. The original rtl insns
836 will be saved for inlining; this is used to make a copy
837 which is used to finish compiling the inline function itself.
838
839 If we find a "saved" constant pool entry, one which was replaced with
840 the value of the constant, convert it back to a constant pool entry.
841 Since the pool wasn't touched, this should simply restore the old
842 address.
843
844 All other kinds of rtx are copied except those that can never be
845 changed during compilation. */
846
847 static rtx
848 copy_for_inline (orig)
849 rtx orig;
850 {
851 register rtx x = orig;
852 register int i;
853 register enum rtx_code code;
854 register char *format_ptr;
855
856 if (x == 0)
857 return x;
858
859 code = GET_CODE (x);
860
861 /* These types may be freely shared. */
862
863 switch (code)
864 {
865 case QUEUED:
866 case CONST_INT:
867 case SYMBOL_REF:
868 case PC:
869 case CC0:
870 return x;
871
872 case CONST_DOUBLE:
873 /* We have to make a new CONST_DOUBLE to ensure that we account for
874 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
875 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
876 {
877 REAL_VALUE_TYPE d;
878
879 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
880 return immed_real_const_1 (d, GET_MODE (x));
881 }
882 else
883 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
884 VOIDmode);
885
886 case CONST:
887 /* Get constant pool entry for constant in the pool. */
888 if (RTX_INTEGRATED_P (x))
889 return validize_mem (force_const_mem (GET_MODE (x),
890 copy_for_inline (XEXP (x, 0))));
891 break;
892
893 case SUBREG:
894 /* Get constant pool entry, but access in different mode. */
895 if (RTX_INTEGRATED_P (x))
896 {
897 rtx new
898 = force_const_mem (GET_MODE (SUBREG_REG (x)),
899 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
900
901 PUT_MODE (new, GET_MODE (x));
902 return validize_mem (new);
903 }
904 break;
905
906 case ADDRESS:
907 /* If not special for constant pool error. Else get constant pool
908 address. */
909 if (! RTX_INTEGRATED_P (x))
910 abort ();
911
912 return XEXP (force_const_mem (GET_MODE (x),
913 copy_for_inline (XEXP (x, 0))), 0);
914
915 case ASM_OPERANDS:
916 /* If a single asm insn contains multiple output operands
917 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
918 We must make sure that the copied insn continues to share it. */
919 if (orig_asm_operands_vector == XVEC (orig, 3))
920 {
921 x = rtx_alloc (ASM_OPERANDS);
922 XSTR (x, 0) = XSTR (orig, 0);
923 XSTR (x, 1) = XSTR (orig, 1);
924 XINT (x, 2) = XINT (orig, 2);
925 XVEC (x, 3) = copy_asm_operands_vector;
926 XVEC (x, 4) = copy_asm_constraints_vector;
927 XSTR (x, 5) = XSTR (orig, 5);
928 XINT (x, 6) = XINT (orig, 6);
929 return x;
930 }
931 break;
932
933 case MEM:
934 /* A MEM is usually allowed to be shared if its address is constant
935 or is a constant plus one of the special registers.
936
937 We do not allow sharing of addresses that are either a special
938 register or the sum of a constant and a special register because
939 it is possible for unshare_all_rtl to copy the address, into memory
940 that won't be saved. Although the MEM can safely be shared, and
941 won't be copied there, the address itself cannot be shared, and may
942 need to be copied.
943
944 There are also two exceptions with constants: The first is if the
945 constant is a LABEL_REF or the sum of the LABEL_REF
946 and an integer. This case can happen if we have an inline
947 function that supplies a constant operand to the call of another
948 inline function that uses it in a switch statement. In this case,
949 we will be replacing the LABEL_REF, so we have to replace this MEM
950 as well.
951
952 The second case is if we have a (const (plus (address ..) ...)).
953 In that case we need to put back the address of the constant pool
954 entry. */
955
956 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
957 && GET_CODE (XEXP (x, 0)) != LABEL_REF
958 && ! (GET_CODE (XEXP (x, 0)) == CONST
959 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
960 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
961 == LABEL_REF)
962 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
963 == ADDRESS)))))
964 return x;
965 break;
966
967 case LABEL_REF:
968 {
969 /* Must point to the new insn. */
970 return gen_rtx (LABEL_REF, GET_MODE (orig),
971 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
972 }
973
974 case REG:
975 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
976 return reg_map [REGNO (x)];
977 else
978 return x;
979
980 case SET:
981 /* If a parm that gets modified lives in a pseudo-reg,
982 clear its TREE_READONLY to prevent certain optimizations. */
983 {
984 rtx dest = SET_DEST (x);
985
986 while (GET_CODE (dest) == STRICT_LOW_PART
987 || GET_CODE (dest) == ZERO_EXTRACT
988 || GET_CODE (dest) == SUBREG)
989 dest = XEXP (dest, 0);
990
991 if (GET_CODE (dest) == REG
992 && REGNO (dest) < max_parm_reg
993 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
994 && parmdecl_map[REGNO (dest)] != 0
995 /* The insn to load an arg pseudo from a stack slot
996 does not count as modifying it. */
997 && in_nonparm_insns)
998 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
999 }
1000 break;
1001
1002 #if 0 /* This is a good idea, but here is the wrong place for it. */
1003 /* Arrange that CONST_INTs always appear as the second operand
1004 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1005 always appear as the first. */
1006 case PLUS:
1007 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1008 || (XEXP (x, 1) == frame_pointer_rtx
1009 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1010 && XEXP (x, 1) == arg_pointer_rtx)))
1011 {
1012 rtx t = XEXP (x, 0);
1013 XEXP (x, 0) = XEXP (x, 1);
1014 XEXP (x, 1) = t;
1015 }
1016 break;
1017 #endif
1018 }
1019
1020 /* Replace this rtx with a copy of itself. */
1021
1022 x = rtx_alloc (code);
1023 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
1024 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1025
1026 /* Now scan the subexpressions recursively.
1027 We can store any replaced subexpressions directly into X
1028 since we know X is not shared! Any vectors in X
1029 must be copied if X was copied. */
1030
1031 format_ptr = GET_RTX_FORMAT (code);
1032
1033 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1034 {
1035 switch (*format_ptr++)
1036 {
1037 case 'e':
1038 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1039 break;
1040
1041 case 'u':
1042 /* Change any references to old-insns to point to the
1043 corresponding copied insns. */
1044 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1045 break;
1046
1047 case 'E':
1048 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1049 {
1050 register int j;
1051
1052 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1053 for (j = 0; j < XVECLEN (x, i); j++)
1054 XVECEXP (x, i, j)
1055 = copy_for_inline (XVECEXP (x, i, j));
1056 }
1057 break;
1058 }
1059 }
1060
1061 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1062 {
1063 orig_asm_operands_vector = XVEC (orig, 3);
1064 copy_asm_operands_vector = XVEC (x, 3);
1065 copy_asm_constraints_vector = XVEC (x, 4);
1066 }
1067
1068 return x;
1069 }
1070
1071 /* Unfortunately, we need a global copy of const_equiv map for communication
1072 with a function called from note_stores. Be *very* careful that this
1073 is used properly in the presence of recursion. */
1074
1075 rtx *global_const_equiv_map;
1076 \f
1077 #define FIXED_BASE_PLUS_P(X) \
1078 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1079 && GET_CODE (XEXP (X, 0)) == REG \
1080 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1081 && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)
1082
1083 /* Integrate the procedure defined by FNDECL. Note that this function
1084 may wind up calling itself. Since the static variables are not
1085 reentrant, we do not assign them until after the possibility
1086 of recursion is eliminated.
1087
1088 If IGNORE is nonzero, do not produce a value.
1089 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1090
1091 Value is:
1092 (rtx)-1 if we could not substitute the function
1093 0 if we substituted it and it does not produce a value
1094 else an rtx for where the value is stored. */
1095
1096 rtx
1097 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1098 tree fndecl, parms;
1099 rtx target;
1100 int ignore;
1101 tree type;
1102 rtx structure_value_addr;
1103 {
1104 tree formal, actual, block;
1105 rtx header = DECL_SAVED_INSNS (fndecl);
1106 rtx insns = FIRST_FUNCTION_INSN (header);
1107 rtx parm_insns = FIRST_PARM_INSN (header);
1108 tree *arg_trees;
1109 rtx *arg_vals;
1110 rtx insn;
1111 int max_regno;
1112 register int i;
1113 int min_labelno = FIRST_LABELNO (header);
1114 int max_labelno = LAST_LABELNO (header);
1115 int nargs;
1116 rtx local_return_label = 0;
1117 rtx loc;
1118 rtx temp;
1119 struct inline_remap *map;
1120 rtx cc0_insn = 0;
1121 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1122
1123 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1124 max_regno = MAX_REGNUM (header) + 3;
1125 if (max_regno < FIRST_PSEUDO_REGISTER)
1126 abort ();
1127
1128 nargs = list_length (DECL_ARGUMENTS (fndecl));
1129
1130 /* We expect PARMS to have the right length; don't crash if not. */
1131 if (list_length (parms) != nargs)
1132 return (rtx) (HOST_WIDE_INT) -1;
1133 /* Also check that the parms type match. Since the appropriate
1134 conversions or default promotions have already been applied,
1135 the machine modes should match exactly. */
1136 for (formal = DECL_ARGUMENTS (fndecl),
1137 actual = parms;
1138 formal;
1139 formal = TREE_CHAIN (formal),
1140 actual = TREE_CHAIN (actual))
1141 {
1142 tree arg = TREE_VALUE (actual);
1143 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1144 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1145 return (rtx) (HOST_WIDE_INT) -1;
1146 /* If they are block mode, the types should match exactly.
1147 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1148 which could happen if the parameter has incomplete type. */
1149 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1150 return (rtx) (HOST_WIDE_INT) -1;
1151 }
1152
1153 /* Make a binding contour to keep inline cleanups called at
1154 outer function-scope level from looking like they are shadowing
1155 parameter declarations. */
1156 pushlevel (0);
1157
1158 /* Make a fresh binding contour that we can easily remove. */
1159 pushlevel (0);
1160 expand_start_bindings (0);
1161 if (GET_CODE (parm_insns) == NOTE
1162 && NOTE_LINE_NUMBER (parm_insns) > 0)
1163 emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns));
1164
1165 /* Expand the function arguments. Do this first so that any
1166 new registers get created before we allocate the maps. */
1167
1168 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1169 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1170
1171 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1172 formal;
1173 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1174 {
1175 /* Actual parameter, converted to the type of the argument within the
1176 function. */
1177 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1178 /* Mode of the variable used within the function. */
1179 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1180 /* Where parameter is located in the function. */
1181 rtx copy;
1182
1183 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1184
1185 arg_trees[i] = arg;
1186 loc = RTVEC_ELT (arg_vector, i);
1187
1188 /* If this is an object passed by invisible reference, we copy the
1189 object into a stack slot and save its address. If this will go
1190 into memory, we do nothing now. Otherwise, we just expand the
1191 argument. */
1192 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1193 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1194 {
1195 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
1196 rtx stack_slot = assign_stack_temp (mode, int_size_in_bytes (TREE_TYPE (arg)), 1);
1197
1198 store_expr (arg, stack_slot, 0);
1199
1200 arg_vals[i] = XEXP (stack_slot, 0);
1201 }
1202 else if (GET_CODE (loc) != MEM)
1203 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1204 else
1205 arg_vals[i] = 0;
1206
1207 if (arg_vals[i] != 0
1208 && (! TREE_READONLY (formal)
1209 /* If the parameter is not read-only, copy our argument through
1210 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1211 TARGET in any way. In the inline function, they will likely
1212 be two different pseudos, and `safe_from_p' will make all
1213 sorts of smart assumptions about their not conflicting.
1214 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1215 wrong, so put ARG_VALS[I] into a fresh register. */
1216 || (target != 0
1217 && (GET_CODE (arg_vals[i]) == REG
1218 || GET_CODE (arg_vals[i]) == SUBREG
1219 || GET_CODE (arg_vals[i]) == MEM)
1220 && reg_overlap_mentioned_p (arg_vals[i], target))))
1221 arg_vals[i] = copy_to_mode_reg (mode, arg_vals[i]);
1222 }
1223
1224 /* Allocate the structures we use to remap things. */
1225
1226 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1227 map->fndecl = fndecl;
1228
1229 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1230 bzero (map->reg_map, max_regno * sizeof (rtx));
1231
1232 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1233 map->label_map -= min_labelno;
1234
1235 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1236 bzero (map->insn_map, INSN_UID (header) * sizeof (rtx));
1237 map->min_insnno = 0;
1238 map->max_insnno = INSN_UID (header);
1239
1240 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1241 be large enough for all our pseudos. This is the number we are currently
1242 using plus the number in the called routine, plus 15 for each arg,
1243 five to compute the virtual frame pointer, and five for the return value.
1244 This should be enough for most cases. We do not reference entries
1245 outside the range of the map.
1246
1247 ??? These numbers are quite arbitrary and were obtained by
1248 experimentation. At some point, we should try to allocate the
1249 table after all the parameters are set up so we an more accurately
1250 estimate the number of pseudos we will need. */
1251
1252 map->const_equiv_map_size
1253 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1254
1255 map->const_equiv_map
1256 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1257 bzero (map->const_equiv_map, map->const_equiv_map_size * sizeof (rtx));
1258
1259 map->const_age_map
1260 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1261 bzero (map->const_age_map, map->const_equiv_map_size * sizeof (unsigned));
1262 map->const_age = 0;
1263
1264 /* Record the current insn in case we have to set up pointers to frame
1265 and argument memory blocks. */
1266 map->insns_at_start = get_last_insn ();
1267
1268 /* Update the outgoing argument size to allow for those in the inlined
1269 function. */
1270 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1271 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1272
1273 /* If the inline function needs to make PIC references, that means
1274 that this function's PIC offset table must be used. */
1275 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1276 current_function_uses_pic_offset_table = 1;
1277
1278 /* Process each argument. For each, set up things so that the function's
1279 reference to the argument will refer to the argument being passed.
1280 We only replace REG with REG here. Any simplifications are done
1281 via const_equiv_map.
1282
1283 We make two passes: In the first, we deal with parameters that will
1284 be placed into registers, since we need to ensure that the allocated
1285 register number fits in const_equiv_map. Then we store all non-register
1286 parameters into their memory location. */
1287
1288 for (i = 0; i < nargs; i++)
1289 {
1290 rtx copy = arg_vals[i];
1291
1292 loc = RTVEC_ELT (arg_vector, i);
1293
1294 /* There are three cases, each handled separately. */
1295 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1296 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1297 {
1298 /* This must be an object passed by invisible reference (it could
1299 also be a variable-sized object, but we forbid inlining functions
1300 with variable-sized arguments). COPY is the address of the
1301 actual value (this computation will cause it to be copied). We
1302 map that address for the register, noting the actual address as
1303 an equivalent in case it can be substituted into the insns. */
1304
1305 if (GET_CODE (copy) != REG)
1306 {
1307 temp = copy_addr_to_reg (copy);
1308 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1309 {
1310 map->const_equiv_map[REGNO (temp)] = copy;
1311 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1312 }
1313 copy = temp;
1314 }
1315 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1316 }
1317 else if (GET_CODE (loc) == MEM)
1318 {
1319 /* This is the case of a parameter that lives in memory.
1320 It will live in the block we allocate in the called routine's
1321 frame that simulates the incoming argument area. Do nothing
1322 now; we will call store_expr later. */
1323 ;
1324 }
1325 else if (GET_CODE (loc) == REG)
1326 {
1327 /* This is the good case where the parameter is in a register.
1328 If it is read-only and our argument is a constant, set up the
1329 constant equivalence. */
1330 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1331 {
1332 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1333 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1334 {
1335 map->const_equiv_map[REGNO (temp)] = copy;
1336 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1337 }
1338 copy = temp;
1339 }
1340 map->reg_map[REGNO (loc)] = copy;
1341 }
1342 else
1343 abort ();
1344
1345 /* Free any temporaries we made setting up this parameter. */
1346 free_temp_slots ();
1347 }
1348
1349 /* Now do the parameters that will be placed in memory. */
1350
1351 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1352 formal; formal = TREE_CHAIN (formal), i++)
1353 {
1354 rtx copy = arg_vals[i];
1355
1356 loc = RTVEC_ELT (arg_vector, i);
1357
1358 if (GET_CODE (loc) == MEM
1359 /* Exclude case handled above. */
1360 && ! (GET_CODE (XEXP (loc, 0)) == REG
1361 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1362 {
1363 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1364
1365 /* Compute the address in the area we reserved and store the
1366 value there. */
1367 temp = copy_rtx_and_substitute (loc, map);
1368 subst_constants (&temp, NULL_RTX, map);
1369 apply_change_group ();
1370 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1371 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1372 store_expr (arg_trees[i], temp, 0);
1373
1374 /* Free any temporaries we made setting up this parameter. */
1375 free_temp_slots ();
1376 }
1377 }
1378
1379 /* Deal with the places that the function puts its result.
1380 We are driven by what is placed into DECL_RESULT.
1381
1382 Initially, we assume that we don't have anything special handling for
1383 REG_FUNCTION_RETURN_VALUE_P. */
1384
1385 map->inline_target = 0;
1386 loc = DECL_RTL (DECL_RESULT (fndecl));
1387 if (TYPE_MODE (type) == VOIDmode)
1388 /* There is no return value to worry about. */
1389 ;
1390 else if (GET_CODE (loc) == MEM)
1391 {
1392 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1393 abort ();
1394
1395 /* Pass the function the address in which to return a structure value.
1396 Note that a constructor can cause someone to call us with
1397 STRUCTURE_VALUE_ADDR, but the initialization takes place
1398 via the first parameter, rather than the struct return address.
1399
1400 We have two cases: If the address is a simple register indirect,
1401 use the mapping mechanism to point that register to our structure
1402 return address. Otherwise, store the structure return value into
1403 the place that it will be referenced from. */
1404
1405 if (GET_CODE (XEXP (loc, 0)) == REG)
1406 {
1407 temp = force_reg (Pmode, structure_value_addr);
1408 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1409 if (CONSTANT_P (structure_value_addr)
1410 || (GET_CODE (structure_value_addr) == PLUS
1411 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1412 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1413 {
1414 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1415 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1416 }
1417 }
1418 else
1419 {
1420 temp = copy_rtx_and_substitute (loc, map);
1421 subst_constants (&temp, NULL_RTX, map);
1422 apply_change_group ();
1423 emit_move_insn (temp, structure_value_addr);
1424 }
1425 }
1426 else if (ignore)
1427 /* We will ignore the result value, so don't look at its structure.
1428 Note that preparations for an aggregate return value
1429 do need to be made (above) even if it will be ignored. */
1430 ;
1431 else if (GET_CODE (loc) == REG)
1432 {
1433 /* The function returns an object in a register and we use the return
1434 value. Set up our target for remapping. */
1435
1436 /* Machine mode function was declared to return. */
1437 enum machine_mode departing_mode = TYPE_MODE (type);
1438 /* (Possibly wider) machine mode it actually computes
1439 (for the sake of callers that fail to declare it right). */
1440 enum machine_mode arriving_mode
1441 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1442 rtx reg_to_map;
1443
1444 /* Don't use MEMs as direct targets because on some machines
1445 substituting a MEM for a REG makes invalid insns.
1446 Let the combiner substitute the MEM if that is valid. */
1447 if (target == 0 || GET_CODE (target) != REG
1448 || GET_MODE (target) != departing_mode)
1449 target = gen_reg_rtx (departing_mode);
1450
1451 /* If function's value was promoted before return,
1452 avoid machine mode mismatch when we substitute INLINE_TARGET.
1453 But TARGET is what we will return to the caller. */
1454 if (arriving_mode != departing_mode)
1455 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1456 else
1457 reg_to_map = target;
1458
1459 /* Usually, the result value is the machine's return register.
1460 Sometimes it may be a pseudo. Handle both cases. */
1461 if (REG_FUNCTION_VALUE_P (loc))
1462 map->inline_target = reg_to_map;
1463 else
1464 map->reg_map[REGNO (loc)] = reg_to_map;
1465 }
1466
1467 /* Make new label equivalences for the labels in the called function. */
1468 for (i = min_labelno; i < max_labelno; i++)
1469 map->label_map[i] = gen_label_rtx ();
1470
1471 /* Perform postincrements before actually calling the function. */
1472 emit_queue ();
1473
1474 /* Clean up stack so that variables might have smaller offsets. */
1475 do_pending_stack_adjust ();
1476
1477 /* Save a copy of the location of const_equiv_map for mark_stores, called
1478 via note_stores. */
1479 global_const_equiv_map = map->const_equiv_map;
1480
1481 /* Now copy the insns one by one. Do this in two passes, first the insns and
1482 then their REG_NOTES, just like save_for_inline. */
1483
1484 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1485
1486 for (insn = insns; insn; insn = NEXT_INSN (insn))
1487 {
1488 rtx copy, pattern;
1489
1490 map->orig_asm_operands_vector = 0;
1491
1492 switch (GET_CODE (insn))
1493 {
1494 case INSN:
1495 pattern = PATTERN (insn);
1496 copy = 0;
1497 if (GET_CODE (pattern) == USE
1498 && GET_CODE (XEXP (pattern, 0)) == REG
1499 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1500 /* The (USE (REG n)) at return from the function should
1501 be ignored since we are changing (REG n) into
1502 inline_target. */
1503 break;
1504
1505 /* Ignore setting a function value that we don't want to use. */
1506 if (map->inline_target == 0
1507 && GET_CODE (pattern) == SET
1508 && GET_CODE (SET_DEST (pattern)) == REG
1509 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1510 {
1511 if (volatile_refs_p (SET_SRC (pattern)))
1512 {
1513 /* If we must not delete the source,
1514 load it into a new temporary. */
1515 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1516 SET_DEST (PATTERN (copy))
1517 = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (copy))));
1518 }
1519 else
1520 break;
1521 }
1522 else
1523 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1524 /* REG_NOTES will be copied later. */
1525
1526 #ifdef HAVE_cc0
1527 /* If this insn is setting CC0, it may need to look at
1528 the insn that uses CC0 to see what type of insn it is.
1529 In that case, the call to recog via validate_change will
1530 fail. So don't substitute constants here. Instead,
1531 do it when we emit the following insn.
1532
1533 For example, see the pyr.md file. That machine has signed and
1534 unsigned compares. The compare patterns must check the
1535 following branch insn to see which what kind of compare to
1536 emit.
1537
1538 If the previous insn set CC0, substitute constants on it as
1539 well. */
1540 if (sets_cc0_p (PATTERN (copy)) != 0)
1541 cc0_insn = copy;
1542 else
1543 {
1544 if (cc0_insn)
1545 try_constants (cc0_insn, map);
1546 cc0_insn = 0;
1547 try_constants (copy, map);
1548 }
1549 #else
1550 try_constants (copy, map);
1551 #endif
1552 break;
1553
1554 case JUMP_INSN:
1555 if (GET_CODE (PATTERN (insn)) == RETURN)
1556 {
1557 if (local_return_label == 0)
1558 local_return_label = gen_label_rtx ();
1559 pattern = gen_jump (local_return_label);
1560 }
1561 else
1562 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1563
1564 copy = emit_jump_insn (pattern);
1565
1566 #ifdef HAVE_cc0
1567 if (cc0_insn)
1568 try_constants (cc0_insn, map);
1569 cc0_insn = 0;
1570 #endif
1571 try_constants (copy, map);
1572
1573 /* If this used to be a conditional jump insn but whose branch
1574 direction is now know, we must do something special. */
1575 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1576 {
1577 #ifdef HAVE_cc0
1578 /* The previous insn set cc0 for us. So delete it. */
1579 delete_insn (PREV_INSN (copy));
1580 #endif
1581
1582 /* If this is now a no-op, delete it. */
1583 if (map->last_pc_value == pc_rtx)
1584 {
1585 delete_insn (copy);
1586 copy = 0;
1587 }
1588 else
1589 /* Otherwise, this is unconditional jump so we must put a
1590 BARRIER after it. We could do some dead code elimination
1591 here, but jump.c will do it just as well. */
1592 emit_barrier ();
1593 }
1594 break;
1595
1596 case CALL_INSN:
1597 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1598 copy = emit_call_insn (pattern);
1599
1600 #ifdef HAVE_cc0
1601 if (cc0_insn)
1602 try_constants (cc0_insn, map);
1603 cc0_insn = 0;
1604 #endif
1605 try_constants (copy, map);
1606
1607 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1608 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1609 map->const_equiv_map[i] = 0;
1610 break;
1611
1612 case CODE_LABEL:
1613 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1614 LABEL_NAME (copy) = LABEL_NAME (insn);
1615 map->const_age++;
1616 break;
1617
1618 case BARRIER:
1619 copy = emit_barrier ();
1620 break;
1621
1622 case NOTE:
1623 /* It is important to discard function-end and function-beg notes,
1624 so we have only one of each in the current function.
1625 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1626 deleted these in the copy used for continuing compilation,
1627 not the copy used for inlining). */
1628 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1629 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1630 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1631 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1632 else
1633 copy = 0;
1634 break;
1635
1636 default:
1637 abort ();
1638 break;
1639 }
1640
1641 if (copy)
1642 RTX_INTEGRATED_P (copy) = 1;
1643
1644 map->insn_map[INSN_UID (insn)] = copy;
1645 }
1646
1647 /* Now copy the REG_NOTES. */
1648 for (insn = insns; insn; insn = NEXT_INSN (insn))
1649 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1650 && map->insn_map[INSN_UID (insn)])
1651 REG_NOTES (map->insn_map[INSN_UID (insn)])
1652 = copy_rtx_and_substitute (REG_NOTES (insn), map);
1653
1654 if (local_return_label)
1655 emit_label (local_return_label);
1656
1657 /* Make copies of the decls of the symbols in the inline function, so that
1658 the copies of the variables get declared in the current function. Set
1659 up things so that lookup_static_chain knows that to interpret registers
1660 in SAVE_EXPRs for TYPE_SIZEs as local. */
1661
1662 inline_function_decl = fndecl;
1663 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1664 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1665 inline_function_decl = 0;
1666
1667 /* End the scope containing the copied formal parameter variables
1668 and copied LABEL_DECLs. */
1669
1670 expand_end_bindings (getdecls (), 1, 1);
1671 block = poplevel (1, 1, 0);
1672 BLOCK_ABSTRACT_ORIGIN (block) = fndecl;
1673 poplevel (0, 0, 0);
1674 emit_line_note (input_filename, lineno);
1675
1676 if (structure_value_addr)
1677 return gen_rtx (MEM, TYPE_MODE (type),
1678 memory_address (TYPE_MODE (type), structure_value_addr));
1679 return target;
1680 }
1681 \f
1682 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1683 push all of those decls and give each one the corresponding home. */
1684
1685 static void
1686 integrate_parm_decls (args, map, arg_vector)
1687 tree args;
1688 struct inline_remap *map;
1689 rtvec arg_vector;
1690 {
1691 register tree tail;
1692 register int i;
1693
1694 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1695 {
1696 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1697 TREE_TYPE (tail));
1698 rtx new_decl_rtl
1699 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1700
1701 /* These args would always appear unused, if not for this. */
1702 TREE_USED (decl) = 1;
1703 /* Prevent warning for shadowing with these. */
1704 DECL_ABSTRACT_ORIGIN (decl) = tail;
1705 pushdecl (decl);
1706 /* Fully instantiate the address with the equivalent form so that the
1707 debugging information contains the actual register, instead of the
1708 virtual register. Do this by not passing an insn to
1709 subst_constants. */
1710 subst_constants (&new_decl_rtl, NULL_RTX, map);
1711 apply_change_group ();
1712 DECL_RTL (decl) = new_decl_rtl;
1713 }
1714 }
1715
1716 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1717 current function a tree of contexts isomorphic to the one that is given.
1718
1719 LEVEL indicates how far down into the BLOCK tree is the node we are
1720 currently traversing. It is always zero except for recursive calls.
1721
1722 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1723 registers used in the DECL_RTL field should be remapped. If it is zero,
1724 no mapping is necessary. */
1725
1726 static void
1727 integrate_decl_tree (let, level, map)
1728 tree let;
1729 int level;
1730 struct inline_remap *map;
1731 {
1732 tree t, node;
1733
1734 if (level > 0)
1735 pushlevel (0);
1736
1737 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1738 {
1739 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1740 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1741 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1742 if (DECL_RTL (t) != 0)
1743 {
1744 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1745 /* Fully instantiate the address with the equivalent form so that the
1746 debugging information contains the actual register, instead of the
1747 virtual register. Do this by not passing an insn to
1748 subst_constants. */
1749 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1750 apply_change_group ();
1751 }
1752 else if (DECL_RTL (t))
1753 DECL_RTL (d) = copy_rtx (DECL_RTL (t));
1754 DECL_EXTERNAL (d) = DECL_EXTERNAL (t);
1755 TREE_STATIC (d) = TREE_STATIC (t);
1756 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1757 TREE_CONSTANT (d) = TREE_CONSTANT (t);
1758 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1759 TREE_READONLY (d) = TREE_READONLY (t);
1760 TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t);
1761 /* These args would always appear unused, if not for this. */
1762 TREE_USED (d) = 1;
1763 /* Prevent warning for shadowing with these. */
1764 DECL_ABSTRACT_ORIGIN (d) = t;
1765 pushdecl (d);
1766 }
1767
1768 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1769 integrate_decl_tree (t, level + 1, map);
1770
1771 if (level > 0)
1772 {
1773 node = poplevel (1, 0, 0);
1774 if (node)
1775 {
1776 TREE_USED (node) = TREE_USED (let);
1777 BLOCK_ABSTRACT_ORIGIN (node) = let;
1778 }
1779 }
1780 }
1781 \f
1782 /* Create a new copy of an rtx.
1783 Recursively copies the operands of the rtx,
1784 except for those few rtx codes that are sharable.
1785
1786 We always return an rtx that is similar to that incoming rtx, with the
1787 exception of possibly changing a REG to a SUBREG or vice versa. No
1788 rtl is ever emitted.
1789
1790 Handle constants that need to be placed in the constant pool by
1791 calling `force_const_mem'. */
1792
1793 rtx
1794 copy_rtx_and_substitute (orig, map)
1795 register rtx orig;
1796 struct inline_remap *map;
1797 {
1798 register rtx copy, temp;
1799 register int i, j;
1800 register RTX_CODE code;
1801 register enum machine_mode mode;
1802 register char *format_ptr;
1803 int regno;
1804
1805 if (orig == 0)
1806 return 0;
1807
1808 code = GET_CODE (orig);
1809 mode = GET_MODE (orig);
1810
1811 switch (code)
1812 {
1813 case REG:
1814 /* If the stack pointer register shows up, it must be part of
1815 stack-adjustments (*not* because we eliminated the frame pointer!).
1816 Small hard registers are returned as-is. Pseudo-registers
1817 go through their `reg_map'. */
1818 regno = REGNO (orig);
1819 if (regno <= LAST_VIRTUAL_REGISTER)
1820 {
1821 /* Some hard registers are also mapped,
1822 but others are not translated. */
1823 if (map->reg_map[regno] != 0)
1824 return map->reg_map[regno];
1825
1826 /* If this is the virtual frame pointer, make space in current
1827 function's stack frame for the stack frame of the inline function.
1828
1829 Copy the address of this area into a pseudo. Map
1830 virtual_stack_vars_rtx to this pseudo and set up a constant
1831 equivalence for it to be the address. This will substitute the
1832 address into insns where it can be substituted and use the new
1833 pseudo where it can't. */
1834 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1835 {
1836 rtx loc, seq;
1837 int size = DECL_FRAME_SIZE (map->fndecl);
1838 int rounded;
1839
1840 start_sequence ();
1841 loc = assign_stack_temp (BLKmode, size, 1);
1842 loc = XEXP (loc, 0);
1843 #ifdef FRAME_GROWS_DOWNWARD
1844 /* In this case, virtual_stack_vars_rtx points to one byte
1845 higher than the top of the frame area. So compute the offset
1846 to one byte higher than our substitute frame.
1847 Keep the fake frame pointer aligned like a real one. */
1848 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1849 loc = plus_constant (loc, rounded);
1850 #endif
1851 map->reg_map[regno] = temp = force_operand (loc, NULL_RTX);
1852 map->const_equiv_map[REGNO (temp)] = loc;
1853 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1854
1855 seq = gen_sequence ();
1856 end_sequence ();
1857 emit_insn_after (seq, map->insns_at_start);
1858 return temp;
1859 }
1860 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
1861 {
1862 /* Do the same for a block to contain any arguments referenced
1863 in memory. */
1864 rtx loc, seq;
1865 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
1866
1867 start_sequence ();
1868 loc = assign_stack_temp (BLKmode, size, 1);
1869 loc = XEXP (loc, 0);
1870 map->reg_map[regno] = temp = force_operand (loc, NULL_RTX);
1871 map->const_equiv_map[REGNO (temp)] = loc;
1872 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1873
1874 seq = gen_sequence ();
1875 end_sequence ();
1876 emit_insn_after (seq, map->insns_at_start);
1877 return temp;
1878 }
1879 else if (REG_FUNCTION_VALUE_P (orig))
1880 {
1881 /* This is a reference to the function return value. If
1882 the function doesn't have a return value, error. If the
1883 mode doesn't agree, make a SUBREG. */
1884 if (map->inline_target == 0)
1885 /* Must be unrolling loops or replicating code if we
1886 reach here, so return the register unchanged. */
1887 return orig;
1888 else if (mode != GET_MODE (map->inline_target))
1889 return gen_rtx (SUBREG, mode, map->inline_target, 0);
1890 else
1891 return map->inline_target;
1892 }
1893 return orig;
1894 }
1895 if (map->reg_map[regno] == NULL)
1896 {
1897 map->reg_map[regno] = gen_reg_rtx (mode);
1898 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1899 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1900 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1901 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1902 }
1903 return map->reg_map[regno];
1904
1905 case SUBREG:
1906 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1907 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1908 if (GET_CODE (copy) == SUBREG)
1909 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1910 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1911 else
1912 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1913 SUBREG_WORD (orig));
1914
1915 case USE:
1916 case CLOBBER:
1917 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1918 to (use foo). */
1919 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1920 if (GET_CODE (copy) == SUBREG)
1921 copy = SUBREG_REG (copy);
1922 return gen_rtx (code, VOIDmode, copy);
1923
1924 case CODE_LABEL:
1925 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
1926 = LABEL_PRESERVE_P (orig);
1927 return map->label_map[CODE_LABEL_NUMBER (orig)];
1928
1929 case LABEL_REF:
1930 copy = rtx_alloc (LABEL_REF);
1931 PUT_MODE (copy, mode);
1932 XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1933 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1934 return copy;
1935
1936 case PC:
1937 case CC0:
1938 case CONST_INT:
1939 return orig;
1940
1941 case SYMBOL_REF:
1942 /* Symbols which represent the address of a label stored in the constant
1943 pool must be modified to point to a constant pool entry for the
1944 remapped label. Otherwise, symbols are returned unchanged. */
1945 if (CONSTANT_POOL_ADDRESS_P (orig))
1946 {
1947 rtx constant = get_pool_constant (orig);
1948 if (GET_CODE (constant) == LABEL_REF)
1949 {
1950 copy = rtx_alloc (LABEL_REF);
1951 PUT_MODE (copy, mode);
1952 XEXP (copy, 0)
1953 = map->label_map[CODE_LABEL_NUMBER (XEXP (constant, 0))];
1954 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1955 copy = force_const_mem (Pmode, copy);
1956 return XEXP (copy, 0);
1957 }
1958 }
1959 return orig;
1960
1961 case CONST_DOUBLE:
1962 /* We have to make a new copy of this CONST_DOUBLE because don't want
1963 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1964 duplicate of a CONST_DOUBLE we have already seen. */
1965 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1966 {
1967 REAL_VALUE_TYPE d;
1968
1969 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1970 return immed_real_const_1 (d, GET_MODE (orig));
1971 }
1972 else
1973 return immed_double_const (CONST_DOUBLE_LOW (orig),
1974 CONST_DOUBLE_HIGH (orig), VOIDmode);
1975
1976 case CONST:
1977 /* Make new constant pool entry for a constant
1978 that was in the pool of the inline function. */
1979 if (RTX_INTEGRATED_P (orig))
1980 {
1981 /* If this was an address of a constant pool entry that itself
1982 had to be placed in the constant pool, it might not be a
1983 valid address. So the recursive call below might turn it
1984 into a register. In that case, it isn't a constant any
1985 more, so return it. This has the potential of changing a
1986 MEM into a REG, but we'll assume that it safe. */
1987 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
1988 if (! CONSTANT_P (temp))
1989 return temp;
1990 return validize_mem (force_const_mem (GET_MODE (orig), temp));
1991 }
1992 break;
1993
1994 case ADDRESS:
1995 /* If from constant pool address, make new constant pool entry and
1996 return its address. */
1997 if (! RTX_INTEGRATED_P (orig))
1998 abort ();
1999
2000 temp = force_const_mem (GET_MODE (orig),
2001 copy_rtx_and_substitute (XEXP (orig, 0), map));
2002
2003 #if 0
2004 /* Legitimizing the address here is incorrect.
2005
2006 The only ADDRESS rtx's that can reach here are ones created by
2007 save_constants. Hence the operand of the ADDRESS is always legal
2008 in this position of the instruction, since the original rtx without
2009 the ADDRESS was legal.
2010
2011 The reason we don't legitimize the address here is that on the
2012 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2013 This code forces the operand of the address to a register, which
2014 fails because we can not take the HIGH part of a register.
2015
2016 Also, change_address may create new registers. These registers
2017 will not have valid reg_map entries. This can cause try_constants()
2018 to fail because assumes that all registers in the rtx have valid
2019 reg_map entries, and it may end up replacing one of these new
2020 registers with junk. */
2021
2022 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2023 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2024 #endif
2025
2026 return XEXP (temp, 0);
2027
2028 case ASM_OPERANDS:
2029 /* If a single asm insn contains multiple output operands
2030 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2031 We must make sure that the copied insn continues to share it. */
2032 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2033 {
2034 copy = rtx_alloc (ASM_OPERANDS);
2035 XSTR (copy, 0) = XSTR (orig, 0);
2036 XSTR (copy, 1) = XSTR (orig, 1);
2037 XINT (copy, 2) = XINT (orig, 2);
2038 XVEC (copy, 3) = map->copy_asm_operands_vector;
2039 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2040 XSTR (copy, 5) = XSTR (orig, 5);
2041 XINT (copy, 6) = XINT (orig, 6);
2042 return copy;
2043 }
2044 break;
2045
2046 case CALL:
2047 /* This is given special treatment because the first
2048 operand of a CALL is a (MEM ...) which may get
2049 forced into a register for cse. This is undesirable
2050 if function-address cse isn't wanted or if we won't do cse. */
2051 #ifndef NO_FUNCTION_CSE
2052 if (! (optimize && ! flag_no_function_cse))
2053 #endif
2054 return gen_rtx (CALL, GET_MODE (orig),
2055 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2056 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2057 copy_rtx_and_substitute (XEXP (orig, 1), map));
2058 break;
2059
2060 #if 0
2061 /* Must be ifdefed out for loop unrolling to work. */
2062 case RETURN:
2063 abort ();
2064 #endif
2065
2066 case SET:
2067 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2068 Don't alter that.
2069 If the nonlocal goto is into the current function,
2070 this will result in unnecessarily bad code, but should work. */
2071 if (SET_DEST (orig) == virtual_stack_vars_rtx
2072 || SET_DEST (orig) == virtual_incoming_args_rtx)
2073 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2074 copy_rtx_and_substitute (SET_SRC (orig), map));
2075 break;
2076
2077 case MEM:
2078 copy = rtx_alloc (MEM);
2079 PUT_MODE (copy, mode);
2080 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2081 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2082 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2083 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2084 return copy;
2085 }
2086
2087 copy = rtx_alloc (code);
2088 PUT_MODE (copy, mode);
2089 copy->in_struct = orig->in_struct;
2090 copy->volatil = orig->volatil;
2091 copy->unchanging = orig->unchanging;
2092
2093 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2094
2095 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2096 {
2097 switch (*format_ptr++)
2098 {
2099 case '0':
2100 break;
2101
2102 case 'e':
2103 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2104 break;
2105
2106 case 'u':
2107 /* Change any references to old-insns to point to the
2108 corresponding copied insns. */
2109 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2110 break;
2111
2112 case 'E':
2113 XVEC (copy, i) = XVEC (orig, i);
2114 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2115 {
2116 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2117 for (j = 0; j < XVECLEN (copy, i); j++)
2118 XVECEXP (copy, i, j)
2119 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2120 }
2121 break;
2122
2123 case 'w':
2124 XWINT (copy, i) = XWINT (orig, i);
2125 break;
2126
2127 case 'i':
2128 XINT (copy, i) = XINT (orig, i);
2129 break;
2130
2131 case 's':
2132 XSTR (copy, i) = XSTR (orig, i);
2133 break;
2134
2135 default:
2136 abort ();
2137 }
2138 }
2139
2140 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2141 {
2142 map->orig_asm_operands_vector = XVEC (orig, 3);
2143 map->copy_asm_operands_vector = XVEC (copy, 3);
2144 map->copy_asm_constraints_vector = XVEC (copy, 4);
2145 }
2146
2147 return copy;
2148 }
2149 \f
2150 /* Substitute known constant values into INSN, if that is valid. */
2151
2152 void
2153 try_constants (insn, map)
2154 rtx insn;
2155 struct inline_remap *map;
2156 {
2157 int i;
2158
2159 map->num_sets = 0;
2160 subst_constants (&PATTERN (insn), insn, map);
2161
2162 /* Apply the changes if they are valid; otherwise discard them. */
2163 apply_change_group ();
2164
2165 /* Show we don't know the value of anything stored or clobbered. */
2166 note_stores (PATTERN (insn), mark_stores);
2167 map->last_pc_value = 0;
2168 #ifdef HAVE_cc0
2169 map->last_cc0_value = 0;
2170 #endif
2171
2172 /* Set up any constant equivalences made in this insn. */
2173 for (i = 0; i < map->num_sets; i++)
2174 {
2175 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2176 {
2177 int regno = REGNO (map->equiv_sets[i].dest);
2178
2179 if (map->const_equiv_map[regno] == 0
2180 /* Following clause is a hack to make case work where GNU C++
2181 reassigns a variable to make cse work right. */
2182 || ! rtx_equal_p (map->const_equiv_map[regno],
2183 map->equiv_sets[i].equiv))
2184 {
2185 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2186 map->const_age_map[regno] = map->const_age;
2187 }
2188 }
2189 else if (map->equiv_sets[i].dest == pc_rtx)
2190 map->last_pc_value = map->equiv_sets[i].equiv;
2191 #ifdef HAVE_cc0
2192 else if (map->equiv_sets[i].dest == cc0_rtx)
2193 map->last_cc0_value = map->equiv_sets[i].equiv;
2194 #endif
2195 }
2196 }
2197 \f
2198 /* Substitute known constants for pseudo regs in the contents of LOC,
2199 which are part of INSN.
2200 If INSN is zero, the substitution should always be done (this is used to
2201 update DECL_RTL).
2202 These changes are taken out by try_constants if the result is not valid.
2203
2204 Note that we are more concerned with determining when the result of a SET
2205 is a constant, for further propagation, than actually inserting constants
2206 into insns; cse will do the latter task better.
2207
2208 This function is also used to adjust address of items previously addressed
2209 via the virtual stack variable or virtual incoming arguments registers. */
2210
2211 static void
2212 subst_constants (loc, insn, map)
2213 rtx *loc;
2214 rtx insn;
2215 struct inline_remap *map;
2216 {
2217 rtx x = *loc;
2218 register int i;
2219 register enum rtx_code code;
2220 register char *format_ptr;
2221 int num_changes = num_validated_changes ();
2222 rtx new = 0;
2223 enum machine_mode op0_mode;
2224
2225 code = GET_CODE (x);
2226
2227 switch (code)
2228 {
2229 case PC:
2230 case CONST_INT:
2231 case CONST_DOUBLE:
2232 case SYMBOL_REF:
2233 case CONST:
2234 case LABEL_REF:
2235 case ADDRESS:
2236 return;
2237
2238 #ifdef HAVE_cc0
2239 case CC0:
2240 validate_change (insn, loc, map->last_cc0_value, 1);
2241 return;
2242 #endif
2243
2244 case USE:
2245 case CLOBBER:
2246 /* The only thing we can do with a USE or CLOBBER is possibly do
2247 some substitutions in a MEM within it. */
2248 if (GET_CODE (XEXP (x, 0)) == MEM)
2249 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2250 return;
2251
2252 case REG:
2253 /* Substitute for parms and known constants. Don't replace
2254 hard regs used as user variables with constants. */
2255 {
2256 int regno = REGNO (x);
2257
2258 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2259 && regno < map->const_equiv_map_size
2260 && map->const_equiv_map[regno] != 0
2261 && map->const_age_map[regno] >= map->const_age)
2262 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2263 return;
2264 }
2265
2266 case SUBREG:
2267 /* SUBREG is ordinary, but don't make nested SUBREGs and try to simplify
2268 constants. */
2269 {
2270 rtx inner = SUBREG_REG (x);
2271 rtx new = 0;
2272
2273 /* We can't call subst_constants on &SUBREG_REG (x) because any
2274 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2275 see what is inside, try to form the new SUBREG and see if that is
2276 valid. We handle two cases: extracting a full word in an
2277 integral mode and extracting the low part. */
2278 subst_constants (&inner, NULL_RTX, map);
2279
2280 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2281 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2282 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2283 new = operand_subword (inner, SUBREG_WORD (x), 0,
2284 GET_MODE (SUBREG_REG (x)));
2285
2286 if (new == 0 && subreg_lowpart_p (x))
2287 new = gen_lowpart_common (GET_MODE (x), inner);
2288
2289 if (new)
2290 validate_change (insn, loc, new, 1);
2291
2292 return;
2293 }
2294
2295 case MEM:
2296 subst_constants (&XEXP (x, 0), insn, map);
2297
2298 /* If a memory address got spoiled, change it back. */
2299 if (insn != 0 && num_validated_changes () != num_changes
2300 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2301 cancel_changes (num_changes);
2302 return;
2303
2304 case SET:
2305 {
2306 /* Substitute constants in our source, and in any arguments to a
2307 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2308 itself. */
2309 rtx *dest_loc = &SET_DEST (x);
2310 rtx dest = *dest_loc;
2311 rtx src, tem;
2312
2313 subst_constants (&SET_SRC (x), insn, map);
2314 src = SET_SRC (x);
2315
2316 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2317 || GET_CODE (*dest_loc) == SIGN_EXTRACT
2318 || GET_CODE (*dest_loc) == SUBREG
2319 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2320 {
2321 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2322 {
2323 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2324 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2325 }
2326 dest_loc = &XEXP (*dest_loc, 0);
2327 }
2328
2329 /* Check for the case of DEST a SUBREG, both it and the underlying
2330 register are less than one word, and the SUBREG has the wider mode.
2331 In the case, we are really setting the underlying register to the
2332 source converted to the mode of DEST. So indicate that. */
2333 if (GET_CODE (dest) == SUBREG
2334 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2335 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2336 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2337 <= GET_MODE_SIZE (GET_MODE (dest)))
2338 && (tem = gen_lowpart_if_possible (GET_MODE (dest), src)))
2339 src = tem, dest = SUBREG_REG (dest);
2340
2341 /* If storing a recognizable value save it for later recording. */
2342 if ((map->num_sets < MAX_RECOG_OPERANDS)
2343 && (CONSTANT_P (src)
2344 || (GET_CODE (src) == PLUS
2345 && GET_CODE (XEXP (src, 0)) == REG
2346 && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER
2347 && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER
2348 && CONSTANT_P (XEXP (src, 1)))
2349 || GET_CODE (src) == COMPARE
2350 #ifdef HAVE_cc0
2351 || dest == cc0_rtx
2352 #endif
2353 || (dest == pc_rtx
2354 && (src == pc_rtx || GET_CODE (src) == RETURN
2355 || GET_CODE (src) == LABEL_REF))))
2356 {
2357 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2358 it will cause us to save the COMPARE with any constants
2359 substituted, which is what we want for later. */
2360 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2361 map->equiv_sets[map->num_sets++].dest = dest;
2362 }
2363
2364 return;
2365 }
2366 }
2367
2368 format_ptr = GET_RTX_FORMAT (code);
2369
2370 /* If the first operand is an expression, save its mode for later. */
2371 if (*format_ptr == 'e')
2372 op0_mode = GET_MODE (XEXP (x, 0));
2373
2374 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2375 {
2376 switch (*format_ptr++)
2377 {
2378 case '0':
2379 break;
2380
2381 case 'e':
2382 if (XEXP (x, i))
2383 subst_constants (&XEXP (x, i), insn, map);
2384 break;
2385
2386 case 'u':
2387 case 'i':
2388 case 's':
2389 case 'w':
2390 break;
2391
2392 case 'E':
2393 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2394 {
2395 int j;
2396 for (j = 0; j < XVECLEN (x, i); j++)
2397 subst_constants (&XVECEXP (x, i, j), insn, map);
2398 }
2399 break;
2400
2401 default:
2402 abort ();
2403 }
2404 }
2405
2406 /* If this is a commutative operation, move a constant to the second
2407 operand unless the second operand is already a CONST_INT. */
2408 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2409 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2410 {
2411 rtx tem = XEXP (x, 0);
2412 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2413 validate_change (insn, &XEXP (x, 1), tem, 1);
2414 }
2415
2416 /* Simplify the expression in case we put in some constants. */
2417 switch (GET_RTX_CLASS (code))
2418 {
2419 case '1':
2420 new = simplify_unary_operation (code, GET_MODE (x),
2421 XEXP (x, 0), op0_mode);
2422 break;
2423
2424 case '<':
2425 {
2426 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2427 if (op_mode == VOIDmode)
2428 op_mode = GET_MODE (XEXP (x, 1));
2429 new = simplify_relational_operation (code, op_mode,
2430 XEXP (x, 0), XEXP (x, 1));
2431 #ifdef FLOAT_STORE_FLAG_VALUE
2432 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2433 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2434 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2435 #endif
2436 break;
2437 }
2438
2439 case '2':
2440 case 'c':
2441 new = simplify_binary_operation (code, GET_MODE (x),
2442 XEXP (x, 0), XEXP (x, 1));
2443 break;
2444
2445 case 'b':
2446 case '3':
2447 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2448 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2449 break;
2450 }
2451
2452 if (new)
2453 validate_change (insn, loc, new, 1);
2454 }
2455
2456 /* Show that register modified no longer contain known constants. We are
2457 called from note_stores with parts of the new insn. */
2458
2459 void
2460 mark_stores (dest, x)
2461 rtx dest;
2462 rtx x;
2463 {
2464 if (GET_CODE (dest) == SUBREG)
2465 dest = SUBREG_REG (dest);
2466
2467 if (GET_CODE (dest) == REG)
2468 global_const_equiv_map[REGNO (dest)] = 0;
2469 }
2470 \f
2471 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2472 pointed to by PX, they represent constants in the constant pool.
2473 Replace these with a new memory reference obtained from force_const_mem.
2474 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2475 address of a constant pool entry. Replace them with the address of
2476 a new constant pool entry obtained from force_const_mem. */
2477
2478 static void
2479 restore_constants (px)
2480 rtx *px;
2481 {
2482 rtx x = *px;
2483 int i, j;
2484 char *fmt;
2485
2486 if (x == 0)
2487 return;
2488
2489 if (GET_CODE (x) == CONST_DOUBLE)
2490 {
2491 /* We have to make a new CONST_DOUBLE to ensure that we account for
2492 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2493 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2494 {
2495 REAL_VALUE_TYPE d;
2496
2497 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2498 *px = immed_real_const_1 (d, GET_MODE (x));
2499 }
2500 else
2501 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2502 VOIDmode);
2503 }
2504
2505 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2506 {
2507 restore_constants (&XEXP (x, 0));
2508 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2509 }
2510 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2511 {
2512 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2513 rtx new = XEXP (SUBREG_REG (x), 0);
2514
2515 restore_constants (&new);
2516 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2517 PUT_MODE (new, GET_MODE (x));
2518 *px = validize_mem (new);
2519 }
2520 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2521 {
2522 restore_constants (&XEXP (x, 0));
2523 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2524 }
2525 else
2526 {
2527 fmt = GET_RTX_FORMAT (GET_CODE (x));
2528 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2529 {
2530 switch (*fmt++)
2531 {
2532 case 'E':
2533 for (j = 0; j < XVECLEN (x, i); j++)
2534 restore_constants (&XVECEXP (x, i, j));
2535 break;
2536
2537 case 'e':
2538 restore_constants (&XEXP (x, i));
2539 break;
2540 }
2541 }
2542 }
2543 }
2544 \f
2545 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2546 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2547 that it points to the node itself, thus indicating that the node is its
2548 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2549 the given node is NULL, recursively descend the decl/block tree which
2550 it is the root of, and for each other ..._DECL or BLOCK node contained
2551 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2552 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2553 values to point to themselves. */
2554
2555 static void set_decl_origin_self ();
2556
2557 static void
2558 set_block_origin_self (stmt)
2559 register tree stmt;
2560 {
2561 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2562 {
2563 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2564
2565 {
2566 register tree local_decl;
2567
2568 for (local_decl = BLOCK_VARS (stmt);
2569 local_decl != NULL_TREE;
2570 local_decl = TREE_CHAIN (local_decl))
2571 set_decl_origin_self (local_decl); /* Potential recursion. */
2572 }
2573
2574 {
2575 register tree subblock;
2576
2577 for (subblock = BLOCK_SUBBLOCKS (stmt);
2578 subblock != NULL_TREE;
2579 subblock = BLOCK_CHAIN (subblock))
2580 set_block_origin_self (subblock); /* Recurse. */
2581 }
2582 }
2583 }
2584
2585 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2586 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2587 node to so that it points to the node itself, thus indicating that the
2588 node represents its own (abstract) origin. Additionally, if the
2589 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2590 the decl/block tree of which the given node is the root of, and for
2591 each other ..._DECL or BLOCK node contained therein whose
2592 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2593 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2594 point to themselves. */
2595
2596 static void
2597 set_decl_origin_self (decl)
2598 register tree decl;
2599 {
2600 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2601 {
2602 DECL_ABSTRACT_ORIGIN (decl) = decl;
2603 if (TREE_CODE (decl) == FUNCTION_DECL)
2604 {
2605 register tree arg;
2606
2607 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2608 DECL_ABSTRACT_ORIGIN (arg) = arg;
2609 if (DECL_INITIAL (decl) != NULL_TREE)
2610 set_block_origin_self (DECL_INITIAL (decl));
2611 }
2612 }
2613 }
2614 \f
2615 /* Given a pointer to some BLOCK node, and a boolean value to set the
2616 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2617 the given block, and for all local decls and all local sub-blocks
2618 (recursively) which are contained therein. */
2619
2620 void set_decl_abstract_flags ();
2621
2622 static void
2623 set_block_abstract_flags (stmt, setting)
2624 register tree stmt;
2625 register int setting;
2626 {
2627 BLOCK_ABSTRACT (stmt) = setting;
2628
2629 {
2630 register tree local_decl;
2631
2632 for (local_decl = BLOCK_VARS (stmt);
2633 local_decl != NULL_TREE;
2634 local_decl = TREE_CHAIN (local_decl))
2635 set_decl_abstract_flags (local_decl, setting);
2636 }
2637
2638 {
2639 register tree subblock;
2640
2641 for (subblock = BLOCK_SUBBLOCKS (stmt);
2642 subblock != NULL_TREE;
2643 subblock = BLOCK_CHAIN (subblock))
2644 set_block_abstract_flags (subblock, setting);
2645 }
2646 }
2647
2648 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2649 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2650 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2651 set the abstract flags for all of the parameters, local vars, local
2652 blocks and sub-blocks (recursively) to the same setting. */
2653
2654 void
2655 set_decl_abstract_flags (decl, setting)
2656 register tree decl;
2657 register int setting;
2658 {
2659 DECL_ABSTRACT (decl) = setting;
2660 if (TREE_CODE (decl) == FUNCTION_DECL)
2661 {
2662 register tree arg;
2663
2664 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2665 DECL_ABSTRACT (arg) = setting;
2666 if (DECL_INITIAL (decl) != NULL_TREE)
2667 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2668 }
2669 }
2670 \f
2671 /* Output the assembly language code for the function FNDECL
2672 from its DECL_SAVED_INSNS. Used for inline functions that are output
2673 at end of compilation instead of where they came in the source. */
2674
2675 void
2676 output_inline_function (fndecl)
2677 tree fndecl;
2678 {
2679 rtx head = DECL_SAVED_INSNS (fndecl);
2680 rtx last;
2681
2682 temporary_allocation ();
2683
2684 current_function_decl = fndecl;
2685
2686 /* This call is only used to initialize global variables. */
2687 init_function_start (fndecl, "lossage", 1);
2688
2689 /* Redo parameter determinations in case the FUNCTION_...
2690 macros took machine-specific actions that need to be redone. */
2691 assign_parms (fndecl, 1);
2692
2693 /* Set stack frame size. */
2694 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
2695
2696 restore_reg_data (FIRST_PARM_INSN (head));
2697
2698 stack_slot_list = STACK_SLOT_LIST (head);
2699
2700 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
2701 current_function_calls_alloca = 1;
2702
2703 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
2704 current_function_calls_setjmp = 1;
2705
2706 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
2707 current_function_calls_longjmp = 1;
2708
2709 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
2710 current_function_returns_struct = 1;
2711
2712 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
2713 current_function_returns_pcc_struct = 1;
2714
2715 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
2716 current_function_needs_context = 1;
2717
2718 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
2719 current_function_has_nonlocal_label = 1;
2720
2721 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
2722 current_function_returns_pointer = 1;
2723
2724 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
2725 current_function_uses_const_pool = 1;
2726
2727 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
2728 current_function_uses_pic_offset_table = 1;
2729
2730 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
2731 current_function_pops_args = POPS_ARGS (head);
2732
2733 /* There is no need to output a return label again. */
2734 return_label = 0;
2735
2736 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2737
2738 /* Find last insn and rebuild the constant pool. */
2739 for (last = FIRST_PARM_INSN (head);
2740 NEXT_INSN (last); last = NEXT_INSN (last))
2741 {
2742 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
2743 {
2744 restore_constants (&PATTERN (last));
2745 restore_constants (&REG_NOTES (last));
2746 }
2747 }
2748
2749 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2750 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
2751
2752 /* We must have already output DWARF debugging information for the
2753 original (abstract) inline function declaration/definition, so
2754 we want to make sure that the debugging information we generate
2755 for this special instance of the inline function refers back to
2756 the information we already generated. To make sure that happens,
2757 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2758 node (and for all of the local ..._DECL nodes which are its children)
2759 so that they all point to themselves. */
2760
2761 set_decl_origin_self (fndecl);
2762
2763 /* Compile this function all the way down to assembly code. */
2764 rest_of_compilation (fndecl);
2765
2766 current_function_decl = 0;
2767
2768 permanent_allocation ();
2769 }