]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
Postpone expanding va_arg until pass_stdarg
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "stmt.h"
41 #include "print-tree.h"
42 #include "tm_p.h"
43 #include "predict.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "dominance.h"
47 #include "cfg.h"
48 #include "cfgrtl.h"
49 #include "cfganal.h"
50 #include "cfgbuild.h"
51 #include "cfgcleanup.h"
52 #include "basic-block.h"
53 #include "insn-codes.h"
54 #include "optabs.h"
55 #include "flags.h"
56 #include "statistics.h"
57 #include "real.h"
58 #include "fixed-value.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "calls.h"
64 #include "emit-rtl.h"
65 #include "expr.h"
66 #include "langhooks.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "tree-eh.h"
71 #include "gimple-expr.h"
72 #include "is-a.h"
73 #include "gimple.h"
74 #include "gimple-iterator.h"
75 #include "gimple-walk.h"
76 #include "gimple-ssa.h"
77 #include "hash-map.h"
78 #include "plugin-api.h"
79 #include "ipa-ref.h"
80 #include "cgraph.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-ssanames.h"
85 #include "tree-dfa.h"
86 #include "tree-ssa.h"
87 #include "tree-pass.h"
88 #include "except.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "toplev.h"
92 #include "debug.h"
93 #include "params.h"
94 #include "tree-inline.h"
95 #include "value-prof.h"
96 #include "target.h"
97 #include "tree-ssa-live.h"
98 #include "tree-outof-ssa.h"
99 #include "sbitmap.h"
100 #include "cfgloop.h"
101 #include "regs.h" /* For reg_renumber. */
102 #include "insn-attr.h" /* For INSN_SCHEDULING. */
103 #include "asan.h"
104 #include "tree-ssa-address.h"
105 #include "recog.h"
106 #include "output.h"
107 #include "builtins.h"
108 #include "tree-chkp.h"
109 #include "rtl-chkp.h"
110
111 /* Some systems use __main in a way incompatible with its use in gcc, in these
112 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
113 give the same symbol without quotes for an alternative entry point. You
114 must define both, or neither. */
115 #ifndef NAME__MAIN
116 #define NAME__MAIN "__main"
117 #endif
118
119 /* This variable holds information helping the rewriting of SSA trees
120 into RTL. */
121 struct ssaexpand SA;
122
123 /* This variable holds the currently expanded gimple statement for purposes
124 of comminucating the profile info to the builtin expanders. */
125 gimple currently_expanding_gimple_stmt;
126
127 static rtx expand_debug_expr (tree);
128
129 /* Return an expression tree corresponding to the RHS of GIMPLE
130 statement STMT. */
131
132 tree
133 gimple_assign_rhs_to_tree (gimple stmt)
134 {
135 tree t;
136 enum gimple_rhs_class grhs_class;
137
138 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
139
140 if (grhs_class == GIMPLE_TERNARY_RHS)
141 t = build3 (gimple_assign_rhs_code (stmt),
142 TREE_TYPE (gimple_assign_lhs (stmt)),
143 gimple_assign_rhs1 (stmt),
144 gimple_assign_rhs2 (stmt),
145 gimple_assign_rhs3 (stmt));
146 else if (grhs_class == GIMPLE_BINARY_RHS)
147 t = build2 (gimple_assign_rhs_code (stmt),
148 TREE_TYPE (gimple_assign_lhs (stmt)),
149 gimple_assign_rhs1 (stmt),
150 gimple_assign_rhs2 (stmt));
151 else if (grhs_class == GIMPLE_UNARY_RHS)
152 t = build1 (gimple_assign_rhs_code (stmt),
153 TREE_TYPE (gimple_assign_lhs (stmt)),
154 gimple_assign_rhs1 (stmt));
155 else if (grhs_class == GIMPLE_SINGLE_RHS)
156 {
157 t = gimple_assign_rhs1 (stmt);
158 /* Avoid modifying this tree in place below. */
159 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
160 && gimple_location (stmt) != EXPR_LOCATION (t))
161 || (gimple_block (stmt)
162 && currently_expanding_to_rtl
163 && EXPR_P (t)))
164 t = copy_node (t);
165 }
166 else
167 gcc_unreachable ();
168
169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
170 SET_EXPR_LOCATION (t, gimple_location (stmt));
171
172 return t;
173 }
174
175
176 #ifndef STACK_ALIGNMENT_NEEDED
177 #define STACK_ALIGNMENT_NEEDED 1
178 #endif
179
180 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
181
182 /* Associate declaration T with storage space X. If T is no
183 SSA name this is exactly SET_DECL_RTL, otherwise make the
184 partition of T associated with X. */
185 static inline void
186 set_rtl (tree t, rtx x)
187 {
188 if (TREE_CODE (t) == SSA_NAME)
189 {
190 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
191 if (x && !MEM_P (x))
192 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
193 /* For the benefit of debug information at -O0 (where vartracking
194 doesn't run) record the place also in the base DECL if it's
195 a normal variable (not a parameter). */
196 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
197 {
198 tree var = SSA_NAME_VAR (t);
199 /* If we don't yet have something recorded, just record it now. */
200 if (!DECL_RTL_SET_P (var))
201 SET_DECL_RTL (var, x);
202 /* If we have it set already to "multiple places" don't
203 change this. */
204 else if (DECL_RTL (var) == pc_rtx)
205 ;
206 /* If we have something recorded and it's not the same place
207 as we want to record now, we have multiple partitions for the
208 same base variable, with different places. We can't just
209 randomly chose one, hence we have to say that we don't know.
210 This only happens with optimization, and there var-tracking
211 will figure out the right thing. */
212 else if (DECL_RTL (var) != x)
213 SET_DECL_RTL (var, pc_rtx);
214 }
215 }
216 else
217 SET_DECL_RTL (t, x);
218 }
219
220 /* This structure holds data relevant to one variable that will be
221 placed in a stack slot. */
222 struct stack_var
223 {
224 /* The Variable. */
225 tree decl;
226
227 /* Initially, the size of the variable. Later, the size of the partition,
228 if this variable becomes it's partition's representative. */
229 HOST_WIDE_INT size;
230
231 /* The *byte* alignment required for this variable. Or as, with the
232 size, the alignment for this partition. */
233 unsigned int alignb;
234
235 /* The partition representative. */
236 size_t representative;
237
238 /* The next stack variable in the partition, or EOC. */
239 size_t next;
240
241 /* The numbers of conflicting stack variables. */
242 bitmap conflicts;
243 };
244
245 #define EOC ((size_t)-1)
246
247 /* We have an array of such objects while deciding allocation. */
248 static struct stack_var *stack_vars;
249 static size_t stack_vars_alloc;
250 static size_t stack_vars_num;
251 static hash_map<tree, size_t> *decl_to_stack_part;
252
253 /* Conflict bitmaps go on this obstack. This allows us to destroy
254 all of them in one big sweep. */
255 static bitmap_obstack stack_var_bitmap_obstack;
256
257 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
258 is non-decreasing. */
259 static size_t *stack_vars_sorted;
260
261 /* The phase of the stack frame. This is the known misalignment of
262 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
263 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
264 static int frame_phase;
265
266 /* Used during expand_used_vars to remember if we saw any decls for
267 which we'd like to enable stack smashing protection. */
268 static bool has_protected_decls;
269
270 /* Used during expand_used_vars. Remember if we say a character buffer
271 smaller than our cutoff threshold. Used for -Wstack-protector. */
272 static bool has_short_buffer;
273
274 /* Compute the byte alignment to use for DECL. Ignore alignment
275 we can't do with expected alignment of the stack boundary. */
276
277 static unsigned int
278 align_local_variable (tree decl)
279 {
280 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
281 DECL_ALIGN (decl) = align;
282 return align / BITS_PER_UNIT;
283 }
284
285 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
286 down otherwise. Return truncated BASE value. */
287
288 static inline unsigned HOST_WIDE_INT
289 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
290 {
291 return align_up ? (base + align - 1) & -align : base & -align;
292 }
293
294 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
295 Return the frame offset. */
296
297 static HOST_WIDE_INT
298 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
299 {
300 HOST_WIDE_INT offset, new_frame_offset;
301
302 if (FRAME_GROWS_DOWNWARD)
303 {
304 new_frame_offset
305 = align_base (frame_offset - frame_phase - size,
306 align, false) + frame_phase;
307 offset = new_frame_offset;
308 }
309 else
310 {
311 new_frame_offset
312 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
313 offset = new_frame_offset;
314 new_frame_offset += size;
315 }
316 frame_offset = new_frame_offset;
317
318 if (frame_offset_overflow (frame_offset, cfun->decl))
319 frame_offset = offset = 0;
320
321 return offset;
322 }
323
324 /* Accumulate DECL into STACK_VARS. */
325
326 static void
327 add_stack_var (tree decl)
328 {
329 struct stack_var *v;
330
331 if (stack_vars_num >= stack_vars_alloc)
332 {
333 if (stack_vars_alloc)
334 stack_vars_alloc = stack_vars_alloc * 3 / 2;
335 else
336 stack_vars_alloc = 32;
337 stack_vars
338 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
339 }
340 if (!decl_to_stack_part)
341 decl_to_stack_part = new hash_map<tree, size_t>;
342
343 v = &stack_vars[stack_vars_num];
344 decl_to_stack_part->put (decl, stack_vars_num);
345
346 v->decl = decl;
347 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
348 /* Ensure that all variables have size, so that &a != &b for any two
349 variables that are simultaneously live. */
350 if (v->size == 0)
351 v->size = 1;
352 v->alignb = align_local_variable (SSAVAR (decl));
353 /* An alignment of zero can mightily confuse us later. */
354 gcc_assert (v->alignb != 0);
355
356 /* All variables are initially in their own partition. */
357 v->representative = stack_vars_num;
358 v->next = EOC;
359
360 /* All variables initially conflict with no other. */
361 v->conflicts = NULL;
362
363 /* Ensure that this decl doesn't get put onto the list twice. */
364 set_rtl (decl, pc_rtx);
365
366 stack_vars_num++;
367 }
368
369 /* Make the decls associated with luid's X and Y conflict. */
370
371 static void
372 add_stack_var_conflict (size_t x, size_t y)
373 {
374 struct stack_var *a = &stack_vars[x];
375 struct stack_var *b = &stack_vars[y];
376 if (!a->conflicts)
377 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
378 if (!b->conflicts)
379 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
380 bitmap_set_bit (a->conflicts, y);
381 bitmap_set_bit (b->conflicts, x);
382 }
383
384 /* Check whether the decls associated with luid's X and Y conflict. */
385
386 static bool
387 stack_var_conflict_p (size_t x, size_t y)
388 {
389 struct stack_var *a = &stack_vars[x];
390 struct stack_var *b = &stack_vars[y];
391 if (x == y)
392 return false;
393 /* Partitions containing an SSA name result from gimple registers
394 with things like unsupported modes. They are top-level and
395 hence conflict with everything else. */
396 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
397 return true;
398
399 if (!a->conflicts || !b->conflicts)
400 return false;
401 return bitmap_bit_p (a->conflicts, y);
402 }
403
404 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
405 enter its partition number into bitmap DATA. */
406
407 static bool
408 visit_op (gimple, tree op, tree, void *data)
409 {
410 bitmap active = (bitmap)data;
411 op = get_base_address (op);
412 if (op
413 && DECL_P (op)
414 && DECL_RTL_IF_SET (op) == pc_rtx)
415 {
416 size_t *v = decl_to_stack_part->get (op);
417 if (v)
418 bitmap_set_bit (active, *v);
419 }
420 return false;
421 }
422
423 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
424 record conflicts between it and all currently active other partitions
425 from bitmap DATA. */
426
427 static bool
428 visit_conflict (gimple, tree op, tree, void *data)
429 {
430 bitmap active = (bitmap)data;
431 op = get_base_address (op);
432 if (op
433 && DECL_P (op)
434 && DECL_RTL_IF_SET (op) == pc_rtx)
435 {
436 size_t *v = decl_to_stack_part->get (op);
437 if (v && bitmap_set_bit (active, *v))
438 {
439 size_t num = *v;
440 bitmap_iterator bi;
441 unsigned i;
442 gcc_assert (num < stack_vars_num);
443 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
444 add_stack_var_conflict (num, i);
445 }
446 }
447 return false;
448 }
449
450 /* Helper routine for add_scope_conflicts, calculating the active partitions
451 at the end of BB, leaving the result in WORK. We're called to generate
452 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
453 liveness. */
454
455 static void
456 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
457 {
458 edge e;
459 edge_iterator ei;
460 gimple_stmt_iterator gsi;
461 walk_stmt_load_store_addr_fn visit;
462
463 bitmap_clear (work);
464 FOR_EACH_EDGE (e, ei, bb->preds)
465 bitmap_ior_into (work, (bitmap)e->src->aux);
466
467 visit = visit_op;
468
469 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470 {
471 gimple stmt = gsi_stmt (gsi);
472 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
473 }
474 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
475 {
476 gimple stmt = gsi_stmt (gsi);
477
478 if (gimple_clobber_p (stmt))
479 {
480 tree lhs = gimple_assign_lhs (stmt);
481 size_t *v;
482 /* Nested function lowering might introduce LHSs
483 that are COMPONENT_REFs. */
484 if (TREE_CODE (lhs) != VAR_DECL)
485 continue;
486 if (DECL_RTL_IF_SET (lhs) == pc_rtx
487 && (v = decl_to_stack_part->get (lhs)))
488 bitmap_clear_bit (work, *v);
489 }
490 else if (!is_gimple_debug (stmt))
491 {
492 if (for_conflict
493 && visit == visit_op)
494 {
495 /* If this is the first real instruction in this BB we need
496 to add conflicts for everything live at this point now.
497 Unlike classical liveness for named objects we can't
498 rely on seeing a def/use of the names we're interested in.
499 There might merely be indirect loads/stores. We'd not add any
500 conflicts for such partitions. */
501 bitmap_iterator bi;
502 unsigned i;
503 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
504 {
505 struct stack_var *a = &stack_vars[i];
506 if (!a->conflicts)
507 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
508 bitmap_ior_into (a->conflicts, work);
509 }
510 visit = visit_conflict;
511 }
512 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
513 }
514 }
515 }
516
517 /* Generate stack partition conflicts between all partitions that are
518 simultaneously live. */
519
520 static void
521 add_scope_conflicts (void)
522 {
523 basic_block bb;
524 bool changed;
525 bitmap work = BITMAP_ALLOC (NULL);
526 int *rpo;
527 int n_bbs;
528
529 /* We approximate the live range of a stack variable by taking the first
530 mention of its name as starting point(s), and by the end-of-scope
531 death clobber added by gimplify as ending point(s) of the range.
532 This overapproximates in the case we for instance moved an address-taken
533 operation upward, without also moving a dereference to it upwards.
534 But it's conservatively correct as a variable never can hold values
535 before its name is mentioned at least once.
536
537 We then do a mostly classical bitmap liveness algorithm. */
538
539 FOR_ALL_BB_FN (bb, cfun)
540 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
541
542 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
543 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
544
545 changed = true;
546 while (changed)
547 {
548 int i;
549 changed = false;
550 for (i = 0; i < n_bbs; i++)
551 {
552 bitmap active;
553 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
554 active = (bitmap)bb->aux;
555 add_scope_conflicts_1 (bb, work, false);
556 if (bitmap_ior_into (active, work))
557 changed = true;
558 }
559 }
560
561 FOR_EACH_BB_FN (bb, cfun)
562 add_scope_conflicts_1 (bb, work, true);
563
564 free (rpo);
565 BITMAP_FREE (work);
566 FOR_ALL_BB_FN (bb, cfun)
567 BITMAP_FREE (bb->aux);
568 }
569
570 /* A subroutine of partition_stack_vars. A comparison function for qsort,
571 sorting an array of indices by the properties of the object. */
572
573 static int
574 stack_var_cmp (const void *a, const void *b)
575 {
576 size_t ia = *(const size_t *)a;
577 size_t ib = *(const size_t *)b;
578 unsigned int aligna = stack_vars[ia].alignb;
579 unsigned int alignb = stack_vars[ib].alignb;
580 HOST_WIDE_INT sizea = stack_vars[ia].size;
581 HOST_WIDE_INT sizeb = stack_vars[ib].size;
582 tree decla = stack_vars[ia].decl;
583 tree declb = stack_vars[ib].decl;
584 bool largea, largeb;
585 unsigned int uida, uidb;
586
587 /* Primary compare on "large" alignment. Large comes first. */
588 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
589 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
590 if (largea != largeb)
591 return (int)largeb - (int)largea;
592
593 /* Secondary compare on size, decreasing */
594 if (sizea > sizeb)
595 return -1;
596 if (sizea < sizeb)
597 return 1;
598
599 /* Tertiary compare on true alignment, decreasing. */
600 if (aligna < alignb)
601 return -1;
602 if (aligna > alignb)
603 return 1;
604
605 /* Final compare on ID for sort stability, increasing.
606 Two SSA names are compared by their version, SSA names come before
607 non-SSA names, and two normal decls are compared by their DECL_UID. */
608 if (TREE_CODE (decla) == SSA_NAME)
609 {
610 if (TREE_CODE (declb) == SSA_NAME)
611 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
612 else
613 return -1;
614 }
615 else if (TREE_CODE (declb) == SSA_NAME)
616 return 1;
617 else
618 uida = DECL_UID (decla), uidb = DECL_UID (declb);
619 if (uida < uidb)
620 return 1;
621 if (uida > uidb)
622 return -1;
623 return 0;
624 }
625
626 struct part_traits : default_hashmap_traits
627 {
628 template<typename T>
629 static bool
630 is_deleted (T &e)
631 { return e.m_value == reinterpret_cast<void *> (1); }
632
633 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
634 template<typename T>
635 static void
636 mark_deleted (T &e)
637 { e.m_value = reinterpret_cast<T> (1); }
638
639 template<typename T>
640 static void
641 mark_empty (T &e)
642 { e.m_value = NULL; }
643 };
644
645 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
646
647 /* If the points-to solution *PI points to variables that are in a partition
648 together with other variables add all partition members to the pointed-to
649 variables bitmap. */
650
651 static void
652 add_partitioned_vars_to_ptset (struct pt_solution *pt,
653 part_hashmap *decls_to_partitions,
654 hash_set<bitmap> *visited, bitmap temp)
655 {
656 bitmap_iterator bi;
657 unsigned i;
658 bitmap *part;
659
660 if (pt->anything
661 || pt->vars == NULL
662 /* The pointed-to vars bitmap is shared, it is enough to
663 visit it once. */
664 || visited->add (pt->vars))
665 return;
666
667 bitmap_clear (temp);
668
669 /* By using a temporary bitmap to store all members of the partitions
670 we have to add we make sure to visit each of the partitions only
671 once. */
672 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
673 if ((!temp
674 || !bitmap_bit_p (temp, i))
675 && (part = decls_to_partitions->get (i)))
676 bitmap_ior_into (temp, *part);
677 if (!bitmap_empty_p (temp))
678 bitmap_ior_into (pt->vars, temp);
679 }
680
681 /* Update points-to sets based on partition info, so we can use them on RTL.
682 The bitmaps representing stack partitions will be saved until expand,
683 where partitioned decls used as bases in memory expressions will be
684 rewritten. */
685
686 static void
687 update_alias_info_with_stack_vars (void)
688 {
689 part_hashmap *decls_to_partitions = NULL;
690 size_t i, j;
691 tree var = NULL_TREE;
692
693 for (i = 0; i < stack_vars_num; i++)
694 {
695 bitmap part = NULL;
696 tree name;
697 struct ptr_info_def *pi;
698
699 /* Not interested in partitions with single variable. */
700 if (stack_vars[i].representative != i
701 || stack_vars[i].next == EOC)
702 continue;
703
704 if (!decls_to_partitions)
705 {
706 decls_to_partitions = new part_hashmap;
707 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
708 }
709
710 /* Create an SSA_NAME that points to the partition for use
711 as base during alias-oracle queries on RTL for bases that
712 have been partitioned. */
713 if (var == NULL_TREE)
714 var = create_tmp_var (ptr_type_node);
715 name = make_ssa_name (var);
716
717 /* Create bitmaps representing partitions. They will be used for
718 points-to sets later, so use GGC alloc. */
719 part = BITMAP_GGC_ALLOC ();
720 for (j = i; j != EOC; j = stack_vars[j].next)
721 {
722 tree decl = stack_vars[j].decl;
723 unsigned int uid = DECL_PT_UID (decl);
724 bitmap_set_bit (part, uid);
725 decls_to_partitions->put (uid, part);
726 cfun->gimple_df->decls_to_pointers->put (decl, name);
727 if (TREE_ADDRESSABLE (decl))
728 TREE_ADDRESSABLE (name) = 1;
729 }
730
731 /* Make the SSA name point to all partition members. */
732 pi = get_ptr_info (name);
733 pt_solution_set (&pi->pt, part, false);
734 }
735
736 /* Make all points-to sets that contain one member of a partition
737 contain all members of the partition. */
738 if (decls_to_partitions)
739 {
740 unsigned i;
741 hash_set<bitmap> visited;
742 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
743
744 for (i = 1; i < num_ssa_names; i++)
745 {
746 tree name = ssa_name (i);
747 struct ptr_info_def *pi;
748
749 if (name
750 && POINTER_TYPE_P (TREE_TYPE (name))
751 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
752 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
753 &visited, temp);
754 }
755
756 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
757 decls_to_partitions, &visited, temp);
758
759 delete decls_to_partitions;
760 BITMAP_FREE (temp);
761 }
762 }
763
764 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
765 partitioning algorithm. Partitions A and B are known to be non-conflicting.
766 Merge them into a single partition A. */
767
768 static void
769 union_stack_vars (size_t a, size_t b)
770 {
771 struct stack_var *vb = &stack_vars[b];
772 bitmap_iterator bi;
773 unsigned u;
774
775 gcc_assert (stack_vars[b].next == EOC);
776 /* Add B to A's partition. */
777 stack_vars[b].next = stack_vars[a].next;
778 stack_vars[b].representative = a;
779 stack_vars[a].next = b;
780
781 /* Update the required alignment of partition A to account for B. */
782 if (stack_vars[a].alignb < stack_vars[b].alignb)
783 stack_vars[a].alignb = stack_vars[b].alignb;
784
785 /* Update the interference graph and merge the conflicts. */
786 if (vb->conflicts)
787 {
788 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
789 add_stack_var_conflict (a, stack_vars[u].representative);
790 BITMAP_FREE (vb->conflicts);
791 }
792 }
793
794 /* A subroutine of expand_used_vars. Binpack the variables into
795 partitions constrained by the interference graph. The overall
796 algorithm used is as follows:
797
798 Sort the objects by size in descending order.
799 For each object A {
800 S = size(A)
801 O = 0
802 loop {
803 Look for the largest non-conflicting object B with size <= S.
804 UNION (A, B)
805 }
806 }
807 */
808
809 static void
810 partition_stack_vars (void)
811 {
812 size_t si, sj, n = stack_vars_num;
813
814 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
815 for (si = 0; si < n; ++si)
816 stack_vars_sorted[si] = si;
817
818 if (n == 1)
819 return;
820
821 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
822
823 for (si = 0; si < n; ++si)
824 {
825 size_t i = stack_vars_sorted[si];
826 unsigned int ialign = stack_vars[i].alignb;
827 HOST_WIDE_INT isize = stack_vars[i].size;
828
829 /* Ignore objects that aren't partition representatives. If we
830 see a var that is not a partition representative, it must
831 have been merged earlier. */
832 if (stack_vars[i].representative != i)
833 continue;
834
835 for (sj = si + 1; sj < n; ++sj)
836 {
837 size_t j = stack_vars_sorted[sj];
838 unsigned int jalign = stack_vars[j].alignb;
839 HOST_WIDE_INT jsize = stack_vars[j].size;
840
841 /* Ignore objects that aren't partition representatives. */
842 if (stack_vars[j].representative != j)
843 continue;
844
845 /* Do not mix objects of "small" (supported) alignment
846 and "large" (unsupported) alignment. */
847 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
848 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
849 break;
850
851 /* For Address Sanitizer do not mix objects with different
852 sizes, as the shorter vars wouldn't be adequately protected.
853 Don't do that for "large" (unsupported) alignment objects,
854 those aren't protected anyway. */
855 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
856 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
857 break;
858
859 /* Ignore conflicting objects. */
860 if (stack_var_conflict_p (i, j))
861 continue;
862
863 /* UNION the objects, placing J at OFFSET. */
864 union_stack_vars (i, j);
865 }
866 }
867
868 update_alias_info_with_stack_vars ();
869 }
870
871 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
872
873 static void
874 dump_stack_var_partition (void)
875 {
876 size_t si, i, j, n = stack_vars_num;
877
878 for (si = 0; si < n; ++si)
879 {
880 i = stack_vars_sorted[si];
881
882 /* Skip variables that aren't partition representatives, for now. */
883 if (stack_vars[i].representative != i)
884 continue;
885
886 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
887 " align %u\n", (unsigned long) i, stack_vars[i].size,
888 stack_vars[i].alignb);
889
890 for (j = i; j != EOC; j = stack_vars[j].next)
891 {
892 fputc ('\t', dump_file);
893 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
894 }
895 fputc ('\n', dump_file);
896 }
897 }
898
899 /* Assign rtl to DECL at BASE + OFFSET. */
900
901 static void
902 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
903 HOST_WIDE_INT offset)
904 {
905 unsigned align;
906 rtx x;
907
908 /* If this fails, we've overflowed the stack frame. Error nicely? */
909 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
910
911 x = plus_constant (Pmode, base, offset);
912 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
913
914 if (TREE_CODE (decl) != SSA_NAME)
915 {
916 /* Set alignment we actually gave this decl if it isn't an SSA name.
917 If it is we generate stack slots only accidentally so it isn't as
918 important, we'll simply use the alignment that is already set. */
919 if (base == virtual_stack_vars_rtx)
920 offset -= frame_phase;
921 align = offset & -offset;
922 align *= BITS_PER_UNIT;
923 if (align == 0 || align > base_align)
924 align = base_align;
925
926 /* One would think that we could assert that we're not decreasing
927 alignment here, but (at least) the i386 port does exactly this
928 via the MINIMUM_ALIGNMENT hook. */
929
930 DECL_ALIGN (decl) = align;
931 DECL_USER_ALIGN (decl) = 0;
932 }
933
934 set_mem_attributes (x, SSAVAR (decl), true);
935 set_rtl (decl, x);
936 }
937
938 struct stack_vars_data
939 {
940 /* Vector of offset pairs, always end of some padding followed
941 by start of the padding that needs Address Sanitizer protection.
942 The vector is in reversed, highest offset pairs come first. */
943 vec<HOST_WIDE_INT> asan_vec;
944
945 /* Vector of partition representative decls in between the paddings. */
946 vec<tree> asan_decl_vec;
947
948 /* Base pseudo register for Address Sanitizer protected automatic vars. */
949 rtx asan_base;
950
951 /* Alignment needed for the Address Sanitizer protected automatic vars. */
952 unsigned int asan_alignb;
953 };
954
955 /* A subroutine of expand_used_vars. Give each partition representative
956 a unique location within the stack frame. Update each partition member
957 with that location. */
958
959 static void
960 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
961 {
962 size_t si, i, j, n = stack_vars_num;
963 HOST_WIDE_INT large_size = 0, large_alloc = 0;
964 rtx large_base = NULL;
965 unsigned large_align = 0;
966 tree decl;
967
968 /* Determine if there are any variables requiring "large" alignment.
969 Since these are dynamically allocated, we only process these if
970 no predicate involved. */
971 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
972 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
973 {
974 /* Find the total size of these variables. */
975 for (si = 0; si < n; ++si)
976 {
977 unsigned alignb;
978
979 i = stack_vars_sorted[si];
980 alignb = stack_vars[i].alignb;
981
982 /* All "large" alignment decls come before all "small" alignment
983 decls, but "large" alignment decls are not sorted based on
984 their alignment. Increase large_align to track the largest
985 required alignment. */
986 if ((alignb * BITS_PER_UNIT) > large_align)
987 large_align = alignb * BITS_PER_UNIT;
988
989 /* Stop when we get to the first decl with "small" alignment. */
990 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
991 break;
992
993 /* Skip variables that aren't partition representatives. */
994 if (stack_vars[i].representative != i)
995 continue;
996
997 /* Skip variables that have already had rtl assigned. See also
998 add_stack_var where we perpetrate this pc_rtx hack. */
999 decl = stack_vars[i].decl;
1000 if ((TREE_CODE (decl) == SSA_NAME
1001 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1002 : DECL_RTL (decl)) != pc_rtx)
1003 continue;
1004
1005 large_size += alignb - 1;
1006 large_size &= -(HOST_WIDE_INT)alignb;
1007 large_size += stack_vars[i].size;
1008 }
1009
1010 /* If there were any, allocate space. */
1011 if (large_size > 0)
1012 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1013 large_align, true);
1014 }
1015
1016 for (si = 0; si < n; ++si)
1017 {
1018 rtx base;
1019 unsigned base_align, alignb;
1020 HOST_WIDE_INT offset;
1021
1022 i = stack_vars_sorted[si];
1023
1024 /* Skip variables that aren't partition representatives, for now. */
1025 if (stack_vars[i].representative != i)
1026 continue;
1027
1028 /* Skip variables that have already had rtl assigned. See also
1029 add_stack_var where we perpetrate this pc_rtx hack. */
1030 decl = stack_vars[i].decl;
1031 if ((TREE_CODE (decl) == SSA_NAME
1032 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1033 : DECL_RTL (decl)) != pc_rtx)
1034 continue;
1035
1036 /* Check the predicate to see whether this variable should be
1037 allocated in this pass. */
1038 if (pred && !pred (i))
1039 continue;
1040
1041 alignb = stack_vars[i].alignb;
1042 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1043 {
1044 base = virtual_stack_vars_rtx;
1045 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1046 {
1047 HOST_WIDE_INT prev_offset
1048 = align_base (frame_offset,
1049 MAX (alignb, ASAN_RED_ZONE_SIZE),
1050 FRAME_GROWS_DOWNWARD);
1051 tree repr_decl = NULL_TREE;
1052 offset
1053 = alloc_stack_frame_space (stack_vars[i].size
1054 + ASAN_RED_ZONE_SIZE,
1055 MAX (alignb, ASAN_RED_ZONE_SIZE));
1056
1057 data->asan_vec.safe_push (prev_offset);
1058 data->asan_vec.safe_push (offset + stack_vars[i].size);
1059 /* Find best representative of the partition.
1060 Prefer those with DECL_NAME, even better
1061 satisfying asan_protect_stack_decl predicate. */
1062 for (j = i; j != EOC; j = stack_vars[j].next)
1063 if (asan_protect_stack_decl (stack_vars[j].decl)
1064 && DECL_NAME (stack_vars[j].decl))
1065 {
1066 repr_decl = stack_vars[j].decl;
1067 break;
1068 }
1069 else if (repr_decl == NULL_TREE
1070 && DECL_P (stack_vars[j].decl)
1071 && DECL_NAME (stack_vars[j].decl))
1072 repr_decl = stack_vars[j].decl;
1073 if (repr_decl == NULL_TREE)
1074 repr_decl = stack_vars[i].decl;
1075 data->asan_decl_vec.safe_push (repr_decl);
1076 data->asan_alignb = MAX (data->asan_alignb, alignb);
1077 if (data->asan_base == NULL)
1078 data->asan_base = gen_reg_rtx (Pmode);
1079 base = data->asan_base;
1080
1081 if (!STRICT_ALIGNMENT)
1082 base_align = crtl->max_used_stack_slot_alignment;
1083 else
1084 base_align = MAX (crtl->max_used_stack_slot_alignment,
1085 GET_MODE_ALIGNMENT (SImode)
1086 << ASAN_SHADOW_SHIFT);
1087 }
1088 else
1089 {
1090 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1091 base_align = crtl->max_used_stack_slot_alignment;
1092 }
1093 }
1094 else
1095 {
1096 /* Large alignment is only processed in the last pass. */
1097 if (pred)
1098 continue;
1099 gcc_assert (large_base != NULL);
1100
1101 large_alloc += alignb - 1;
1102 large_alloc &= -(HOST_WIDE_INT)alignb;
1103 offset = large_alloc;
1104 large_alloc += stack_vars[i].size;
1105
1106 base = large_base;
1107 base_align = large_align;
1108 }
1109
1110 /* Create rtl for each variable based on their location within the
1111 partition. */
1112 for (j = i; j != EOC; j = stack_vars[j].next)
1113 {
1114 expand_one_stack_var_at (stack_vars[j].decl,
1115 base, base_align,
1116 offset);
1117 }
1118 }
1119
1120 gcc_assert (large_alloc == large_size);
1121 }
1122
1123 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1124 static HOST_WIDE_INT
1125 account_stack_vars (void)
1126 {
1127 size_t si, j, i, n = stack_vars_num;
1128 HOST_WIDE_INT size = 0;
1129
1130 for (si = 0; si < n; ++si)
1131 {
1132 i = stack_vars_sorted[si];
1133
1134 /* Skip variables that aren't partition representatives, for now. */
1135 if (stack_vars[i].representative != i)
1136 continue;
1137
1138 size += stack_vars[i].size;
1139 for (j = i; j != EOC; j = stack_vars[j].next)
1140 set_rtl (stack_vars[j].decl, NULL);
1141 }
1142 return size;
1143 }
1144
1145 /* A subroutine of expand_one_var. Called to immediately assign rtl
1146 to a variable to be allocated in the stack frame. */
1147
1148 static void
1149 expand_one_stack_var (tree var)
1150 {
1151 HOST_WIDE_INT size, offset;
1152 unsigned byte_align;
1153
1154 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1155 byte_align = align_local_variable (SSAVAR (var));
1156
1157 /* We handle highly aligned variables in expand_stack_vars. */
1158 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1159
1160 offset = alloc_stack_frame_space (size, byte_align);
1161
1162 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1163 crtl->max_used_stack_slot_alignment, offset);
1164 }
1165
1166 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1167 that will reside in a hard register. */
1168
1169 static void
1170 expand_one_hard_reg_var (tree var)
1171 {
1172 rest_of_decl_compilation (var, 0, 0);
1173 }
1174
1175 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1176 that will reside in a pseudo register. */
1177
1178 static void
1179 expand_one_register_var (tree var)
1180 {
1181 tree decl = SSAVAR (var);
1182 tree type = TREE_TYPE (decl);
1183 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1184 rtx x = gen_reg_rtx (reg_mode);
1185
1186 set_rtl (var, x);
1187
1188 /* Note if the object is a user variable. */
1189 if (!DECL_ARTIFICIAL (decl))
1190 mark_user_reg (x);
1191
1192 if (POINTER_TYPE_P (type))
1193 mark_reg_pointer (x, get_pointer_alignment (var));
1194 }
1195
1196 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1197 has some associated error, e.g. its type is error-mark. We just need
1198 to pick something that won't crash the rest of the compiler. */
1199
1200 static void
1201 expand_one_error_var (tree var)
1202 {
1203 machine_mode mode = DECL_MODE (var);
1204 rtx x;
1205
1206 if (mode == BLKmode)
1207 x = gen_rtx_MEM (BLKmode, const0_rtx);
1208 else if (mode == VOIDmode)
1209 x = const0_rtx;
1210 else
1211 x = gen_reg_rtx (mode);
1212
1213 SET_DECL_RTL (var, x);
1214 }
1215
1216 /* A subroutine of expand_one_var. VAR is a variable that will be
1217 allocated to the local stack frame. Return true if we wish to
1218 add VAR to STACK_VARS so that it will be coalesced with other
1219 variables. Return false to allocate VAR immediately.
1220
1221 This function is used to reduce the number of variables considered
1222 for coalescing, which reduces the size of the quadratic problem. */
1223
1224 static bool
1225 defer_stack_allocation (tree var, bool toplevel)
1226 {
1227 /* Whether the variable is small enough for immediate allocation not to be
1228 a problem with regard to the frame size. */
1229 bool smallish
1230 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1231 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1232
1233 /* If stack protection is enabled, *all* stack variables must be deferred,
1234 so that we can re-order the strings to the top of the frame.
1235 Similarly for Address Sanitizer. */
1236 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1237 return true;
1238
1239 /* We handle "large" alignment via dynamic allocation. We want to handle
1240 this extra complication in only one place, so defer them. */
1241 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1242 return true;
1243
1244 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1245 might be detached from their block and appear at toplevel when we reach
1246 here. We want to coalesce them with variables from other blocks when
1247 the immediate contribution to the frame size would be noticeable. */
1248 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1249 return true;
1250
1251 /* Variables declared in the outermost scope automatically conflict
1252 with every other variable. The only reason to want to defer them
1253 at all is that, after sorting, we can more efficiently pack
1254 small variables in the stack frame. Continue to defer at -O2. */
1255 if (toplevel && optimize < 2)
1256 return false;
1257
1258 /* Without optimization, *most* variables are allocated from the
1259 stack, which makes the quadratic problem large exactly when we
1260 want compilation to proceed as quickly as possible. On the
1261 other hand, we don't want the function's stack frame size to
1262 get completely out of hand. So we avoid adding scalars and
1263 "small" aggregates to the list at all. */
1264 if (optimize == 0 && smallish)
1265 return false;
1266
1267 return true;
1268 }
1269
1270 /* A subroutine of expand_used_vars. Expand one variable according to
1271 its flavor. Variables to be placed on the stack are not actually
1272 expanded yet, merely recorded.
1273 When REALLY_EXPAND is false, only add stack values to be allocated.
1274 Return stack usage this variable is supposed to take.
1275 */
1276
1277 static HOST_WIDE_INT
1278 expand_one_var (tree var, bool toplevel, bool really_expand)
1279 {
1280 unsigned int align = BITS_PER_UNIT;
1281 tree origvar = var;
1282
1283 var = SSAVAR (var);
1284
1285 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1286 {
1287 /* Because we don't know if VAR will be in register or on stack,
1288 we conservatively assume it will be on stack even if VAR is
1289 eventually put into register after RA pass. For non-automatic
1290 variables, which won't be on stack, we collect alignment of
1291 type and ignore user specified alignment. Similarly for
1292 SSA_NAMEs for which use_register_for_decl returns true. */
1293 if (TREE_STATIC (var)
1294 || DECL_EXTERNAL (var)
1295 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1296 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1297 TYPE_MODE (TREE_TYPE (var)),
1298 TYPE_ALIGN (TREE_TYPE (var)));
1299 else if (DECL_HAS_VALUE_EXPR_P (var)
1300 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1301 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1302 or variables which were assigned a stack slot already by
1303 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1304 changed from the offset chosen to it. */
1305 align = crtl->stack_alignment_estimated;
1306 else
1307 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1308
1309 /* If the variable alignment is very large we'll dynamicaly allocate
1310 it, which means that in-frame portion is just a pointer. */
1311 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1312 align = POINTER_SIZE;
1313 }
1314
1315 if (SUPPORTS_STACK_ALIGNMENT
1316 && crtl->stack_alignment_estimated < align)
1317 {
1318 /* stack_alignment_estimated shouldn't change after stack
1319 realign decision made */
1320 gcc_assert (!crtl->stack_realign_processed);
1321 crtl->stack_alignment_estimated = align;
1322 }
1323
1324 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1325 So here we only make sure stack_alignment_needed >= align. */
1326 if (crtl->stack_alignment_needed < align)
1327 crtl->stack_alignment_needed = align;
1328 if (crtl->max_used_stack_slot_alignment < align)
1329 crtl->max_used_stack_slot_alignment = align;
1330
1331 if (TREE_CODE (origvar) == SSA_NAME)
1332 {
1333 gcc_assert (TREE_CODE (var) != VAR_DECL
1334 || (!DECL_EXTERNAL (var)
1335 && !DECL_HAS_VALUE_EXPR_P (var)
1336 && !TREE_STATIC (var)
1337 && TREE_TYPE (var) != error_mark_node
1338 && !DECL_HARD_REGISTER (var)
1339 && really_expand));
1340 }
1341 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1342 ;
1343 else if (DECL_EXTERNAL (var))
1344 ;
1345 else if (DECL_HAS_VALUE_EXPR_P (var))
1346 ;
1347 else if (TREE_STATIC (var))
1348 ;
1349 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1350 ;
1351 else if (TREE_TYPE (var) == error_mark_node)
1352 {
1353 if (really_expand)
1354 expand_one_error_var (var);
1355 }
1356 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1357 {
1358 if (really_expand)
1359 {
1360 expand_one_hard_reg_var (var);
1361 if (!DECL_HARD_REGISTER (var))
1362 /* Invalid register specification. */
1363 expand_one_error_var (var);
1364 }
1365 }
1366 else if (use_register_for_decl (var))
1367 {
1368 if (really_expand)
1369 expand_one_register_var (origvar);
1370 }
1371 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1372 {
1373 /* Reject variables which cover more than half of the address-space. */
1374 if (really_expand)
1375 {
1376 error ("size of variable %q+D is too large", var);
1377 expand_one_error_var (var);
1378 }
1379 }
1380 else if (defer_stack_allocation (var, toplevel))
1381 add_stack_var (origvar);
1382 else
1383 {
1384 if (really_expand)
1385 expand_one_stack_var (origvar);
1386 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1387 }
1388 return 0;
1389 }
1390
1391 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1392 expanding variables. Those variables that can be put into registers
1393 are allocated pseudos; those that can't are put on the stack.
1394
1395 TOPLEVEL is true if this is the outermost BLOCK. */
1396
1397 static void
1398 expand_used_vars_for_block (tree block, bool toplevel)
1399 {
1400 tree t;
1401
1402 /* Expand all variables at this level. */
1403 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1404 if (TREE_USED (t)
1405 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1406 || !DECL_NONSHAREABLE (t)))
1407 expand_one_var (t, toplevel, true);
1408
1409 /* Expand all variables at containing levels. */
1410 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1411 expand_used_vars_for_block (t, false);
1412 }
1413
1414 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1415 and clear TREE_USED on all local variables. */
1416
1417 static void
1418 clear_tree_used (tree block)
1419 {
1420 tree t;
1421
1422 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1423 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1424 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1425 || !DECL_NONSHAREABLE (t))
1426 TREE_USED (t) = 0;
1427
1428 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1429 clear_tree_used (t);
1430 }
1431
1432 enum {
1433 SPCT_FLAG_DEFAULT = 1,
1434 SPCT_FLAG_ALL = 2,
1435 SPCT_FLAG_STRONG = 3,
1436 SPCT_FLAG_EXPLICIT = 4
1437 };
1438
1439 /* Examine TYPE and determine a bit mask of the following features. */
1440
1441 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1442 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1443 #define SPCT_HAS_ARRAY 4
1444 #define SPCT_HAS_AGGREGATE 8
1445
1446 static unsigned int
1447 stack_protect_classify_type (tree type)
1448 {
1449 unsigned int ret = 0;
1450 tree t;
1451
1452 switch (TREE_CODE (type))
1453 {
1454 case ARRAY_TYPE:
1455 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1456 if (t == char_type_node
1457 || t == signed_char_type_node
1458 || t == unsigned_char_type_node)
1459 {
1460 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1461 unsigned HOST_WIDE_INT len;
1462
1463 if (!TYPE_SIZE_UNIT (type)
1464 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1465 len = max;
1466 else
1467 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1468
1469 if (len < max)
1470 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1471 else
1472 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1473 }
1474 else
1475 ret = SPCT_HAS_ARRAY;
1476 break;
1477
1478 case UNION_TYPE:
1479 case QUAL_UNION_TYPE:
1480 case RECORD_TYPE:
1481 ret = SPCT_HAS_AGGREGATE;
1482 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1483 if (TREE_CODE (t) == FIELD_DECL)
1484 ret |= stack_protect_classify_type (TREE_TYPE (t));
1485 break;
1486
1487 default:
1488 break;
1489 }
1490
1491 return ret;
1492 }
1493
1494 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1495 part of the local stack frame. Remember if we ever return nonzero for
1496 any variable in this function. The return value is the phase number in
1497 which the variable should be allocated. */
1498
1499 static int
1500 stack_protect_decl_phase (tree decl)
1501 {
1502 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1503 int ret = 0;
1504
1505 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1506 has_short_buffer = true;
1507
1508 if (flag_stack_protect == SPCT_FLAG_ALL
1509 || flag_stack_protect == SPCT_FLAG_STRONG
1510 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1511 && lookup_attribute ("stack_protect",
1512 DECL_ATTRIBUTES (current_function_decl))))
1513 {
1514 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1515 && !(bits & SPCT_HAS_AGGREGATE))
1516 ret = 1;
1517 else if (bits & SPCT_HAS_ARRAY)
1518 ret = 2;
1519 }
1520 else
1521 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1522
1523 if (ret)
1524 has_protected_decls = true;
1525
1526 return ret;
1527 }
1528
1529 /* Two helper routines that check for phase 1 and phase 2. These are used
1530 as callbacks for expand_stack_vars. */
1531
1532 static bool
1533 stack_protect_decl_phase_1 (size_t i)
1534 {
1535 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1536 }
1537
1538 static bool
1539 stack_protect_decl_phase_2 (size_t i)
1540 {
1541 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1542 }
1543
1544 /* And helper function that checks for asan phase (with stack protector
1545 it is phase 3). This is used as callback for expand_stack_vars.
1546 Returns true if any of the vars in the partition need to be protected. */
1547
1548 static bool
1549 asan_decl_phase_3 (size_t i)
1550 {
1551 while (i != EOC)
1552 {
1553 if (asan_protect_stack_decl (stack_vars[i].decl))
1554 return true;
1555 i = stack_vars[i].next;
1556 }
1557 return false;
1558 }
1559
1560 /* Ensure that variables in different stack protection phases conflict
1561 so that they are not merged and share the same stack slot. */
1562
1563 static void
1564 add_stack_protection_conflicts (void)
1565 {
1566 size_t i, j, n = stack_vars_num;
1567 unsigned char *phase;
1568
1569 phase = XNEWVEC (unsigned char, n);
1570 for (i = 0; i < n; ++i)
1571 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1572
1573 for (i = 0; i < n; ++i)
1574 {
1575 unsigned char ph_i = phase[i];
1576 for (j = i + 1; j < n; ++j)
1577 if (ph_i != phase[j])
1578 add_stack_var_conflict (i, j);
1579 }
1580
1581 XDELETEVEC (phase);
1582 }
1583
1584 /* Create a decl for the guard at the top of the stack frame. */
1585
1586 static void
1587 create_stack_guard (void)
1588 {
1589 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1590 VAR_DECL, NULL, ptr_type_node);
1591 TREE_THIS_VOLATILE (guard) = 1;
1592 TREE_USED (guard) = 1;
1593 expand_one_stack_var (guard);
1594 crtl->stack_protect_guard = guard;
1595 }
1596
1597 /* Prepare for expanding variables. */
1598 static void
1599 init_vars_expansion (void)
1600 {
1601 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1602 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1603
1604 /* A map from decl to stack partition. */
1605 decl_to_stack_part = new hash_map<tree, size_t>;
1606
1607 /* Initialize local stack smashing state. */
1608 has_protected_decls = false;
1609 has_short_buffer = false;
1610 }
1611
1612 /* Free up stack variable graph data. */
1613 static void
1614 fini_vars_expansion (void)
1615 {
1616 bitmap_obstack_release (&stack_var_bitmap_obstack);
1617 if (stack_vars)
1618 XDELETEVEC (stack_vars);
1619 if (stack_vars_sorted)
1620 XDELETEVEC (stack_vars_sorted);
1621 stack_vars = NULL;
1622 stack_vars_sorted = NULL;
1623 stack_vars_alloc = stack_vars_num = 0;
1624 delete decl_to_stack_part;
1625 decl_to_stack_part = NULL;
1626 }
1627
1628 /* Make a fair guess for the size of the stack frame of the function
1629 in NODE. This doesn't have to be exact, the result is only used in
1630 the inline heuristics. So we don't want to run the full stack var
1631 packing algorithm (which is quadratic in the number of stack vars).
1632 Instead, we calculate the total size of all stack vars. This turns
1633 out to be a pretty fair estimate -- packing of stack vars doesn't
1634 happen very often. */
1635
1636 HOST_WIDE_INT
1637 estimated_stack_frame_size (struct cgraph_node *node)
1638 {
1639 HOST_WIDE_INT size = 0;
1640 size_t i;
1641 tree var;
1642 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1643
1644 push_cfun (fn);
1645
1646 init_vars_expansion ();
1647
1648 FOR_EACH_LOCAL_DECL (fn, i, var)
1649 if (auto_var_in_fn_p (var, fn->decl))
1650 size += expand_one_var (var, true, false);
1651
1652 if (stack_vars_num > 0)
1653 {
1654 /* Fake sorting the stack vars for account_stack_vars (). */
1655 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1656 for (i = 0; i < stack_vars_num; ++i)
1657 stack_vars_sorted[i] = i;
1658 size += account_stack_vars ();
1659 }
1660
1661 fini_vars_expansion ();
1662 pop_cfun ();
1663 return size;
1664 }
1665
1666 /* Helper routine to check if a record or union contains an array field. */
1667
1668 static int
1669 record_or_union_type_has_array_p (const_tree tree_type)
1670 {
1671 tree fields = TYPE_FIELDS (tree_type);
1672 tree f;
1673
1674 for (f = fields; f; f = DECL_CHAIN (f))
1675 if (TREE_CODE (f) == FIELD_DECL)
1676 {
1677 tree field_type = TREE_TYPE (f);
1678 if (RECORD_OR_UNION_TYPE_P (field_type)
1679 && record_or_union_type_has_array_p (field_type))
1680 return 1;
1681 if (TREE_CODE (field_type) == ARRAY_TYPE)
1682 return 1;
1683 }
1684 return 0;
1685 }
1686
1687 /* Check if the current function has local referenced variables that
1688 have their addresses taken, contain an array, or are arrays. */
1689
1690 static bool
1691 stack_protect_decl_p ()
1692 {
1693 unsigned i;
1694 tree var;
1695
1696 FOR_EACH_LOCAL_DECL (cfun, i, var)
1697 if (!is_global_var (var))
1698 {
1699 tree var_type = TREE_TYPE (var);
1700 if (TREE_CODE (var) == VAR_DECL
1701 && (TREE_CODE (var_type) == ARRAY_TYPE
1702 || TREE_ADDRESSABLE (var)
1703 || (RECORD_OR_UNION_TYPE_P (var_type)
1704 && record_or_union_type_has_array_p (var_type))))
1705 return true;
1706 }
1707 return false;
1708 }
1709
1710 /* Check if the current function has calls that use a return slot. */
1711
1712 static bool
1713 stack_protect_return_slot_p ()
1714 {
1715 basic_block bb;
1716
1717 FOR_ALL_BB_FN (bb, cfun)
1718 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1719 !gsi_end_p (gsi); gsi_next (&gsi))
1720 {
1721 gimple stmt = gsi_stmt (gsi);
1722 /* This assumes that calls to internal-only functions never
1723 use a return slot. */
1724 if (is_gimple_call (stmt)
1725 && !gimple_call_internal_p (stmt)
1726 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1727 gimple_call_fndecl (stmt)))
1728 return true;
1729 }
1730 return false;
1731 }
1732
1733 /* Expand all variables used in the function. */
1734
1735 static rtx_insn *
1736 expand_used_vars (void)
1737 {
1738 tree var, outer_block = DECL_INITIAL (current_function_decl);
1739 vec<tree> maybe_local_decls = vNULL;
1740 rtx_insn *var_end_seq = NULL;
1741 unsigned i;
1742 unsigned len;
1743 bool gen_stack_protect_signal = false;
1744
1745 /* Compute the phase of the stack frame for this function. */
1746 {
1747 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1748 int off = STARTING_FRAME_OFFSET % align;
1749 frame_phase = off ? align - off : 0;
1750 }
1751
1752 /* Set TREE_USED on all variables in the local_decls. */
1753 FOR_EACH_LOCAL_DECL (cfun, i, var)
1754 TREE_USED (var) = 1;
1755 /* Clear TREE_USED on all variables associated with a block scope. */
1756 clear_tree_used (DECL_INITIAL (current_function_decl));
1757
1758 init_vars_expansion ();
1759
1760 if (targetm.use_pseudo_pic_reg ())
1761 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1762
1763 hash_map<tree, tree> ssa_name_decls;
1764 for (i = 0; i < SA.map->num_partitions; i++)
1765 {
1766 tree var = partition_to_var (SA.map, i);
1767
1768 gcc_assert (!virtual_operand_p (var));
1769
1770 /* Assign decls to each SSA name partition, share decls for partitions
1771 we could have coalesced (those with the same type). */
1772 if (SSA_NAME_VAR (var) == NULL_TREE)
1773 {
1774 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1775 if (!*slot)
1776 *slot = create_tmp_reg (TREE_TYPE (var));
1777 replace_ssa_name_symbol (var, *slot);
1778 }
1779
1780 /* Always allocate space for partitions based on VAR_DECLs. But for
1781 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1782 debug info, there is no need to do so if optimization is disabled
1783 because all the SSA_NAMEs based on these DECLs have been coalesced
1784 into a single partition, which is thus assigned the canonical RTL
1785 location of the DECLs. If in_lto_p, we can't rely on optimize,
1786 a function could be compiled with -O1 -flto first and only the
1787 link performed at -O0. */
1788 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1789 expand_one_var (var, true, true);
1790 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1791 {
1792 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1793 contain the default def (representing the parm or result itself)
1794 we don't do anything here. But those which don't contain the
1795 default def (representing a temporary based on the parm/result)
1796 we need to allocate space just like for normal VAR_DECLs. */
1797 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1798 {
1799 expand_one_var (var, true, true);
1800 gcc_assert (SA.partition_to_pseudo[i]);
1801 }
1802 }
1803 }
1804
1805 if (flag_stack_protect == SPCT_FLAG_STRONG)
1806 gen_stack_protect_signal
1807 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1808
1809 /* At this point all variables on the local_decls with TREE_USED
1810 set are not associated with any block scope. Lay them out. */
1811
1812 len = vec_safe_length (cfun->local_decls);
1813 FOR_EACH_LOCAL_DECL (cfun, i, var)
1814 {
1815 bool expand_now = false;
1816
1817 /* Expanded above already. */
1818 if (is_gimple_reg (var))
1819 {
1820 TREE_USED (var) = 0;
1821 goto next;
1822 }
1823 /* We didn't set a block for static or extern because it's hard
1824 to tell the difference between a global variable (re)declared
1825 in a local scope, and one that's really declared there to
1826 begin with. And it doesn't really matter much, since we're
1827 not giving them stack space. Expand them now. */
1828 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1829 expand_now = true;
1830
1831 /* Expand variables not associated with any block now. Those created by
1832 the optimizers could be live anywhere in the function. Those that
1833 could possibly have been scoped originally and detached from their
1834 block will have their allocation deferred so we coalesce them with
1835 others when optimization is enabled. */
1836 else if (TREE_USED (var))
1837 expand_now = true;
1838
1839 /* Finally, mark all variables on the list as used. We'll use
1840 this in a moment when we expand those associated with scopes. */
1841 TREE_USED (var) = 1;
1842
1843 if (expand_now)
1844 expand_one_var (var, true, true);
1845
1846 next:
1847 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1848 {
1849 rtx rtl = DECL_RTL_IF_SET (var);
1850
1851 /* Keep artificial non-ignored vars in cfun->local_decls
1852 chain until instantiate_decls. */
1853 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1854 add_local_decl (cfun, var);
1855 else if (rtl == NULL_RTX)
1856 /* If rtl isn't set yet, which can happen e.g. with
1857 -fstack-protector, retry before returning from this
1858 function. */
1859 maybe_local_decls.safe_push (var);
1860 }
1861 }
1862
1863 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1864
1865 +-----------------+-----------------+
1866 | ...processed... | ...duplicates...|
1867 +-----------------+-----------------+
1868 ^
1869 +-- LEN points here.
1870
1871 We just want the duplicates, as those are the artificial
1872 non-ignored vars that we want to keep until instantiate_decls.
1873 Move them down and truncate the array. */
1874 if (!vec_safe_is_empty (cfun->local_decls))
1875 cfun->local_decls->block_remove (0, len);
1876
1877 /* At this point, all variables within the block tree with TREE_USED
1878 set are actually used by the optimized function. Lay them out. */
1879 expand_used_vars_for_block (outer_block, true);
1880
1881 if (stack_vars_num > 0)
1882 {
1883 add_scope_conflicts ();
1884
1885 /* If stack protection is enabled, we don't share space between
1886 vulnerable data and non-vulnerable data. */
1887 if (flag_stack_protect != 0
1888 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1889 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1890 && lookup_attribute ("stack_protect",
1891 DECL_ATTRIBUTES (current_function_decl)))))
1892 add_stack_protection_conflicts ();
1893
1894 /* Now that we have collected all stack variables, and have computed a
1895 minimal interference graph, attempt to save some stack space. */
1896 partition_stack_vars ();
1897 if (dump_file)
1898 dump_stack_var_partition ();
1899 }
1900
1901 switch (flag_stack_protect)
1902 {
1903 case SPCT_FLAG_ALL:
1904 create_stack_guard ();
1905 break;
1906
1907 case SPCT_FLAG_STRONG:
1908 if (gen_stack_protect_signal
1909 || cfun->calls_alloca || has_protected_decls
1910 || lookup_attribute ("stack_protect",
1911 DECL_ATTRIBUTES (current_function_decl)))
1912 create_stack_guard ();
1913 break;
1914
1915 case SPCT_FLAG_DEFAULT:
1916 if (cfun->calls_alloca || has_protected_decls
1917 || lookup_attribute ("stack_protect",
1918 DECL_ATTRIBUTES (current_function_decl)))
1919 create_stack_guard ();
1920 break;
1921
1922 case SPCT_FLAG_EXPLICIT:
1923 if (lookup_attribute ("stack_protect",
1924 DECL_ATTRIBUTES (current_function_decl)))
1925 create_stack_guard ();
1926 break;
1927 default:
1928 ;
1929 }
1930
1931 /* Assign rtl to each variable based on these partitions. */
1932 if (stack_vars_num > 0)
1933 {
1934 struct stack_vars_data data;
1935
1936 data.asan_vec = vNULL;
1937 data.asan_decl_vec = vNULL;
1938 data.asan_base = NULL_RTX;
1939 data.asan_alignb = 0;
1940
1941 /* Reorder decls to be protected by iterating over the variables
1942 array multiple times, and allocating out of each phase in turn. */
1943 /* ??? We could probably integrate this into the qsort we did
1944 earlier, such that we naturally see these variables first,
1945 and thus naturally allocate things in the right order. */
1946 if (has_protected_decls)
1947 {
1948 /* Phase 1 contains only character arrays. */
1949 expand_stack_vars (stack_protect_decl_phase_1, &data);
1950
1951 /* Phase 2 contains other kinds of arrays. */
1952 if (flag_stack_protect == SPCT_FLAG_ALL
1953 || flag_stack_protect == SPCT_FLAG_STRONG
1954 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1955 && lookup_attribute ("stack_protect",
1956 DECL_ATTRIBUTES (current_function_decl))))
1957 expand_stack_vars (stack_protect_decl_phase_2, &data);
1958 }
1959
1960 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1961 /* Phase 3, any partitions that need asan protection
1962 in addition to phase 1 and 2. */
1963 expand_stack_vars (asan_decl_phase_3, &data);
1964
1965 if (!data.asan_vec.is_empty ())
1966 {
1967 HOST_WIDE_INT prev_offset = frame_offset;
1968 HOST_WIDE_INT offset, sz, redzonesz;
1969 redzonesz = ASAN_RED_ZONE_SIZE;
1970 sz = data.asan_vec[0] - prev_offset;
1971 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1972 && data.asan_alignb <= 4096
1973 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1974 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1975 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1976 offset
1977 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1978 data.asan_vec.safe_push (prev_offset);
1979 data.asan_vec.safe_push (offset);
1980 /* Leave space for alignment if STRICT_ALIGNMENT. */
1981 if (STRICT_ALIGNMENT)
1982 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1983 << ASAN_SHADOW_SHIFT)
1984 / BITS_PER_UNIT, 1);
1985
1986 var_end_seq
1987 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1988 data.asan_base,
1989 data.asan_alignb,
1990 data.asan_vec.address (),
1991 data.asan_decl_vec.address (),
1992 data.asan_vec.length ());
1993 }
1994
1995 expand_stack_vars (NULL, &data);
1996
1997 data.asan_vec.release ();
1998 data.asan_decl_vec.release ();
1999 }
2000
2001 fini_vars_expansion ();
2002
2003 /* If there were any artificial non-ignored vars without rtl
2004 found earlier, see if deferred stack allocation hasn't assigned
2005 rtl to them. */
2006 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2007 {
2008 rtx rtl = DECL_RTL_IF_SET (var);
2009
2010 /* Keep artificial non-ignored vars in cfun->local_decls
2011 chain until instantiate_decls. */
2012 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2013 add_local_decl (cfun, var);
2014 }
2015 maybe_local_decls.release ();
2016
2017 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2018 if (STACK_ALIGNMENT_NEEDED)
2019 {
2020 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2021 if (!FRAME_GROWS_DOWNWARD)
2022 frame_offset += align - 1;
2023 frame_offset &= -align;
2024 }
2025
2026 return var_end_seq;
2027 }
2028
2029
2030 /* If we need to produce a detailed dump, print the tree representation
2031 for STMT to the dump file. SINCE is the last RTX after which the RTL
2032 generated for STMT should have been appended. */
2033
2034 static void
2035 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2036 {
2037 if (dump_file && (dump_flags & TDF_DETAILS))
2038 {
2039 fprintf (dump_file, "\n;; ");
2040 print_gimple_stmt (dump_file, stmt, 0,
2041 TDF_SLIM | (dump_flags & TDF_LINENO));
2042 fprintf (dump_file, "\n");
2043
2044 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2045 }
2046 }
2047
2048 /* Maps the blocks that do not contain tree labels to rtx labels. */
2049
2050 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2051
2052 /* Returns the label_rtx expression for a label starting basic block BB. */
2053
2054 static rtx
2055 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2056 {
2057 gimple_stmt_iterator gsi;
2058 tree lab;
2059
2060 if (bb->flags & BB_RTL)
2061 return block_label (bb);
2062
2063 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2064 if (elt)
2065 return *elt;
2066
2067 /* Find the tree label if it is present. */
2068
2069 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2070 {
2071 glabel *lab_stmt;
2072
2073 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2074 if (!lab_stmt)
2075 break;
2076
2077 lab = gimple_label_label (lab_stmt);
2078 if (DECL_NONLOCAL (lab))
2079 break;
2080
2081 return label_rtx (lab);
2082 }
2083
2084 rtx_code_label *l = gen_label_rtx ();
2085 lab_rtx_for_bb->put (bb, l);
2086 return l;
2087 }
2088
2089
2090 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2091 of a basic block where we just expanded the conditional at the end,
2092 possibly clean up the CFG and instruction sequence. LAST is the
2093 last instruction before the just emitted jump sequence. */
2094
2095 static void
2096 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2097 {
2098 /* Special case: when jumpif decides that the condition is
2099 trivial it emits an unconditional jump (and the necessary
2100 barrier). But we still have two edges, the fallthru one is
2101 wrong. purge_dead_edges would clean this up later. Unfortunately
2102 we have to insert insns (and split edges) before
2103 find_many_sub_basic_blocks and hence before purge_dead_edges.
2104 But splitting edges might create new blocks which depend on the
2105 fact that if there are two edges there's no barrier. So the
2106 barrier would get lost and verify_flow_info would ICE. Instead
2107 of auditing all edge splitters to care for the barrier (which
2108 normally isn't there in a cleaned CFG), fix it here. */
2109 if (BARRIER_P (get_last_insn ()))
2110 {
2111 rtx_insn *insn;
2112 remove_edge (e);
2113 /* Now, we have a single successor block, if we have insns to
2114 insert on the remaining edge we potentially will insert
2115 it at the end of this block (if the dest block isn't feasible)
2116 in order to avoid splitting the edge. This insertion will take
2117 place in front of the last jump. But we might have emitted
2118 multiple jumps (conditional and one unconditional) to the
2119 same destination. Inserting in front of the last one then
2120 is a problem. See PR 40021. We fix this by deleting all
2121 jumps except the last unconditional one. */
2122 insn = PREV_INSN (get_last_insn ());
2123 /* Make sure we have an unconditional jump. Otherwise we're
2124 confused. */
2125 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2126 for (insn = PREV_INSN (insn); insn != last;)
2127 {
2128 insn = PREV_INSN (insn);
2129 if (JUMP_P (NEXT_INSN (insn)))
2130 {
2131 if (!any_condjump_p (NEXT_INSN (insn)))
2132 {
2133 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2134 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2135 }
2136 delete_insn (NEXT_INSN (insn));
2137 }
2138 }
2139 }
2140 }
2141
2142 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2143 Returns a new basic block if we've terminated the current basic
2144 block and created a new one. */
2145
2146 static basic_block
2147 expand_gimple_cond (basic_block bb, gcond *stmt)
2148 {
2149 basic_block new_bb, dest;
2150 edge new_edge;
2151 edge true_edge;
2152 edge false_edge;
2153 rtx_insn *last2, *last;
2154 enum tree_code code;
2155 tree op0, op1;
2156
2157 code = gimple_cond_code (stmt);
2158 op0 = gimple_cond_lhs (stmt);
2159 op1 = gimple_cond_rhs (stmt);
2160 /* We're sometimes presented with such code:
2161 D.123_1 = x < y;
2162 if (D.123_1 != 0)
2163 ...
2164 This would expand to two comparisons which then later might
2165 be cleaned up by combine. But some pattern matchers like if-conversion
2166 work better when there's only one compare, so make up for this
2167 here as special exception if TER would have made the same change. */
2168 if (SA.values
2169 && TREE_CODE (op0) == SSA_NAME
2170 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2171 && TREE_CODE (op1) == INTEGER_CST
2172 && ((gimple_cond_code (stmt) == NE_EXPR
2173 && integer_zerop (op1))
2174 || (gimple_cond_code (stmt) == EQ_EXPR
2175 && integer_onep (op1)))
2176 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2177 {
2178 gimple second = SSA_NAME_DEF_STMT (op0);
2179 if (gimple_code (second) == GIMPLE_ASSIGN)
2180 {
2181 enum tree_code code2 = gimple_assign_rhs_code (second);
2182 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2183 {
2184 code = code2;
2185 op0 = gimple_assign_rhs1 (second);
2186 op1 = gimple_assign_rhs2 (second);
2187 }
2188 /* If jumps are cheap and the target does not support conditional
2189 compare, turn some more codes into jumpy sequences. */
2190 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2191 && targetm.gen_ccmp_first == NULL)
2192 {
2193 if ((code2 == BIT_AND_EXPR
2194 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2195 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2196 || code2 == TRUTH_AND_EXPR)
2197 {
2198 code = TRUTH_ANDIF_EXPR;
2199 op0 = gimple_assign_rhs1 (second);
2200 op1 = gimple_assign_rhs2 (second);
2201 }
2202 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2203 {
2204 code = TRUTH_ORIF_EXPR;
2205 op0 = gimple_assign_rhs1 (second);
2206 op1 = gimple_assign_rhs2 (second);
2207 }
2208 }
2209 }
2210 }
2211
2212 last2 = last = get_last_insn ();
2213
2214 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2215 set_curr_insn_location (gimple_location (stmt));
2216
2217 /* These flags have no purpose in RTL land. */
2218 true_edge->flags &= ~EDGE_TRUE_VALUE;
2219 false_edge->flags &= ~EDGE_FALSE_VALUE;
2220
2221 /* We can either have a pure conditional jump with one fallthru edge or
2222 two-way jump that needs to be decomposed into two basic blocks. */
2223 if (false_edge->dest == bb->next_bb)
2224 {
2225 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2226 true_edge->probability);
2227 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2228 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2229 set_curr_insn_location (true_edge->goto_locus);
2230 false_edge->flags |= EDGE_FALLTHRU;
2231 maybe_cleanup_end_of_block (false_edge, last);
2232 return NULL;
2233 }
2234 if (true_edge->dest == bb->next_bb)
2235 {
2236 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2237 false_edge->probability);
2238 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2239 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2240 set_curr_insn_location (false_edge->goto_locus);
2241 true_edge->flags |= EDGE_FALLTHRU;
2242 maybe_cleanup_end_of_block (true_edge, last);
2243 return NULL;
2244 }
2245
2246 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2247 true_edge->probability);
2248 last = get_last_insn ();
2249 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2250 set_curr_insn_location (false_edge->goto_locus);
2251 emit_jump (label_rtx_for_bb (false_edge->dest));
2252
2253 BB_END (bb) = last;
2254 if (BARRIER_P (BB_END (bb)))
2255 BB_END (bb) = PREV_INSN (BB_END (bb));
2256 update_bb_for_insn (bb);
2257
2258 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2259 dest = false_edge->dest;
2260 redirect_edge_succ (false_edge, new_bb);
2261 false_edge->flags |= EDGE_FALLTHRU;
2262 new_bb->count = false_edge->count;
2263 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2264 add_bb_to_loop (new_bb, bb->loop_father);
2265 new_edge = make_edge (new_bb, dest, 0);
2266 new_edge->probability = REG_BR_PROB_BASE;
2267 new_edge->count = new_bb->count;
2268 if (BARRIER_P (BB_END (new_bb)))
2269 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2270 update_bb_for_insn (new_bb);
2271
2272 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2273
2274 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2275 {
2276 set_curr_insn_location (true_edge->goto_locus);
2277 true_edge->goto_locus = curr_insn_location ();
2278 }
2279
2280 return new_bb;
2281 }
2282
2283 /* Mark all calls that can have a transaction restart. */
2284
2285 static void
2286 mark_transaction_restart_calls (gimple stmt)
2287 {
2288 struct tm_restart_node dummy;
2289 tm_restart_node **slot;
2290
2291 if (!cfun->gimple_df->tm_restart)
2292 return;
2293
2294 dummy.stmt = stmt;
2295 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2296 if (slot)
2297 {
2298 struct tm_restart_node *n = *slot;
2299 tree list = n->label_or_list;
2300 rtx_insn *insn;
2301
2302 for (insn = next_real_insn (get_last_insn ());
2303 !CALL_P (insn);
2304 insn = next_real_insn (insn))
2305 continue;
2306
2307 if (TREE_CODE (list) == LABEL_DECL)
2308 add_reg_note (insn, REG_TM, label_rtx (list));
2309 else
2310 for (; list ; list = TREE_CHAIN (list))
2311 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2312 }
2313 }
2314
2315 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2316 statement STMT. */
2317
2318 static void
2319 expand_call_stmt (gcall *stmt)
2320 {
2321 tree exp, decl, lhs;
2322 bool builtin_p;
2323 size_t i;
2324
2325 if (gimple_call_internal_p (stmt))
2326 {
2327 expand_internal_call (stmt);
2328 return;
2329 }
2330
2331 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2332
2333 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2334 decl = gimple_call_fndecl (stmt);
2335 builtin_p = decl && DECL_BUILT_IN (decl);
2336
2337 /* If this is not a builtin function, the function type through which the
2338 call is made may be different from the type of the function. */
2339 if (!builtin_p)
2340 CALL_EXPR_FN (exp)
2341 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2342 CALL_EXPR_FN (exp));
2343
2344 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2345 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2346
2347 for (i = 0; i < gimple_call_num_args (stmt); i++)
2348 {
2349 tree arg = gimple_call_arg (stmt, i);
2350 gimple def;
2351 /* TER addresses into arguments of builtin functions so we have a
2352 chance to infer more correct alignment information. See PR39954. */
2353 if (builtin_p
2354 && TREE_CODE (arg) == SSA_NAME
2355 && (def = get_gimple_for_ssa_name (arg))
2356 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2357 arg = gimple_assign_rhs1 (def);
2358 CALL_EXPR_ARG (exp, i) = arg;
2359 }
2360
2361 if (gimple_has_side_effects (stmt))
2362 TREE_SIDE_EFFECTS (exp) = 1;
2363
2364 if (gimple_call_nothrow_p (stmt))
2365 TREE_NOTHROW (exp) = 1;
2366
2367 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2368 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2369 if (decl
2370 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2371 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2372 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2373 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2374 else
2375 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2376 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2377 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2378 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2379
2380 /* Ensure RTL is created for debug args. */
2381 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2382 {
2383 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2384 unsigned int ix;
2385 tree dtemp;
2386
2387 if (debug_args)
2388 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2389 {
2390 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2391 expand_debug_expr (dtemp);
2392 }
2393 }
2394
2395 lhs = gimple_call_lhs (stmt);
2396 if (lhs)
2397 expand_assignment (lhs, exp, false);
2398 else
2399 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2400
2401 mark_transaction_restart_calls (stmt);
2402 }
2403
2404
2405 /* Generate RTL for an asm statement (explicit assembler code).
2406 STRING is a STRING_CST node containing the assembler code text,
2407 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2408 insn is volatile; don't optimize it. */
2409
2410 static void
2411 expand_asm_loc (tree string, int vol, location_t locus)
2412 {
2413 rtx body;
2414
2415 if (TREE_CODE (string) == ADDR_EXPR)
2416 string = TREE_OPERAND (string, 0);
2417
2418 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2419 ggc_strdup (TREE_STRING_POINTER (string)),
2420 locus);
2421
2422 MEM_VOLATILE_P (body) = vol;
2423
2424 emit_insn (body);
2425 }
2426
2427 /* Return the number of times character C occurs in string S. */
2428 static int
2429 n_occurrences (int c, const char *s)
2430 {
2431 int n = 0;
2432 while (*s)
2433 n += (*s++ == c);
2434 return n;
2435 }
2436
2437 /* A subroutine of expand_asm_operands. Check that all operands have
2438 the same number of alternatives. Return true if so. */
2439
2440 static bool
2441 check_operand_nalternatives (tree outputs, tree inputs)
2442 {
2443 if (outputs || inputs)
2444 {
2445 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2446 int nalternatives
2447 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2448 tree next = inputs;
2449
2450 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2451 {
2452 error ("too many alternatives in %<asm%>");
2453 return false;
2454 }
2455
2456 tmp = outputs;
2457 while (tmp)
2458 {
2459 const char *constraint
2460 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2461
2462 if (n_occurrences (',', constraint) != nalternatives)
2463 {
2464 error ("operand constraints for %<asm%> differ "
2465 "in number of alternatives");
2466 return false;
2467 }
2468
2469 if (TREE_CHAIN (tmp))
2470 tmp = TREE_CHAIN (tmp);
2471 else
2472 tmp = next, next = 0;
2473 }
2474 }
2475
2476 return true;
2477 }
2478
2479 /* Check for overlap between registers marked in CLOBBERED_REGS and
2480 anything inappropriate in T. Emit error and return the register
2481 variable definition for error, NULL_TREE for ok. */
2482
2483 static bool
2484 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2485 {
2486 /* Conflicts between asm-declared register variables and the clobber
2487 list are not allowed. */
2488 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2489
2490 if (overlap)
2491 {
2492 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2493 DECL_NAME (overlap));
2494
2495 /* Reset registerness to stop multiple errors emitted for a single
2496 variable. */
2497 DECL_REGISTER (overlap) = 0;
2498 return true;
2499 }
2500
2501 return false;
2502 }
2503
2504 /* Generate RTL for an asm statement with arguments.
2505 STRING is the instruction template.
2506 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2507 Each output or input has an expression in the TREE_VALUE and
2508 a tree list in TREE_PURPOSE which in turn contains a constraint
2509 name in TREE_VALUE (or NULL_TREE) and a constraint string
2510 in TREE_PURPOSE.
2511 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2512 that is clobbered by this insn.
2513
2514 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2515 should be the fallthru basic block of the asm goto.
2516
2517 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2518 Some elements of OUTPUTS may be replaced with trees representing temporary
2519 values. The caller should copy those temporary values to the originally
2520 specified lvalues.
2521
2522 VOL nonzero means the insn is volatile; don't optimize it. */
2523
2524 static void
2525 expand_asm_operands (tree string, tree outputs, tree inputs,
2526 tree clobbers, tree labels, basic_block fallthru_bb,
2527 int vol, location_t locus)
2528 {
2529 rtvec argvec, constraintvec, labelvec;
2530 rtx body;
2531 int ninputs = list_length (inputs);
2532 int noutputs = list_length (outputs);
2533 int nlabels = list_length (labels);
2534 int ninout;
2535 int nclobbers;
2536 HARD_REG_SET clobbered_regs;
2537 int clobber_conflict_found = 0;
2538 tree tail;
2539 tree t;
2540 int i;
2541 /* Vector of RTX's of evaluated output operands. */
2542 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2543 int *inout_opnum = XALLOCAVEC (int, noutputs);
2544 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2545 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2546 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2547 int old_generating_concat_p = generating_concat_p;
2548 rtx_code_label *fallthru_label = NULL;
2549
2550 /* An ASM with no outputs needs to be treated as volatile, for now. */
2551 if (noutputs == 0)
2552 vol = 1;
2553
2554 if (! check_operand_nalternatives (outputs, inputs))
2555 return;
2556
2557 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2558
2559 /* Collect constraints. */
2560 i = 0;
2561 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2562 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2563 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2564 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2565
2566 /* Sometimes we wish to automatically clobber registers across an asm.
2567 Case in point is when the i386 backend moved from cc0 to a hard reg --
2568 maintaining source-level compatibility means automatically clobbering
2569 the flags register. */
2570 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2571
2572 /* Count the number of meaningful clobbered registers, ignoring what
2573 we would ignore later. */
2574 nclobbers = 0;
2575 CLEAR_HARD_REG_SET (clobbered_regs);
2576 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2577 {
2578 const char *regname;
2579 int nregs;
2580
2581 if (TREE_VALUE (tail) == error_mark_node)
2582 return;
2583 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2584
2585 i = decode_reg_name_and_count (regname, &nregs);
2586 if (i == -4)
2587 ++nclobbers;
2588 else if (i == -2)
2589 error ("unknown register name %qs in %<asm%>", regname);
2590
2591 /* Mark clobbered registers. */
2592 if (i >= 0)
2593 {
2594 int reg;
2595
2596 for (reg = i; reg < i + nregs; reg++)
2597 {
2598 ++nclobbers;
2599
2600 /* Clobbering the PIC register is an error. */
2601 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2602 {
2603 error ("PIC register clobbered by %qs in %<asm%>", regname);
2604 return;
2605 }
2606
2607 SET_HARD_REG_BIT (clobbered_regs, reg);
2608 }
2609 }
2610 }
2611
2612 /* First pass over inputs and outputs checks validity and sets
2613 mark_addressable if needed. */
2614
2615 ninout = 0;
2616 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2617 {
2618 tree val = TREE_VALUE (tail);
2619 tree type = TREE_TYPE (val);
2620 const char *constraint;
2621 bool is_inout;
2622 bool allows_reg;
2623 bool allows_mem;
2624
2625 /* If there's an erroneous arg, emit no insn. */
2626 if (type == error_mark_node)
2627 return;
2628
2629 /* Try to parse the output constraint. If that fails, there's
2630 no point in going further. */
2631 constraint = constraints[i];
2632 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2633 &allows_mem, &allows_reg, &is_inout))
2634 return;
2635
2636 if (! allows_reg
2637 && (allows_mem
2638 || is_inout
2639 || (DECL_P (val)
2640 && REG_P (DECL_RTL (val))
2641 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2642 mark_addressable (val);
2643
2644 if (is_inout)
2645 ninout++;
2646 }
2647
2648 ninputs += ninout;
2649 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2650 {
2651 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2652 return;
2653 }
2654
2655 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2656 {
2657 bool allows_reg, allows_mem;
2658 const char *constraint;
2659
2660 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2661 would get VOIDmode and that could cause a crash in reload. */
2662 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2663 return;
2664
2665 constraint = constraints[i + noutputs];
2666 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2667 constraints, &allows_mem, &allows_reg))
2668 return;
2669
2670 if (! allows_reg && allows_mem)
2671 mark_addressable (TREE_VALUE (tail));
2672 }
2673
2674 /* Second pass evaluates arguments. */
2675
2676 /* Make sure stack is consistent for asm goto. */
2677 if (nlabels > 0)
2678 do_pending_stack_adjust ();
2679
2680 ninout = 0;
2681 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2682 {
2683 tree val = TREE_VALUE (tail);
2684 tree type = TREE_TYPE (val);
2685 bool is_inout;
2686 bool allows_reg;
2687 bool allows_mem;
2688 rtx op;
2689 bool ok;
2690
2691 ok = parse_output_constraint (&constraints[i], i, ninputs,
2692 noutputs, &allows_mem, &allows_reg,
2693 &is_inout);
2694 gcc_assert (ok);
2695
2696 /* If an output operand is not a decl or indirect ref and our constraint
2697 allows a register, make a temporary to act as an intermediate.
2698 Make the asm insn write into that, then our caller will copy it to
2699 the real output operand. Likewise for promoted variables. */
2700
2701 generating_concat_p = 0;
2702
2703 real_output_rtx[i] = NULL_RTX;
2704 if ((TREE_CODE (val) == INDIRECT_REF
2705 && allows_mem)
2706 || (DECL_P (val)
2707 && (allows_mem || REG_P (DECL_RTL (val)))
2708 && ! (REG_P (DECL_RTL (val))
2709 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2710 || ! allows_reg
2711 || is_inout)
2712 {
2713 op = expand_expr (val, NULL_RTX, VOIDmode,
2714 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2715 if (MEM_P (op))
2716 op = validize_mem (op);
2717
2718 if (! allows_reg && !MEM_P (op))
2719 error ("output number %d not directly addressable", i);
2720 if ((! allows_mem && MEM_P (op))
2721 || GET_CODE (op) == CONCAT)
2722 {
2723 real_output_rtx[i] = op;
2724 op = gen_reg_rtx (GET_MODE (op));
2725 if (is_inout)
2726 emit_move_insn (op, real_output_rtx[i]);
2727 }
2728 }
2729 else
2730 {
2731 op = assign_temp (type, 0, 1);
2732 op = validize_mem (op);
2733 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2734 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2735 TREE_VALUE (tail) = make_tree (type, op);
2736 }
2737 output_rtx[i] = op;
2738
2739 generating_concat_p = old_generating_concat_p;
2740
2741 if (is_inout)
2742 {
2743 inout_mode[ninout] = TYPE_MODE (type);
2744 inout_opnum[ninout++] = i;
2745 }
2746
2747 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2748 clobber_conflict_found = 1;
2749 }
2750
2751 /* Make vectors for the expression-rtx, constraint strings,
2752 and named operands. */
2753
2754 argvec = rtvec_alloc (ninputs);
2755 constraintvec = rtvec_alloc (ninputs);
2756 labelvec = rtvec_alloc (nlabels);
2757
2758 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2759 : GET_MODE (output_rtx[0])),
2760 ggc_strdup (TREE_STRING_POINTER (string)),
2761 empty_string, 0, argvec, constraintvec,
2762 labelvec, locus);
2763
2764 MEM_VOLATILE_P (body) = vol;
2765
2766 /* Eval the inputs and put them into ARGVEC.
2767 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2768
2769 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2770 {
2771 bool allows_reg, allows_mem;
2772 const char *constraint;
2773 tree val, type;
2774 rtx op;
2775 bool ok;
2776
2777 constraint = constraints[i + noutputs];
2778 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2779 constraints, &allows_mem, &allows_reg);
2780 gcc_assert (ok);
2781
2782 generating_concat_p = 0;
2783
2784 val = TREE_VALUE (tail);
2785 type = TREE_TYPE (val);
2786 /* EXPAND_INITIALIZER will not generate code for valid initializer
2787 constants, but will still generate code for other types of operand.
2788 This is the behavior we want for constant constraints. */
2789 op = expand_expr (val, NULL_RTX, VOIDmode,
2790 allows_reg ? EXPAND_NORMAL
2791 : allows_mem ? EXPAND_MEMORY
2792 : EXPAND_INITIALIZER);
2793
2794 /* Never pass a CONCAT to an ASM. */
2795 if (GET_CODE (op) == CONCAT)
2796 op = force_reg (GET_MODE (op), op);
2797 else if (MEM_P (op))
2798 op = validize_mem (op);
2799
2800 if (asm_operand_ok (op, constraint, NULL) <= 0)
2801 {
2802 if (allows_reg && TYPE_MODE (type) != BLKmode)
2803 op = force_reg (TYPE_MODE (type), op);
2804 else if (!allows_mem)
2805 warning (0, "asm operand %d probably doesn%'t match constraints",
2806 i + noutputs);
2807 else if (MEM_P (op))
2808 {
2809 /* We won't recognize either volatile memory or memory
2810 with a queued address as available a memory_operand
2811 at this point. Ignore it: clearly this *is* a memory. */
2812 }
2813 else
2814 gcc_unreachable ();
2815 }
2816
2817 generating_concat_p = old_generating_concat_p;
2818 ASM_OPERANDS_INPUT (body, i) = op;
2819
2820 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2821 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2822 ggc_strdup (constraints[i + noutputs]),
2823 locus);
2824
2825 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2826 clobber_conflict_found = 1;
2827 }
2828
2829 /* Protect all the operands from the queue now that they have all been
2830 evaluated. */
2831
2832 generating_concat_p = 0;
2833
2834 /* For in-out operands, copy output rtx to input rtx. */
2835 for (i = 0; i < ninout; i++)
2836 {
2837 int j = inout_opnum[i];
2838 char buffer[16];
2839
2840 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2841 = output_rtx[j];
2842
2843 sprintf (buffer, "%d", j);
2844 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2845 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2846 }
2847
2848 /* Copy labels to the vector. */
2849 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2850 {
2851 rtx r;
2852 /* If asm goto has any labels in the fallthru basic block, use
2853 a label that we emit immediately after the asm goto. Expansion
2854 may insert further instructions into the same basic block after
2855 asm goto and if we don't do this, insertion of instructions on
2856 the fallthru edge might misbehave. See PR58670. */
2857 if (fallthru_bb
2858 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2859 {
2860 if (fallthru_label == NULL_RTX)
2861 fallthru_label = gen_label_rtx ();
2862 r = fallthru_label;
2863 }
2864 else
2865 r = label_rtx (TREE_VALUE (tail));
2866 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2867 }
2868
2869 generating_concat_p = old_generating_concat_p;
2870
2871 /* Now, for each output, construct an rtx
2872 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2873 ARGVEC CONSTRAINTS OPNAMES))
2874 If there is more than one, put them inside a PARALLEL. */
2875
2876 if (nlabels > 0 && nclobbers == 0)
2877 {
2878 gcc_assert (noutputs == 0);
2879 emit_jump_insn (body);
2880 }
2881 else if (noutputs == 0 && nclobbers == 0)
2882 {
2883 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2884 emit_insn (body);
2885 }
2886 else if (noutputs == 1 && nclobbers == 0)
2887 {
2888 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2889 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2890 }
2891 else
2892 {
2893 rtx obody = body;
2894 int num = noutputs;
2895
2896 if (num == 0)
2897 num = 1;
2898
2899 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2900
2901 /* For each output operand, store a SET. */
2902 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2903 {
2904 XVECEXP (body, 0, i)
2905 = gen_rtx_SET (VOIDmode,
2906 output_rtx[i],
2907 gen_rtx_ASM_OPERANDS
2908 (GET_MODE (output_rtx[i]),
2909 ggc_strdup (TREE_STRING_POINTER (string)),
2910 ggc_strdup (constraints[i]),
2911 i, argvec, constraintvec, labelvec, locus));
2912
2913 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2914 }
2915
2916 /* If there are no outputs (but there are some clobbers)
2917 store the bare ASM_OPERANDS into the PARALLEL. */
2918
2919 if (i == 0)
2920 XVECEXP (body, 0, i++) = obody;
2921
2922 /* Store (clobber REG) for each clobbered register specified. */
2923
2924 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2925 {
2926 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2927 int reg, nregs;
2928 int j = decode_reg_name_and_count (regname, &nregs);
2929 rtx clobbered_reg;
2930
2931 if (j < 0)
2932 {
2933 if (j == -3) /* `cc', which is not a register */
2934 continue;
2935
2936 if (j == -4) /* `memory', don't cache memory across asm */
2937 {
2938 XVECEXP (body, 0, i++)
2939 = gen_rtx_CLOBBER (VOIDmode,
2940 gen_rtx_MEM
2941 (BLKmode,
2942 gen_rtx_SCRATCH (VOIDmode)));
2943 continue;
2944 }
2945
2946 /* Ignore unknown register, error already signaled. */
2947 continue;
2948 }
2949
2950 for (reg = j; reg < j + nregs; reg++)
2951 {
2952 /* Use QImode since that's guaranteed to clobber just
2953 * one reg. */
2954 clobbered_reg = gen_rtx_REG (QImode, reg);
2955
2956 /* Do sanity check for overlap between clobbers and
2957 respectively input and outputs that hasn't been
2958 handled. Such overlap should have been detected and
2959 reported above. */
2960 if (!clobber_conflict_found)
2961 {
2962 int opno;
2963
2964 /* We test the old body (obody) contents to avoid
2965 tripping over the under-construction body. */
2966 for (opno = 0; opno < noutputs; opno++)
2967 if (reg_overlap_mentioned_p (clobbered_reg,
2968 output_rtx[opno]))
2969 internal_error
2970 ("asm clobber conflict with output operand");
2971
2972 for (opno = 0; opno < ninputs - ninout; opno++)
2973 if (reg_overlap_mentioned_p (clobbered_reg,
2974 ASM_OPERANDS_INPUT (obody,
2975 opno)))
2976 internal_error
2977 ("asm clobber conflict with input operand");
2978 }
2979
2980 XVECEXP (body, 0, i++)
2981 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2982 }
2983 }
2984
2985 if (nlabels > 0)
2986 emit_jump_insn (body);
2987 else
2988 emit_insn (body);
2989 }
2990
2991 if (fallthru_label)
2992 emit_label (fallthru_label);
2993
2994 /* For any outputs that needed reloading into registers, spill them
2995 back to where they belong. */
2996 for (i = 0; i < noutputs; ++i)
2997 if (real_output_rtx[i])
2998 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2999
3000 crtl->has_asm_statement = 1;
3001 free_temp_slots ();
3002 }
3003
3004
3005 static void
3006 expand_asm_stmt (gasm *stmt)
3007 {
3008 int noutputs;
3009 tree outputs, tail, t;
3010 tree *o;
3011 size_t i, n;
3012 const char *s;
3013 tree str, out, in, cl, labels;
3014 location_t locus = gimple_location (stmt);
3015 basic_block fallthru_bb = NULL;
3016
3017 /* Meh... convert the gimple asm operands into real tree lists.
3018 Eventually we should make all routines work on the vectors instead
3019 of relying on TREE_CHAIN. */
3020 out = NULL_TREE;
3021 n = gimple_asm_noutputs (stmt);
3022 if (n > 0)
3023 {
3024 t = out = gimple_asm_output_op (stmt, 0);
3025 for (i = 1; i < n; i++)
3026 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
3027 }
3028
3029 in = NULL_TREE;
3030 n = gimple_asm_ninputs (stmt);
3031 if (n > 0)
3032 {
3033 t = in = gimple_asm_input_op (stmt, 0);
3034 for (i = 1; i < n; i++)
3035 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
3036 }
3037
3038 cl = NULL_TREE;
3039 n = gimple_asm_nclobbers (stmt);
3040 if (n > 0)
3041 {
3042 t = cl = gimple_asm_clobber_op (stmt, 0);
3043 for (i = 1; i < n; i++)
3044 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
3045 }
3046
3047 labels = NULL_TREE;
3048 n = gimple_asm_nlabels (stmt);
3049 if (n > 0)
3050 {
3051 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3052 if (fallthru)
3053 fallthru_bb = fallthru->dest;
3054 t = labels = gimple_asm_label_op (stmt, 0);
3055 for (i = 1; i < n; i++)
3056 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3057 }
3058
3059 s = gimple_asm_string (stmt);
3060 str = build_string (strlen (s), s);
3061
3062 if (gimple_asm_input_p (stmt))
3063 {
3064 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3065 return;
3066 }
3067
3068 outputs = out;
3069 noutputs = gimple_asm_noutputs (stmt);
3070 /* o[I] is the place that output number I should be written. */
3071 o = (tree *) alloca (noutputs * sizeof (tree));
3072
3073 /* Record the contents of OUTPUTS before it is modified. */
3074 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3075 o[i] = TREE_VALUE (tail);
3076
3077 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3078 OUTPUTS some trees for where the values were actually stored. */
3079 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3080 gimple_asm_volatile_p (stmt), locus);
3081
3082 /* Copy all the intermediate outputs into the specified outputs. */
3083 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3084 {
3085 if (o[i] != TREE_VALUE (tail))
3086 {
3087 expand_assignment (o[i], TREE_VALUE (tail), false);
3088 free_temp_slots ();
3089
3090 /* Restore the original value so that it's correct the next
3091 time we expand this function. */
3092 TREE_VALUE (tail) = o[i];
3093 }
3094 }
3095 }
3096
3097 /* Emit code to jump to the address
3098 specified by the pointer expression EXP. */
3099
3100 static void
3101 expand_computed_goto (tree exp)
3102 {
3103 rtx x = expand_normal (exp);
3104
3105 do_pending_stack_adjust ();
3106 emit_indirect_jump (x);
3107 }
3108
3109 /* Generate RTL code for a `goto' statement with target label LABEL.
3110 LABEL should be a LABEL_DECL tree node that was or will later be
3111 defined with `expand_label'. */
3112
3113 static void
3114 expand_goto (tree label)
3115 {
3116 #ifdef ENABLE_CHECKING
3117 /* Check for a nonlocal goto to a containing function. Should have
3118 gotten translated to __builtin_nonlocal_goto. */
3119 tree context = decl_function_context (label);
3120 gcc_assert (!context || context == current_function_decl);
3121 #endif
3122
3123 emit_jump (label_rtx (label));
3124 }
3125
3126 /* Output a return with no value. */
3127
3128 static void
3129 expand_null_return_1 (void)
3130 {
3131 clear_pending_stack_adjust ();
3132 do_pending_stack_adjust ();
3133 emit_jump (return_label);
3134 }
3135
3136 /* Generate RTL to return from the current function, with no value.
3137 (That is, we do not do anything about returning any value.) */
3138
3139 void
3140 expand_null_return (void)
3141 {
3142 /* If this function was declared to return a value, but we
3143 didn't, clobber the return registers so that they are not
3144 propagated live to the rest of the function. */
3145 clobber_return_register ();
3146
3147 expand_null_return_1 ();
3148 }
3149
3150 /* Generate RTL to return from the current function, with value VAL. */
3151
3152 static void
3153 expand_value_return (rtx val)
3154 {
3155 /* Copy the value to the return location unless it's already there. */
3156
3157 tree decl = DECL_RESULT (current_function_decl);
3158 rtx return_reg = DECL_RTL (decl);
3159 if (return_reg != val)
3160 {
3161 tree funtype = TREE_TYPE (current_function_decl);
3162 tree type = TREE_TYPE (decl);
3163 int unsignedp = TYPE_UNSIGNED (type);
3164 machine_mode old_mode = DECL_MODE (decl);
3165 machine_mode mode;
3166 if (DECL_BY_REFERENCE (decl))
3167 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3168 else
3169 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3170
3171 if (mode != old_mode)
3172 val = convert_modes (mode, old_mode, val, unsignedp);
3173
3174 if (GET_CODE (return_reg) == PARALLEL)
3175 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3176 else
3177 emit_move_insn (return_reg, val);
3178 }
3179
3180 expand_null_return_1 ();
3181 }
3182
3183 /* Generate RTL to evaluate the expression RETVAL and return it
3184 from the current function. */
3185
3186 static void
3187 expand_return (tree retval, tree bounds)
3188 {
3189 rtx result_rtl;
3190 rtx val = 0;
3191 tree retval_rhs;
3192 rtx bounds_rtl;
3193
3194 /* If function wants no value, give it none. */
3195 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3196 {
3197 expand_normal (retval);
3198 expand_null_return ();
3199 return;
3200 }
3201
3202 if (retval == error_mark_node)
3203 {
3204 /* Treat this like a return of no value from a function that
3205 returns a value. */
3206 expand_null_return ();
3207 return;
3208 }
3209 else if ((TREE_CODE (retval) == MODIFY_EXPR
3210 || TREE_CODE (retval) == INIT_EXPR)
3211 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3212 retval_rhs = TREE_OPERAND (retval, 1);
3213 else
3214 retval_rhs = retval;
3215
3216 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3217
3218 /* Put returned bounds to the right place. */
3219 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3220 if (bounds_rtl)
3221 {
3222 rtx addr, bnd;
3223
3224 if (bounds)
3225 {
3226 bnd = expand_normal (bounds);
3227 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3228 }
3229 else if (REG_P (bounds_rtl))
3230 {
3231 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3232 addr = gen_rtx_MEM (Pmode, addr);
3233 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3234 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3235 }
3236 else
3237 {
3238 int n;
3239
3240 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3241
3242 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3243 addr = gen_rtx_MEM (Pmode, addr);
3244
3245 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3246 {
3247 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3248 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3249 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3250 rtx bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3251 targetm.calls.store_returned_bounds (slot, bnd);
3252 }
3253 }
3254 }
3255 else if (chkp_function_instrumented_p (current_function_decl)
3256 && !BOUNDED_P (retval_rhs)
3257 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3258 && TREE_CODE (retval_rhs) != RESULT_DECL)
3259 {
3260 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3261 addr = gen_rtx_MEM (Pmode, addr);
3262
3263 gcc_assert (MEM_P (result_rtl));
3264
3265 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3266 }
3267
3268 /* If we are returning the RESULT_DECL, then the value has already
3269 been stored into it, so we don't have to do anything special. */
3270 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3271 expand_value_return (result_rtl);
3272
3273 /* If the result is an aggregate that is being returned in one (or more)
3274 registers, load the registers here. */
3275
3276 else if (retval_rhs != 0
3277 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3278 && REG_P (result_rtl))
3279 {
3280 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3281 if (val)
3282 {
3283 /* Use the mode of the result value on the return register. */
3284 PUT_MODE (result_rtl, GET_MODE (val));
3285 expand_value_return (val);
3286 }
3287 else
3288 expand_null_return ();
3289 }
3290 else if (retval_rhs != 0
3291 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3292 && (REG_P (result_rtl)
3293 || (GET_CODE (result_rtl) == PARALLEL)))
3294 {
3295 /* Compute the return value into a temporary (usually a pseudo reg). */
3296 val
3297 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3298 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3299 val = force_not_mem (val);
3300 expand_value_return (val);
3301 }
3302 else
3303 {
3304 /* No hard reg used; calculate value into hard return reg. */
3305 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3306 expand_value_return (result_rtl);
3307 }
3308 }
3309
3310 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3311 STMT that doesn't require special handling for outgoing edges. That
3312 is no tailcalls and no GIMPLE_COND. */
3313
3314 static void
3315 expand_gimple_stmt_1 (gimple stmt)
3316 {
3317 tree op0;
3318
3319 set_curr_insn_location (gimple_location (stmt));
3320
3321 switch (gimple_code (stmt))
3322 {
3323 case GIMPLE_GOTO:
3324 op0 = gimple_goto_dest (stmt);
3325 if (TREE_CODE (op0) == LABEL_DECL)
3326 expand_goto (op0);
3327 else
3328 expand_computed_goto (op0);
3329 break;
3330 case GIMPLE_LABEL:
3331 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3332 break;
3333 case GIMPLE_NOP:
3334 case GIMPLE_PREDICT:
3335 break;
3336 case GIMPLE_SWITCH:
3337 expand_case (as_a <gswitch *> (stmt));
3338 break;
3339 case GIMPLE_ASM:
3340 expand_asm_stmt (as_a <gasm *> (stmt));
3341 break;
3342 case GIMPLE_CALL:
3343 expand_call_stmt (as_a <gcall *> (stmt));
3344 break;
3345
3346 case GIMPLE_RETURN:
3347 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3348
3349 if (op0 && op0 != error_mark_node)
3350 {
3351 tree result = DECL_RESULT (current_function_decl);
3352
3353 /* If we are not returning the current function's RESULT_DECL,
3354 build an assignment to it. */
3355 if (op0 != result)
3356 {
3357 /* I believe that a function's RESULT_DECL is unique. */
3358 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3359
3360 /* ??? We'd like to use simply expand_assignment here,
3361 but this fails if the value is of BLKmode but the return
3362 decl is a register. expand_return has special handling
3363 for this combination, which eventually should move
3364 to common code. See comments there. Until then, let's
3365 build a modify expression :-/ */
3366 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3367 result, op0);
3368 }
3369 }
3370 if (!op0)
3371 expand_null_return ();
3372 else
3373 expand_return (op0, gimple_return_retbnd (stmt));
3374 break;
3375
3376 case GIMPLE_ASSIGN:
3377 {
3378 gassign *assign_stmt = as_a <gassign *> (stmt);
3379 tree lhs = gimple_assign_lhs (assign_stmt);
3380
3381 /* Tree expand used to fiddle with |= and &= of two bitfield
3382 COMPONENT_REFs here. This can't happen with gimple, the LHS
3383 of binary assigns must be a gimple reg. */
3384
3385 if (TREE_CODE (lhs) != SSA_NAME
3386 || get_gimple_rhs_class (gimple_expr_code (stmt))
3387 == GIMPLE_SINGLE_RHS)
3388 {
3389 tree rhs = gimple_assign_rhs1 (assign_stmt);
3390 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3391 == GIMPLE_SINGLE_RHS);
3392 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3393 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3394 if (TREE_CLOBBER_P (rhs))
3395 /* This is a clobber to mark the going out of scope for
3396 this LHS. */
3397 ;
3398 else
3399 expand_assignment (lhs, rhs,
3400 gimple_assign_nontemporal_move_p (
3401 assign_stmt));
3402 }
3403 else
3404 {
3405 rtx target, temp;
3406 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3407 struct separate_ops ops;
3408 bool promoted = false;
3409
3410 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3411 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3412 promoted = true;
3413
3414 ops.code = gimple_assign_rhs_code (assign_stmt);
3415 ops.type = TREE_TYPE (lhs);
3416 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3417 {
3418 case GIMPLE_TERNARY_RHS:
3419 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3420 /* Fallthru */
3421 case GIMPLE_BINARY_RHS:
3422 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3423 /* Fallthru */
3424 case GIMPLE_UNARY_RHS:
3425 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3426 break;
3427 default:
3428 gcc_unreachable ();
3429 }
3430 ops.location = gimple_location (stmt);
3431
3432 /* If we want to use a nontemporal store, force the value to
3433 register first. If we store into a promoted register,
3434 don't directly expand to target. */
3435 temp = nontemporal || promoted ? NULL_RTX : target;
3436 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3437 EXPAND_NORMAL);
3438
3439 if (temp == target)
3440 ;
3441 else if (promoted)
3442 {
3443 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3444 /* If TEMP is a VOIDmode constant, use convert_modes to make
3445 sure that we properly convert it. */
3446 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3447 {
3448 temp = convert_modes (GET_MODE (target),
3449 TYPE_MODE (ops.type),
3450 temp, unsignedp);
3451 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3452 GET_MODE (target), temp, unsignedp);
3453 }
3454
3455 convert_move (SUBREG_REG (target), temp, unsignedp);
3456 }
3457 else if (nontemporal && emit_storent_insn (target, temp))
3458 ;
3459 else
3460 {
3461 temp = force_operand (temp, target);
3462 if (temp != target)
3463 emit_move_insn (target, temp);
3464 }
3465 }
3466 }
3467 break;
3468
3469 default:
3470 gcc_unreachable ();
3471 }
3472 }
3473
3474 /* Expand one gimple statement STMT and return the last RTL instruction
3475 before any of the newly generated ones.
3476
3477 In addition to generating the necessary RTL instructions this also
3478 sets REG_EH_REGION notes if necessary and sets the current source
3479 location for diagnostics. */
3480
3481 static rtx_insn *
3482 expand_gimple_stmt (gimple stmt)
3483 {
3484 location_t saved_location = input_location;
3485 rtx_insn *last = get_last_insn ();
3486 int lp_nr;
3487
3488 gcc_assert (cfun);
3489
3490 /* We need to save and restore the current source location so that errors
3491 discovered during expansion are emitted with the right location. But
3492 it would be better if the diagnostic routines used the source location
3493 embedded in the tree nodes rather than globals. */
3494 if (gimple_has_location (stmt))
3495 input_location = gimple_location (stmt);
3496
3497 expand_gimple_stmt_1 (stmt);
3498
3499 /* Free any temporaries used to evaluate this statement. */
3500 free_temp_slots ();
3501
3502 input_location = saved_location;
3503
3504 /* Mark all insns that may trap. */
3505 lp_nr = lookup_stmt_eh_lp (stmt);
3506 if (lp_nr)
3507 {
3508 rtx_insn *insn;
3509 for (insn = next_real_insn (last); insn;
3510 insn = next_real_insn (insn))
3511 {
3512 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3513 /* If we want exceptions for non-call insns, any
3514 may_trap_p instruction may throw. */
3515 && GET_CODE (PATTERN (insn)) != CLOBBER
3516 && GET_CODE (PATTERN (insn)) != USE
3517 && insn_could_throw_p (insn))
3518 make_reg_eh_region_note (insn, 0, lp_nr);
3519 }
3520 }
3521
3522 return last;
3523 }
3524
3525 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3526 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3527 generated a tail call (something that might be denied by the ABI
3528 rules governing the call; see calls.c).
3529
3530 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3531 can still reach the rest of BB. The case here is __builtin_sqrt,
3532 where the NaN result goes through the external function (with a
3533 tailcall) and the normal result happens via a sqrt instruction. */
3534
3535 static basic_block
3536 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3537 {
3538 rtx_insn *last2, *last;
3539 edge e;
3540 edge_iterator ei;
3541 int probability;
3542 gcov_type count;
3543
3544 last2 = last = expand_gimple_stmt (stmt);
3545
3546 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3547 if (CALL_P (last) && SIBLING_CALL_P (last))
3548 goto found;
3549
3550 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3551
3552 *can_fallthru = true;
3553 return NULL;
3554
3555 found:
3556 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3557 Any instructions emitted here are about to be deleted. */
3558 do_pending_stack_adjust ();
3559
3560 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3561 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3562 EH or abnormal edges, we shouldn't have created a tail call in
3563 the first place. So it seems to me we should just be removing
3564 all edges here, or redirecting the existing fallthru edge to
3565 the exit block. */
3566
3567 probability = 0;
3568 count = 0;
3569
3570 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3571 {
3572 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3573 {
3574 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3575 {
3576 e->dest->count -= e->count;
3577 e->dest->frequency -= EDGE_FREQUENCY (e);
3578 if (e->dest->count < 0)
3579 e->dest->count = 0;
3580 if (e->dest->frequency < 0)
3581 e->dest->frequency = 0;
3582 }
3583 count += e->count;
3584 probability += e->probability;
3585 remove_edge (e);
3586 }
3587 else
3588 ei_next (&ei);
3589 }
3590
3591 /* This is somewhat ugly: the call_expr expander often emits instructions
3592 after the sibcall (to perform the function return). These confuse the
3593 find_many_sub_basic_blocks code, so we need to get rid of these. */
3594 last = NEXT_INSN (last);
3595 gcc_assert (BARRIER_P (last));
3596
3597 *can_fallthru = false;
3598 while (NEXT_INSN (last))
3599 {
3600 /* For instance an sqrt builtin expander expands if with
3601 sibcall in the then and label for `else`. */
3602 if (LABEL_P (NEXT_INSN (last)))
3603 {
3604 *can_fallthru = true;
3605 break;
3606 }
3607 delete_insn (NEXT_INSN (last));
3608 }
3609
3610 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3611 | EDGE_SIBCALL);
3612 e->probability += probability;
3613 e->count += count;
3614 BB_END (bb) = last;
3615 update_bb_for_insn (bb);
3616
3617 if (NEXT_INSN (last))
3618 {
3619 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3620
3621 last = BB_END (bb);
3622 if (BARRIER_P (last))
3623 BB_END (bb) = PREV_INSN (last);
3624 }
3625
3626 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3627
3628 return bb;
3629 }
3630
3631 /* Return the difference between the floor and the truncated result of
3632 a signed division by OP1 with remainder MOD. */
3633 static rtx
3634 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3635 {
3636 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3637 return gen_rtx_IF_THEN_ELSE
3638 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3639 gen_rtx_IF_THEN_ELSE
3640 (mode, gen_rtx_LT (BImode,
3641 gen_rtx_DIV (mode, op1, mod),
3642 const0_rtx),
3643 constm1_rtx, const0_rtx),
3644 const0_rtx);
3645 }
3646
3647 /* Return the difference between the ceil and the truncated result of
3648 a signed division by OP1 with remainder MOD. */
3649 static rtx
3650 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3651 {
3652 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3653 return gen_rtx_IF_THEN_ELSE
3654 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3655 gen_rtx_IF_THEN_ELSE
3656 (mode, gen_rtx_GT (BImode,
3657 gen_rtx_DIV (mode, op1, mod),
3658 const0_rtx),
3659 const1_rtx, const0_rtx),
3660 const0_rtx);
3661 }
3662
3663 /* Return the difference between the ceil and the truncated result of
3664 an unsigned division by OP1 with remainder MOD. */
3665 static rtx
3666 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3667 {
3668 /* (mod != 0 ? 1 : 0) */
3669 return gen_rtx_IF_THEN_ELSE
3670 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3671 const1_rtx, const0_rtx);
3672 }
3673
3674 /* Return the difference between the rounded and the truncated result
3675 of a signed division by OP1 with remainder MOD. Halfway cases are
3676 rounded away from zero, rather than to the nearest even number. */
3677 static rtx
3678 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3679 {
3680 /* (abs (mod) >= abs (op1) - abs (mod)
3681 ? (op1 / mod > 0 ? 1 : -1)
3682 : 0) */
3683 return gen_rtx_IF_THEN_ELSE
3684 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3685 gen_rtx_MINUS (mode,
3686 gen_rtx_ABS (mode, op1),
3687 gen_rtx_ABS (mode, mod))),
3688 gen_rtx_IF_THEN_ELSE
3689 (mode, gen_rtx_GT (BImode,
3690 gen_rtx_DIV (mode, op1, mod),
3691 const0_rtx),
3692 const1_rtx, constm1_rtx),
3693 const0_rtx);
3694 }
3695
3696 /* Return the difference between the rounded and the truncated result
3697 of a unsigned division by OP1 with remainder MOD. Halfway cases
3698 are rounded away from zero, rather than to the nearest even
3699 number. */
3700 static rtx
3701 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3702 {
3703 /* (mod >= op1 - mod ? 1 : 0) */
3704 return gen_rtx_IF_THEN_ELSE
3705 (mode, gen_rtx_GE (BImode, mod,
3706 gen_rtx_MINUS (mode, op1, mod)),
3707 const1_rtx, const0_rtx);
3708 }
3709
3710 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3711 any rtl. */
3712
3713 static rtx
3714 convert_debug_memory_address (machine_mode mode, rtx x,
3715 addr_space_t as)
3716 {
3717 machine_mode xmode = GET_MODE (x);
3718
3719 #ifndef POINTERS_EXTEND_UNSIGNED
3720 gcc_assert (mode == Pmode
3721 || mode == targetm.addr_space.address_mode (as));
3722 gcc_assert (xmode == mode || xmode == VOIDmode);
3723 #else
3724 rtx temp;
3725
3726 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3727
3728 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3729 return x;
3730
3731 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3732 x = simplify_gen_subreg (mode, x, xmode,
3733 subreg_lowpart_offset
3734 (mode, xmode));
3735 else if (POINTERS_EXTEND_UNSIGNED > 0)
3736 x = gen_rtx_ZERO_EXTEND (mode, x);
3737 else if (!POINTERS_EXTEND_UNSIGNED)
3738 x = gen_rtx_SIGN_EXTEND (mode, x);
3739 else
3740 {
3741 switch (GET_CODE (x))
3742 {
3743 case SUBREG:
3744 if ((SUBREG_PROMOTED_VAR_P (x)
3745 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3746 || (GET_CODE (SUBREG_REG (x)) == PLUS
3747 && REG_P (XEXP (SUBREG_REG (x), 0))
3748 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3749 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3750 && GET_MODE (SUBREG_REG (x)) == mode)
3751 return SUBREG_REG (x);
3752 break;
3753 case LABEL_REF:
3754 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3755 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3756 return temp;
3757 case SYMBOL_REF:
3758 temp = shallow_copy_rtx (x);
3759 PUT_MODE (temp, mode);
3760 return temp;
3761 case CONST:
3762 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3763 if (temp)
3764 temp = gen_rtx_CONST (mode, temp);
3765 return temp;
3766 case PLUS:
3767 case MINUS:
3768 if (CONST_INT_P (XEXP (x, 1)))
3769 {
3770 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3771 if (temp)
3772 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3773 }
3774 break;
3775 default:
3776 break;
3777 }
3778 /* Don't know how to express ptr_extend as operation in debug info. */
3779 return NULL;
3780 }
3781 #endif /* POINTERS_EXTEND_UNSIGNED */
3782
3783 return x;
3784 }
3785
3786 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3787 by avoid_deep_ter_for_debug. */
3788
3789 static hash_map<tree, tree> *deep_ter_debug_map;
3790
3791 /* Split too deep TER chains for debug stmts using debug temporaries. */
3792
3793 static void
3794 avoid_deep_ter_for_debug (gimple stmt, int depth)
3795 {
3796 use_operand_p use_p;
3797 ssa_op_iter iter;
3798 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3799 {
3800 tree use = USE_FROM_PTR (use_p);
3801 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3802 continue;
3803 gimple g = get_gimple_for_ssa_name (use);
3804 if (g == NULL)
3805 continue;
3806 if (depth > 6 && !stmt_ends_bb_p (g))
3807 {
3808 if (deep_ter_debug_map == NULL)
3809 deep_ter_debug_map = new hash_map<tree, tree>;
3810
3811 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3812 if (vexpr != NULL)
3813 continue;
3814 vexpr = make_node (DEBUG_EXPR_DECL);
3815 gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3816 DECL_ARTIFICIAL (vexpr) = 1;
3817 TREE_TYPE (vexpr) = TREE_TYPE (use);
3818 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3819 gimple_stmt_iterator gsi = gsi_for_stmt (g);
3820 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3821 avoid_deep_ter_for_debug (def_temp, 0);
3822 }
3823 else
3824 avoid_deep_ter_for_debug (g, depth + 1);
3825 }
3826 }
3827
3828 /* Return an RTX equivalent to the value of the parameter DECL. */
3829
3830 static rtx
3831 expand_debug_parm_decl (tree decl)
3832 {
3833 rtx incoming = DECL_INCOMING_RTL (decl);
3834
3835 if (incoming
3836 && GET_MODE (incoming) != BLKmode
3837 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3838 || (MEM_P (incoming)
3839 && REG_P (XEXP (incoming, 0))
3840 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3841 {
3842 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3843
3844 #ifdef HAVE_window_save
3845 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3846 If the target machine has an explicit window save instruction, the
3847 actual entry value is the corresponding OUTGOING_REGNO instead. */
3848 if (REG_P (incoming)
3849 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3850 incoming
3851 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3852 OUTGOING_REGNO (REGNO (incoming)), 0);
3853 else if (MEM_P (incoming))
3854 {
3855 rtx reg = XEXP (incoming, 0);
3856 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3857 {
3858 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3859 incoming = replace_equiv_address_nv (incoming, reg);
3860 }
3861 else
3862 incoming = copy_rtx (incoming);
3863 }
3864 #endif
3865
3866 ENTRY_VALUE_EXP (rtl) = incoming;
3867 return rtl;
3868 }
3869
3870 if (incoming
3871 && GET_MODE (incoming) != BLKmode
3872 && !TREE_ADDRESSABLE (decl)
3873 && MEM_P (incoming)
3874 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3875 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3876 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3877 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3878 return copy_rtx (incoming);
3879
3880 return NULL_RTX;
3881 }
3882
3883 /* Return an RTX equivalent to the value of the tree expression EXP. */
3884
3885 static rtx
3886 expand_debug_expr (tree exp)
3887 {
3888 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3889 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3890 machine_mode inner_mode = VOIDmode;
3891 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3892 addr_space_t as;
3893
3894 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3895 {
3896 case tcc_expression:
3897 switch (TREE_CODE (exp))
3898 {
3899 case COND_EXPR:
3900 case DOT_PROD_EXPR:
3901 case SAD_EXPR:
3902 case WIDEN_MULT_PLUS_EXPR:
3903 case WIDEN_MULT_MINUS_EXPR:
3904 case FMA_EXPR:
3905 goto ternary;
3906
3907 case TRUTH_ANDIF_EXPR:
3908 case TRUTH_ORIF_EXPR:
3909 case TRUTH_AND_EXPR:
3910 case TRUTH_OR_EXPR:
3911 case TRUTH_XOR_EXPR:
3912 goto binary;
3913
3914 case TRUTH_NOT_EXPR:
3915 goto unary;
3916
3917 default:
3918 break;
3919 }
3920 break;
3921
3922 ternary:
3923 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3924 if (!op2)
3925 return NULL_RTX;
3926 /* Fall through. */
3927
3928 binary:
3929 case tcc_binary:
3930 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3931 if (!op1)
3932 return NULL_RTX;
3933 switch (TREE_CODE (exp))
3934 {
3935 case LSHIFT_EXPR:
3936 case RSHIFT_EXPR:
3937 case LROTATE_EXPR:
3938 case RROTATE_EXPR:
3939 case WIDEN_LSHIFT_EXPR:
3940 /* Ensure second operand isn't wider than the first one. */
3941 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3942 if (SCALAR_INT_MODE_P (inner_mode))
3943 {
3944 machine_mode opmode = mode;
3945 if (VECTOR_MODE_P (mode))
3946 opmode = GET_MODE_INNER (mode);
3947 if (SCALAR_INT_MODE_P (opmode)
3948 && (GET_MODE_PRECISION (opmode)
3949 < GET_MODE_PRECISION (inner_mode)))
3950 op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3951 subreg_lowpart_offset (opmode,
3952 inner_mode));
3953 }
3954 break;
3955 default:
3956 break;
3957 }
3958 /* Fall through. */
3959
3960 unary:
3961 case tcc_unary:
3962 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3963 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3964 if (!op0)
3965 return NULL_RTX;
3966 break;
3967
3968 case tcc_comparison:
3969 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3970 goto binary;
3971
3972 case tcc_type:
3973 case tcc_statement:
3974 gcc_unreachable ();
3975
3976 case tcc_constant:
3977 case tcc_exceptional:
3978 case tcc_declaration:
3979 case tcc_reference:
3980 case tcc_vl_exp:
3981 break;
3982 }
3983
3984 switch (TREE_CODE (exp))
3985 {
3986 case STRING_CST:
3987 if (!lookup_constant_def (exp))
3988 {
3989 if (strlen (TREE_STRING_POINTER (exp)) + 1
3990 != (size_t) TREE_STRING_LENGTH (exp))
3991 return NULL_RTX;
3992 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3993 op0 = gen_rtx_MEM (BLKmode, op0);
3994 set_mem_attributes (op0, exp, 0);
3995 return op0;
3996 }
3997 /* Fall through... */
3998
3999 case INTEGER_CST:
4000 case REAL_CST:
4001 case FIXED_CST:
4002 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4003 return op0;
4004
4005 case COMPLEX_CST:
4006 gcc_assert (COMPLEX_MODE_P (mode));
4007 op0 = expand_debug_expr (TREE_REALPART (exp));
4008 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4009 return gen_rtx_CONCAT (mode, op0, op1);
4010
4011 case DEBUG_EXPR_DECL:
4012 op0 = DECL_RTL_IF_SET (exp);
4013
4014 if (op0)
4015 return op0;
4016
4017 op0 = gen_rtx_DEBUG_EXPR (mode);
4018 DEBUG_EXPR_TREE_DECL (op0) = exp;
4019 SET_DECL_RTL (exp, op0);
4020
4021 return op0;
4022
4023 case VAR_DECL:
4024 case PARM_DECL:
4025 case FUNCTION_DECL:
4026 case LABEL_DECL:
4027 case CONST_DECL:
4028 case RESULT_DECL:
4029 op0 = DECL_RTL_IF_SET (exp);
4030
4031 /* This decl was probably optimized away. */
4032 if (!op0)
4033 {
4034 if (TREE_CODE (exp) != VAR_DECL
4035 || DECL_EXTERNAL (exp)
4036 || !TREE_STATIC (exp)
4037 || !DECL_NAME (exp)
4038 || DECL_HARD_REGISTER (exp)
4039 || DECL_IN_CONSTANT_POOL (exp)
4040 || mode == VOIDmode)
4041 return NULL;
4042
4043 op0 = make_decl_rtl_for_debug (exp);
4044 if (!MEM_P (op0)
4045 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4046 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4047 return NULL;
4048 }
4049 else
4050 op0 = copy_rtx (op0);
4051
4052 if (GET_MODE (op0) == BLKmode
4053 /* If op0 is not BLKmode, but mode is, adjust_mode
4054 below would ICE. While it is likely a FE bug,
4055 try to be robust here. See PR43166. */
4056 || mode == BLKmode
4057 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4058 {
4059 gcc_assert (MEM_P (op0));
4060 op0 = adjust_address_nv (op0, mode, 0);
4061 return op0;
4062 }
4063
4064 /* Fall through. */
4065
4066 adjust_mode:
4067 case PAREN_EXPR:
4068 CASE_CONVERT:
4069 {
4070 inner_mode = GET_MODE (op0);
4071
4072 if (mode == inner_mode)
4073 return op0;
4074
4075 if (inner_mode == VOIDmode)
4076 {
4077 if (TREE_CODE (exp) == SSA_NAME)
4078 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4079 else
4080 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4081 if (mode == inner_mode)
4082 return op0;
4083 }
4084
4085 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4086 {
4087 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4088 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4089 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4090 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4091 else
4092 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4093 }
4094 else if (FLOAT_MODE_P (mode))
4095 {
4096 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4097 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4098 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4099 else
4100 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4101 }
4102 else if (FLOAT_MODE_P (inner_mode))
4103 {
4104 if (unsignedp)
4105 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4106 else
4107 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4108 }
4109 else if (CONSTANT_P (op0)
4110 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4111 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4112 subreg_lowpart_offset (mode,
4113 inner_mode));
4114 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
4115 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4116 : unsignedp)
4117 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4118 else
4119 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4120
4121 return op0;
4122 }
4123
4124 case MEM_REF:
4125 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4126 {
4127 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4128 TREE_OPERAND (exp, 0),
4129 TREE_OPERAND (exp, 1));
4130 if (newexp)
4131 return expand_debug_expr (newexp);
4132 }
4133 /* FALLTHROUGH */
4134 case INDIRECT_REF:
4135 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4136 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4137 if (!op0)
4138 return NULL;
4139
4140 if (TREE_CODE (exp) == MEM_REF)
4141 {
4142 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4143 || (GET_CODE (op0) == PLUS
4144 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4145 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4146 Instead just use get_inner_reference. */
4147 goto component_ref;
4148
4149 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4150 if (!op1 || !CONST_INT_P (op1))
4151 return NULL;
4152
4153 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4154 }
4155
4156 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4157
4158 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4159 op0, as);
4160 if (op0 == NULL_RTX)
4161 return NULL;
4162
4163 op0 = gen_rtx_MEM (mode, op0);
4164 set_mem_attributes (op0, exp, 0);
4165 if (TREE_CODE (exp) == MEM_REF
4166 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4167 set_mem_expr (op0, NULL_TREE);
4168 set_mem_addr_space (op0, as);
4169
4170 return op0;
4171
4172 case TARGET_MEM_REF:
4173 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4174 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4175 return NULL;
4176
4177 op0 = expand_debug_expr
4178 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4179 if (!op0)
4180 return NULL;
4181
4182 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4183 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4184 op0, as);
4185 if (op0 == NULL_RTX)
4186 return NULL;
4187
4188 op0 = gen_rtx_MEM (mode, op0);
4189
4190 set_mem_attributes (op0, exp, 0);
4191 set_mem_addr_space (op0, as);
4192
4193 return op0;
4194
4195 component_ref:
4196 case ARRAY_REF:
4197 case ARRAY_RANGE_REF:
4198 case COMPONENT_REF:
4199 case BIT_FIELD_REF:
4200 case REALPART_EXPR:
4201 case IMAGPART_EXPR:
4202 case VIEW_CONVERT_EXPR:
4203 {
4204 machine_mode mode1;
4205 HOST_WIDE_INT bitsize, bitpos;
4206 tree offset;
4207 int volatilep = 0;
4208 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4209 &mode1, &unsignedp, &volatilep, false);
4210 rtx orig_op0;
4211
4212 if (bitsize == 0)
4213 return NULL;
4214
4215 orig_op0 = op0 = expand_debug_expr (tem);
4216
4217 if (!op0)
4218 return NULL;
4219
4220 if (offset)
4221 {
4222 machine_mode addrmode, offmode;
4223
4224 if (!MEM_P (op0))
4225 return NULL;
4226
4227 op0 = XEXP (op0, 0);
4228 addrmode = GET_MODE (op0);
4229 if (addrmode == VOIDmode)
4230 addrmode = Pmode;
4231
4232 op1 = expand_debug_expr (offset);
4233 if (!op1)
4234 return NULL;
4235
4236 offmode = GET_MODE (op1);
4237 if (offmode == VOIDmode)
4238 offmode = TYPE_MODE (TREE_TYPE (offset));
4239
4240 if (addrmode != offmode)
4241 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4242 subreg_lowpart_offset (addrmode,
4243 offmode));
4244
4245 /* Don't use offset_address here, we don't need a
4246 recognizable address, and we don't want to generate
4247 code. */
4248 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4249 op0, op1));
4250 }
4251
4252 if (MEM_P (op0))
4253 {
4254 if (mode1 == VOIDmode)
4255 /* Bitfield. */
4256 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4257 if (bitpos >= BITS_PER_UNIT)
4258 {
4259 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4260 bitpos %= BITS_PER_UNIT;
4261 }
4262 else if (bitpos < 0)
4263 {
4264 HOST_WIDE_INT units
4265 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4266 op0 = adjust_address_nv (op0, mode1, units);
4267 bitpos += units * BITS_PER_UNIT;
4268 }
4269 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4270 op0 = adjust_address_nv (op0, mode, 0);
4271 else if (GET_MODE (op0) != mode1)
4272 op0 = adjust_address_nv (op0, mode1, 0);
4273 else
4274 op0 = copy_rtx (op0);
4275 if (op0 == orig_op0)
4276 op0 = shallow_copy_rtx (op0);
4277 set_mem_attributes (op0, exp, 0);
4278 }
4279
4280 if (bitpos == 0 && mode == GET_MODE (op0))
4281 return op0;
4282
4283 if (bitpos < 0)
4284 return NULL;
4285
4286 if (GET_MODE (op0) == BLKmode)
4287 return NULL;
4288
4289 if ((bitpos % BITS_PER_UNIT) == 0
4290 && bitsize == GET_MODE_BITSIZE (mode1))
4291 {
4292 machine_mode opmode = GET_MODE (op0);
4293
4294 if (opmode == VOIDmode)
4295 opmode = TYPE_MODE (TREE_TYPE (tem));
4296
4297 /* This condition may hold if we're expanding the address
4298 right past the end of an array that turned out not to
4299 be addressable (i.e., the address was only computed in
4300 debug stmts). The gen_subreg below would rightfully
4301 crash, and the address doesn't really exist, so just
4302 drop it. */
4303 if (bitpos >= GET_MODE_BITSIZE (opmode))
4304 return NULL;
4305
4306 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4307 return simplify_gen_subreg (mode, op0, opmode,
4308 bitpos / BITS_PER_UNIT);
4309 }
4310
4311 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4312 && TYPE_UNSIGNED (TREE_TYPE (exp))
4313 ? SIGN_EXTRACT
4314 : ZERO_EXTRACT, mode,
4315 GET_MODE (op0) != VOIDmode
4316 ? GET_MODE (op0)
4317 : TYPE_MODE (TREE_TYPE (tem)),
4318 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4319 }
4320
4321 case ABS_EXPR:
4322 return simplify_gen_unary (ABS, mode, op0, mode);
4323
4324 case NEGATE_EXPR:
4325 return simplify_gen_unary (NEG, mode, op0, mode);
4326
4327 case BIT_NOT_EXPR:
4328 return simplify_gen_unary (NOT, mode, op0, mode);
4329
4330 case FLOAT_EXPR:
4331 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4332 0)))
4333 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4334 inner_mode);
4335
4336 case FIX_TRUNC_EXPR:
4337 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4338 inner_mode);
4339
4340 case POINTER_PLUS_EXPR:
4341 /* For the rare target where pointers are not the same size as
4342 size_t, we need to check for mis-matched modes and correct
4343 the addend. */
4344 if (op0 && op1
4345 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4346 && GET_MODE (op0) != GET_MODE (op1))
4347 {
4348 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4349 /* If OP0 is a partial mode, then we must truncate, even if it has
4350 the same bitsize as OP1 as GCC's representation of partial modes
4351 is opaque. */
4352 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4353 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4354 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4355 GET_MODE (op1));
4356 else
4357 /* We always sign-extend, regardless of the signedness of
4358 the operand, because the operand is always unsigned
4359 here even if the original C expression is signed. */
4360 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4361 GET_MODE (op1));
4362 }
4363 /* Fall through. */
4364 case PLUS_EXPR:
4365 return simplify_gen_binary (PLUS, mode, op0, op1);
4366
4367 case MINUS_EXPR:
4368 return simplify_gen_binary (MINUS, mode, op0, op1);
4369
4370 case MULT_EXPR:
4371 return simplify_gen_binary (MULT, mode, op0, op1);
4372
4373 case RDIV_EXPR:
4374 case TRUNC_DIV_EXPR:
4375 case EXACT_DIV_EXPR:
4376 if (unsignedp)
4377 return simplify_gen_binary (UDIV, mode, op0, op1);
4378 else
4379 return simplify_gen_binary (DIV, mode, op0, op1);
4380
4381 case TRUNC_MOD_EXPR:
4382 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4383
4384 case FLOOR_DIV_EXPR:
4385 if (unsignedp)
4386 return simplify_gen_binary (UDIV, mode, op0, op1);
4387 else
4388 {
4389 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4390 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4391 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4392 return simplify_gen_binary (PLUS, mode, div, adj);
4393 }
4394
4395 case FLOOR_MOD_EXPR:
4396 if (unsignedp)
4397 return simplify_gen_binary (UMOD, mode, op0, op1);
4398 else
4399 {
4400 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4401 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4402 adj = simplify_gen_unary (NEG, mode,
4403 simplify_gen_binary (MULT, mode, adj, op1),
4404 mode);
4405 return simplify_gen_binary (PLUS, mode, mod, adj);
4406 }
4407
4408 case CEIL_DIV_EXPR:
4409 if (unsignedp)
4410 {
4411 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4412 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4413 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4414 return simplify_gen_binary (PLUS, mode, div, adj);
4415 }
4416 else
4417 {
4418 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4419 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4420 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4421 return simplify_gen_binary (PLUS, mode, div, adj);
4422 }
4423
4424 case CEIL_MOD_EXPR:
4425 if (unsignedp)
4426 {
4427 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4428 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4429 adj = simplify_gen_unary (NEG, mode,
4430 simplify_gen_binary (MULT, mode, adj, op1),
4431 mode);
4432 return simplify_gen_binary (PLUS, mode, mod, adj);
4433 }
4434 else
4435 {
4436 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4437 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4438 adj = simplify_gen_unary (NEG, mode,
4439 simplify_gen_binary (MULT, mode, adj, op1),
4440 mode);
4441 return simplify_gen_binary (PLUS, mode, mod, adj);
4442 }
4443
4444 case ROUND_DIV_EXPR:
4445 if (unsignedp)
4446 {
4447 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4448 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4449 rtx adj = round_udiv_adjust (mode, mod, op1);
4450 return simplify_gen_binary (PLUS, mode, div, adj);
4451 }
4452 else
4453 {
4454 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4455 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4456 rtx adj = round_sdiv_adjust (mode, mod, op1);
4457 return simplify_gen_binary (PLUS, mode, div, adj);
4458 }
4459
4460 case ROUND_MOD_EXPR:
4461 if (unsignedp)
4462 {
4463 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4464 rtx adj = round_udiv_adjust (mode, mod, op1);
4465 adj = simplify_gen_unary (NEG, mode,
4466 simplify_gen_binary (MULT, mode, adj, op1),
4467 mode);
4468 return simplify_gen_binary (PLUS, mode, mod, adj);
4469 }
4470 else
4471 {
4472 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4473 rtx adj = round_sdiv_adjust (mode, mod, op1);
4474 adj = simplify_gen_unary (NEG, mode,
4475 simplify_gen_binary (MULT, mode, adj, op1),
4476 mode);
4477 return simplify_gen_binary (PLUS, mode, mod, adj);
4478 }
4479
4480 case LSHIFT_EXPR:
4481 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4482
4483 case RSHIFT_EXPR:
4484 if (unsignedp)
4485 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4486 else
4487 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4488
4489 case LROTATE_EXPR:
4490 return simplify_gen_binary (ROTATE, mode, op0, op1);
4491
4492 case RROTATE_EXPR:
4493 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4494
4495 case MIN_EXPR:
4496 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4497
4498 case MAX_EXPR:
4499 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4500
4501 case BIT_AND_EXPR:
4502 case TRUTH_AND_EXPR:
4503 return simplify_gen_binary (AND, mode, op0, op1);
4504
4505 case BIT_IOR_EXPR:
4506 case TRUTH_OR_EXPR:
4507 return simplify_gen_binary (IOR, mode, op0, op1);
4508
4509 case BIT_XOR_EXPR:
4510 case TRUTH_XOR_EXPR:
4511 return simplify_gen_binary (XOR, mode, op0, op1);
4512
4513 case TRUTH_ANDIF_EXPR:
4514 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4515
4516 case TRUTH_ORIF_EXPR:
4517 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4518
4519 case TRUTH_NOT_EXPR:
4520 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4521
4522 case LT_EXPR:
4523 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4524 op0, op1);
4525
4526 case LE_EXPR:
4527 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4528 op0, op1);
4529
4530 case GT_EXPR:
4531 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4532 op0, op1);
4533
4534 case GE_EXPR:
4535 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4536 op0, op1);
4537
4538 case EQ_EXPR:
4539 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4540
4541 case NE_EXPR:
4542 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4543
4544 case UNORDERED_EXPR:
4545 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4546
4547 case ORDERED_EXPR:
4548 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4549
4550 case UNLT_EXPR:
4551 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4552
4553 case UNLE_EXPR:
4554 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4555
4556 case UNGT_EXPR:
4557 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4558
4559 case UNGE_EXPR:
4560 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4561
4562 case UNEQ_EXPR:
4563 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4564
4565 case LTGT_EXPR:
4566 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4567
4568 case COND_EXPR:
4569 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4570
4571 case COMPLEX_EXPR:
4572 gcc_assert (COMPLEX_MODE_P (mode));
4573 if (GET_MODE (op0) == VOIDmode)
4574 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4575 if (GET_MODE (op1) == VOIDmode)
4576 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4577 return gen_rtx_CONCAT (mode, op0, op1);
4578
4579 case CONJ_EXPR:
4580 if (GET_CODE (op0) == CONCAT)
4581 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4582 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4583 XEXP (op0, 1),
4584 GET_MODE_INNER (mode)));
4585 else
4586 {
4587 machine_mode imode = GET_MODE_INNER (mode);
4588 rtx re, im;
4589
4590 if (MEM_P (op0))
4591 {
4592 re = adjust_address_nv (op0, imode, 0);
4593 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4594 }
4595 else
4596 {
4597 machine_mode ifmode = int_mode_for_mode (mode);
4598 machine_mode ihmode = int_mode_for_mode (imode);
4599 rtx halfsize;
4600 if (ifmode == BLKmode || ihmode == BLKmode)
4601 return NULL;
4602 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4603 re = op0;
4604 if (mode != ifmode)
4605 re = gen_rtx_SUBREG (ifmode, re, 0);
4606 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4607 if (imode != ihmode)
4608 re = gen_rtx_SUBREG (imode, re, 0);
4609 im = copy_rtx (op0);
4610 if (mode != ifmode)
4611 im = gen_rtx_SUBREG (ifmode, im, 0);
4612 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4613 if (imode != ihmode)
4614 im = gen_rtx_SUBREG (imode, im, 0);
4615 }
4616 im = gen_rtx_NEG (imode, im);
4617 return gen_rtx_CONCAT (mode, re, im);
4618 }
4619
4620 case ADDR_EXPR:
4621 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4622 if (!op0 || !MEM_P (op0))
4623 {
4624 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4625 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4626 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4627 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4628 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4629 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4630
4631 if (handled_component_p (TREE_OPERAND (exp, 0)))
4632 {
4633 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4634 tree decl
4635 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4636 &bitoffset, &bitsize, &maxsize);
4637 if ((TREE_CODE (decl) == VAR_DECL
4638 || TREE_CODE (decl) == PARM_DECL
4639 || TREE_CODE (decl) == RESULT_DECL)
4640 && (!TREE_ADDRESSABLE (decl)
4641 || target_for_debug_bind (decl))
4642 && (bitoffset % BITS_PER_UNIT) == 0
4643 && bitsize > 0
4644 && bitsize == maxsize)
4645 {
4646 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4647 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4648 }
4649 }
4650
4651 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4652 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4653 == ADDR_EXPR)
4654 {
4655 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4656 0));
4657 if (op0 != NULL
4658 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4659 || (GET_CODE (op0) == PLUS
4660 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4661 && CONST_INT_P (XEXP (op0, 1)))))
4662 {
4663 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4664 1));
4665 if (!op1 || !CONST_INT_P (op1))
4666 return NULL;
4667
4668 return plus_constant (mode, op0, INTVAL (op1));
4669 }
4670 }
4671
4672 return NULL;
4673 }
4674
4675 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4676 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4677
4678 return op0;
4679
4680 case VECTOR_CST:
4681 {
4682 unsigned i;
4683
4684 op0 = gen_rtx_CONCATN
4685 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4686
4687 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4688 {
4689 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4690 if (!op1)
4691 return NULL;
4692 XVECEXP (op0, 0, i) = op1;
4693 }
4694
4695 return op0;
4696 }
4697
4698 case CONSTRUCTOR:
4699 if (TREE_CLOBBER_P (exp))
4700 return NULL;
4701 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4702 {
4703 unsigned i;
4704 tree val;
4705
4706 op0 = gen_rtx_CONCATN
4707 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4708
4709 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4710 {
4711 op1 = expand_debug_expr (val);
4712 if (!op1)
4713 return NULL;
4714 XVECEXP (op0, 0, i) = op1;
4715 }
4716
4717 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4718 {
4719 op1 = expand_debug_expr
4720 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4721
4722 if (!op1)
4723 return NULL;
4724
4725 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4726 XVECEXP (op0, 0, i) = op1;
4727 }
4728
4729 return op0;
4730 }
4731 else
4732 goto flag_unsupported;
4733
4734 case CALL_EXPR:
4735 /* ??? Maybe handle some builtins? */
4736 return NULL;
4737
4738 case SSA_NAME:
4739 {
4740 gimple g = get_gimple_for_ssa_name (exp);
4741 if (g)
4742 {
4743 tree t = NULL_TREE;
4744 if (deep_ter_debug_map)
4745 {
4746 tree *slot = deep_ter_debug_map->get (exp);
4747 if (slot)
4748 t = *slot;
4749 }
4750 if (t == NULL_TREE)
4751 t = gimple_assign_rhs_to_tree (g);
4752 op0 = expand_debug_expr (t);
4753 if (!op0)
4754 return NULL;
4755 }
4756 else
4757 {
4758 int part = var_to_partition (SA.map, exp);
4759
4760 if (part == NO_PARTITION)
4761 {
4762 /* If this is a reference to an incoming value of parameter
4763 that is never used in the code or where the incoming
4764 value is never used in the code, use PARM_DECL's
4765 DECL_RTL if set. */
4766 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4767 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4768 {
4769 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4770 if (op0)
4771 goto adjust_mode;
4772 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4773 if (op0)
4774 goto adjust_mode;
4775 }
4776 return NULL;
4777 }
4778
4779 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4780
4781 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4782 }
4783 goto adjust_mode;
4784 }
4785
4786 case ERROR_MARK:
4787 return NULL;
4788
4789 /* Vector stuff. For most of the codes we don't have rtl codes. */
4790 case REALIGN_LOAD_EXPR:
4791 case REDUC_MAX_EXPR:
4792 case REDUC_MIN_EXPR:
4793 case REDUC_PLUS_EXPR:
4794 case VEC_COND_EXPR:
4795 case VEC_PACK_FIX_TRUNC_EXPR:
4796 case VEC_PACK_SAT_EXPR:
4797 case VEC_PACK_TRUNC_EXPR:
4798 case VEC_UNPACK_FLOAT_HI_EXPR:
4799 case VEC_UNPACK_FLOAT_LO_EXPR:
4800 case VEC_UNPACK_HI_EXPR:
4801 case VEC_UNPACK_LO_EXPR:
4802 case VEC_WIDEN_MULT_HI_EXPR:
4803 case VEC_WIDEN_MULT_LO_EXPR:
4804 case VEC_WIDEN_MULT_EVEN_EXPR:
4805 case VEC_WIDEN_MULT_ODD_EXPR:
4806 case VEC_WIDEN_LSHIFT_HI_EXPR:
4807 case VEC_WIDEN_LSHIFT_LO_EXPR:
4808 case VEC_PERM_EXPR:
4809 return NULL;
4810
4811 /* Misc codes. */
4812 case ADDR_SPACE_CONVERT_EXPR:
4813 case FIXED_CONVERT_EXPR:
4814 case OBJ_TYPE_REF:
4815 case WITH_SIZE_EXPR:
4816 return NULL;
4817
4818 case DOT_PROD_EXPR:
4819 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4820 && SCALAR_INT_MODE_P (mode))
4821 {
4822 op0
4823 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4824 0)))
4825 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4826 inner_mode);
4827 op1
4828 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4829 1)))
4830 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4831 inner_mode);
4832 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4833 return simplify_gen_binary (PLUS, mode, op0, op2);
4834 }
4835 return NULL;
4836
4837 case WIDEN_MULT_EXPR:
4838 case WIDEN_MULT_PLUS_EXPR:
4839 case WIDEN_MULT_MINUS_EXPR:
4840 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4841 && SCALAR_INT_MODE_P (mode))
4842 {
4843 inner_mode = GET_MODE (op0);
4844 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4845 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4846 else
4847 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4848 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4849 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4850 else
4851 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4852 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4853 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4854 return op0;
4855 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4856 return simplify_gen_binary (PLUS, mode, op0, op2);
4857 else
4858 return simplify_gen_binary (MINUS, mode, op2, op0);
4859 }
4860 return NULL;
4861
4862 case MULT_HIGHPART_EXPR:
4863 /* ??? Similar to the above. */
4864 return NULL;
4865
4866 case WIDEN_SUM_EXPR:
4867 case WIDEN_LSHIFT_EXPR:
4868 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4869 && SCALAR_INT_MODE_P (mode))
4870 {
4871 op0
4872 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4873 0)))
4874 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4875 inner_mode);
4876 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4877 ? ASHIFT : PLUS, mode, op0, op1);
4878 }
4879 return NULL;
4880
4881 case FMA_EXPR:
4882 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4883
4884 default:
4885 flag_unsupported:
4886 #ifdef ENABLE_CHECKING
4887 debug_tree (exp);
4888 gcc_unreachable ();
4889 #else
4890 return NULL;
4891 #endif
4892 }
4893 }
4894
4895 /* Return an RTX equivalent to the source bind value of the tree expression
4896 EXP. */
4897
4898 static rtx
4899 expand_debug_source_expr (tree exp)
4900 {
4901 rtx op0 = NULL_RTX;
4902 machine_mode mode = VOIDmode, inner_mode;
4903
4904 switch (TREE_CODE (exp))
4905 {
4906 case PARM_DECL:
4907 {
4908 mode = DECL_MODE (exp);
4909 op0 = expand_debug_parm_decl (exp);
4910 if (op0)
4911 break;
4912 /* See if this isn't an argument that has been completely
4913 optimized out. */
4914 if (!DECL_RTL_SET_P (exp)
4915 && !DECL_INCOMING_RTL (exp)
4916 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4917 {
4918 tree aexp = DECL_ORIGIN (exp);
4919 if (DECL_CONTEXT (aexp)
4920 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4921 {
4922 vec<tree, va_gc> **debug_args;
4923 unsigned int ix;
4924 tree ddecl;
4925 debug_args = decl_debug_args_lookup (current_function_decl);
4926 if (debug_args != NULL)
4927 {
4928 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4929 ix += 2)
4930 if (ddecl == aexp)
4931 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4932 }
4933 }
4934 }
4935 break;
4936 }
4937 default:
4938 break;
4939 }
4940
4941 if (op0 == NULL_RTX)
4942 return NULL_RTX;
4943
4944 inner_mode = GET_MODE (op0);
4945 if (mode == inner_mode)
4946 return op0;
4947
4948 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4949 {
4950 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4951 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4952 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4953 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4954 else
4955 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4956 }
4957 else if (FLOAT_MODE_P (mode))
4958 gcc_unreachable ();
4959 else if (FLOAT_MODE_P (inner_mode))
4960 {
4961 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4962 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4963 else
4964 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4965 }
4966 else if (CONSTANT_P (op0)
4967 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4968 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4969 subreg_lowpart_offset (mode, inner_mode));
4970 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4971 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4972 else
4973 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4974
4975 return op0;
4976 }
4977
4978 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4979 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4980 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4981
4982 static void
4983 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4984 {
4985 rtx exp = *exp_p;
4986
4987 if (exp == NULL_RTX)
4988 return;
4989
4990 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4991 return;
4992
4993 if (depth == 4)
4994 {
4995 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4996 rtx dval = make_debug_expr_from_rtl (exp);
4997
4998 /* Emit a debug bind insn before INSN. */
4999 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5000 DEBUG_EXPR_TREE_DECL (dval), exp,
5001 VAR_INIT_STATUS_INITIALIZED);
5002
5003 emit_debug_insn_before (bind, insn);
5004 *exp_p = dval;
5005 return;
5006 }
5007
5008 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5009 int i, j;
5010 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5011 switch (*format_ptr++)
5012 {
5013 case 'e':
5014 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5015 break;
5016
5017 case 'E':
5018 case 'V':
5019 for (j = 0; j < XVECLEN (exp, i); j++)
5020 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5021 break;
5022
5023 default:
5024 break;
5025 }
5026 }
5027
5028 /* Expand the _LOCs in debug insns. We run this after expanding all
5029 regular insns, so that any variables referenced in the function
5030 will have their DECL_RTLs set. */
5031
5032 static void
5033 expand_debug_locations (void)
5034 {
5035 rtx_insn *insn;
5036 rtx_insn *last = get_last_insn ();
5037 int save_strict_alias = flag_strict_aliasing;
5038
5039 /* New alias sets while setting up memory attributes cause
5040 -fcompare-debug failures, even though it doesn't bring about any
5041 codegen changes. */
5042 flag_strict_aliasing = 0;
5043
5044 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5045 if (DEBUG_INSN_P (insn))
5046 {
5047 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5048 rtx val;
5049 rtx_insn *prev_insn, *insn2;
5050 machine_mode mode;
5051
5052 if (value == NULL_TREE)
5053 val = NULL_RTX;
5054 else
5055 {
5056 if (INSN_VAR_LOCATION_STATUS (insn)
5057 == VAR_INIT_STATUS_UNINITIALIZED)
5058 val = expand_debug_source_expr (value);
5059 /* The avoid_deep_ter_for_debug function inserts
5060 debug bind stmts after SSA_NAME definition, with the
5061 SSA_NAME as the whole bind location. Disable temporarily
5062 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5063 being defined in this DEBUG_INSN. */
5064 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5065 {
5066 tree *slot = deep_ter_debug_map->get (value);
5067 if (slot)
5068 {
5069 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5070 *slot = NULL_TREE;
5071 else
5072 slot = NULL;
5073 }
5074 val = expand_debug_expr (value);
5075 if (slot)
5076 *slot = INSN_VAR_LOCATION_DECL (insn);
5077 }
5078 else
5079 val = expand_debug_expr (value);
5080 gcc_assert (last == get_last_insn ());
5081 }
5082
5083 if (!val)
5084 val = gen_rtx_UNKNOWN_VAR_LOC ();
5085 else
5086 {
5087 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5088
5089 gcc_assert (mode == GET_MODE (val)
5090 || (GET_MODE (val) == VOIDmode
5091 && (CONST_SCALAR_INT_P (val)
5092 || GET_CODE (val) == CONST_FIXED
5093 || GET_CODE (val) == LABEL_REF)));
5094 }
5095
5096 INSN_VAR_LOCATION_LOC (insn) = val;
5097 prev_insn = PREV_INSN (insn);
5098 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5099 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5100 }
5101
5102 flag_strict_aliasing = save_strict_alias;
5103 }
5104
5105 /* Performs swapping operands of commutative operations to expand
5106 the expensive one first. */
5107
5108 static void
5109 reorder_operands (basic_block bb)
5110 {
5111 unsigned int *lattice; /* Hold cost of each statement. */
5112 unsigned int i = 0, n = 0;
5113 gimple_stmt_iterator gsi;
5114 gimple_seq stmts;
5115 gimple stmt;
5116 bool swap;
5117 tree op0, op1;
5118 ssa_op_iter iter;
5119 use_operand_p use_p;
5120 gimple def0, def1;
5121
5122 /* Compute cost of each statement using estimate_num_insns. */
5123 stmts = bb_seq (bb);
5124 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5125 {
5126 stmt = gsi_stmt (gsi);
5127 if (!is_gimple_debug (stmt))
5128 gimple_set_uid (stmt, n++);
5129 }
5130 lattice = XNEWVEC (unsigned int, n);
5131 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5132 {
5133 unsigned cost;
5134 stmt = gsi_stmt (gsi);
5135 if (is_gimple_debug (stmt))
5136 continue;
5137 cost = estimate_num_insns (stmt, &eni_size_weights);
5138 lattice[i] = cost;
5139 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5140 {
5141 tree use = USE_FROM_PTR (use_p);
5142 gimple def_stmt;
5143 if (TREE_CODE (use) != SSA_NAME)
5144 continue;
5145 def_stmt = get_gimple_for_ssa_name (use);
5146 if (!def_stmt)
5147 continue;
5148 lattice[i] += lattice[gimple_uid (def_stmt)];
5149 }
5150 i++;
5151 if (!is_gimple_assign (stmt)
5152 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5153 continue;
5154 op0 = gimple_op (stmt, 1);
5155 op1 = gimple_op (stmt, 2);
5156 if (TREE_CODE (op0) != SSA_NAME
5157 || TREE_CODE (op1) != SSA_NAME)
5158 continue;
5159 /* Swap operands if the second one is more expensive. */
5160 def0 = get_gimple_for_ssa_name (op0);
5161 def1 = get_gimple_for_ssa_name (op1);
5162 if (!def1)
5163 continue;
5164 swap = false;
5165 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5166 swap = true;
5167 if (swap)
5168 {
5169 if (dump_file && (dump_flags & TDF_DETAILS))
5170 {
5171 fprintf (dump_file, "Swap operands in stmt:\n");
5172 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5173 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5174 def0 ? lattice[gimple_uid (def0)] : 0,
5175 lattice[gimple_uid (def1)]);
5176 }
5177 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5178 gimple_assign_rhs2_ptr (stmt));
5179 }
5180 }
5181 XDELETE (lattice);
5182 }
5183
5184 /* Expand basic block BB from GIMPLE trees to RTL. */
5185
5186 static basic_block
5187 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5188 {
5189 gimple_stmt_iterator gsi;
5190 gimple_seq stmts;
5191 gimple stmt = NULL;
5192 rtx_note *note;
5193 rtx_insn *last;
5194 edge e;
5195 edge_iterator ei;
5196
5197 if (dump_file)
5198 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5199 bb->index);
5200
5201 /* Note that since we are now transitioning from GIMPLE to RTL, we
5202 cannot use the gsi_*_bb() routines because they expect the basic
5203 block to be in GIMPLE, instead of RTL. Therefore, we need to
5204 access the BB sequence directly. */
5205 if (optimize)
5206 reorder_operands (bb);
5207 stmts = bb_seq (bb);
5208 bb->il.gimple.seq = NULL;
5209 bb->il.gimple.phi_nodes = NULL;
5210 rtl_profile_for_bb (bb);
5211 init_rtl_bb_info (bb);
5212 bb->flags |= BB_RTL;
5213
5214 /* Remove the RETURN_EXPR if we may fall though to the exit
5215 instead. */
5216 gsi = gsi_last (stmts);
5217 if (!gsi_end_p (gsi)
5218 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5219 {
5220 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5221
5222 gcc_assert (single_succ_p (bb));
5223 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5224
5225 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5226 && !gimple_return_retval (ret_stmt))
5227 {
5228 gsi_remove (&gsi, false);
5229 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5230 }
5231 }
5232
5233 gsi = gsi_start (stmts);
5234 if (!gsi_end_p (gsi))
5235 {
5236 stmt = gsi_stmt (gsi);
5237 if (gimple_code (stmt) != GIMPLE_LABEL)
5238 stmt = NULL;
5239 }
5240
5241 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5242
5243 if (stmt || elt)
5244 {
5245 last = get_last_insn ();
5246
5247 if (stmt)
5248 {
5249 expand_gimple_stmt (stmt);
5250 gsi_next (&gsi);
5251 }
5252
5253 if (elt)
5254 emit_label (*elt);
5255
5256 /* Java emits line number notes in the top of labels.
5257 ??? Make this go away once line number notes are obsoleted. */
5258 BB_HEAD (bb) = NEXT_INSN (last);
5259 if (NOTE_P (BB_HEAD (bb)))
5260 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5261 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5262
5263 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5264 }
5265 else
5266 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5267
5268 NOTE_BASIC_BLOCK (note) = bb;
5269
5270 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5271 {
5272 basic_block new_bb;
5273
5274 stmt = gsi_stmt (gsi);
5275
5276 /* If this statement is a non-debug one, and we generate debug
5277 insns, then this one might be the last real use of a TERed
5278 SSA_NAME, but where there are still some debug uses further
5279 down. Expanding the current SSA name in such further debug
5280 uses by their RHS might lead to wrong debug info, as coalescing
5281 might make the operands of such RHS be placed into the same
5282 pseudo as something else. Like so:
5283 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5284 use(a_1);
5285 a_2 = ...
5286 #DEBUG ... => a_1
5287 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5288 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5289 the write to a_2 would actually have clobbered the place which
5290 formerly held a_0.
5291
5292 So, instead of that, we recognize the situation, and generate
5293 debug temporaries at the last real use of TERed SSA names:
5294 a_1 = a_0 + 1;
5295 #DEBUG #D1 => a_1
5296 use(a_1);
5297 a_2 = ...
5298 #DEBUG ... => #D1
5299 */
5300 if (MAY_HAVE_DEBUG_INSNS
5301 && SA.values
5302 && !is_gimple_debug (stmt))
5303 {
5304 ssa_op_iter iter;
5305 tree op;
5306 gimple def;
5307
5308 location_t sloc = curr_insn_location ();
5309
5310 /* Look for SSA names that have their last use here (TERed
5311 names always have only one real use). */
5312 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5313 if ((def = get_gimple_for_ssa_name (op)))
5314 {
5315 imm_use_iterator imm_iter;
5316 use_operand_p use_p;
5317 bool have_debug_uses = false;
5318
5319 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5320 {
5321 if (gimple_debug_bind_p (USE_STMT (use_p)))
5322 {
5323 have_debug_uses = true;
5324 break;
5325 }
5326 }
5327
5328 if (have_debug_uses)
5329 {
5330 /* OP is a TERed SSA name, with DEF its defining
5331 statement, and where OP is used in further debug
5332 instructions. Generate a debug temporary, and
5333 replace all uses of OP in debug insns with that
5334 temporary. */
5335 gimple debugstmt;
5336 tree value = gimple_assign_rhs_to_tree (def);
5337 tree vexpr = make_node (DEBUG_EXPR_DECL);
5338 rtx val;
5339 machine_mode mode;
5340
5341 set_curr_insn_location (gimple_location (def));
5342
5343 DECL_ARTIFICIAL (vexpr) = 1;
5344 TREE_TYPE (vexpr) = TREE_TYPE (value);
5345 if (DECL_P (value))
5346 mode = DECL_MODE (value);
5347 else
5348 mode = TYPE_MODE (TREE_TYPE (value));
5349 DECL_MODE (vexpr) = mode;
5350
5351 val = gen_rtx_VAR_LOCATION
5352 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5353
5354 emit_debug_insn (val);
5355
5356 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5357 {
5358 if (!gimple_debug_bind_p (debugstmt))
5359 continue;
5360
5361 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5362 SET_USE (use_p, vexpr);
5363
5364 update_stmt (debugstmt);
5365 }
5366 }
5367 }
5368 set_curr_insn_location (sloc);
5369 }
5370
5371 currently_expanding_gimple_stmt = stmt;
5372
5373 /* Expand this statement, then evaluate the resulting RTL and
5374 fixup the CFG accordingly. */
5375 if (gimple_code (stmt) == GIMPLE_COND)
5376 {
5377 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5378 if (new_bb)
5379 return new_bb;
5380 }
5381 else if (gimple_debug_bind_p (stmt))
5382 {
5383 location_t sloc = curr_insn_location ();
5384 gimple_stmt_iterator nsi = gsi;
5385
5386 for (;;)
5387 {
5388 tree var = gimple_debug_bind_get_var (stmt);
5389 tree value;
5390 rtx val;
5391 machine_mode mode;
5392
5393 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5394 && TREE_CODE (var) != LABEL_DECL
5395 && !target_for_debug_bind (var))
5396 goto delink_debug_stmt;
5397
5398 if (gimple_debug_bind_has_value_p (stmt))
5399 value = gimple_debug_bind_get_value (stmt);
5400 else
5401 value = NULL_TREE;
5402
5403 last = get_last_insn ();
5404
5405 set_curr_insn_location (gimple_location (stmt));
5406
5407 if (DECL_P (var))
5408 mode = DECL_MODE (var);
5409 else
5410 mode = TYPE_MODE (TREE_TYPE (var));
5411
5412 val = gen_rtx_VAR_LOCATION
5413 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5414
5415 emit_debug_insn (val);
5416
5417 if (dump_file && (dump_flags & TDF_DETAILS))
5418 {
5419 /* We can't dump the insn with a TREE where an RTX
5420 is expected. */
5421 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5422 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5423 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5424 }
5425
5426 delink_debug_stmt:
5427 /* In order not to generate too many debug temporaries,
5428 we delink all uses of debug statements we already expanded.
5429 Therefore debug statements between definition and real
5430 use of TERed SSA names will continue to use the SSA name,
5431 and not be replaced with debug temps. */
5432 delink_stmt_imm_use (stmt);
5433
5434 gsi = nsi;
5435 gsi_next (&nsi);
5436 if (gsi_end_p (nsi))
5437 break;
5438 stmt = gsi_stmt (nsi);
5439 if (!gimple_debug_bind_p (stmt))
5440 break;
5441 }
5442
5443 set_curr_insn_location (sloc);
5444 }
5445 else if (gimple_debug_source_bind_p (stmt))
5446 {
5447 location_t sloc = curr_insn_location ();
5448 tree var = gimple_debug_source_bind_get_var (stmt);
5449 tree value = gimple_debug_source_bind_get_value (stmt);
5450 rtx val;
5451 machine_mode mode;
5452
5453 last = get_last_insn ();
5454
5455 set_curr_insn_location (gimple_location (stmt));
5456
5457 mode = DECL_MODE (var);
5458
5459 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5460 VAR_INIT_STATUS_UNINITIALIZED);
5461
5462 emit_debug_insn (val);
5463
5464 if (dump_file && (dump_flags & TDF_DETAILS))
5465 {
5466 /* We can't dump the insn with a TREE where an RTX
5467 is expected. */
5468 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5469 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5470 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5471 }
5472
5473 set_curr_insn_location (sloc);
5474 }
5475 else
5476 {
5477 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5478 if (call_stmt
5479 && gimple_call_tail_p (call_stmt)
5480 && disable_tail_calls)
5481 gimple_call_set_tail (call_stmt, false);
5482
5483 if (call_stmt && gimple_call_tail_p (call_stmt))
5484 {
5485 bool can_fallthru;
5486 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5487 if (new_bb)
5488 {
5489 if (can_fallthru)
5490 bb = new_bb;
5491 else
5492 return new_bb;
5493 }
5494 }
5495 else
5496 {
5497 def_operand_p def_p;
5498 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5499
5500 if (def_p != NULL)
5501 {
5502 /* Ignore this stmt if it is in the list of
5503 replaceable expressions. */
5504 if (SA.values
5505 && bitmap_bit_p (SA.values,
5506 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5507 continue;
5508 }
5509 last = expand_gimple_stmt (stmt);
5510 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5511 }
5512 }
5513 }
5514
5515 currently_expanding_gimple_stmt = NULL;
5516
5517 /* Expand implicit goto and convert goto_locus. */
5518 FOR_EACH_EDGE (e, ei, bb->succs)
5519 {
5520 if (e->goto_locus != UNKNOWN_LOCATION)
5521 set_curr_insn_location (e->goto_locus);
5522 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5523 {
5524 emit_jump (label_rtx_for_bb (e->dest));
5525 e->flags &= ~EDGE_FALLTHRU;
5526 }
5527 }
5528
5529 /* Expanded RTL can create a jump in the last instruction of block.
5530 This later might be assumed to be a jump to successor and break edge insertion.
5531 We need to insert dummy move to prevent this. PR41440. */
5532 if (single_succ_p (bb)
5533 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5534 && (last = get_last_insn ())
5535 && JUMP_P (last))
5536 {
5537 rtx dummy = gen_reg_rtx (SImode);
5538 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5539 }
5540
5541 do_pending_stack_adjust ();
5542
5543 /* Find the block tail. The last insn in the block is the insn
5544 before a barrier and/or table jump insn. */
5545 last = get_last_insn ();
5546 if (BARRIER_P (last))
5547 last = PREV_INSN (last);
5548 if (JUMP_TABLE_DATA_P (last))
5549 last = PREV_INSN (PREV_INSN (last));
5550 BB_END (bb) = last;
5551
5552 update_bb_for_insn (bb);
5553
5554 return bb;
5555 }
5556
5557
5558 /* Create a basic block for initialization code. */
5559
5560 static basic_block
5561 construct_init_block (void)
5562 {
5563 basic_block init_block, first_block;
5564 edge e = NULL;
5565 int flags;
5566
5567 /* Multiple entry points not supported yet. */
5568 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5569 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5570 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5571 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5572 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5573
5574 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5575
5576 /* When entry edge points to first basic block, we don't need jump,
5577 otherwise we have to jump into proper target. */
5578 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5579 {
5580 tree label = gimple_block_label (e->dest);
5581
5582 emit_jump (label_rtx (label));
5583 flags = 0;
5584 }
5585 else
5586 flags = EDGE_FALLTHRU;
5587
5588 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5589 get_last_insn (),
5590 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5591 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5592 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5593 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5594 if (e)
5595 {
5596 first_block = e->dest;
5597 redirect_edge_succ (e, init_block);
5598 e = make_edge (init_block, first_block, flags);
5599 }
5600 else
5601 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5602 e->probability = REG_BR_PROB_BASE;
5603 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5604
5605 update_bb_for_insn (init_block);
5606 return init_block;
5607 }
5608
5609 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5610 found in the block tree. */
5611
5612 static void
5613 set_block_levels (tree block, int level)
5614 {
5615 while (block)
5616 {
5617 BLOCK_NUMBER (block) = level;
5618 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5619 block = BLOCK_CHAIN (block);
5620 }
5621 }
5622
5623 /* Create a block containing landing pads and similar stuff. */
5624
5625 static void
5626 construct_exit_block (void)
5627 {
5628 rtx_insn *head = get_last_insn ();
5629 rtx_insn *end;
5630 basic_block exit_block;
5631 edge e, e2;
5632 unsigned ix;
5633 edge_iterator ei;
5634 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5635 rtx_insn *orig_end = BB_END (prev_bb);
5636
5637 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5638
5639 /* Make sure the locus is set to the end of the function, so that
5640 epilogue line numbers and warnings are set properly. */
5641 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5642 input_location = cfun->function_end_locus;
5643
5644 /* Generate rtl for function exit. */
5645 expand_function_end ();
5646
5647 end = get_last_insn ();
5648 if (head == end)
5649 return;
5650 /* While emitting the function end we could move end of the last basic
5651 block. */
5652 BB_END (prev_bb) = orig_end;
5653 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5654 head = NEXT_INSN (head);
5655 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5656 bb frequency counting will be confused. Any instructions before that
5657 label are emitted for the case where PREV_BB falls through into the
5658 exit block, so append those instructions to prev_bb in that case. */
5659 if (NEXT_INSN (head) != return_label)
5660 {
5661 while (NEXT_INSN (head) != return_label)
5662 {
5663 if (!NOTE_P (NEXT_INSN (head)))
5664 BB_END (prev_bb) = NEXT_INSN (head);
5665 head = NEXT_INSN (head);
5666 }
5667 }
5668 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5669 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5670 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5671 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5672
5673 ix = 0;
5674 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5675 {
5676 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5677 if (!(e->flags & EDGE_ABNORMAL))
5678 redirect_edge_succ (e, exit_block);
5679 else
5680 ix++;
5681 }
5682
5683 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5684 e->probability = REG_BR_PROB_BASE;
5685 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5686 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5687 if (e2 != e)
5688 {
5689 e->count -= e2->count;
5690 exit_block->count -= e2->count;
5691 exit_block->frequency -= EDGE_FREQUENCY (e2);
5692 }
5693 if (e->count < 0)
5694 e->count = 0;
5695 if (exit_block->count < 0)
5696 exit_block->count = 0;
5697 if (exit_block->frequency < 0)
5698 exit_block->frequency = 0;
5699 update_bb_for_insn (exit_block);
5700 }
5701
5702 /* Helper function for discover_nonconstant_array_refs.
5703 Look for ARRAY_REF nodes with non-constant indexes and mark them
5704 addressable. */
5705
5706 static tree
5707 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5708 void *data ATTRIBUTE_UNUSED)
5709 {
5710 tree t = *tp;
5711
5712 if (IS_TYPE_OR_DECL_P (t))
5713 *walk_subtrees = 0;
5714 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5715 {
5716 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5717 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5718 && (!TREE_OPERAND (t, 2)
5719 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5720 || (TREE_CODE (t) == COMPONENT_REF
5721 && (!TREE_OPERAND (t,2)
5722 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5723 || TREE_CODE (t) == BIT_FIELD_REF
5724 || TREE_CODE (t) == REALPART_EXPR
5725 || TREE_CODE (t) == IMAGPART_EXPR
5726 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5727 || CONVERT_EXPR_P (t))
5728 t = TREE_OPERAND (t, 0);
5729
5730 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5731 {
5732 t = get_base_address (t);
5733 if (t && DECL_P (t)
5734 && DECL_MODE (t) != BLKmode)
5735 TREE_ADDRESSABLE (t) = 1;
5736 }
5737
5738 *walk_subtrees = 0;
5739 }
5740
5741 return NULL_TREE;
5742 }
5743
5744 /* RTL expansion is not able to compile array references with variable
5745 offsets for arrays stored in single register. Discover such
5746 expressions and mark variables as addressable to avoid this
5747 scenario. */
5748
5749 static void
5750 discover_nonconstant_array_refs (void)
5751 {
5752 basic_block bb;
5753 gimple_stmt_iterator gsi;
5754
5755 FOR_EACH_BB_FN (bb, cfun)
5756 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5757 {
5758 gimple stmt = gsi_stmt (gsi);
5759 if (!is_gimple_debug (stmt))
5760 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5761 }
5762 }
5763
5764 /* This function sets crtl->args.internal_arg_pointer to a virtual
5765 register if DRAP is needed. Local register allocator will replace
5766 virtual_incoming_args_rtx with the virtual register. */
5767
5768 static void
5769 expand_stack_alignment (void)
5770 {
5771 rtx drap_rtx;
5772 unsigned int preferred_stack_boundary;
5773
5774 if (! SUPPORTS_STACK_ALIGNMENT)
5775 return;
5776
5777 if (cfun->calls_alloca
5778 || cfun->has_nonlocal_label
5779 || crtl->has_nonlocal_goto)
5780 crtl->need_drap = true;
5781
5782 /* Call update_stack_boundary here again to update incoming stack
5783 boundary. It may set incoming stack alignment to a different
5784 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5785 use the minimum incoming stack alignment to check if it is OK
5786 to perform sibcall optimization since sibcall optimization will
5787 only align the outgoing stack to incoming stack boundary. */
5788 if (targetm.calls.update_stack_boundary)
5789 targetm.calls.update_stack_boundary ();
5790
5791 /* The incoming stack frame has to be aligned at least at
5792 parm_stack_boundary. */
5793 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5794
5795 /* Update crtl->stack_alignment_estimated and use it later to align
5796 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5797 exceptions since callgraph doesn't collect incoming stack alignment
5798 in this case. */
5799 if (cfun->can_throw_non_call_exceptions
5800 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5801 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5802 else
5803 preferred_stack_boundary = crtl->preferred_stack_boundary;
5804 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5805 crtl->stack_alignment_estimated = preferred_stack_boundary;
5806 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5807 crtl->stack_alignment_needed = preferred_stack_boundary;
5808
5809 gcc_assert (crtl->stack_alignment_needed
5810 <= crtl->stack_alignment_estimated);
5811
5812 crtl->stack_realign_needed
5813 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5814 crtl->stack_realign_tried = crtl->stack_realign_needed;
5815
5816 crtl->stack_realign_processed = true;
5817
5818 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5819 alignment. */
5820 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5821 drap_rtx = targetm.calls.get_drap_rtx ();
5822
5823 /* stack_realign_drap and drap_rtx must match. */
5824 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5825
5826 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5827 if (NULL != drap_rtx)
5828 {
5829 crtl->args.internal_arg_pointer = drap_rtx;
5830
5831 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5832 needed. */
5833 fixup_tail_calls ();
5834 }
5835 }
5836 \f
5837
5838 static void
5839 expand_main_function (void)
5840 {
5841 #if (defined(INVOKE__main) \
5842 || (!defined(HAS_INIT_SECTION) \
5843 && !defined(INIT_SECTION_ASM_OP) \
5844 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5845 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5846 #endif
5847 }
5848 \f
5849
5850 /* Expand code to initialize the stack_protect_guard. This is invoked at
5851 the beginning of a function to be protected. */
5852
5853 #ifndef HAVE_stack_protect_set
5854 # define HAVE_stack_protect_set 0
5855 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5856 #endif
5857
5858 static void
5859 stack_protect_prologue (void)
5860 {
5861 tree guard_decl = targetm.stack_protect_guard ();
5862 rtx x, y;
5863
5864 x = expand_normal (crtl->stack_protect_guard);
5865 y = expand_normal (guard_decl);
5866
5867 /* Allow the target to copy from Y to X without leaking Y into a
5868 register. */
5869 if (HAVE_stack_protect_set)
5870 {
5871 rtx insn = gen_stack_protect_set (x, y);
5872 if (insn)
5873 {
5874 emit_insn (insn);
5875 return;
5876 }
5877 }
5878
5879 /* Otherwise do a straight move. */
5880 emit_move_insn (x, y);
5881 }
5882
5883 /* Translate the intermediate representation contained in the CFG
5884 from GIMPLE trees to RTL.
5885
5886 We do conversion per basic block and preserve/update the tree CFG.
5887 This implies we have to do some magic as the CFG can simultaneously
5888 consist of basic blocks containing RTL and GIMPLE trees. This can
5889 confuse the CFG hooks, so be careful to not manipulate CFG during
5890 the expansion. */
5891
5892 namespace {
5893
5894 const pass_data pass_data_expand =
5895 {
5896 RTL_PASS, /* type */
5897 "expand", /* name */
5898 OPTGROUP_NONE, /* optinfo_flags */
5899 TV_EXPAND, /* tv_id */
5900 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5901 | PROP_gimple_lcx
5902 | PROP_gimple_lvec
5903 | PROP_gimple_lva), /* properties_required */
5904 PROP_rtl, /* properties_provided */
5905 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5906 0, /* todo_flags_start */
5907 0, /* todo_flags_finish */
5908 };
5909
5910 class pass_expand : public rtl_opt_pass
5911 {
5912 public:
5913 pass_expand (gcc::context *ctxt)
5914 : rtl_opt_pass (pass_data_expand, ctxt)
5915 {}
5916
5917 /* opt_pass methods: */
5918 virtual unsigned int execute (function *);
5919
5920 }; // class pass_expand
5921
5922 unsigned int
5923 pass_expand::execute (function *fun)
5924 {
5925 basic_block bb, init_block;
5926 sbitmap blocks;
5927 edge_iterator ei;
5928 edge e;
5929 rtx_insn *var_seq, *var_ret_seq;
5930 unsigned i;
5931
5932 timevar_push (TV_OUT_OF_SSA);
5933 rewrite_out_of_ssa (&SA);
5934 timevar_pop (TV_OUT_OF_SSA);
5935 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5936
5937 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5938 {
5939 gimple_stmt_iterator gsi;
5940 FOR_EACH_BB_FN (bb, cfun)
5941 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5942 if (gimple_debug_bind_p (gsi_stmt (gsi)))
5943 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5944 }
5945
5946 /* Make sure all values used by the optimization passes have sane
5947 defaults. */
5948 reg_renumber = 0;
5949
5950 /* Some backends want to know that we are expanding to RTL. */
5951 currently_expanding_to_rtl = 1;
5952 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5953 free_dominance_info (CDI_DOMINATORS);
5954
5955 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5956
5957 if (chkp_function_instrumented_p (current_function_decl))
5958 chkp_reset_rtl_bounds ();
5959
5960 insn_locations_init ();
5961 if (!DECL_IS_BUILTIN (current_function_decl))
5962 {
5963 /* Eventually, all FEs should explicitly set function_start_locus. */
5964 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5965 set_curr_insn_location
5966 (DECL_SOURCE_LOCATION (current_function_decl));
5967 else
5968 set_curr_insn_location (fun->function_start_locus);
5969 }
5970 else
5971 set_curr_insn_location (UNKNOWN_LOCATION);
5972 prologue_location = curr_insn_location ();
5973
5974 #ifdef INSN_SCHEDULING
5975 init_sched_attrs ();
5976 #endif
5977
5978 /* Make sure first insn is a note even if we don't want linenums.
5979 This makes sure the first insn will never be deleted.
5980 Also, final expects a note to appear there. */
5981 emit_note (NOTE_INSN_DELETED);
5982
5983 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5984 discover_nonconstant_array_refs ();
5985
5986 targetm.expand_to_rtl_hook ();
5987 crtl->stack_alignment_needed = STACK_BOUNDARY;
5988 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5989 crtl->stack_alignment_estimated = 0;
5990 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5991 fun->cfg->max_jumptable_ents = 0;
5992
5993 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5994 of the function section at exapnsion time to predict distance of calls. */
5995 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5996
5997 /* Expand the variables recorded during gimple lowering. */
5998 timevar_push (TV_VAR_EXPAND);
5999 start_sequence ();
6000
6001 var_ret_seq = expand_used_vars ();
6002
6003 var_seq = get_insns ();
6004 end_sequence ();
6005 timevar_pop (TV_VAR_EXPAND);
6006
6007 /* Honor stack protection warnings. */
6008 if (warn_stack_protect)
6009 {
6010 if (fun->calls_alloca)
6011 warning (OPT_Wstack_protector,
6012 "stack protector not protecting local variables: "
6013 "variable length buffer");
6014 if (has_short_buffer && !crtl->stack_protect_guard)
6015 warning (OPT_Wstack_protector,
6016 "stack protector not protecting function: "
6017 "all local arrays are less than %d bytes long",
6018 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6019 }
6020
6021 /* Set up parameters and prepare for return, for the function. */
6022 expand_function_start (current_function_decl);
6023
6024 /* If we emitted any instructions for setting up the variables,
6025 emit them before the FUNCTION_START note. */
6026 if (var_seq)
6027 {
6028 emit_insn_before (var_seq, parm_birth_insn);
6029
6030 /* In expand_function_end we'll insert the alloca save/restore
6031 before parm_birth_insn. We've just insertted an alloca call.
6032 Adjust the pointer to match. */
6033 parm_birth_insn = var_seq;
6034 }
6035
6036 /* Now that we also have the parameter RTXs, copy them over to our
6037 partitions. */
6038 for (i = 0; i < SA.map->num_partitions; i++)
6039 {
6040 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
6041
6042 if (TREE_CODE (var) != VAR_DECL
6043 && !SA.partition_to_pseudo[i])
6044 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
6045 gcc_assert (SA.partition_to_pseudo[i]);
6046
6047 /* If this decl was marked as living in multiple places, reset
6048 this now to NULL. */
6049 if (DECL_RTL_IF_SET (var) == pc_rtx)
6050 SET_DECL_RTL (var, NULL);
6051
6052 /* Some RTL parts really want to look at DECL_RTL(x) when x
6053 was a decl marked in REG_ATTR or MEM_ATTR. We could use
6054 SET_DECL_RTL here making this available, but that would mean
6055 to select one of the potentially many RTLs for one DECL. Instead
6056 of doing that we simply reset the MEM_EXPR of the RTL in question,
6057 then nobody can get at it and hence nobody can call DECL_RTL on it. */
6058 if (!DECL_RTL_SET_P (var))
6059 {
6060 if (MEM_P (SA.partition_to_pseudo[i]))
6061 set_mem_expr (SA.partition_to_pseudo[i], NULL);
6062 }
6063 }
6064
6065 /* If we have a class containing differently aligned pointers
6066 we need to merge those into the corresponding RTL pointer
6067 alignment. */
6068 for (i = 1; i < num_ssa_names; i++)
6069 {
6070 tree name = ssa_name (i);
6071 int part;
6072 rtx r;
6073
6074 if (!name
6075 /* We might have generated new SSA names in
6076 update_alias_info_with_stack_vars. They will have a NULL
6077 defining statements, and won't be part of the partitioning,
6078 so ignore those. */
6079 || !SSA_NAME_DEF_STMT (name))
6080 continue;
6081 part = var_to_partition (SA.map, name);
6082 if (part == NO_PARTITION)
6083 continue;
6084
6085 /* Adjust all partition members to get the underlying decl of
6086 the representative which we might have created in expand_one_var. */
6087 if (SSA_NAME_VAR (name) == NULL_TREE)
6088 {
6089 tree leader = partition_to_var (SA.map, part);
6090 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
6091 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
6092 }
6093 if (!POINTER_TYPE_P (TREE_TYPE (name)))
6094 continue;
6095
6096 r = SA.partition_to_pseudo[part];
6097 if (REG_P (r))
6098 mark_reg_pointer (r, get_pointer_alignment (name));
6099 }
6100
6101 /* If this function is `main', emit a call to `__main'
6102 to run global initializers, etc. */
6103 if (DECL_NAME (current_function_decl)
6104 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6105 && DECL_FILE_SCOPE_P (current_function_decl))
6106 expand_main_function ();
6107
6108 /* Initialize the stack_protect_guard field. This must happen after the
6109 call to __main (if any) so that the external decl is initialized. */
6110 if (crtl->stack_protect_guard)
6111 stack_protect_prologue ();
6112
6113 expand_phi_nodes (&SA);
6114
6115 /* Register rtl specific functions for cfg. */
6116 rtl_register_cfg_hooks ();
6117
6118 init_block = construct_init_block ();
6119
6120 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6121 remaining edges later. */
6122 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6123 e->flags &= ~EDGE_EXECUTABLE;
6124
6125 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6126 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6127 next_bb)
6128 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6129
6130 if (MAY_HAVE_DEBUG_INSNS)
6131 expand_debug_locations ();
6132
6133 if (deep_ter_debug_map)
6134 {
6135 delete deep_ter_debug_map;
6136 deep_ter_debug_map = NULL;
6137 }
6138
6139 /* Free stuff we no longer need after GIMPLE optimizations. */
6140 free_dominance_info (CDI_DOMINATORS);
6141 free_dominance_info (CDI_POST_DOMINATORS);
6142 delete_tree_cfg_annotations ();
6143
6144 timevar_push (TV_OUT_OF_SSA);
6145 finish_out_of_ssa (&SA);
6146 timevar_pop (TV_OUT_OF_SSA);
6147
6148 timevar_push (TV_POST_EXPAND);
6149 /* We are no longer in SSA form. */
6150 fun->gimple_df->in_ssa_p = false;
6151 loops_state_clear (LOOP_CLOSED_SSA);
6152
6153 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6154 conservatively to true until they are all profile aware. */
6155 delete lab_rtx_for_bb;
6156 free_histograms ();
6157
6158 construct_exit_block ();
6159 insn_locations_finalize ();
6160
6161 if (var_ret_seq)
6162 {
6163 rtx_insn *after = return_label;
6164 rtx_insn *next = NEXT_INSN (after);
6165 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6166 after = next;
6167 emit_insn_after (var_ret_seq, after);
6168 }
6169
6170 /* Zap the tree EH table. */
6171 set_eh_throw_stmt_table (fun, NULL);
6172
6173 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6174 split edges which edge insertions might do. */
6175 rebuild_jump_labels (get_insns ());
6176
6177 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6178 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6179 {
6180 edge e;
6181 edge_iterator ei;
6182 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6183 {
6184 if (e->insns.r)
6185 {
6186 rebuild_jump_labels_chain (e->insns.r);
6187 /* Put insns after parm birth, but before
6188 NOTE_INSNS_FUNCTION_BEG. */
6189 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6190 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6191 {
6192 rtx_insn *insns = e->insns.r;
6193 e->insns.r = NULL;
6194 if (NOTE_P (parm_birth_insn)
6195 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6196 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6197 else
6198 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6199 }
6200 else
6201 commit_one_edge_insertion (e);
6202 }
6203 else
6204 ei_next (&ei);
6205 }
6206 }
6207
6208 /* We're done expanding trees to RTL. */
6209 currently_expanding_to_rtl = 0;
6210
6211 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6212 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6213 {
6214 edge e;
6215 edge_iterator ei;
6216 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6217 {
6218 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6219 e->flags &= ~EDGE_EXECUTABLE;
6220
6221 /* At the moment not all abnormal edges match the RTL
6222 representation. It is safe to remove them here as
6223 find_many_sub_basic_blocks will rediscover them.
6224 In the future we should get this fixed properly. */
6225 if ((e->flags & EDGE_ABNORMAL)
6226 && !(e->flags & EDGE_SIBCALL))
6227 remove_edge (e);
6228 else
6229 ei_next (&ei);
6230 }
6231 }
6232
6233 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6234 bitmap_ones (blocks);
6235 find_many_sub_basic_blocks (blocks);
6236 sbitmap_free (blocks);
6237 purge_all_dead_edges ();
6238
6239 expand_stack_alignment ();
6240
6241 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6242 function. */
6243 if (crtl->tail_call_emit)
6244 fixup_tail_calls ();
6245
6246 /* After initial rtl generation, call back to finish generating
6247 exception support code. We need to do this before cleaning up
6248 the CFG as the code does not expect dead landing pads. */
6249 if (fun->eh->region_tree != NULL)
6250 finish_eh_generation ();
6251
6252 /* Remove unreachable blocks, otherwise we cannot compute dominators
6253 which are needed for loop state verification. As a side-effect
6254 this also compacts blocks.
6255 ??? We cannot remove trivially dead insns here as for example
6256 the DRAP reg on i?86 is not magically live at this point.
6257 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6258 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6259
6260 #ifdef ENABLE_CHECKING
6261 verify_flow_info ();
6262 #endif
6263
6264 /* Initialize pseudos allocated for hard registers. */
6265 emit_initial_value_sets ();
6266
6267 /* And finally unshare all RTL. */
6268 unshare_all_rtl ();
6269
6270 /* There's no need to defer outputting this function any more; we
6271 know we want to output it. */
6272 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6273
6274 /* Now that we're done expanding trees to RTL, we shouldn't have any
6275 more CONCATs anywhere. */
6276 generating_concat_p = 0;
6277
6278 if (dump_file)
6279 {
6280 fprintf (dump_file,
6281 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6282 /* And the pass manager will dump RTL for us. */
6283 }
6284
6285 /* If we're emitting a nested function, make sure its parent gets
6286 emitted as well. Doing otherwise confuses debug info. */
6287 {
6288 tree parent;
6289 for (parent = DECL_CONTEXT (current_function_decl);
6290 parent != NULL_TREE;
6291 parent = get_containing_scope (parent))
6292 if (TREE_CODE (parent) == FUNCTION_DECL)
6293 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6294 }
6295
6296 /* We are now committed to emitting code for this function. Do any
6297 preparation, such as emitting abstract debug info for the inline
6298 before it gets mangled by optimization. */
6299 if (cgraph_function_possibly_inlined_p (current_function_decl))
6300 (*debug_hooks->outlining_inline_function) (current_function_decl);
6301
6302 TREE_ASM_WRITTEN (current_function_decl) = 1;
6303
6304 /* After expanding, the return labels are no longer needed. */
6305 return_label = NULL;
6306 naked_return_label = NULL;
6307
6308 /* After expanding, the tm_restart map is no longer needed. */
6309 if (fun->gimple_df->tm_restart)
6310 fun->gimple_df->tm_restart = NULL;
6311
6312 /* Tag the blocks with a depth number so that change_scope can find
6313 the common parent easily. */
6314 set_block_levels (DECL_INITIAL (fun->decl), 0);
6315 default_rtl_profile ();
6316
6317 timevar_pop (TV_POST_EXPAND);
6318
6319 return 0;
6320 }
6321
6322 } // anon namespace
6323
6324 rtl_opt_pass *
6325 make_pass_expand (gcc::context *ctxt)
6326 {
6327 return new pass_expand (ctxt);
6328 }