]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
rtl.h (always_void_p): New function.
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "stmt.h"
41 #include "print-tree.h"
42 #include "tm_p.h"
43 #include "predict.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "dominance.h"
47 #include "cfg.h"
48 #include "cfgrtl.h"
49 #include "cfganal.h"
50 #include "cfgbuild.h"
51 #include "cfgcleanup.h"
52 #include "basic-block.h"
53 #include "insn-codes.h"
54 #include "optabs.h"
55 #include "flags.h"
56 #include "statistics.h"
57 #include "real.h"
58 #include "fixed-value.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "calls.h"
64 #include "emit-rtl.h"
65 #include "expr.h"
66 #include "langhooks.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "tree-eh.h"
71 #include "gimple-expr.h"
72 #include "is-a.h"
73 #include "gimple.h"
74 #include "gimple-iterator.h"
75 #include "gimple-walk.h"
76 #include "gimple-ssa.h"
77 #include "hash-map.h"
78 #include "plugin-api.h"
79 #include "ipa-ref.h"
80 #include "cgraph.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-ssanames.h"
85 #include "tree-dfa.h"
86 #include "tree-ssa.h"
87 #include "tree-pass.h"
88 #include "except.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "toplev.h"
92 #include "debug.h"
93 #include "params.h"
94 #include "tree-inline.h"
95 #include "value-prof.h"
96 #include "target.h"
97 #include "tree-ssa-live.h"
98 #include "tree-outof-ssa.h"
99 #include "sbitmap.h"
100 #include "cfgloop.h"
101 #include "regs.h" /* For reg_renumber. */
102 #include "insn-attr.h" /* For INSN_SCHEDULING. */
103 #include "asan.h"
104 #include "tree-ssa-address.h"
105 #include "recog.h"
106 #include "output.h"
107 #include "builtins.h"
108 #include "tree-chkp.h"
109 #include "rtl-chkp.h"
110
111 /* Some systems use __main in a way incompatible with its use in gcc, in these
112 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
113 give the same symbol without quotes for an alternative entry point. You
114 must define both, or neither. */
115 #ifndef NAME__MAIN
116 #define NAME__MAIN "__main"
117 #endif
118
119 /* This variable holds information helping the rewriting of SSA trees
120 into RTL. */
121 struct ssaexpand SA;
122
123 /* This variable holds the currently expanded gimple statement for purposes
124 of comminucating the profile info to the builtin expanders. */
125 gimple currently_expanding_gimple_stmt;
126
127 static rtx expand_debug_expr (tree);
128
129 /* Return an expression tree corresponding to the RHS of GIMPLE
130 statement STMT. */
131
132 tree
133 gimple_assign_rhs_to_tree (gimple stmt)
134 {
135 tree t;
136 enum gimple_rhs_class grhs_class;
137
138 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
139
140 if (grhs_class == GIMPLE_TERNARY_RHS)
141 t = build3 (gimple_assign_rhs_code (stmt),
142 TREE_TYPE (gimple_assign_lhs (stmt)),
143 gimple_assign_rhs1 (stmt),
144 gimple_assign_rhs2 (stmt),
145 gimple_assign_rhs3 (stmt));
146 else if (grhs_class == GIMPLE_BINARY_RHS)
147 t = build2 (gimple_assign_rhs_code (stmt),
148 TREE_TYPE (gimple_assign_lhs (stmt)),
149 gimple_assign_rhs1 (stmt),
150 gimple_assign_rhs2 (stmt));
151 else if (grhs_class == GIMPLE_UNARY_RHS)
152 t = build1 (gimple_assign_rhs_code (stmt),
153 TREE_TYPE (gimple_assign_lhs (stmt)),
154 gimple_assign_rhs1 (stmt));
155 else if (grhs_class == GIMPLE_SINGLE_RHS)
156 {
157 t = gimple_assign_rhs1 (stmt);
158 /* Avoid modifying this tree in place below. */
159 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
160 && gimple_location (stmt) != EXPR_LOCATION (t))
161 || (gimple_block (stmt)
162 && currently_expanding_to_rtl
163 && EXPR_P (t)))
164 t = copy_node (t);
165 }
166 else
167 gcc_unreachable ();
168
169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
170 SET_EXPR_LOCATION (t, gimple_location (stmt));
171
172 return t;
173 }
174
175
176 #ifndef STACK_ALIGNMENT_NEEDED
177 #define STACK_ALIGNMENT_NEEDED 1
178 #endif
179
180 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
181
182 /* Associate declaration T with storage space X. If T is no
183 SSA name this is exactly SET_DECL_RTL, otherwise make the
184 partition of T associated with X. */
185 static inline void
186 set_rtl (tree t, rtx x)
187 {
188 if (TREE_CODE (t) == SSA_NAME)
189 {
190 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
191 if (x && !MEM_P (x))
192 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
193 /* For the benefit of debug information at -O0 (where vartracking
194 doesn't run) record the place also in the base DECL if it's
195 a normal variable (not a parameter). */
196 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
197 {
198 tree var = SSA_NAME_VAR (t);
199 /* If we don't yet have something recorded, just record it now. */
200 if (!DECL_RTL_SET_P (var))
201 SET_DECL_RTL (var, x);
202 /* If we have it set already to "multiple places" don't
203 change this. */
204 else if (DECL_RTL (var) == pc_rtx)
205 ;
206 /* If we have something recorded and it's not the same place
207 as we want to record now, we have multiple partitions for the
208 same base variable, with different places. We can't just
209 randomly chose one, hence we have to say that we don't know.
210 This only happens with optimization, and there var-tracking
211 will figure out the right thing. */
212 else if (DECL_RTL (var) != x)
213 SET_DECL_RTL (var, pc_rtx);
214 }
215 }
216 else
217 SET_DECL_RTL (t, x);
218 }
219
220 /* This structure holds data relevant to one variable that will be
221 placed in a stack slot. */
222 struct stack_var
223 {
224 /* The Variable. */
225 tree decl;
226
227 /* Initially, the size of the variable. Later, the size of the partition,
228 if this variable becomes it's partition's representative. */
229 HOST_WIDE_INT size;
230
231 /* The *byte* alignment required for this variable. Or as, with the
232 size, the alignment for this partition. */
233 unsigned int alignb;
234
235 /* The partition representative. */
236 size_t representative;
237
238 /* The next stack variable in the partition, or EOC. */
239 size_t next;
240
241 /* The numbers of conflicting stack variables. */
242 bitmap conflicts;
243 };
244
245 #define EOC ((size_t)-1)
246
247 /* We have an array of such objects while deciding allocation. */
248 static struct stack_var *stack_vars;
249 static size_t stack_vars_alloc;
250 static size_t stack_vars_num;
251 static hash_map<tree, size_t> *decl_to_stack_part;
252
253 /* Conflict bitmaps go on this obstack. This allows us to destroy
254 all of them in one big sweep. */
255 static bitmap_obstack stack_var_bitmap_obstack;
256
257 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
258 is non-decreasing. */
259 static size_t *stack_vars_sorted;
260
261 /* The phase of the stack frame. This is the known misalignment of
262 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
263 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
264 static int frame_phase;
265
266 /* Used during expand_used_vars to remember if we saw any decls for
267 which we'd like to enable stack smashing protection. */
268 static bool has_protected_decls;
269
270 /* Used during expand_used_vars. Remember if we say a character buffer
271 smaller than our cutoff threshold. Used for -Wstack-protector. */
272 static bool has_short_buffer;
273
274 /* Compute the byte alignment to use for DECL. Ignore alignment
275 we can't do with expected alignment of the stack boundary. */
276
277 static unsigned int
278 align_local_variable (tree decl)
279 {
280 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
281 DECL_ALIGN (decl) = align;
282 return align / BITS_PER_UNIT;
283 }
284
285 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
286 down otherwise. Return truncated BASE value. */
287
288 static inline unsigned HOST_WIDE_INT
289 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
290 {
291 return align_up ? (base + align - 1) & -align : base & -align;
292 }
293
294 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
295 Return the frame offset. */
296
297 static HOST_WIDE_INT
298 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
299 {
300 HOST_WIDE_INT offset, new_frame_offset;
301
302 if (FRAME_GROWS_DOWNWARD)
303 {
304 new_frame_offset
305 = align_base (frame_offset - frame_phase - size,
306 align, false) + frame_phase;
307 offset = new_frame_offset;
308 }
309 else
310 {
311 new_frame_offset
312 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
313 offset = new_frame_offset;
314 new_frame_offset += size;
315 }
316 frame_offset = new_frame_offset;
317
318 if (frame_offset_overflow (frame_offset, cfun->decl))
319 frame_offset = offset = 0;
320
321 return offset;
322 }
323
324 /* Accumulate DECL into STACK_VARS. */
325
326 static void
327 add_stack_var (tree decl)
328 {
329 struct stack_var *v;
330
331 if (stack_vars_num >= stack_vars_alloc)
332 {
333 if (stack_vars_alloc)
334 stack_vars_alloc = stack_vars_alloc * 3 / 2;
335 else
336 stack_vars_alloc = 32;
337 stack_vars
338 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
339 }
340 if (!decl_to_stack_part)
341 decl_to_stack_part = new hash_map<tree, size_t>;
342
343 v = &stack_vars[stack_vars_num];
344 decl_to_stack_part->put (decl, stack_vars_num);
345
346 v->decl = decl;
347 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
348 /* Ensure that all variables have size, so that &a != &b for any two
349 variables that are simultaneously live. */
350 if (v->size == 0)
351 v->size = 1;
352 v->alignb = align_local_variable (SSAVAR (decl));
353 /* An alignment of zero can mightily confuse us later. */
354 gcc_assert (v->alignb != 0);
355
356 /* All variables are initially in their own partition. */
357 v->representative = stack_vars_num;
358 v->next = EOC;
359
360 /* All variables initially conflict with no other. */
361 v->conflicts = NULL;
362
363 /* Ensure that this decl doesn't get put onto the list twice. */
364 set_rtl (decl, pc_rtx);
365
366 stack_vars_num++;
367 }
368
369 /* Make the decls associated with luid's X and Y conflict. */
370
371 static void
372 add_stack_var_conflict (size_t x, size_t y)
373 {
374 struct stack_var *a = &stack_vars[x];
375 struct stack_var *b = &stack_vars[y];
376 if (!a->conflicts)
377 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
378 if (!b->conflicts)
379 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
380 bitmap_set_bit (a->conflicts, y);
381 bitmap_set_bit (b->conflicts, x);
382 }
383
384 /* Check whether the decls associated with luid's X and Y conflict. */
385
386 static bool
387 stack_var_conflict_p (size_t x, size_t y)
388 {
389 struct stack_var *a = &stack_vars[x];
390 struct stack_var *b = &stack_vars[y];
391 if (x == y)
392 return false;
393 /* Partitions containing an SSA name result from gimple registers
394 with things like unsupported modes. They are top-level and
395 hence conflict with everything else. */
396 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
397 return true;
398
399 if (!a->conflicts || !b->conflicts)
400 return false;
401 return bitmap_bit_p (a->conflicts, y);
402 }
403
404 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
405 enter its partition number into bitmap DATA. */
406
407 static bool
408 visit_op (gimple, tree op, tree, void *data)
409 {
410 bitmap active = (bitmap)data;
411 op = get_base_address (op);
412 if (op
413 && DECL_P (op)
414 && DECL_RTL_IF_SET (op) == pc_rtx)
415 {
416 size_t *v = decl_to_stack_part->get (op);
417 if (v)
418 bitmap_set_bit (active, *v);
419 }
420 return false;
421 }
422
423 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
424 record conflicts between it and all currently active other partitions
425 from bitmap DATA. */
426
427 static bool
428 visit_conflict (gimple, tree op, tree, void *data)
429 {
430 bitmap active = (bitmap)data;
431 op = get_base_address (op);
432 if (op
433 && DECL_P (op)
434 && DECL_RTL_IF_SET (op) == pc_rtx)
435 {
436 size_t *v = decl_to_stack_part->get (op);
437 if (v && bitmap_set_bit (active, *v))
438 {
439 size_t num = *v;
440 bitmap_iterator bi;
441 unsigned i;
442 gcc_assert (num < stack_vars_num);
443 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
444 add_stack_var_conflict (num, i);
445 }
446 }
447 return false;
448 }
449
450 /* Helper routine for add_scope_conflicts, calculating the active partitions
451 at the end of BB, leaving the result in WORK. We're called to generate
452 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
453 liveness. */
454
455 static void
456 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
457 {
458 edge e;
459 edge_iterator ei;
460 gimple_stmt_iterator gsi;
461 walk_stmt_load_store_addr_fn visit;
462
463 bitmap_clear (work);
464 FOR_EACH_EDGE (e, ei, bb->preds)
465 bitmap_ior_into (work, (bitmap)e->src->aux);
466
467 visit = visit_op;
468
469 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470 {
471 gimple stmt = gsi_stmt (gsi);
472 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
473 }
474 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
475 {
476 gimple stmt = gsi_stmt (gsi);
477
478 if (gimple_clobber_p (stmt))
479 {
480 tree lhs = gimple_assign_lhs (stmt);
481 size_t *v;
482 /* Nested function lowering might introduce LHSs
483 that are COMPONENT_REFs. */
484 if (TREE_CODE (lhs) != VAR_DECL)
485 continue;
486 if (DECL_RTL_IF_SET (lhs) == pc_rtx
487 && (v = decl_to_stack_part->get (lhs)))
488 bitmap_clear_bit (work, *v);
489 }
490 else if (!is_gimple_debug (stmt))
491 {
492 if (for_conflict
493 && visit == visit_op)
494 {
495 /* If this is the first real instruction in this BB we need
496 to add conflicts for everything live at this point now.
497 Unlike classical liveness for named objects we can't
498 rely on seeing a def/use of the names we're interested in.
499 There might merely be indirect loads/stores. We'd not add any
500 conflicts for such partitions. */
501 bitmap_iterator bi;
502 unsigned i;
503 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
504 {
505 struct stack_var *a = &stack_vars[i];
506 if (!a->conflicts)
507 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
508 bitmap_ior_into (a->conflicts, work);
509 }
510 visit = visit_conflict;
511 }
512 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
513 }
514 }
515 }
516
517 /* Generate stack partition conflicts between all partitions that are
518 simultaneously live. */
519
520 static void
521 add_scope_conflicts (void)
522 {
523 basic_block bb;
524 bool changed;
525 bitmap work = BITMAP_ALLOC (NULL);
526 int *rpo;
527 int n_bbs;
528
529 /* We approximate the live range of a stack variable by taking the first
530 mention of its name as starting point(s), and by the end-of-scope
531 death clobber added by gimplify as ending point(s) of the range.
532 This overapproximates in the case we for instance moved an address-taken
533 operation upward, without also moving a dereference to it upwards.
534 But it's conservatively correct as a variable never can hold values
535 before its name is mentioned at least once.
536
537 We then do a mostly classical bitmap liveness algorithm. */
538
539 FOR_ALL_BB_FN (bb, cfun)
540 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
541
542 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
543 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
544
545 changed = true;
546 while (changed)
547 {
548 int i;
549 changed = false;
550 for (i = 0; i < n_bbs; i++)
551 {
552 bitmap active;
553 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
554 active = (bitmap)bb->aux;
555 add_scope_conflicts_1 (bb, work, false);
556 if (bitmap_ior_into (active, work))
557 changed = true;
558 }
559 }
560
561 FOR_EACH_BB_FN (bb, cfun)
562 add_scope_conflicts_1 (bb, work, true);
563
564 free (rpo);
565 BITMAP_FREE (work);
566 FOR_ALL_BB_FN (bb, cfun)
567 BITMAP_FREE (bb->aux);
568 }
569
570 /* A subroutine of partition_stack_vars. A comparison function for qsort,
571 sorting an array of indices by the properties of the object. */
572
573 static int
574 stack_var_cmp (const void *a, const void *b)
575 {
576 size_t ia = *(const size_t *)a;
577 size_t ib = *(const size_t *)b;
578 unsigned int aligna = stack_vars[ia].alignb;
579 unsigned int alignb = stack_vars[ib].alignb;
580 HOST_WIDE_INT sizea = stack_vars[ia].size;
581 HOST_WIDE_INT sizeb = stack_vars[ib].size;
582 tree decla = stack_vars[ia].decl;
583 tree declb = stack_vars[ib].decl;
584 bool largea, largeb;
585 unsigned int uida, uidb;
586
587 /* Primary compare on "large" alignment. Large comes first. */
588 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
589 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
590 if (largea != largeb)
591 return (int)largeb - (int)largea;
592
593 /* Secondary compare on size, decreasing */
594 if (sizea > sizeb)
595 return -1;
596 if (sizea < sizeb)
597 return 1;
598
599 /* Tertiary compare on true alignment, decreasing. */
600 if (aligna < alignb)
601 return -1;
602 if (aligna > alignb)
603 return 1;
604
605 /* Final compare on ID for sort stability, increasing.
606 Two SSA names are compared by their version, SSA names come before
607 non-SSA names, and two normal decls are compared by their DECL_UID. */
608 if (TREE_CODE (decla) == SSA_NAME)
609 {
610 if (TREE_CODE (declb) == SSA_NAME)
611 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
612 else
613 return -1;
614 }
615 else if (TREE_CODE (declb) == SSA_NAME)
616 return 1;
617 else
618 uida = DECL_UID (decla), uidb = DECL_UID (declb);
619 if (uida < uidb)
620 return 1;
621 if (uida > uidb)
622 return -1;
623 return 0;
624 }
625
626 struct part_traits : default_hashmap_traits
627 {
628 template<typename T>
629 static bool
630 is_deleted (T &e)
631 { return e.m_value == reinterpret_cast<void *> (1); }
632
633 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
634 template<typename T>
635 static void
636 mark_deleted (T &e)
637 { e.m_value = reinterpret_cast<T> (1); }
638
639 template<typename T>
640 static void
641 mark_empty (T &e)
642 { e.m_value = NULL; }
643 };
644
645 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
646
647 /* If the points-to solution *PI points to variables that are in a partition
648 together with other variables add all partition members to the pointed-to
649 variables bitmap. */
650
651 static void
652 add_partitioned_vars_to_ptset (struct pt_solution *pt,
653 part_hashmap *decls_to_partitions,
654 hash_set<bitmap> *visited, bitmap temp)
655 {
656 bitmap_iterator bi;
657 unsigned i;
658 bitmap *part;
659
660 if (pt->anything
661 || pt->vars == NULL
662 /* The pointed-to vars bitmap is shared, it is enough to
663 visit it once. */
664 || visited->add (pt->vars))
665 return;
666
667 bitmap_clear (temp);
668
669 /* By using a temporary bitmap to store all members of the partitions
670 we have to add we make sure to visit each of the partitions only
671 once. */
672 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
673 if ((!temp
674 || !bitmap_bit_p (temp, i))
675 && (part = decls_to_partitions->get (i)))
676 bitmap_ior_into (temp, *part);
677 if (!bitmap_empty_p (temp))
678 bitmap_ior_into (pt->vars, temp);
679 }
680
681 /* Update points-to sets based on partition info, so we can use them on RTL.
682 The bitmaps representing stack partitions will be saved until expand,
683 where partitioned decls used as bases in memory expressions will be
684 rewritten. */
685
686 static void
687 update_alias_info_with_stack_vars (void)
688 {
689 part_hashmap *decls_to_partitions = NULL;
690 size_t i, j;
691 tree var = NULL_TREE;
692
693 for (i = 0; i < stack_vars_num; i++)
694 {
695 bitmap part = NULL;
696 tree name;
697 struct ptr_info_def *pi;
698
699 /* Not interested in partitions with single variable. */
700 if (stack_vars[i].representative != i
701 || stack_vars[i].next == EOC)
702 continue;
703
704 if (!decls_to_partitions)
705 {
706 decls_to_partitions = new part_hashmap;
707 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
708 }
709
710 /* Create an SSA_NAME that points to the partition for use
711 as base during alias-oracle queries on RTL for bases that
712 have been partitioned. */
713 if (var == NULL_TREE)
714 var = create_tmp_var (ptr_type_node);
715 name = make_ssa_name (var);
716
717 /* Create bitmaps representing partitions. They will be used for
718 points-to sets later, so use GGC alloc. */
719 part = BITMAP_GGC_ALLOC ();
720 for (j = i; j != EOC; j = stack_vars[j].next)
721 {
722 tree decl = stack_vars[j].decl;
723 unsigned int uid = DECL_PT_UID (decl);
724 bitmap_set_bit (part, uid);
725 decls_to_partitions->put (uid, part);
726 cfun->gimple_df->decls_to_pointers->put (decl, name);
727 if (TREE_ADDRESSABLE (decl))
728 TREE_ADDRESSABLE (name) = 1;
729 }
730
731 /* Make the SSA name point to all partition members. */
732 pi = get_ptr_info (name);
733 pt_solution_set (&pi->pt, part, false);
734 }
735
736 /* Make all points-to sets that contain one member of a partition
737 contain all members of the partition. */
738 if (decls_to_partitions)
739 {
740 unsigned i;
741 hash_set<bitmap> visited;
742 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
743
744 for (i = 1; i < num_ssa_names; i++)
745 {
746 tree name = ssa_name (i);
747 struct ptr_info_def *pi;
748
749 if (name
750 && POINTER_TYPE_P (TREE_TYPE (name))
751 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
752 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
753 &visited, temp);
754 }
755
756 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
757 decls_to_partitions, &visited, temp);
758
759 delete decls_to_partitions;
760 BITMAP_FREE (temp);
761 }
762 }
763
764 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
765 partitioning algorithm. Partitions A and B are known to be non-conflicting.
766 Merge them into a single partition A. */
767
768 static void
769 union_stack_vars (size_t a, size_t b)
770 {
771 struct stack_var *vb = &stack_vars[b];
772 bitmap_iterator bi;
773 unsigned u;
774
775 gcc_assert (stack_vars[b].next == EOC);
776 /* Add B to A's partition. */
777 stack_vars[b].next = stack_vars[a].next;
778 stack_vars[b].representative = a;
779 stack_vars[a].next = b;
780
781 /* Update the required alignment of partition A to account for B. */
782 if (stack_vars[a].alignb < stack_vars[b].alignb)
783 stack_vars[a].alignb = stack_vars[b].alignb;
784
785 /* Update the interference graph and merge the conflicts. */
786 if (vb->conflicts)
787 {
788 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
789 add_stack_var_conflict (a, stack_vars[u].representative);
790 BITMAP_FREE (vb->conflicts);
791 }
792 }
793
794 /* A subroutine of expand_used_vars. Binpack the variables into
795 partitions constrained by the interference graph. The overall
796 algorithm used is as follows:
797
798 Sort the objects by size in descending order.
799 For each object A {
800 S = size(A)
801 O = 0
802 loop {
803 Look for the largest non-conflicting object B with size <= S.
804 UNION (A, B)
805 }
806 }
807 */
808
809 static void
810 partition_stack_vars (void)
811 {
812 size_t si, sj, n = stack_vars_num;
813
814 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
815 for (si = 0; si < n; ++si)
816 stack_vars_sorted[si] = si;
817
818 if (n == 1)
819 return;
820
821 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
822
823 for (si = 0; si < n; ++si)
824 {
825 size_t i = stack_vars_sorted[si];
826 unsigned int ialign = stack_vars[i].alignb;
827 HOST_WIDE_INT isize = stack_vars[i].size;
828
829 /* Ignore objects that aren't partition representatives. If we
830 see a var that is not a partition representative, it must
831 have been merged earlier. */
832 if (stack_vars[i].representative != i)
833 continue;
834
835 for (sj = si + 1; sj < n; ++sj)
836 {
837 size_t j = stack_vars_sorted[sj];
838 unsigned int jalign = stack_vars[j].alignb;
839 HOST_WIDE_INT jsize = stack_vars[j].size;
840
841 /* Ignore objects that aren't partition representatives. */
842 if (stack_vars[j].representative != j)
843 continue;
844
845 /* Do not mix objects of "small" (supported) alignment
846 and "large" (unsupported) alignment. */
847 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
848 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
849 break;
850
851 /* For Address Sanitizer do not mix objects with different
852 sizes, as the shorter vars wouldn't be adequately protected.
853 Don't do that for "large" (unsupported) alignment objects,
854 those aren't protected anyway. */
855 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
856 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
857 break;
858
859 /* Ignore conflicting objects. */
860 if (stack_var_conflict_p (i, j))
861 continue;
862
863 /* UNION the objects, placing J at OFFSET. */
864 union_stack_vars (i, j);
865 }
866 }
867
868 update_alias_info_with_stack_vars ();
869 }
870
871 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
872
873 static void
874 dump_stack_var_partition (void)
875 {
876 size_t si, i, j, n = stack_vars_num;
877
878 for (si = 0; si < n; ++si)
879 {
880 i = stack_vars_sorted[si];
881
882 /* Skip variables that aren't partition representatives, for now. */
883 if (stack_vars[i].representative != i)
884 continue;
885
886 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
887 " align %u\n", (unsigned long) i, stack_vars[i].size,
888 stack_vars[i].alignb);
889
890 for (j = i; j != EOC; j = stack_vars[j].next)
891 {
892 fputc ('\t', dump_file);
893 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
894 }
895 fputc ('\n', dump_file);
896 }
897 }
898
899 /* Assign rtl to DECL at BASE + OFFSET. */
900
901 static void
902 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
903 HOST_WIDE_INT offset)
904 {
905 unsigned align;
906 rtx x;
907
908 /* If this fails, we've overflowed the stack frame. Error nicely? */
909 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
910
911 x = plus_constant (Pmode, base, offset);
912 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
913
914 if (TREE_CODE (decl) != SSA_NAME)
915 {
916 /* Set alignment we actually gave this decl if it isn't an SSA name.
917 If it is we generate stack slots only accidentally so it isn't as
918 important, we'll simply use the alignment that is already set. */
919 if (base == virtual_stack_vars_rtx)
920 offset -= frame_phase;
921 align = offset & -offset;
922 align *= BITS_PER_UNIT;
923 if (align == 0 || align > base_align)
924 align = base_align;
925
926 /* One would think that we could assert that we're not decreasing
927 alignment here, but (at least) the i386 port does exactly this
928 via the MINIMUM_ALIGNMENT hook. */
929
930 DECL_ALIGN (decl) = align;
931 DECL_USER_ALIGN (decl) = 0;
932 }
933
934 set_mem_attributes (x, SSAVAR (decl), true);
935 set_rtl (decl, x);
936 }
937
938 struct stack_vars_data
939 {
940 /* Vector of offset pairs, always end of some padding followed
941 by start of the padding that needs Address Sanitizer protection.
942 The vector is in reversed, highest offset pairs come first. */
943 vec<HOST_WIDE_INT> asan_vec;
944
945 /* Vector of partition representative decls in between the paddings. */
946 vec<tree> asan_decl_vec;
947
948 /* Base pseudo register for Address Sanitizer protected automatic vars. */
949 rtx asan_base;
950
951 /* Alignment needed for the Address Sanitizer protected automatic vars. */
952 unsigned int asan_alignb;
953 };
954
955 /* A subroutine of expand_used_vars. Give each partition representative
956 a unique location within the stack frame. Update each partition member
957 with that location. */
958
959 static void
960 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
961 {
962 size_t si, i, j, n = stack_vars_num;
963 HOST_WIDE_INT large_size = 0, large_alloc = 0;
964 rtx large_base = NULL;
965 unsigned large_align = 0;
966 tree decl;
967
968 /* Determine if there are any variables requiring "large" alignment.
969 Since these are dynamically allocated, we only process these if
970 no predicate involved. */
971 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
972 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
973 {
974 /* Find the total size of these variables. */
975 for (si = 0; si < n; ++si)
976 {
977 unsigned alignb;
978
979 i = stack_vars_sorted[si];
980 alignb = stack_vars[i].alignb;
981
982 /* All "large" alignment decls come before all "small" alignment
983 decls, but "large" alignment decls are not sorted based on
984 their alignment. Increase large_align to track the largest
985 required alignment. */
986 if ((alignb * BITS_PER_UNIT) > large_align)
987 large_align = alignb * BITS_PER_UNIT;
988
989 /* Stop when we get to the first decl with "small" alignment. */
990 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
991 break;
992
993 /* Skip variables that aren't partition representatives. */
994 if (stack_vars[i].representative != i)
995 continue;
996
997 /* Skip variables that have already had rtl assigned. See also
998 add_stack_var where we perpetrate this pc_rtx hack. */
999 decl = stack_vars[i].decl;
1000 if ((TREE_CODE (decl) == SSA_NAME
1001 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1002 : DECL_RTL (decl)) != pc_rtx)
1003 continue;
1004
1005 large_size += alignb - 1;
1006 large_size &= -(HOST_WIDE_INT)alignb;
1007 large_size += stack_vars[i].size;
1008 }
1009
1010 /* If there were any, allocate space. */
1011 if (large_size > 0)
1012 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1013 large_align, true);
1014 }
1015
1016 for (si = 0; si < n; ++si)
1017 {
1018 rtx base;
1019 unsigned base_align, alignb;
1020 HOST_WIDE_INT offset;
1021
1022 i = stack_vars_sorted[si];
1023
1024 /* Skip variables that aren't partition representatives, for now. */
1025 if (stack_vars[i].representative != i)
1026 continue;
1027
1028 /* Skip variables that have already had rtl assigned. See also
1029 add_stack_var where we perpetrate this pc_rtx hack. */
1030 decl = stack_vars[i].decl;
1031 if ((TREE_CODE (decl) == SSA_NAME
1032 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1033 : DECL_RTL (decl)) != pc_rtx)
1034 continue;
1035
1036 /* Check the predicate to see whether this variable should be
1037 allocated in this pass. */
1038 if (pred && !pred (i))
1039 continue;
1040
1041 alignb = stack_vars[i].alignb;
1042 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1043 {
1044 base = virtual_stack_vars_rtx;
1045 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1046 {
1047 HOST_WIDE_INT prev_offset
1048 = align_base (frame_offset,
1049 MAX (alignb, ASAN_RED_ZONE_SIZE),
1050 FRAME_GROWS_DOWNWARD);
1051 tree repr_decl = NULL_TREE;
1052 offset
1053 = alloc_stack_frame_space (stack_vars[i].size
1054 + ASAN_RED_ZONE_SIZE,
1055 MAX (alignb, ASAN_RED_ZONE_SIZE));
1056
1057 data->asan_vec.safe_push (prev_offset);
1058 data->asan_vec.safe_push (offset + stack_vars[i].size);
1059 /* Find best representative of the partition.
1060 Prefer those with DECL_NAME, even better
1061 satisfying asan_protect_stack_decl predicate. */
1062 for (j = i; j != EOC; j = stack_vars[j].next)
1063 if (asan_protect_stack_decl (stack_vars[j].decl)
1064 && DECL_NAME (stack_vars[j].decl))
1065 {
1066 repr_decl = stack_vars[j].decl;
1067 break;
1068 }
1069 else if (repr_decl == NULL_TREE
1070 && DECL_P (stack_vars[j].decl)
1071 && DECL_NAME (stack_vars[j].decl))
1072 repr_decl = stack_vars[j].decl;
1073 if (repr_decl == NULL_TREE)
1074 repr_decl = stack_vars[i].decl;
1075 data->asan_decl_vec.safe_push (repr_decl);
1076 data->asan_alignb = MAX (data->asan_alignb, alignb);
1077 if (data->asan_base == NULL)
1078 data->asan_base = gen_reg_rtx (Pmode);
1079 base = data->asan_base;
1080
1081 if (!STRICT_ALIGNMENT)
1082 base_align = crtl->max_used_stack_slot_alignment;
1083 else
1084 base_align = MAX (crtl->max_used_stack_slot_alignment,
1085 GET_MODE_ALIGNMENT (SImode)
1086 << ASAN_SHADOW_SHIFT);
1087 }
1088 else
1089 {
1090 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1091 base_align = crtl->max_used_stack_slot_alignment;
1092 }
1093 }
1094 else
1095 {
1096 /* Large alignment is only processed in the last pass. */
1097 if (pred)
1098 continue;
1099 gcc_assert (large_base != NULL);
1100
1101 large_alloc += alignb - 1;
1102 large_alloc &= -(HOST_WIDE_INT)alignb;
1103 offset = large_alloc;
1104 large_alloc += stack_vars[i].size;
1105
1106 base = large_base;
1107 base_align = large_align;
1108 }
1109
1110 /* Create rtl for each variable based on their location within the
1111 partition. */
1112 for (j = i; j != EOC; j = stack_vars[j].next)
1113 {
1114 expand_one_stack_var_at (stack_vars[j].decl,
1115 base, base_align,
1116 offset);
1117 }
1118 }
1119
1120 gcc_assert (large_alloc == large_size);
1121 }
1122
1123 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1124 static HOST_WIDE_INT
1125 account_stack_vars (void)
1126 {
1127 size_t si, j, i, n = stack_vars_num;
1128 HOST_WIDE_INT size = 0;
1129
1130 for (si = 0; si < n; ++si)
1131 {
1132 i = stack_vars_sorted[si];
1133
1134 /* Skip variables that aren't partition representatives, for now. */
1135 if (stack_vars[i].representative != i)
1136 continue;
1137
1138 size += stack_vars[i].size;
1139 for (j = i; j != EOC; j = stack_vars[j].next)
1140 set_rtl (stack_vars[j].decl, NULL);
1141 }
1142 return size;
1143 }
1144
1145 /* A subroutine of expand_one_var. Called to immediately assign rtl
1146 to a variable to be allocated in the stack frame. */
1147
1148 static void
1149 expand_one_stack_var (tree var)
1150 {
1151 HOST_WIDE_INT size, offset;
1152 unsigned byte_align;
1153
1154 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1155 byte_align = align_local_variable (SSAVAR (var));
1156
1157 /* We handle highly aligned variables in expand_stack_vars. */
1158 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1159
1160 offset = alloc_stack_frame_space (size, byte_align);
1161
1162 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1163 crtl->max_used_stack_slot_alignment, offset);
1164 }
1165
1166 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1167 that will reside in a hard register. */
1168
1169 static void
1170 expand_one_hard_reg_var (tree var)
1171 {
1172 rest_of_decl_compilation (var, 0, 0);
1173 }
1174
1175 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1176 that will reside in a pseudo register. */
1177
1178 static void
1179 expand_one_register_var (tree var)
1180 {
1181 tree decl = SSAVAR (var);
1182 tree type = TREE_TYPE (decl);
1183 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1184 rtx x = gen_reg_rtx (reg_mode);
1185
1186 set_rtl (var, x);
1187
1188 /* Note if the object is a user variable. */
1189 if (!DECL_ARTIFICIAL (decl))
1190 mark_user_reg (x);
1191
1192 if (POINTER_TYPE_P (type))
1193 mark_reg_pointer (x, get_pointer_alignment (var));
1194 }
1195
1196 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1197 has some associated error, e.g. its type is error-mark. We just need
1198 to pick something that won't crash the rest of the compiler. */
1199
1200 static void
1201 expand_one_error_var (tree var)
1202 {
1203 machine_mode mode = DECL_MODE (var);
1204 rtx x;
1205
1206 if (mode == BLKmode)
1207 x = gen_rtx_MEM (BLKmode, const0_rtx);
1208 else if (mode == VOIDmode)
1209 x = const0_rtx;
1210 else
1211 x = gen_reg_rtx (mode);
1212
1213 SET_DECL_RTL (var, x);
1214 }
1215
1216 /* A subroutine of expand_one_var. VAR is a variable that will be
1217 allocated to the local stack frame. Return true if we wish to
1218 add VAR to STACK_VARS so that it will be coalesced with other
1219 variables. Return false to allocate VAR immediately.
1220
1221 This function is used to reduce the number of variables considered
1222 for coalescing, which reduces the size of the quadratic problem. */
1223
1224 static bool
1225 defer_stack_allocation (tree var, bool toplevel)
1226 {
1227 /* Whether the variable is small enough for immediate allocation not to be
1228 a problem with regard to the frame size. */
1229 bool smallish
1230 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1231 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1232
1233 /* If stack protection is enabled, *all* stack variables must be deferred,
1234 so that we can re-order the strings to the top of the frame.
1235 Similarly for Address Sanitizer. */
1236 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1237 return true;
1238
1239 /* We handle "large" alignment via dynamic allocation. We want to handle
1240 this extra complication in only one place, so defer them. */
1241 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1242 return true;
1243
1244 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1245 might be detached from their block and appear at toplevel when we reach
1246 here. We want to coalesce them with variables from other blocks when
1247 the immediate contribution to the frame size would be noticeable. */
1248 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1249 return true;
1250
1251 /* Variables declared in the outermost scope automatically conflict
1252 with every other variable. The only reason to want to defer them
1253 at all is that, after sorting, we can more efficiently pack
1254 small variables in the stack frame. Continue to defer at -O2. */
1255 if (toplevel && optimize < 2)
1256 return false;
1257
1258 /* Without optimization, *most* variables are allocated from the
1259 stack, which makes the quadratic problem large exactly when we
1260 want compilation to proceed as quickly as possible. On the
1261 other hand, we don't want the function's stack frame size to
1262 get completely out of hand. So we avoid adding scalars and
1263 "small" aggregates to the list at all. */
1264 if (optimize == 0 && smallish)
1265 return false;
1266
1267 return true;
1268 }
1269
1270 /* A subroutine of expand_used_vars. Expand one variable according to
1271 its flavor. Variables to be placed on the stack are not actually
1272 expanded yet, merely recorded.
1273 When REALLY_EXPAND is false, only add stack values to be allocated.
1274 Return stack usage this variable is supposed to take.
1275 */
1276
1277 static HOST_WIDE_INT
1278 expand_one_var (tree var, bool toplevel, bool really_expand)
1279 {
1280 unsigned int align = BITS_PER_UNIT;
1281 tree origvar = var;
1282
1283 var = SSAVAR (var);
1284
1285 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1286 {
1287 /* Because we don't know if VAR will be in register or on stack,
1288 we conservatively assume it will be on stack even if VAR is
1289 eventually put into register after RA pass. For non-automatic
1290 variables, which won't be on stack, we collect alignment of
1291 type and ignore user specified alignment. Similarly for
1292 SSA_NAMEs for which use_register_for_decl returns true. */
1293 if (TREE_STATIC (var)
1294 || DECL_EXTERNAL (var)
1295 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1296 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1297 TYPE_MODE (TREE_TYPE (var)),
1298 TYPE_ALIGN (TREE_TYPE (var)));
1299 else if (DECL_HAS_VALUE_EXPR_P (var)
1300 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1301 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1302 or variables which were assigned a stack slot already by
1303 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1304 changed from the offset chosen to it. */
1305 align = crtl->stack_alignment_estimated;
1306 else
1307 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1308
1309 /* If the variable alignment is very large we'll dynamicaly allocate
1310 it, which means that in-frame portion is just a pointer. */
1311 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1312 align = POINTER_SIZE;
1313 }
1314
1315 if (SUPPORTS_STACK_ALIGNMENT
1316 && crtl->stack_alignment_estimated < align)
1317 {
1318 /* stack_alignment_estimated shouldn't change after stack
1319 realign decision made */
1320 gcc_assert (!crtl->stack_realign_processed);
1321 crtl->stack_alignment_estimated = align;
1322 }
1323
1324 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1325 So here we only make sure stack_alignment_needed >= align. */
1326 if (crtl->stack_alignment_needed < align)
1327 crtl->stack_alignment_needed = align;
1328 if (crtl->max_used_stack_slot_alignment < align)
1329 crtl->max_used_stack_slot_alignment = align;
1330
1331 if (TREE_CODE (origvar) == SSA_NAME)
1332 {
1333 gcc_assert (TREE_CODE (var) != VAR_DECL
1334 || (!DECL_EXTERNAL (var)
1335 && !DECL_HAS_VALUE_EXPR_P (var)
1336 && !TREE_STATIC (var)
1337 && TREE_TYPE (var) != error_mark_node
1338 && !DECL_HARD_REGISTER (var)
1339 && really_expand));
1340 }
1341 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1342 ;
1343 else if (DECL_EXTERNAL (var))
1344 ;
1345 else if (DECL_HAS_VALUE_EXPR_P (var))
1346 ;
1347 else if (TREE_STATIC (var))
1348 ;
1349 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1350 ;
1351 else if (TREE_TYPE (var) == error_mark_node)
1352 {
1353 if (really_expand)
1354 expand_one_error_var (var);
1355 }
1356 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1357 {
1358 if (really_expand)
1359 {
1360 expand_one_hard_reg_var (var);
1361 if (!DECL_HARD_REGISTER (var))
1362 /* Invalid register specification. */
1363 expand_one_error_var (var);
1364 }
1365 }
1366 else if (use_register_for_decl (var))
1367 {
1368 if (really_expand)
1369 expand_one_register_var (origvar);
1370 }
1371 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1372 {
1373 /* Reject variables which cover more than half of the address-space. */
1374 if (really_expand)
1375 {
1376 error ("size of variable %q+D is too large", var);
1377 expand_one_error_var (var);
1378 }
1379 }
1380 else if (defer_stack_allocation (var, toplevel))
1381 add_stack_var (origvar);
1382 else
1383 {
1384 if (really_expand)
1385 expand_one_stack_var (origvar);
1386 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1387 }
1388 return 0;
1389 }
1390
1391 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1392 expanding variables. Those variables that can be put into registers
1393 are allocated pseudos; those that can't are put on the stack.
1394
1395 TOPLEVEL is true if this is the outermost BLOCK. */
1396
1397 static void
1398 expand_used_vars_for_block (tree block, bool toplevel)
1399 {
1400 tree t;
1401
1402 /* Expand all variables at this level. */
1403 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1404 if (TREE_USED (t)
1405 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1406 || !DECL_NONSHAREABLE (t)))
1407 expand_one_var (t, toplevel, true);
1408
1409 /* Expand all variables at containing levels. */
1410 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1411 expand_used_vars_for_block (t, false);
1412 }
1413
1414 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1415 and clear TREE_USED on all local variables. */
1416
1417 static void
1418 clear_tree_used (tree block)
1419 {
1420 tree t;
1421
1422 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1423 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1424 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1425 || !DECL_NONSHAREABLE (t))
1426 TREE_USED (t) = 0;
1427
1428 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1429 clear_tree_used (t);
1430 }
1431
1432 enum {
1433 SPCT_FLAG_DEFAULT = 1,
1434 SPCT_FLAG_ALL = 2,
1435 SPCT_FLAG_STRONG = 3,
1436 SPCT_FLAG_EXPLICIT = 4
1437 };
1438
1439 /* Examine TYPE and determine a bit mask of the following features. */
1440
1441 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1442 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1443 #define SPCT_HAS_ARRAY 4
1444 #define SPCT_HAS_AGGREGATE 8
1445
1446 static unsigned int
1447 stack_protect_classify_type (tree type)
1448 {
1449 unsigned int ret = 0;
1450 tree t;
1451
1452 switch (TREE_CODE (type))
1453 {
1454 case ARRAY_TYPE:
1455 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1456 if (t == char_type_node
1457 || t == signed_char_type_node
1458 || t == unsigned_char_type_node)
1459 {
1460 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1461 unsigned HOST_WIDE_INT len;
1462
1463 if (!TYPE_SIZE_UNIT (type)
1464 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1465 len = max;
1466 else
1467 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1468
1469 if (len < max)
1470 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1471 else
1472 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1473 }
1474 else
1475 ret = SPCT_HAS_ARRAY;
1476 break;
1477
1478 case UNION_TYPE:
1479 case QUAL_UNION_TYPE:
1480 case RECORD_TYPE:
1481 ret = SPCT_HAS_AGGREGATE;
1482 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1483 if (TREE_CODE (t) == FIELD_DECL)
1484 ret |= stack_protect_classify_type (TREE_TYPE (t));
1485 break;
1486
1487 default:
1488 break;
1489 }
1490
1491 return ret;
1492 }
1493
1494 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1495 part of the local stack frame. Remember if we ever return nonzero for
1496 any variable in this function. The return value is the phase number in
1497 which the variable should be allocated. */
1498
1499 static int
1500 stack_protect_decl_phase (tree decl)
1501 {
1502 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1503 int ret = 0;
1504
1505 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1506 has_short_buffer = true;
1507
1508 if (flag_stack_protect == SPCT_FLAG_ALL
1509 || flag_stack_protect == SPCT_FLAG_STRONG
1510 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1511 && lookup_attribute ("stack_protect",
1512 DECL_ATTRIBUTES (current_function_decl))))
1513 {
1514 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1515 && !(bits & SPCT_HAS_AGGREGATE))
1516 ret = 1;
1517 else if (bits & SPCT_HAS_ARRAY)
1518 ret = 2;
1519 }
1520 else
1521 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1522
1523 if (ret)
1524 has_protected_decls = true;
1525
1526 return ret;
1527 }
1528
1529 /* Two helper routines that check for phase 1 and phase 2. These are used
1530 as callbacks for expand_stack_vars. */
1531
1532 static bool
1533 stack_protect_decl_phase_1 (size_t i)
1534 {
1535 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1536 }
1537
1538 static bool
1539 stack_protect_decl_phase_2 (size_t i)
1540 {
1541 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1542 }
1543
1544 /* And helper function that checks for asan phase (with stack protector
1545 it is phase 3). This is used as callback for expand_stack_vars.
1546 Returns true if any of the vars in the partition need to be protected. */
1547
1548 static bool
1549 asan_decl_phase_3 (size_t i)
1550 {
1551 while (i != EOC)
1552 {
1553 if (asan_protect_stack_decl (stack_vars[i].decl))
1554 return true;
1555 i = stack_vars[i].next;
1556 }
1557 return false;
1558 }
1559
1560 /* Ensure that variables in different stack protection phases conflict
1561 so that they are not merged and share the same stack slot. */
1562
1563 static void
1564 add_stack_protection_conflicts (void)
1565 {
1566 size_t i, j, n = stack_vars_num;
1567 unsigned char *phase;
1568
1569 phase = XNEWVEC (unsigned char, n);
1570 for (i = 0; i < n; ++i)
1571 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1572
1573 for (i = 0; i < n; ++i)
1574 {
1575 unsigned char ph_i = phase[i];
1576 for (j = i + 1; j < n; ++j)
1577 if (ph_i != phase[j])
1578 add_stack_var_conflict (i, j);
1579 }
1580
1581 XDELETEVEC (phase);
1582 }
1583
1584 /* Create a decl for the guard at the top of the stack frame. */
1585
1586 static void
1587 create_stack_guard (void)
1588 {
1589 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1590 VAR_DECL, NULL, ptr_type_node);
1591 TREE_THIS_VOLATILE (guard) = 1;
1592 TREE_USED (guard) = 1;
1593 expand_one_stack_var (guard);
1594 crtl->stack_protect_guard = guard;
1595 }
1596
1597 /* Prepare for expanding variables. */
1598 static void
1599 init_vars_expansion (void)
1600 {
1601 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1602 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1603
1604 /* A map from decl to stack partition. */
1605 decl_to_stack_part = new hash_map<tree, size_t>;
1606
1607 /* Initialize local stack smashing state. */
1608 has_protected_decls = false;
1609 has_short_buffer = false;
1610 }
1611
1612 /* Free up stack variable graph data. */
1613 static void
1614 fini_vars_expansion (void)
1615 {
1616 bitmap_obstack_release (&stack_var_bitmap_obstack);
1617 if (stack_vars)
1618 XDELETEVEC (stack_vars);
1619 if (stack_vars_sorted)
1620 XDELETEVEC (stack_vars_sorted);
1621 stack_vars = NULL;
1622 stack_vars_sorted = NULL;
1623 stack_vars_alloc = stack_vars_num = 0;
1624 delete decl_to_stack_part;
1625 decl_to_stack_part = NULL;
1626 }
1627
1628 /* Make a fair guess for the size of the stack frame of the function
1629 in NODE. This doesn't have to be exact, the result is only used in
1630 the inline heuristics. So we don't want to run the full stack var
1631 packing algorithm (which is quadratic in the number of stack vars).
1632 Instead, we calculate the total size of all stack vars. This turns
1633 out to be a pretty fair estimate -- packing of stack vars doesn't
1634 happen very often. */
1635
1636 HOST_WIDE_INT
1637 estimated_stack_frame_size (struct cgraph_node *node)
1638 {
1639 HOST_WIDE_INT size = 0;
1640 size_t i;
1641 tree var;
1642 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1643
1644 push_cfun (fn);
1645
1646 init_vars_expansion ();
1647
1648 FOR_EACH_LOCAL_DECL (fn, i, var)
1649 if (auto_var_in_fn_p (var, fn->decl))
1650 size += expand_one_var (var, true, false);
1651
1652 if (stack_vars_num > 0)
1653 {
1654 /* Fake sorting the stack vars for account_stack_vars (). */
1655 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1656 for (i = 0; i < stack_vars_num; ++i)
1657 stack_vars_sorted[i] = i;
1658 size += account_stack_vars ();
1659 }
1660
1661 fini_vars_expansion ();
1662 pop_cfun ();
1663 return size;
1664 }
1665
1666 /* Helper routine to check if a record or union contains an array field. */
1667
1668 static int
1669 record_or_union_type_has_array_p (const_tree tree_type)
1670 {
1671 tree fields = TYPE_FIELDS (tree_type);
1672 tree f;
1673
1674 for (f = fields; f; f = DECL_CHAIN (f))
1675 if (TREE_CODE (f) == FIELD_DECL)
1676 {
1677 tree field_type = TREE_TYPE (f);
1678 if (RECORD_OR_UNION_TYPE_P (field_type)
1679 && record_or_union_type_has_array_p (field_type))
1680 return 1;
1681 if (TREE_CODE (field_type) == ARRAY_TYPE)
1682 return 1;
1683 }
1684 return 0;
1685 }
1686
1687 /* Check if the current function has local referenced variables that
1688 have their addresses taken, contain an array, or are arrays. */
1689
1690 static bool
1691 stack_protect_decl_p ()
1692 {
1693 unsigned i;
1694 tree var;
1695
1696 FOR_EACH_LOCAL_DECL (cfun, i, var)
1697 if (!is_global_var (var))
1698 {
1699 tree var_type = TREE_TYPE (var);
1700 if (TREE_CODE (var) == VAR_DECL
1701 && (TREE_CODE (var_type) == ARRAY_TYPE
1702 || TREE_ADDRESSABLE (var)
1703 || (RECORD_OR_UNION_TYPE_P (var_type)
1704 && record_or_union_type_has_array_p (var_type))))
1705 return true;
1706 }
1707 return false;
1708 }
1709
1710 /* Check if the current function has calls that use a return slot. */
1711
1712 static bool
1713 stack_protect_return_slot_p ()
1714 {
1715 basic_block bb;
1716
1717 FOR_ALL_BB_FN (bb, cfun)
1718 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1719 !gsi_end_p (gsi); gsi_next (&gsi))
1720 {
1721 gimple stmt = gsi_stmt (gsi);
1722 /* This assumes that calls to internal-only functions never
1723 use a return slot. */
1724 if (is_gimple_call (stmt)
1725 && !gimple_call_internal_p (stmt)
1726 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1727 gimple_call_fndecl (stmt)))
1728 return true;
1729 }
1730 return false;
1731 }
1732
1733 /* Expand all variables used in the function. */
1734
1735 static rtx_insn *
1736 expand_used_vars (void)
1737 {
1738 tree var, outer_block = DECL_INITIAL (current_function_decl);
1739 vec<tree> maybe_local_decls = vNULL;
1740 rtx_insn *var_end_seq = NULL;
1741 unsigned i;
1742 unsigned len;
1743 bool gen_stack_protect_signal = false;
1744
1745 /* Compute the phase of the stack frame for this function. */
1746 {
1747 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1748 int off = STARTING_FRAME_OFFSET % align;
1749 frame_phase = off ? align - off : 0;
1750 }
1751
1752 /* Set TREE_USED on all variables in the local_decls. */
1753 FOR_EACH_LOCAL_DECL (cfun, i, var)
1754 TREE_USED (var) = 1;
1755 /* Clear TREE_USED on all variables associated with a block scope. */
1756 clear_tree_used (DECL_INITIAL (current_function_decl));
1757
1758 init_vars_expansion ();
1759
1760 if (targetm.use_pseudo_pic_reg ())
1761 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1762
1763 hash_map<tree, tree> ssa_name_decls;
1764 for (i = 0; i < SA.map->num_partitions; i++)
1765 {
1766 tree var = partition_to_var (SA.map, i);
1767
1768 gcc_assert (!virtual_operand_p (var));
1769
1770 /* Assign decls to each SSA name partition, share decls for partitions
1771 we could have coalesced (those with the same type). */
1772 if (SSA_NAME_VAR (var) == NULL_TREE)
1773 {
1774 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1775 if (!*slot)
1776 *slot = create_tmp_reg (TREE_TYPE (var));
1777 replace_ssa_name_symbol (var, *slot);
1778 }
1779
1780 /* Always allocate space for partitions based on VAR_DECLs. But for
1781 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1782 debug info, there is no need to do so if optimization is disabled
1783 because all the SSA_NAMEs based on these DECLs have been coalesced
1784 into a single partition, which is thus assigned the canonical RTL
1785 location of the DECLs. If in_lto_p, we can't rely on optimize,
1786 a function could be compiled with -O1 -flto first and only the
1787 link performed at -O0. */
1788 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1789 expand_one_var (var, true, true);
1790 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1791 {
1792 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1793 contain the default def (representing the parm or result itself)
1794 we don't do anything here. But those which don't contain the
1795 default def (representing a temporary based on the parm/result)
1796 we need to allocate space just like for normal VAR_DECLs. */
1797 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1798 {
1799 expand_one_var (var, true, true);
1800 gcc_assert (SA.partition_to_pseudo[i]);
1801 }
1802 }
1803 }
1804
1805 if (flag_stack_protect == SPCT_FLAG_STRONG)
1806 gen_stack_protect_signal
1807 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1808
1809 /* At this point all variables on the local_decls with TREE_USED
1810 set are not associated with any block scope. Lay them out. */
1811
1812 len = vec_safe_length (cfun->local_decls);
1813 FOR_EACH_LOCAL_DECL (cfun, i, var)
1814 {
1815 bool expand_now = false;
1816
1817 /* Expanded above already. */
1818 if (is_gimple_reg (var))
1819 {
1820 TREE_USED (var) = 0;
1821 goto next;
1822 }
1823 /* We didn't set a block for static or extern because it's hard
1824 to tell the difference between a global variable (re)declared
1825 in a local scope, and one that's really declared there to
1826 begin with. And it doesn't really matter much, since we're
1827 not giving them stack space. Expand them now. */
1828 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1829 expand_now = true;
1830
1831 /* Expand variables not associated with any block now. Those created by
1832 the optimizers could be live anywhere in the function. Those that
1833 could possibly have been scoped originally and detached from their
1834 block will have their allocation deferred so we coalesce them with
1835 others when optimization is enabled. */
1836 else if (TREE_USED (var))
1837 expand_now = true;
1838
1839 /* Finally, mark all variables on the list as used. We'll use
1840 this in a moment when we expand those associated with scopes. */
1841 TREE_USED (var) = 1;
1842
1843 if (expand_now)
1844 expand_one_var (var, true, true);
1845
1846 next:
1847 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1848 {
1849 rtx rtl = DECL_RTL_IF_SET (var);
1850
1851 /* Keep artificial non-ignored vars in cfun->local_decls
1852 chain until instantiate_decls. */
1853 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1854 add_local_decl (cfun, var);
1855 else if (rtl == NULL_RTX)
1856 /* If rtl isn't set yet, which can happen e.g. with
1857 -fstack-protector, retry before returning from this
1858 function. */
1859 maybe_local_decls.safe_push (var);
1860 }
1861 }
1862
1863 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1864
1865 +-----------------+-----------------+
1866 | ...processed... | ...duplicates...|
1867 +-----------------+-----------------+
1868 ^
1869 +-- LEN points here.
1870
1871 We just want the duplicates, as those are the artificial
1872 non-ignored vars that we want to keep until instantiate_decls.
1873 Move them down and truncate the array. */
1874 if (!vec_safe_is_empty (cfun->local_decls))
1875 cfun->local_decls->block_remove (0, len);
1876
1877 /* At this point, all variables within the block tree with TREE_USED
1878 set are actually used by the optimized function. Lay them out. */
1879 expand_used_vars_for_block (outer_block, true);
1880
1881 if (stack_vars_num > 0)
1882 {
1883 add_scope_conflicts ();
1884
1885 /* If stack protection is enabled, we don't share space between
1886 vulnerable data and non-vulnerable data. */
1887 if (flag_stack_protect != 0
1888 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1889 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1890 && lookup_attribute ("stack_protect",
1891 DECL_ATTRIBUTES (current_function_decl)))))
1892 add_stack_protection_conflicts ();
1893
1894 /* Now that we have collected all stack variables, and have computed a
1895 minimal interference graph, attempt to save some stack space. */
1896 partition_stack_vars ();
1897 if (dump_file)
1898 dump_stack_var_partition ();
1899 }
1900
1901 switch (flag_stack_protect)
1902 {
1903 case SPCT_FLAG_ALL:
1904 create_stack_guard ();
1905 break;
1906
1907 case SPCT_FLAG_STRONG:
1908 if (gen_stack_protect_signal
1909 || cfun->calls_alloca || has_protected_decls
1910 || lookup_attribute ("stack_protect",
1911 DECL_ATTRIBUTES (current_function_decl)))
1912 create_stack_guard ();
1913 break;
1914
1915 case SPCT_FLAG_DEFAULT:
1916 if (cfun->calls_alloca || has_protected_decls
1917 || lookup_attribute ("stack_protect",
1918 DECL_ATTRIBUTES (current_function_decl)))
1919 create_stack_guard ();
1920 break;
1921
1922 case SPCT_FLAG_EXPLICIT:
1923 if (lookup_attribute ("stack_protect",
1924 DECL_ATTRIBUTES (current_function_decl)))
1925 create_stack_guard ();
1926 break;
1927 default:
1928 ;
1929 }
1930
1931 /* Assign rtl to each variable based on these partitions. */
1932 if (stack_vars_num > 0)
1933 {
1934 struct stack_vars_data data;
1935
1936 data.asan_vec = vNULL;
1937 data.asan_decl_vec = vNULL;
1938 data.asan_base = NULL_RTX;
1939 data.asan_alignb = 0;
1940
1941 /* Reorder decls to be protected by iterating over the variables
1942 array multiple times, and allocating out of each phase in turn. */
1943 /* ??? We could probably integrate this into the qsort we did
1944 earlier, such that we naturally see these variables first,
1945 and thus naturally allocate things in the right order. */
1946 if (has_protected_decls)
1947 {
1948 /* Phase 1 contains only character arrays. */
1949 expand_stack_vars (stack_protect_decl_phase_1, &data);
1950
1951 /* Phase 2 contains other kinds of arrays. */
1952 if (flag_stack_protect == SPCT_FLAG_ALL
1953 || flag_stack_protect == SPCT_FLAG_STRONG
1954 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1955 && lookup_attribute ("stack_protect",
1956 DECL_ATTRIBUTES (current_function_decl))))
1957 expand_stack_vars (stack_protect_decl_phase_2, &data);
1958 }
1959
1960 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1961 /* Phase 3, any partitions that need asan protection
1962 in addition to phase 1 and 2. */
1963 expand_stack_vars (asan_decl_phase_3, &data);
1964
1965 if (!data.asan_vec.is_empty ())
1966 {
1967 HOST_WIDE_INT prev_offset = frame_offset;
1968 HOST_WIDE_INT offset, sz, redzonesz;
1969 redzonesz = ASAN_RED_ZONE_SIZE;
1970 sz = data.asan_vec[0] - prev_offset;
1971 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1972 && data.asan_alignb <= 4096
1973 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1974 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1975 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1976 offset
1977 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1978 data.asan_vec.safe_push (prev_offset);
1979 data.asan_vec.safe_push (offset);
1980 /* Leave space for alignment if STRICT_ALIGNMENT. */
1981 if (STRICT_ALIGNMENT)
1982 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1983 << ASAN_SHADOW_SHIFT)
1984 / BITS_PER_UNIT, 1);
1985
1986 var_end_seq
1987 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1988 data.asan_base,
1989 data.asan_alignb,
1990 data.asan_vec.address (),
1991 data.asan_decl_vec.address (),
1992 data.asan_vec.length ());
1993 }
1994
1995 expand_stack_vars (NULL, &data);
1996
1997 data.asan_vec.release ();
1998 data.asan_decl_vec.release ();
1999 }
2000
2001 fini_vars_expansion ();
2002
2003 /* If there were any artificial non-ignored vars without rtl
2004 found earlier, see if deferred stack allocation hasn't assigned
2005 rtl to them. */
2006 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2007 {
2008 rtx rtl = DECL_RTL_IF_SET (var);
2009
2010 /* Keep artificial non-ignored vars in cfun->local_decls
2011 chain until instantiate_decls. */
2012 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2013 add_local_decl (cfun, var);
2014 }
2015 maybe_local_decls.release ();
2016
2017 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2018 if (STACK_ALIGNMENT_NEEDED)
2019 {
2020 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2021 if (!FRAME_GROWS_DOWNWARD)
2022 frame_offset += align - 1;
2023 frame_offset &= -align;
2024 }
2025
2026 return var_end_seq;
2027 }
2028
2029
2030 /* If we need to produce a detailed dump, print the tree representation
2031 for STMT to the dump file. SINCE is the last RTX after which the RTL
2032 generated for STMT should have been appended. */
2033
2034 static void
2035 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2036 {
2037 if (dump_file && (dump_flags & TDF_DETAILS))
2038 {
2039 fprintf (dump_file, "\n;; ");
2040 print_gimple_stmt (dump_file, stmt, 0,
2041 TDF_SLIM | (dump_flags & TDF_LINENO));
2042 fprintf (dump_file, "\n");
2043
2044 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2045 }
2046 }
2047
2048 /* Maps the blocks that do not contain tree labels to rtx labels. */
2049
2050 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2051
2052 /* Returns the label_rtx expression for a label starting basic block BB. */
2053
2054 static rtx
2055 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2056 {
2057 gimple_stmt_iterator gsi;
2058 tree lab;
2059
2060 if (bb->flags & BB_RTL)
2061 return block_label (bb);
2062
2063 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2064 if (elt)
2065 return *elt;
2066
2067 /* Find the tree label if it is present. */
2068
2069 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2070 {
2071 glabel *lab_stmt;
2072
2073 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2074 if (!lab_stmt)
2075 break;
2076
2077 lab = gimple_label_label (lab_stmt);
2078 if (DECL_NONLOCAL (lab))
2079 break;
2080
2081 return label_rtx (lab);
2082 }
2083
2084 rtx_code_label *l = gen_label_rtx ();
2085 lab_rtx_for_bb->put (bb, l);
2086 return l;
2087 }
2088
2089
2090 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2091 of a basic block where we just expanded the conditional at the end,
2092 possibly clean up the CFG and instruction sequence. LAST is the
2093 last instruction before the just emitted jump sequence. */
2094
2095 static void
2096 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2097 {
2098 /* Special case: when jumpif decides that the condition is
2099 trivial it emits an unconditional jump (and the necessary
2100 barrier). But we still have two edges, the fallthru one is
2101 wrong. purge_dead_edges would clean this up later. Unfortunately
2102 we have to insert insns (and split edges) before
2103 find_many_sub_basic_blocks and hence before purge_dead_edges.
2104 But splitting edges might create new blocks which depend on the
2105 fact that if there are two edges there's no barrier. So the
2106 barrier would get lost and verify_flow_info would ICE. Instead
2107 of auditing all edge splitters to care for the barrier (which
2108 normally isn't there in a cleaned CFG), fix it here. */
2109 if (BARRIER_P (get_last_insn ()))
2110 {
2111 rtx_insn *insn;
2112 remove_edge (e);
2113 /* Now, we have a single successor block, if we have insns to
2114 insert on the remaining edge we potentially will insert
2115 it at the end of this block (if the dest block isn't feasible)
2116 in order to avoid splitting the edge. This insertion will take
2117 place in front of the last jump. But we might have emitted
2118 multiple jumps (conditional and one unconditional) to the
2119 same destination. Inserting in front of the last one then
2120 is a problem. See PR 40021. We fix this by deleting all
2121 jumps except the last unconditional one. */
2122 insn = PREV_INSN (get_last_insn ());
2123 /* Make sure we have an unconditional jump. Otherwise we're
2124 confused. */
2125 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2126 for (insn = PREV_INSN (insn); insn != last;)
2127 {
2128 insn = PREV_INSN (insn);
2129 if (JUMP_P (NEXT_INSN (insn)))
2130 {
2131 if (!any_condjump_p (NEXT_INSN (insn)))
2132 {
2133 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2134 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2135 }
2136 delete_insn (NEXT_INSN (insn));
2137 }
2138 }
2139 }
2140 }
2141
2142 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2143 Returns a new basic block if we've terminated the current basic
2144 block and created a new one. */
2145
2146 static basic_block
2147 expand_gimple_cond (basic_block bb, gcond *stmt)
2148 {
2149 basic_block new_bb, dest;
2150 edge new_edge;
2151 edge true_edge;
2152 edge false_edge;
2153 rtx_insn *last2, *last;
2154 enum tree_code code;
2155 tree op0, op1;
2156
2157 code = gimple_cond_code (stmt);
2158 op0 = gimple_cond_lhs (stmt);
2159 op1 = gimple_cond_rhs (stmt);
2160 /* We're sometimes presented with such code:
2161 D.123_1 = x < y;
2162 if (D.123_1 != 0)
2163 ...
2164 This would expand to two comparisons which then later might
2165 be cleaned up by combine. But some pattern matchers like if-conversion
2166 work better when there's only one compare, so make up for this
2167 here as special exception if TER would have made the same change. */
2168 if (SA.values
2169 && TREE_CODE (op0) == SSA_NAME
2170 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2171 && TREE_CODE (op1) == INTEGER_CST
2172 && ((gimple_cond_code (stmt) == NE_EXPR
2173 && integer_zerop (op1))
2174 || (gimple_cond_code (stmt) == EQ_EXPR
2175 && integer_onep (op1)))
2176 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2177 {
2178 gimple second = SSA_NAME_DEF_STMT (op0);
2179 if (gimple_code (second) == GIMPLE_ASSIGN)
2180 {
2181 enum tree_code code2 = gimple_assign_rhs_code (second);
2182 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2183 {
2184 code = code2;
2185 op0 = gimple_assign_rhs1 (second);
2186 op1 = gimple_assign_rhs2 (second);
2187 }
2188 /* If jumps are cheap and the target does not support conditional
2189 compare, turn some more codes into jumpy sequences. */
2190 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2191 && targetm.gen_ccmp_first == NULL)
2192 {
2193 if ((code2 == BIT_AND_EXPR
2194 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2195 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2196 || code2 == TRUTH_AND_EXPR)
2197 {
2198 code = TRUTH_ANDIF_EXPR;
2199 op0 = gimple_assign_rhs1 (second);
2200 op1 = gimple_assign_rhs2 (second);
2201 }
2202 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2203 {
2204 code = TRUTH_ORIF_EXPR;
2205 op0 = gimple_assign_rhs1 (second);
2206 op1 = gimple_assign_rhs2 (second);
2207 }
2208 }
2209 }
2210 }
2211
2212 last2 = last = get_last_insn ();
2213
2214 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2215 set_curr_insn_location (gimple_location (stmt));
2216
2217 /* These flags have no purpose in RTL land. */
2218 true_edge->flags &= ~EDGE_TRUE_VALUE;
2219 false_edge->flags &= ~EDGE_FALSE_VALUE;
2220
2221 /* We can either have a pure conditional jump with one fallthru edge or
2222 two-way jump that needs to be decomposed into two basic blocks. */
2223 if (false_edge->dest == bb->next_bb)
2224 {
2225 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2226 true_edge->probability);
2227 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2228 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2229 set_curr_insn_location (true_edge->goto_locus);
2230 false_edge->flags |= EDGE_FALLTHRU;
2231 maybe_cleanup_end_of_block (false_edge, last);
2232 return NULL;
2233 }
2234 if (true_edge->dest == bb->next_bb)
2235 {
2236 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2237 false_edge->probability);
2238 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2239 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2240 set_curr_insn_location (false_edge->goto_locus);
2241 true_edge->flags |= EDGE_FALLTHRU;
2242 maybe_cleanup_end_of_block (true_edge, last);
2243 return NULL;
2244 }
2245
2246 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2247 true_edge->probability);
2248 last = get_last_insn ();
2249 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2250 set_curr_insn_location (false_edge->goto_locus);
2251 emit_jump (label_rtx_for_bb (false_edge->dest));
2252
2253 BB_END (bb) = last;
2254 if (BARRIER_P (BB_END (bb)))
2255 BB_END (bb) = PREV_INSN (BB_END (bb));
2256 update_bb_for_insn (bb);
2257
2258 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2259 dest = false_edge->dest;
2260 redirect_edge_succ (false_edge, new_bb);
2261 false_edge->flags |= EDGE_FALLTHRU;
2262 new_bb->count = false_edge->count;
2263 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2264 add_bb_to_loop (new_bb, bb->loop_father);
2265 new_edge = make_edge (new_bb, dest, 0);
2266 new_edge->probability = REG_BR_PROB_BASE;
2267 new_edge->count = new_bb->count;
2268 if (BARRIER_P (BB_END (new_bb)))
2269 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2270 update_bb_for_insn (new_bb);
2271
2272 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2273
2274 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2275 {
2276 set_curr_insn_location (true_edge->goto_locus);
2277 true_edge->goto_locus = curr_insn_location ();
2278 }
2279
2280 return new_bb;
2281 }
2282
2283 /* Mark all calls that can have a transaction restart. */
2284
2285 static void
2286 mark_transaction_restart_calls (gimple stmt)
2287 {
2288 struct tm_restart_node dummy;
2289 tm_restart_node **slot;
2290
2291 if (!cfun->gimple_df->tm_restart)
2292 return;
2293
2294 dummy.stmt = stmt;
2295 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2296 if (slot)
2297 {
2298 struct tm_restart_node *n = *slot;
2299 tree list = n->label_or_list;
2300 rtx_insn *insn;
2301
2302 for (insn = next_real_insn (get_last_insn ());
2303 !CALL_P (insn);
2304 insn = next_real_insn (insn))
2305 continue;
2306
2307 if (TREE_CODE (list) == LABEL_DECL)
2308 add_reg_note (insn, REG_TM, label_rtx (list));
2309 else
2310 for (; list ; list = TREE_CHAIN (list))
2311 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2312 }
2313 }
2314
2315 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2316 statement STMT. */
2317
2318 static void
2319 expand_call_stmt (gcall *stmt)
2320 {
2321 tree exp, decl, lhs;
2322 bool builtin_p;
2323 size_t i;
2324
2325 if (gimple_call_internal_p (stmt))
2326 {
2327 expand_internal_call (stmt);
2328 return;
2329 }
2330
2331 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2332
2333 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2334 decl = gimple_call_fndecl (stmt);
2335 builtin_p = decl && DECL_BUILT_IN (decl);
2336
2337 /* If this is not a builtin function, the function type through which the
2338 call is made may be different from the type of the function. */
2339 if (!builtin_p)
2340 CALL_EXPR_FN (exp)
2341 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2342 CALL_EXPR_FN (exp));
2343
2344 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2345 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2346
2347 for (i = 0; i < gimple_call_num_args (stmt); i++)
2348 {
2349 tree arg = gimple_call_arg (stmt, i);
2350 gimple def;
2351 /* TER addresses into arguments of builtin functions so we have a
2352 chance to infer more correct alignment information. See PR39954. */
2353 if (builtin_p
2354 && TREE_CODE (arg) == SSA_NAME
2355 && (def = get_gimple_for_ssa_name (arg))
2356 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2357 arg = gimple_assign_rhs1 (def);
2358 CALL_EXPR_ARG (exp, i) = arg;
2359 }
2360
2361 if (gimple_has_side_effects (stmt))
2362 TREE_SIDE_EFFECTS (exp) = 1;
2363
2364 if (gimple_call_nothrow_p (stmt))
2365 TREE_NOTHROW (exp) = 1;
2366
2367 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2368 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2369 if (decl
2370 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2371 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2372 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2373 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2374 else
2375 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2376 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2377 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2378 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2379
2380 /* Ensure RTL is created for debug args. */
2381 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2382 {
2383 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2384 unsigned int ix;
2385 tree dtemp;
2386
2387 if (debug_args)
2388 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2389 {
2390 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2391 expand_debug_expr (dtemp);
2392 }
2393 }
2394
2395 lhs = gimple_call_lhs (stmt);
2396 if (lhs)
2397 expand_assignment (lhs, exp, false);
2398 else
2399 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2400
2401 mark_transaction_restart_calls (stmt);
2402 }
2403
2404
2405 /* Generate RTL for an asm statement (explicit assembler code).
2406 STRING is a STRING_CST node containing the assembler code text,
2407 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2408 insn is volatile; don't optimize it. */
2409
2410 static void
2411 expand_asm_loc (tree string, int vol, location_t locus)
2412 {
2413 rtx body;
2414
2415 if (TREE_CODE (string) == ADDR_EXPR)
2416 string = TREE_OPERAND (string, 0);
2417
2418 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2419 ggc_strdup (TREE_STRING_POINTER (string)),
2420 locus);
2421
2422 MEM_VOLATILE_P (body) = vol;
2423
2424 emit_insn (body);
2425 }
2426
2427 /* Return the number of times character C occurs in string S. */
2428 static int
2429 n_occurrences (int c, const char *s)
2430 {
2431 int n = 0;
2432 while (*s)
2433 n += (*s++ == c);
2434 return n;
2435 }
2436
2437 /* A subroutine of expand_asm_operands. Check that all operands have
2438 the same number of alternatives. Return true if so. */
2439
2440 static bool
2441 check_operand_nalternatives (tree outputs, tree inputs)
2442 {
2443 if (outputs || inputs)
2444 {
2445 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2446 int nalternatives
2447 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2448 tree next = inputs;
2449
2450 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2451 {
2452 error ("too many alternatives in %<asm%>");
2453 return false;
2454 }
2455
2456 tmp = outputs;
2457 while (tmp)
2458 {
2459 const char *constraint
2460 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2461
2462 if (n_occurrences (',', constraint) != nalternatives)
2463 {
2464 error ("operand constraints for %<asm%> differ "
2465 "in number of alternatives");
2466 return false;
2467 }
2468
2469 if (TREE_CHAIN (tmp))
2470 tmp = TREE_CHAIN (tmp);
2471 else
2472 tmp = next, next = 0;
2473 }
2474 }
2475
2476 return true;
2477 }
2478
2479 /* Check for overlap between registers marked in CLOBBERED_REGS and
2480 anything inappropriate in T. Emit error and return the register
2481 variable definition for error, NULL_TREE for ok. */
2482
2483 static bool
2484 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2485 {
2486 /* Conflicts between asm-declared register variables and the clobber
2487 list are not allowed. */
2488 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2489
2490 if (overlap)
2491 {
2492 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2493 DECL_NAME (overlap));
2494
2495 /* Reset registerness to stop multiple errors emitted for a single
2496 variable. */
2497 DECL_REGISTER (overlap) = 0;
2498 return true;
2499 }
2500
2501 return false;
2502 }
2503
2504 /* Generate RTL for an asm statement with arguments.
2505 STRING is the instruction template.
2506 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2507 Each output or input has an expression in the TREE_VALUE and
2508 a tree list in TREE_PURPOSE which in turn contains a constraint
2509 name in TREE_VALUE (or NULL_TREE) and a constraint string
2510 in TREE_PURPOSE.
2511 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2512 that is clobbered by this insn.
2513
2514 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2515 should be the fallthru basic block of the asm goto.
2516
2517 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2518 Some elements of OUTPUTS may be replaced with trees representing temporary
2519 values. The caller should copy those temporary values to the originally
2520 specified lvalues.
2521
2522 VOL nonzero means the insn is volatile; don't optimize it. */
2523
2524 static void
2525 expand_asm_operands (tree string, tree outputs, tree inputs,
2526 tree clobbers, tree labels, basic_block fallthru_bb,
2527 int vol, location_t locus)
2528 {
2529 rtvec argvec, constraintvec, labelvec;
2530 rtx body;
2531 int ninputs = list_length (inputs);
2532 int noutputs = list_length (outputs);
2533 int nlabels = list_length (labels);
2534 int ninout;
2535 int nclobbers;
2536 HARD_REG_SET clobbered_regs;
2537 int clobber_conflict_found = 0;
2538 tree tail;
2539 tree t;
2540 int i;
2541 /* Vector of RTX's of evaluated output operands. */
2542 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2543 int *inout_opnum = XALLOCAVEC (int, noutputs);
2544 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2545 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2546 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2547 int old_generating_concat_p = generating_concat_p;
2548 rtx_code_label *fallthru_label = NULL;
2549
2550 /* An ASM with no outputs needs to be treated as volatile, for now. */
2551 if (noutputs == 0)
2552 vol = 1;
2553
2554 if (! check_operand_nalternatives (outputs, inputs))
2555 return;
2556
2557 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2558
2559 /* Collect constraints. */
2560 i = 0;
2561 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2562 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2563 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2564 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2565
2566 /* Sometimes we wish to automatically clobber registers across an asm.
2567 Case in point is when the i386 backend moved from cc0 to a hard reg --
2568 maintaining source-level compatibility means automatically clobbering
2569 the flags register. */
2570 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2571
2572 /* Count the number of meaningful clobbered registers, ignoring what
2573 we would ignore later. */
2574 nclobbers = 0;
2575 CLEAR_HARD_REG_SET (clobbered_regs);
2576 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2577 {
2578 const char *regname;
2579 int nregs;
2580
2581 if (TREE_VALUE (tail) == error_mark_node)
2582 return;
2583 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2584
2585 i = decode_reg_name_and_count (regname, &nregs);
2586 if (i == -4)
2587 ++nclobbers;
2588 else if (i == -2)
2589 error ("unknown register name %qs in %<asm%>", regname);
2590
2591 /* Mark clobbered registers. */
2592 if (i >= 0)
2593 {
2594 int reg;
2595
2596 for (reg = i; reg < i + nregs; reg++)
2597 {
2598 ++nclobbers;
2599
2600 /* Clobbering the PIC register is an error. */
2601 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2602 {
2603 error ("PIC register clobbered by %qs in %<asm%>", regname);
2604 return;
2605 }
2606
2607 SET_HARD_REG_BIT (clobbered_regs, reg);
2608 }
2609 }
2610 }
2611
2612 /* First pass over inputs and outputs checks validity and sets
2613 mark_addressable if needed. */
2614
2615 ninout = 0;
2616 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2617 {
2618 tree val = TREE_VALUE (tail);
2619 tree type = TREE_TYPE (val);
2620 const char *constraint;
2621 bool is_inout;
2622 bool allows_reg;
2623 bool allows_mem;
2624
2625 /* If there's an erroneous arg, emit no insn. */
2626 if (type == error_mark_node)
2627 return;
2628
2629 /* Try to parse the output constraint. If that fails, there's
2630 no point in going further. */
2631 constraint = constraints[i];
2632 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2633 &allows_mem, &allows_reg, &is_inout))
2634 return;
2635
2636 if (! allows_reg
2637 && (allows_mem
2638 || is_inout
2639 || (DECL_P (val)
2640 && REG_P (DECL_RTL (val))
2641 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2642 mark_addressable (val);
2643
2644 if (is_inout)
2645 ninout++;
2646 }
2647
2648 ninputs += ninout;
2649 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2650 {
2651 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2652 return;
2653 }
2654
2655 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2656 {
2657 bool allows_reg, allows_mem;
2658 const char *constraint;
2659
2660 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2661 would get VOIDmode and that could cause a crash in reload. */
2662 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2663 return;
2664
2665 constraint = constraints[i + noutputs];
2666 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2667 constraints, &allows_mem, &allows_reg))
2668 return;
2669
2670 if (! allows_reg && allows_mem)
2671 mark_addressable (TREE_VALUE (tail));
2672 }
2673
2674 /* Second pass evaluates arguments. */
2675
2676 /* Make sure stack is consistent for asm goto. */
2677 if (nlabels > 0)
2678 do_pending_stack_adjust ();
2679
2680 ninout = 0;
2681 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2682 {
2683 tree val = TREE_VALUE (tail);
2684 tree type = TREE_TYPE (val);
2685 bool is_inout;
2686 bool allows_reg;
2687 bool allows_mem;
2688 rtx op;
2689 bool ok;
2690
2691 ok = parse_output_constraint (&constraints[i], i, ninputs,
2692 noutputs, &allows_mem, &allows_reg,
2693 &is_inout);
2694 gcc_assert (ok);
2695
2696 /* If an output operand is not a decl or indirect ref and our constraint
2697 allows a register, make a temporary to act as an intermediate.
2698 Make the asm insn write into that, then our caller will copy it to
2699 the real output operand. Likewise for promoted variables. */
2700
2701 generating_concat_p = 0;
2702
2703 real_output_rtx[i] = NULL_RTX;
2704 if ((TREE_CODE (val) == INDIRECT_REF
2705 && allows_mem)
2706 || (DECL_P (val)
2707 && (allows_mem || REG_P (DECL_RTL (val)))
2708 && ! (REG_P (DECL_RTL (val))
2709 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2710 || ! allows_reg
2711 || is_inout)
2712 {
2713 op = expand_expr (val, NULL_RTX, VOIDmode,
2714 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2715 if (MEM_P (op))
2716 op = validize_mem (op);
2717
2718 if (! allows_reg && !MEM_P (op))
2719 error ("output number %d not directly addressable", i);
2720 if ((! allows_mem && MEM_P (op))
2721 || GET_CODE (op) == CONCAT)
2722 {
2723 real_output_rtx[i] = op;
2724 op = gen_reg_rtx (GET_MODE (op));
2725 if (is_inout)
2726 emit_move_insn (op, real_output_rtx[i]);
2727 }
2728 }
2729 else
2730 {
2731 op = assign_temp (type, 0, 1);
2732 op = validize_mem (op);
2733 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2734 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2735 TREE_VALUE (tail) = make_tree (type, op);
2736 }
2737 output_rtx[i] = op;
2738
2739 generating_concat_p = old_generating_concat_p;
2740
2741 if (is_inout)
2742 {
2743 inout_mode[ninout] = TYPE_MODE (type);
2744 inout_opnum[ninout++] = i;
2745 }
2746
2747 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2748 clobber_conflict_found = 1;
2749 }
2750
2751 /* Make vectors for the expression-rtx, constraint strings,
2752 and named operands. */
2753
2754 argvec = rtvec_alloc (ninputs);
2755 constraintvec = rtvec_alloc (ninputs);
2756 labelvec = rtvec_alloc (nlabels);
2757
2758 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2759 : GET_MODE (output_rtx[0])),
2760 ggc_strdup (TREE_STRING_POINTER (string)),
2761 empty_string, 0, argvec, constraintvec,
2762 labelvec, locus);
2763
2764 MEM_VOLATILE_P (body) = vol;
2765
2766 /* Eval the inputs and put them into ARGVEC.
2767 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2768
2769 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2770 {
2771 bool allows_reg, allows_mem;
2772 const char *constraint;
2773 tree val, type;
2774 rtx op;
2775 bool ok;
2776
2777 constraint = constraints[i + noutputs];
2778 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2779 constraints, &allows_mem, &allows_reg);
2780 gcc_assert (ok);
2781
2782 generating_concat_p = 0;
2783
2784 val = TREE_VALUE (tail);
2785 type = TREE_TYPE (val);
2786 /* EXPAND_INITIALIZER will not generate code for valid initializer
2787 constants, but will still generate code for other types of operand.
2788 This is the behavior we want for constant constraints. */
2789 op = expand_expr (val, NULL_RTX, VOIDmode,
2790 allows_reg ? EXPAND_NORMAL
2791 : allows_mem ? EXPAND_MEMORY
2792 : EXPAND_INITIALIZER);
2793
2794 /* Never pass a CONCAT to an ASM. */
2795 if (GET_CODE (op) == CONCAT)
2796 op = force_reg (GET_MODE (op), op);
2797 else if (MEM_P (op))
2798 op = validize_mem (op);
2799
2800 if (asm_operand_ok (op, constraint, NULL) <= 0)
2801 {
2802 if (allows_reg && TYPE_MODE (type) != BLKmode)
2803 op = force_reg (TYPE_MODE (type), op);
2804 else if (!allows_mem)
2805 warning (0, "asm operand %d probably doesn%'t match constraints",
2806 i + noutputs);
2807 else if (MEM_P (op))
2808 {
2809 /* We won't recognize either volatile memory or memory
2810 with a queued address as available a memory_operand
2811 at this point. Ignore it: clearly this *is* a memory. */
2812 }
2813 else
2814 gcc_unreachable ();
2815 }
2816
2817 generating_concat_p = old_generating_concat_p;
2818 ASM_OPERANDS_INPUT (body, i) = op;
2819
2820 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2821 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2822 ggc_strdup (constraints[i + noutputs]),
2823 locus);
2824
2825 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2826 clobber_conflict_found = 1;
2827 }
2828
2829 /* Protect all the operands from the queue now that they have all been
2830 evaluated. */
2831
2832 generating_concat_p = 0;
2833
2834 /* For in-out operands, copy output rtx to input rtx. */
2835 for (i = 0; i < ninout; i++)
2836 {
2837 int j = inout_opnum[i];
2838 char buffer[16];
2839
2840 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2841 = output_rtx[j];
2842
2843 sprintf (buffer, "%d", j);
2844 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2845 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2846 }
2847
2848 /* Copy labels to the vector. */
2849 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2850 {
2851 rtx r;
2852 /* If asm goto has any labels in the fallthru basic block, use
2853 a label that we emit immediately after the asm goto. Expansion
2854 may insert further instructions into the same basic block after
2855 asm goto and if we don't do this, insertion of instructions on
2856 the fallthru edge might misbehave. See PR58670. */
2857 if (fallthru_bb
2858 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2859 {
2860 if (fallthru_label == NULL_RTX)
2861 fallthru_label = gen_label_rtx ();
2862 r = fallthru_label;
2863 }
2864 else
2865 r = label_rtx (TREE_VALUE (tail));
2866 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2867 }
2868
2869 generating_concat_p = old_generating_concat_p;
2870
2871 /* Now, for each output, construct an rtx
2872 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2873 ARGVEC CONSTRAINTS OPNAMES))
2874 If there is more than one, put them inside a PARALLEL. */
2875
2876 if (nlabels > 0 && nclobbers == 0)
2877 {
2878 gcc_assert (noutputs == 0);
2879 emit_jump_insn (body);
2880 }
2881 else if (noutputs == 0 && nclobbers == 0)
2882 {
2883 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2884 emit_insn (body);
2885 }
2886 else if (noutputs == 1 && nclobbers == 0)
2887 {
2888 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2889 emit_insn (gen_rtx_SET (output_rtx[0], body));
2890 }
2891 else
2892 {
2893 rtx obody = body;
2894 int num = noutputs;
2895
2896 if (num == 0)
2897 num = 1;
2898
2899 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2900
2901 /* For each output operand, store a SET. */
2902 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2903 {
2904 XVECEXP (body, 0, i)
2905 = gen_rtx_SET (output_rtx[i],
2906 gen_rtx_ASM_OPERANDS
2907 (GET_MODE (output_rtx[i]),
2908 ggc_strdup (TREE_STRING_POINTER (string)),
2909 ggc_strdup (constraints[i]),
2910 i, argvec, constraintvec, labelvec, locus));
2911
2912 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2913 }
2914
2915 /* If there are no outputs (but there are some clobbers)
2916 store the bare ASM_OPERANDS into the PARALLEL. */
2917
2918 if (i == 0)
2919 XVECEXP (body, 0, i++) = obody;
2920
2921 /* Store (clobber REG) for each clobbered register specified. */
2922
2923 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2924 {
2925 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2926 int reg, nregs;
2927 int j = decode_reg_name_and_count (regname, &nregs);
2928 rtx clobbered_reg;
2929
2930 if (j < 0)
2931 {
2932 if (j == -3) /* `cc', which is not a register */
2933 continue;
2934
2935 if (j == -4) /* `memory', don't cache memory across asm */
2936 {
2937 XVECEXP (body, 0, i++)
2938 = gen_rtx_CLOBBER (VOIDmode,
2939 gen_rtx_MEM
2940 (BLKmode,
2941 gen_rtx_SCRATCH (VOIDmode)));
2942 continue;
2943 }
2944
2945 /* Ignore unknown register, error already signaled. */
2946 continue;
2947 }
2948
2949 for (reg = j; reg < j + nregs; reg++)
2950 {
2951 /* Use QImode since that's guaranteed to clobber just
2952 * one reg. */
2953 clobbered_reg = gen_rtx_REG (QImode, reg);
2954
2955 /* Do sanity check for overlap between clobbers and
2956 respectively input and outputs that hasn't been
2957 handled. Such overlap should have been detected and
2958 reported above. */
2959 if (!clobber_conflict_found)
2960 {
2961 int opno;
2962
2963 /* We test the old body (obody) contents to avoid
2964 tripping over the under-construction body. */
2965 for (opno = 0; opno < noutputs; opno++)
2966 if (reg_overlap_mentioned_p (clobbered_reg,
2967 output_rtx[opno]))
2968 internal_error
2969 ("asm clobber conflict with output operand");
2970
2971 for (opno = 0; opno < ninputs - ninout; opno++)
2972 if (reg_overlap_mentioned_p (clobbered_reg,
2973 ASM_OPERANDS_INPUT (obody,
2974 opno)))
2975 internal_error
2976 ("asm clobber conflict with input operand");
2977 }
2978
2979 XVECEXP (body, 0, i++)
2980 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2981 }
2982 }
2983
2984 if (nlabels > 0)
2985 emit_jump_insn (body);
2986 else
2987 emit_insn (body);
2988 }
2989
2990 if (fallthru_label)
2991 emit_label (fallthru_label);
2992
2993 /* For any outputs that needed reloading into registers, spill them
2994 back to where they belong. */
2995 for (i = 0; i < noutputs; ++i)
2996 if (real_output_rtx[i])
2997 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2998
2999 crtl->has_asm_statement = 1;
3000 free_temp_slots ();
3001 }
3002
3003
3004 static void
3005 expand_asm_stmt (gasm *stmt)
3006 {
3007 int noutputs;
3008 tree outputs, tail, t;
3009 tree *o;
3010 size_t i, n;
3011 const char *s;
3012 tree str, out, in, cl, labels;
3013 location_t locus = gimple_location (stmt);
3014 basic_block fallthru_bb = NULL;
3015
3016 /* Meh... convert the gimple asm operands into real tree lists.
3017 Eventually we should make all routines work on the vectors instead
3018 of relying on TREE_CHAIN. */
3019 out = NULL_TREE;
3020 n = gimple_asm_noutputs (stmt);
3021 if (n > 0)
3022 {
3023 t = out = gimple_asm_output_op (stmt, 0);
3024 for (i = 1; i < n; i++)
3025 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
3026 }
3027
3028 in = NULL_TREE;
3029 n = gimple_asm_ninputs (stmt);
3030 if (n > 0)
3031 {
3032 t = in = gimple_asm_input_op (stmt, 0);
3033 for (i = 1; i < n; i++)
3034 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
3035 }
3036
3037 cl = NULL_TREE;
3038 n = gimple_asm_nclobbers (stmt);
3039 if (n > 0)
3040 {
3041 t = cl = gimple_asm_clobber_op (stmt, 0);
3042 for (i = 1; i < n; i++)
3043 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
3044 }
3045
3046 labels = NULL_TREE;
3047 n = gimple_asm_nlabels (stmt);
3048 if (n > 0)
3049 {
3050 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3051 if (fallthru)
3052 fallthru_bb = fallthru->dest;
3053 t = labels = gimple_asm_label_op (stmt, 0);
3054 for (i = 1; i < n; i++)
3055 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3056 }
3057
3058 s = gimple_asm_string (stmt);
3059 str = build_string (strlen (s), s);
3060
3061 if (gimple_asm_input_p (stmt))
3062 {
3063 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3064 return;
3065 }
3066
3067 outputs = out;
3068 noutputs = gimple_asm_noutputs (stmt);
3069 /* o[I] is the place that output number I should be written. */
3070 o = (tree *) alloca (noutputs * sizeof (tree));
3071
3072 /* Record the contents of OUTPUTS before it is modified. */
3073 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3074 o[i] = TREE_VALUE (tail);
3075
3076 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3077 OUTPUTS some trees for where the values were actually stored. */
3078 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3079 gimple_asm_volatile_p (stmt), locus);
3080
3081 /* Copy all the intermediate outputs into the specified outputs. */
3082 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3083 {
3084 if (o[i] != TREE_VALUE (tail))
3085 {
3086 expand_assignment (o[i], TREE_VALUE (tail), false);
3087 free_temp_slots ();
3088
3089 /* Restore the original value so that it's correct the next
3090 time we expand this function. */
3091 TREE_VALUE (tail) = o[i];
3092 }
3093 }
3094 }
3095
3096 /* Emit code to jump to the address
3097 specified by the pointer expression EXP. */
3098
3099 static void
3100 expand_computed_goto (tree exp)
3101 {
3102 rtx x = expand_normal (exp);
3103
3104 do_pending_stack_adjust ();
3105 emit_indirect_jump (x);
3106 }
3107
3108 /* Generate RTL code for a `goto' statement with target label LABEL.
3109 LABEL should be a LABEL_DECL tree node that was or will later be
3110 defined with `expand_label'. */
3111
3112 static void
3113 expand_goto (tree label)
3114 {
3115 #ifdef ENABLE_CHECKING
3116 /* Check for a nonlocal goto to a containing function. Should have
3117 gotten translated to __builtin_nonlocal_goto. */
3118 tree context = decl_function_context (label);
3119 gcc_assert (!context || context == current_function_decl);
3120 #endif
3121
3122 emit_jump (label_rtx (label));
3123 }
3124
3125 /* Output a return with no value. */
3126
3127 static void
3128 expand_null_return_1 (void)
3129 {
3130 clear_pending_stack_adjust ();
3131 do_pending_stack_adjust ();
3132 emit_jump (return_label);
3133 }
3134
3135 /* Generate RTL to return from the current function, with no value.
3136 (That is, we do not do anything about returning any value.) */
3137
3138 void
3139 expand_null_return (void)
3140 {
3141 /* If this function was declared to return a value, but we
3142 didn't, clobber the return registers so that they are not
3143 propagated live to the rest of the function. */
3144 clobber_return_register ();
3145
3146 expand_null_return_1 ();
3147 }
3148
3149 /* Generate RTL to return from the current function, with value VAL. */
3150
3151 static void
3152 expand_value_return (rtx val)
3153 {
3154 /* Copy the value to the return location unless it's already there. */
3155
3156 tree decl = DECL_RESULT (current_function_decl);
3157 rtx return_reg = DECL_RTL (decl);
3158 if (return_reg != val)
3159 {
3160 tree funtype = TREE_TYPE (current_function_decl);
3161 tree type = TREE_TYPE (decl);
3162 int unsignedp = TYPE_UNSIGNED (type);
3163 machine_mode old_mode = DECL_MODE (decl);
3164 machine_mode mode;
3165 if (DECL_BY_REFERENCE (decl))
3166 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3167 else
3168 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3169
3170 if (mode != old_mode)
3171 val = convert_modes (mode, old_mode, val, unsignedp);
3172
3173 if (GET_CODE (return_reg) == PARALLEL)
3174 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3175 else
3176 emit_move_insn (return_reg, val);
3177 }
3178
3179 expand_null_return_1 ();
3180 }
3181
3182 /* Generate RTL to evaluate the expression RETVAL and return it
3183 from the current function. */
3184
3185 static void
3186 expand_return (tree retval, tree bounds)
3187 {
3188 rtx result_rtl;
3189 rtx val = 0;
3190 tree retval_rhs;
3191 rtx bounds_rtl;
3192
3193 /* If function wants no value, give it none. */
3194 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3195 {
3196 expand_normal (retval);
3197 expand_null_return ();
3198 return;
3199 }
3200
3201 if (retval == error_mark_node)
3202 {
3203 /* Treat this like a return of no value from a function that
3204 returns a value. */
3205 expand_null_return ();
3206 return;
3207 }
3208 else if ((TREE_CODE (retval) == MODIFY_EXPR
3209 || TREE_CODE (retval) == INIT_EXPR)
3210 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3211 retval_rhs = TREE_OPERAND (retval, 1);
3212 else
3213 retval_rhs = retval;
3214
3215 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3216
3217 /* Put returned bounds to the right place. */
3218 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3219 if (bounds_rtl)
3220 {
3221 rtx addr, bnd;
3222
3223 if (bounds)
3224 {
3225 bnd = expand_normal (bounds);
3226 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3227 }
3228 else if (REG_P (bounds_rtl))
3229 {
3230 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3231 addr = gen_rtx_MEM (Pmode, addr);
3232 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3233 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3234 }
3235 else
3236 {
3237 int n;
3238
3239 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3240
3241 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3242 addr = gen_rtx_MEM (Pmode, addr);
3243
3244 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3245 {
3246 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3247 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3248 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3249 rtx bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3250 targetm.calls.store_returned_bounds (slot, bnd);
3251 }
3252 }
3253 }
3254 else if (chkp_function_instrumented_p (current_function_decl)
3255 && !BOUNDED_P (retval_rhs)
3256 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3257 && TREE_CODE (retval_rhs) != RESULT_DECL)
3258 {
3259 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3260 addr = gen_rtx_MEM (Pmode, addr);
3261
3262 gcc_assert (MEM_P (result_rtl));
3263
3264 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3265 }
3266
3267 /* If we are returning the RESULT_DECL, then the value has already
3268 been stored into it, so we don't have to do anything special. */
3269 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3270 expand_value_return (result_rtl);
3271
3272 /* If the result is an aggregate that is being returned in one (or more)
3273 registers, load the registers here. */
3274
3275 else if (retval_rhs != 0
3276 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3277 && REG_P (result_rtl))
3278 {
3279 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3280 if (val)
3281 {
3282 /* Use the mode of the result value on the return register. */
3283 PUT_MODE (result_rtl, GET_MODE (val));
3284 expand_value_return (val);
3285 }
3286 else
3287 expand_null_return ();
3288 }
3289 else if (retval_rhs != 0
3290 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3291 && (REG_P (result_rtl)
3292 || (GET_CODE (result_rtl) == PARALLEL)))
3293 {
3294 /* Compute the return value into a temporary (usually a pseudo reg). */
3295 val
3296 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3297 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3298 val = force_not_mem (val);
3299 expand_value_return (val);
3300 }
3301 else
3302 {
3303 /* No hard reg used; calculate value into hard return reg. */
3304 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3305 expand_value_return (result_rtl);
3306 }
3307 }
3308
3309 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3310 STMT that doesn't require special handling for outgoing edges. That
3311 is no tailcalls and no GIMPLE_COND. */
3312
3313 static void
3314 expand_gimple_stmt_1 (gimple stmt)
3315 {
3316 tree op0;
3317
3318 set_curr_insn_location (gimple_location (stmt));
3319
3320 switch (gimple_code (stmt))
3321 {
3322 case GIMPLE_GOTO:
3323 op0 = gimple_goto_dest (stmt);
3324 if (TREE_CODE (op0) == LABEL_DECL)
3325 expand_goto (op0);
3326 else
3327 expand_computed_goto (op0);
3328 break;
3329 case GIMPLE_LABEL:
3330 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3331 break;
3332 case GIMPLE_NOP:
3333 case GIMPLE_PREDICT:
3334 break;
3335 case GIMPLE_SWITCH:
3336 expand_case (as_a <gswitch *> (stmt));
3337 break;
3338 case GIMPLE_ASM:
3339 expand_asm_stmt (as_a <gasm *> (stmt));
3340 break;
3341 case GIMPLE_CALL:
3342 expand_call_stmt (as_a <gcall *> (stmt));
3343 break;
3344
3345 case GIMPLE_RETURN:
3346 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3347
3348 if (op0 && op0 != error_mark_node)
3349 {
3350 tree result = DECL_RESULT (current_function_decl);
3351
3352 /* If we are not returning the current function's RESULT_DECL,
3353 build an assignment to it. */
3354 if (op0 != result)
3355 {
3356 /* I believe that a function's RESULT_DECL is unique. */
3357 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3358
3359 /* ??? We'd like to use simply expand_assignment here,
3360 but this fails if the value is of BLKmode but the return
3361 decl is a register. expand_return has special handling
3362 for this combination, which eventually should move
3363 to common code. See comments there. Until then, let's
3364 build a modify expression :-/ */
3365 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3366 result, op0);
3367 }
3368 }
3369 if (!op0)
3370 expand_null_return ();
3371 else
3372 expand_return (op0, gimple_return_retbnd (stmt));
3373 break;
3374
3375 case GIMPLE_ASSIGN:
3376 {
3377 gassign *assign_stmt = as_a <gassign *> (stmt);
3378 tree lhs = gimple_assign_lhs (assign_stmt);
3379
3380 /* Tree expand used to fiddle with |= and &= of two bitfield
3381 COMPONENT_REFs here. This can't happen with gimple, the LHS
3382 of binary assigns must be a gimple reg. */
3383
3384 if (TREE_CODE (lhs) != SSA_NAME
3385 || get_gimple_rhs_class (gimple_expr_code (stmt))
3386 == GIMPLE_SINGLE_RHS)
3387 {
3388 tree rhs = gimple_assign_rhs1 (assign_stmt);
3389 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3390 == GIMPLE_SINGLE_RHS);
3391 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3392 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3393 if (TREE_CLOBBER_P (rhs))
3394 /* This is a clobber to mark the going out of scope for
3395 this LHS. */
3396 ;
3397 else
3398 expand_assignment (lhs, rhs,
3399 gimple_assign_nontemporal_move_p (
3400 assign_stmt));
3401 }
3402 else
3403 {
3404 rtx target, temp;
3405 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3406 struct separate_ops ops;
3407 bool promoted = false;
3408
3409 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3410 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3411 promoted = true;
3412
3413 ops.code = gimple_assign_rhs_code (assign_stmt);
3414 ops.type = TREE_TYPE (lhs);
3415 switch (get_gimple_rhs_class (ops.code))
3416 {
3417 case GIMPLE_TERNARY_RHS:
3418 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3419 /* Fallthru */
3420 case GIMPLE_BINARY_RHS:
3421 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3422 /* Fallthru */
3423 case GIMPLE_UNARY_RHS:
3424 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3425 break;
3426 default:
3427 gcc_unreachable ();
3428 }
3429 ops.location = gimple_location (stmt);
3430
3431 /* If we want to use a nontemporal store, force the value to
3432 register first. If we store into a promoted register,
3433 don't directly expand to target. */
3434 temp = nontemporal || promoted ? NULL_RTX : target;
3435 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3436 EXPAND_NORMAL);
3437
3438 if (temp == target)
3439 ;
3440 else if (promoted)
3441 {
3442 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3443 /* If TEMP is a VOIDmode constant, use convert_modes to make
3444 sure that we properly convert it. */
3445 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3446 {
3447 temp = convert_modes (GET_MODE (target),
3448 TYPE_MODE (ops.type),
3449 temp, unsignedp);
3450 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3451 GET_MODE (target), temp, unsignedp);
3452 }
3453
3454 convert_move (SUBREG_REG (target), temp, unsignedp);
3455 }
3456 else if (nontemporal && emit_storent_insn (target, temp))
3457 ;
3458 else
3459 {
3460 temp = force_operand (temp, target);
3461 if (temp != target)
3462 emit_move_insn (target, temp);
3463 }
3464 }
3465 }
3466 break;
3467
3468 default:
3469 gcc_unreachable ();
3470 }
3471 }
3472
3473 /* Expand one gimple statement STMT and return the last RTL instruction
3474 before any of the newly generated ones.
3475
3476 In addition to generating the necessary RTL instructions this also
3477 sets REG_EH_REGION notes if necessary and sets the current source
3478 location for diagnostics. */
3479
3480 static rtx_insn *
3481 expand_gimple_stmt (gimple stmt)
3482 {
3483 location_t saved_location = input_location;
3484 rtx_insn *last = get_last_insn ();
3485 int lp_nr;
3486
3487 gcc_assert (cfun);
3488
3489 /* We need to save and restore the current source location so that errors
3490 discovered during expansion are emitted with the right location. But
3491 it would be better if the diagnostic routines used the source location
3492 embedded in the tree nodes rather than globals. */
3493 if (gimple_has_location (stmt))
3494 input_location = gimple_location (stmt);
3495
3496 expand_gimple_stmt_1 (stmt);
3497
3498 /* Free any temporaries used to evaluate this statement. */
3499 free_temp_slots ();
3500
3501 input_location = saved_location;
3502
3503 /* Mark all insns that may trap. */
3504 lp_nr = lookup_stmt_eh_lp (stmt);
3505 if (lp_nr)
3506 {
3507 rtx_insn *insn;
3508 for (insn = next_real_insn (last); insn;
3509 insn = next_real_insn (insn))
3510 {
3511 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3512 /* If we want exceptions for non-call insns, any
3513 may_trap_p instruction may throw. */
3514 && GET_CODE (PATTERN (insn)) != CLOBBER
3515 && GET_CODE (PATTERN (insn)) != USE
3516 && insn_could_throw_p (insn))
3517 make_reg_eh_region_note (insn, 0, lp_nr);
3518 }
3519 }
3520
3521 return last;
3522 }
3523
3524 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3525 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3526 generated a tail call (something that might be denied by the ABI
3527 rules governing the call; see calls.c).
3528
3529 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3530 can still reach the rest of BB. The case here is __builtin_sqrt,
3531 where the NaN result goes through the external function (with a
3532 tailcall) and the normal result happens via a sqrt instruction. */
3533
3534 static basic_block
3535 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3536 {
3537 rtx_insn *last2, *last;
3538 edge e;
3539 edge_iterator ei;
3540 int probability;
3541 gcov_type count;
3542
3543 last2 = last = expand_gimple_stmt (stmt);
3544
3545 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3546 if (CALL_P (last) && SIBLING_CALL_P (last))
3547 goto found;
3548
3549 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3550
3551 *can_fallthru = true;
3552 return NULL;
3553
3554 found:
3555 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3556 Any instructions emitted here are about to be deleted. */
3557 do_pending_stack_adjust ();
3558
3559 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3560 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3561 EH or abnormal edges, we shouldn't have created a tail call in
3562 the first place. So it seems to me we should just be removing
3563 all edges here, or redirecting the existing fallthru edge to
3564 the exit block. */
3565
3566 probability = 0;
3567 count = 0;
3568
3569 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3570 {
3571 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3572 {
3573 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3574 {
3575 e->dest->count -= e->count;
3576 e->dest->frequency -= EDGE_FREQUENCY (e);
3577 if (e->dest->count < 0)
3578 e->dest->count = 0;
3579 if (e->dest->frequency < 0)
3580 e->dest->frequency = 0;
3581 }
3582 count += e->count;
3583 probability += e->probability;
3584 remove_edge (e);
3585 }
3586 else
3587 ei_next (&ei);
3588 }
3589
3590 /* This is somewhat ugly: the call_expr expander often emits instructions
3591 after the sibcall (to perform the function return). These confuse the
3592 find_many_sub_basic_blocks code, so we need to get rid of these. */
3593 last = NEXT_INSN (last);
3594 gcc_assert (BARRIER_P (last));
3595
3596 *can_fallthru = false;
3597 while (NEXT_INSN (last))
3598 {
3599 /* For instance an sqrt builtin expander expands if with
3600 sibcall in the then and label for `else`. */
3601 if (LABEL_P (NEXT_INSN (last)))
3602 {
3603 *can_fallthru = true;
3604 break;
3605 }
3606 delete_insn (NEXT_INSN (last));
3607 }
3608
3609 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3610 | EDGE_SIBCALL);
3611 e->probability += probability;
3612 e->count += count;
3613 BB_END (bb) = last;
3614 update_bb_for_insn (bb);
3615
3616 if (NEXT_INSN (last))
3617 {
3618 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3619
3620 last = BB_END (bb);
3621 if (BARRIER_P (last))
3622 BB_END (bb) = PREV_INSN (last);
3623 }
3624
3625 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3626
3627 return bb;
3628 }
3629
3630 /* Return the difference between the floor and the truncated result of
3631 a signed division by OP1 with remainder MOD. */
3632 static rtx
3633 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3634 {
3635 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3636 return gen_rtx_IF_THEN_ELSE
3637 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3638 gen_rtx_IF_THEN_ELSE
3639 (mode, gen_rtx_LT (BImode,
3640 gen_rtx_DIV (mode, op1, mod),
3641 const0_rtx),
3642 constm1_rtx, const0_rtx),
3643 const0_rtx);
3644 }
3645
3646 /* Return the difference between the ceil and the truncated result of
3647 a signed division by OP1 with remainder MOD. */
3648 static rtx
3649 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3650 {
3651 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3652 return gen_rtx_IF_THEN_ELSE
3653 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3654 gen_rtx_IF_THEN_ELSE
3655 (mode, gen_rtx_GT (BImode,
3656 gen_rtx_DIV (mode, op1, mod),
3657 const0_rtx),
3658 const1_rtx, const0_rtx),
3659 const0_rtx);
3660 }
3661
3662 /* Return the difference between the ceil and the truncated result of
3663 an unsigned division by OP1 with remainder MOD. */
3664 static rtx
3665 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3666 {
3667 /* (mod != 0 ? 1 : 0) */
3668 return gen_rtx_IF_THEN_ELSE
3669 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3670 const1_rtx, const0_rtx);
3671 }
3672
3673 /* Return the difference between the rounded and the truncated result
3674 of a signed division by OP1 with remainder MOD. Halfway cases are
3675 rounded away from zero, rather than to the nearest even number. */
3676 static rtx
3677 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3678 {
3679 /* (abs (mod) >= abs (op1) - abs (mod)
3680 ? (op1 / mod > 0 ? 1 : -1)
3681 : 0) */
3682 return gen_rtx_IF_THEN_ELSE
3683 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3684 gen_rtx_MINUS (mode,
3685 gen_rtx_ABS (mode, op1),
3686 gen_rtx_ABS (mode, mod))),
3687 gen_rtx_IF_THEN_ELSE
3688 (mode, gen_rtx_GT (BImode,
3689 gen_rtx_DIV (mode, op1, mod),
3690 const0_rtx),
3691 const1_rtx, constm1_rtx),
3692 const0_rtx);
3693 }
3694
3695 /* Return the difference between the rounded and the truncated result
3696 of a unsigned division by OP1 with remainder MOD. Halfway cases
3697 are rounded away from zero, rather than to the nearest even
3698 number. */
3699 static rtx
3700 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3701 {
3702 /* (mod >= op1 - mod ? 1 : 0) */
3703 return gen_rtx_IF_THEN_ELSE
3704 (mode, gen_rtx_GE (BImode, mod,
3705 gen_rtx_MINUS (mode, op1, mod)),
3706 const1_rtx, const0_rtx);
3707 }
3708
3709 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3710 any rtl. */
3711
3712 static rtx
3713 convert_debug_memory_address (machine_mode mode, rtx x,
3714 addr_space_t as)
3715 {
3716 machine_mode xmode = GET_MODE (x);
3717
3718 #ifndef POINTERS_EXTEND_UNSIGNED
3719 gcc_assert (mode == Pmode
3720 || mode == targetm.addr_space.address_mode (as));
3721 gcc_assert (xmode == mode || xmode == VOIDmode);
3722 #else
3723 rtx temp;
3724
3725 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3726
3727 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3728 return x;
3729
3730 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3731 x = simplify_gen_subreg (mode, x, xmode,
3732 subreg_lowpart_offset
3733 (mode, xmode));
3734 else if (POINTERS_EXTEND_UNSIGNED > 0)
3735 x = gen_rtx_ZERO_EXTEND (mode, x);
3736 else if (!POINTERS_EXTEND_UNSIGNED)
3737 x = gen_rtx_SIGN_EXTEND (mode, x);
3738 else
3739 {
3740 switch (GET_CODE (x))
3741 {
3742 case SUBREG:
3743 if ((SUBREG_PROMOTED_VAR_P (x)
3744 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3745 || (GET_CODE (SUBREG_REG (x)) == PLUS
3746 && REG_P (XEXP (SUBREG_REG (x), 0))
3747 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3748 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3749 && GET_MODE (SUBREG_REG (x)) == mode)
3750 return SUBREG_REG (x);
3751 break;
3752 case LABEL_REF:
3753 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3754 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3755 return temp;
3756 case SYMBOL_REF:
3757 temp = shallow_copy_rtx (x);
3758 PUT_MODE (temp, mode);
3759 return temp;
3760 case CONST:
3761 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3762 if (temp)
3763 temp = gen_rtx_CONST (mode, temp);
3764 return temp;
3765 case PLUS:
3766 case MINUS:
3767 if (CONST_INT_P (XEXP (x, 1)))
3768 {
3769 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3770 if (temp)
3771 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3772 }
3773 break;
3774 default:
3775 break;
3776 }
3777 /* Don't know how to express ptr_extend as operation in debug info. */
3778 return NULL;
3779 }
3780 #endif /* POINTERS_EXTEND_UNSIGNED */
3781
3782 return x;
3783 }
3784
3785 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3786 by avoid_deep_ter_for_debug. */
3787
3788 static hash_map<tree, tree> *deep_ter_debug_map;
3789
3790 /* Split too deep TER chains for debug stmts using debug temporaries. */
3791
3792 static void
3793 avoid_deep_ter_for_debug (gimple stmt, int depth)
3794 {
3795 use_operand_p use_p;
3796 ssa_op_iter iter;
3797 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3798 {
3799 tree use = USE_FROM_PTR (use_p);
3800 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3801 continue;
3802 gimple g = get_gimple_for_ssa_name (use);
3803 if (g == NULL)
3804 continue;
3805 if (depth > 6 && !stmt_ends_bb_p (g))
3806 {
3807 if (deep_ter_debug_map == NULL)
3808 deep_ter_debug_map = new hash_map<tree, tree>;
3809
3810 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3811 if (vexpr != NULL)
3812 continue;
3813 vexpr = make_node (DEBUG_EXPR_DECL);
3814 gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3815 DECL_ARTIFICIAL (vexpr) = 1;
3816 TREE_TYPE (vexpr) = TREE_TYPE (use);
3817 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3818 gimple_stmt_iterator gsi = gsi_for_stmt (g);
3819 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3820 avoid_deep_ter_for_debug (def_temp, 0);
3821 }
3822 else
3823 avoid_deep_ter_for_debug (g, depth + 1);
3824 }
3825 }
3826
3827 /* Return an RTX equivalent to the value of the parameter DECL. */
3828
3829 static rtx
3830 expand_debug_parm_decl (tree decl)
3831 {
3832 rtx incoming = DECL_INCOMING_RTL (decl);
3833
3834 if (incoming
3835 && GET_MODE (incoming) != BLKmode
3836 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3837 || (MEM_P (incoming)
3838 && REG_P (XEXP (incoming, 0))
3839 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3840 {
3841 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3842
3843 #ifdef HAVE_window_save
3844 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3845 If the target machine has an explicit window save instruction, the
3846 actual entry value is the corresponding OUTGOING_REGNO instead. */
3847 if (REG_P (incoming)
3848 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3849 incoming
3850 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3851 OUTGOING_REGNO (REGNO (incoming)), 0);
3852 else if (MEM_P (incoming))
3853 {
3854 rtx reg = XEXP (incoming, 0);
3855 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3856 {
3857 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3858 incoming = replace_equiv_address_nv (incoming, reg);
3859 }
3860 else
3861 incoming = copy_rtx (incoming);
3862 }
3863 #endif
3864
3865 ENTRY_VALUE_EXP (rtl) = incoming;
3866 return rtl;
3867 }
3868
3869 if (incoming
3870 && GET_MODE (incoming) != BLKmode
3871 && !TREE_ADDRESSABLE (decl)
3872 && MEM_P (incoming)
3873 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3874 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3875 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3876 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3877 return copy_rtx (incoming);
3878
3879 return NULL_RTX;
3880 }
3881
3882 /* Return an RTX equivalent to the value of the tree expression EXP. */
3883
3884 static rtx
3885 expand_debug_expr (tree exp)
3886 {
3887 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3888 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3889 machine_mode inner_mode = VOIDmode;
3890 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3891 addr_space_t as;
3892
3893 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3894 {
3895 case tcc_expression:
3896 switch (TREE_CODE (exp))
3897 {
3898 case COND_EXPR:
3899 case DOT_PROD_EXPR:
3900 case SAD_EXPR:
3901 case WIDEN_MULT_PLUS_EXPR:
3902 case WIDEN_MULT_MINUS_EXPR:
3903 case FMA_EXPR:
3904 goto ternary;
3905
3906 case TRUTH_ANDIF_EXPR:
3907 case TRUTH_ORIF_EXPR:
3908 case TRUTH_AND_EXPR:
3909 case TRUTH_OR_EXPR:
3910 case TRUTH_XOR_EXPR:
3911 goto binary;
3912
3913 case TRUTH_NOT_EXPR:
3914 goto unary;
3915
3916 default:
3917 break;
3918 }
3919 break;
3920
3921 ternary:
3922 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3923 if (!op2)
3924 return NULL_RTX;
3925 /* Fall through. */
3926
3927 binary:
3928 case tcc_binary:
3929 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3930 if (!op1)
3931 return NULL_RTX;
3932 switch (TREE_CODE (exp))
3933 {
3934 case LSHIFT_EXPR:
3935 case RSHIFT_EXPR:
3936 case LROTATE_EXPR:
3937 case RROTATE_EXPR:
3938 case WIDEN_LSHIFT_EXPR:
3939 /* Ensure second operand isn't wider than the first one. */
3940 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3941 if (SCALAR_INT_MODE_P (inner_mode))
3942 {
3943 machine_mode opmode = mode;
3944 if (VECTOR_MODE_P (mode))
3945 opmode = GET_MODE_INNER (mode);
3946 if (SCALAR_INT_MODE_P (opmode)
3947 && (GET_MODE_PRECISION (opmode)
3948 < GET_MODE_PRECISION (inner_mode)))
3949 op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3950 subreg_lowpart_offset (opmode,
3951 inner_mode));
3952 }
3953 break;
3954 default:
3955 break;
3956 }
3957 /* Fall through. */
3958
3959 unary:
3960 case tcc_unary:
3961 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3962 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3963 if (!op0)
3964 return NULL_RTX;
3965 break;
3966
3967 case tcc_comparison:
3968 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3969 goto binary;
3970
3971 case tcc_type:
3972 case tcc_statement:
3973 gcc_unreachable ();
3974
3975 case tcc_constant:
3976 case tcc_exceptional:
3977 case tcc_declaration:
3978 case tcc_reference:
3979 case tcc_vl_exp:
3980 break;
3981 }
3982
3983 switch (TREE_CODE (exp))
3984 {
3985 case STRING_CST:
3986 if (!lookup_constant_def (exp))
3987 {
3988 if (strlen (TREE_STRING_POINTER (exp)) + 1
3989 != (size_t) TREE_STRING_LENGTH (exp))
3990 return NULL_RTX;
3991 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3992 op0 = gen_rtx_MEM (BLKmode, op0);
3993 set_mem_attributes (op0, exp, 0);
3994 return op0;
3995 }
3996 /* Fall through... */
3997
3998 case INTEGER_CST:
3999 case REAL_CST:
4000 case FIXED_CST:
4001 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4002 return op0;
4003
4004 case COMPLEX_CST:
4005 gcc_assert (COMPLEX_MODE_P (mode));
4006 op0 = expand_debug_expr (TREE_REALPART (exp));
4007 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4008 return gen_rtx_CONCAT (mode, op0, op1);
4009
4010 case DEBUG_EXPR_DECL:
4011 op0 = DECL_RTL_IF_SET (exp);
4012
4013 if (op0)
4014 return op0;
4015
4016 op0 = gen_rtx_DEBUG_EXPR (mode);
4017 DEBUG_EXPR_TREE_DECL (op0) = exp;
4018 SET_DECL_RTL (exp, op0);
4019
4020 return op0;
4021
4022 case VAR_DECL:
4023 case PARM_DECL:
4024 case FUNCTION_DECL:
4025 case LABEL_DECL:
4026 case CONST_DECL:
4027 case RESULT_DECL:
4028 op0 = DECL_RTL_IF_SET (exp);
4029
4030 /* This decl was probably optimized away. */
4031 if (!op0)
4032 {
4033 if (TREE_CODE (exp) != VAR_DECL
4034 || DECL_EXTERNAL (exp)
4035 || !TREE_STATIC (exp)
4036 || !DECL_NAME (exp)
4037 || DECL_HARD_REGISTER (exp)
4038 || DECL_IN_CONSTANT_POOL (exp)
4039 || mode == VOIDmode)
4040 return NULL;
4041
4042 op0 = make_decl_rtl_for_debug (exp);
4043 if (!MEM_P (op0)
4044 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4045 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4046 return NULL;
4047 }
4048 else
4049 op0 = copy_rtx (op0);
4050
4051 if (GET_MODE (op0) == BLKmode
4052 /* If op0 is not BLKmode, but mode is, adjust_mode
4053 below would ICE. While it is likely a FE bug,
4054 try to be robust here. See PR43166. */
4055 || mode == BLKmode
4056 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4057 {
4058 gcc_assert (MEM_P (op0));
4059 op0 = adjust_address_nv (op0, mode, 0);
4060 return op0;
4061 }
4062
4063 /* Fall through. */
4064
4065 adjust_mode:
4066 case PAREN_EXPR:
4067 CASE_CONVERT:
4068 {
4069 inner_mode = GET_MODE (op0);
4070
4071 if (mode == inner_mode)
4072 return op0;
4073
4074 if (inner_mode == VOIDmode)
4075 {
4076 if (TREE_CODE (exp) == SSA_NAME)
4077 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4078 else
4079 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4080 if (mode == inner_mode)
4081 return op0;
4082 }
4083
4084 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4085 {
4086 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4087 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4088 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4089 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4090 else
4091 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4092 }
4093 else if (FLOAT_MODE_P (mode))
4094 {
4095 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4096 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4097 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4098 else
4099 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4100 }
4101 else if (FLOAT_MODE_P (inner_mode))
4102 {
4103 if (unsignedp)
4104 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4105 else
4106 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4107 }
4108 else if (CONSTANT_P (op0)
4109 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4110 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4111 subreg_lowpart_offset (mode,
4112 inner_mode));
4113 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
4114 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4115 : unsignedp)
4116 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4117 else
4118 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4119
4120 return op0;
4121 }
4122
4123 case MEM_REF:
4124 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4125 {
4126 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4127 TREE_OPERAND (exp, 0),
4128 TREE_OPERAND (exp, 1));
4129 if (newexp)
4130 return expand_debug_expr (newexp);
4131 }
4132 /* FALLTHROUGH */
4133 case INDIRECT_REF:
4134 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4135 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4136 if (!op0)
4137 return NULL;
4138
4139 if (TREE_CODE (exp) == MEM_REF)
4140 {
4141 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4142 || (GET_CODE (op0) == PLUS
4143 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4144 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4145 Instead just use get_inner_reference. */
4146 goto component_ref;
4147
4148 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4149 if (!op1 || !CONST_INT_P (op1))
4150 return NULL;
4151
4152 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4153 }
4154
4155 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4156
4157 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4158 op0, as);
4159 if (op0 == NULL_RTX)
4160 return NULL;
4161
4162 op0 = gen_rtx_MEM (mode, op0);
4163 set_mem_attributes (op0, exp, 0);
4164 if (TREE_CODE (exp) == MEM_REF
4165 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4166 set_mem_expr (op0, NULL_TREE);
4167 set_mem_addr_space (op0, as);
4168
4169 return op0;
4170
4171 case TARGET_MEM_REF:
4172 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4173 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4174 return NULL;
4175
4176 op0 = expand_debug_expr
4177 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4178 if (!op0)
4179 return NULL;
4180
4181 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4182 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4183 op0, as);
4184 if (op0 == NULL_RTX)
4185 return NULL;
4186
4187 op0 = gen_rtx_MEM (mode, op0);
4188
4189 set_mem_attributes (op0, exp, 0);
4190 set_mem_addr_space (op0, as);
4191
4192 return op0;
4193
4194 component_ref:
4195 case ARRAY_REF:
4196 case ARRAY_RANGE_REF:
4197 case COMPONENT_REF:
4198 case BIT_FIELD_REF:
4199 case REALPART_EXPR:
4200 case IMAGPART_EXPR:
4201 case VIEW_CONVERT_EXPR:
4202 {
4203 machine_mode mode1;
4204 HOST_WIDE_INT bitsize, bitpos;
4205 tree offset;
4206 int volatilep = 0;
4207 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4208 &mode1, &unsignedp, &volatilep, false);
4209 rtx orig_op0;
4210
4211 if (bitsize == 0)
4212 return NULL;
4213
4214 orig_op0 = op0 = expand_debug_expr (tem);
4215
4216 if (!op0)
4217 return NULL;
4218
4219 if (offset)
4220 {
4221 machine_mode addrmode, offmode;
4222
4223 if (!MEM_P (op0))
4224 return NULL;
4225
4226 op0 = XEXP (op0, 0);
4227 addrmode = GET_MODE (op0);
4228 if (addrmode == VOIDmode)
4229 addrmode = Pmode;
4230
4231 op1 = expand_debug_expr (offset);
4232 if (!op1)
4233 return NULL;
4234
4235 offmode = GET_MODE (op1);
4236 if (offmode == VOIDmode)
4237 offmode = TYPE_MODE (TREE_TYPE (offset));
4238
4239 if (addrmode != offmode)
4240 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4241 subreg_lowpart_offset (addrmode,
4242 offmode));
4243
4244 /* Don't use offset_address here, we don't need a
4245 recognizable address, and we don't want to generate
4246 code. */
4247 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4248 op0, op1));
4249 }
4250
4251 if (MEM_P (op0))
4252 {
4253 if (mode1 == VOIDmode)
4254 /* Bitfield. */
4255 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4256 if (bitpos >= BITS_PER_UNIT)
4257 {
4258 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4259 bitpos %= BITS_PER_UNIT;
4260 }
4261 else if (bitpos < 0)
4262 {
4263 HOST_WIDE_INT units
4264 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4265 op0 = adjust_address_nv (op0, mode1, units);
4266 bitpos += units * BITS_PER_UNIT;
4267 }
4268 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4269 op0 = adjust_address_nv (op0, mode, 0);
4270 else if (GET_MODE (op0) != mode1)
4271 op0 = adjust_address_nv (op0, mode1, 0);
4272 else
4273 op0 = copy_rtx (op0);
4274 if (op0 == orig_op0)
4275 op0 = shallow_copy_rtx (op0);
4276 set_mem_attributes (op0, exp, 0);
4277 }
4278
4279 if (bitpos == 0 && mode == GET_MODE (op0))
4280 return op0;
4281
4282 if (bitpos < 0)
4283 return NULL;
4284
4285 if (GET_MODE (op0) == BLKmode)
4286 return NULL;
4287
4288 if ((bitpos % BITS_PER_UNIT) == 0
4289 && bitsize == GET_MODE_BITSIZE (mode1))
4290 {
4291 machine_mode opmode = GET_MODE (op0);
4292
4293 if (opmode == VOIDmode)
4294 opmode = TYPE_MODE (TREE_TYPE (tem));
4295
4296 /* This condition may hold if we're expanding the address
4297 right past the end of an array that turned out not to
4298 be addressable (i.e., the address was only computed in
4299 debug stmts). The gen_subreg below would rightfully
4300 crash, and the address doesn't really exist, so just
4301 drop it. */
4302 if (bitpos >= GET_MODE_BITSIZE (opmode))
4303 return NULL;
4304
4305 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4306 return simplify_gen_subreg (mode, op0, opmode,
4307 bitpos / BITS_PER_UNIT);
4308 }
4309
4310 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4311 && TYPE_UNSIGNED (TREE_TYPE (exp))
4312 ? SIGN_EXTRACT
4313 : ZERO_EXTRACT, mode,
4314 GET_MODE (op0) != VOIDmode
4315 ? GET_MODE (op0)
4316 : TYPE_MODE (TREE_TYPE (tem)),
4317 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4318 }
4319
4320 case ABS_EXPR:
4321 return simplify_gen_unary (ABS, mode, op0, mode);
4322
4323 case NEGATE_EXPR:
4324 return simplify_gen_unary (NEG, mode, op0, mode);
4325
4326 case BIT_NOT_EXPR:
4327 return simplify_gen_unary (NOT, mode, op0, mode);
4328
4329 case FLOAT_EXPR:
4330 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4331 0)))
4332 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4333 inner_mode);
4334
4335 case FIX_TRUNC_EXPR:
4336 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4337 inner_mode);
4338
4339 case POINTER_PLUS_EXPR:
4340 /* For the rare target where pointers are not the same size as
4341 size_t, we need to check for mis-matched modes and correct
4342 the addend. */
4343 if (op0 && op1
4344 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4345 && GET_MODE (op0) != GET_MODE (op1))
4346 {
4347 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4348 /* If OP0 is a partial mode, then we must truncate, even if it has
4349 the same bitsize as OP1 as GCC's representation of partial modes
4350 is opaque. */
4351 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4352 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4353 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4354 GET_MODE (op1));
4355 else
4356 /* We always sign-extend, regardless of the signedness of
4357 the operand, because the operand is always unsigned
4358 here even if the original C expression is signed. */
4359 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4360 GET_MODE (op1));
4361 }
4362 /* Fall through. */
4363 case PLUS_EXPR:
4364 return simplify_gen_binary (PLUS, mode, op0, op1);
4365
4366 case MINUS_EXPR:
4367 return simplify_gen_binary (MINUS, mode, op0, op1);
4368
4369 case MULT_EXPR:
4370 return simplify_gen_binary (MULT, mode, op0, op1);
4371
4372 case RDIV_EXPR:
4373 case TRUNC_DIV_EXPR:
4374 case EXACT_DIV_EXPR:
4375 if (unsignedp)
4376 return simplify_gen_binary (UDIV, mode, op0, op1);
4377 else
4378 return simplify_gen_binary (DIV, mode, op0, op1);
4379
4380 case TRUNC_MOD_EXPR:
4381 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4382
4383 case FLOOR_DIV_EXPR:
4384 if (unsignedp)
4385 return simplify_gen_binary (UDIV, mode, op0, op1);
4386 else
4387 {
4388 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4389 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4390 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4391 return simplify_gen_binary (PLUS, mode, div, adj);
4392 }
4393
4394 case FLOOR_MOD_EXPR:
4395 if (unsignedp)
4396 return simplify_gen_binary (UMOD, mode, op0, op1);
4397 else
4398 {
4399 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4400 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4401 adj = simplify_gen_unary (NEG, mode,
4402 simplify_gen_binary (MULT, mode, adj, op1),
4403 mode);
4404 return simplify_gen_binary (PLUS, mode, mod, adj);
4405 }
4406
4407 case CEIL_DIV_EXPR:
4408 if (unsignedp)
4409 {
4410 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4411 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4412 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4413 return simplify_gen_binary (PLUS, mode, div, adj);
4414 }
4415 else
4416 {
4417 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4418 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4419 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4420 return simplify_gen_binary (PLUS, mode, div, adj);
4421 }
4422
4423 case CEIL_MOD_EXPR:
4424 if (unsignedp)
4425 {
4426 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4427 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4428 adj = simplify_gen_unary (NEG, mode,
4429 simplify_gen_binary (MULT, mode, adj, op1),
4430 mode);
4431 return simplify_gen_binary (PLUS, mode, mod, adj);
4432 }
4433 else
4434 {
4435 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4436 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4437 adj = simplify_gen_unary (NEG, mode,
4438 simplify_gen_binary (MULT, mode, adj, op1),
4439 mode);
4440 return simplify_gen_binary (PLUS, mode, mod, adj);
4441 }
4442
4443 case ROUND_DIV_EXPR:
4444 if (unsignedp)
4445 {
4446 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4447 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4448 rtx adj = round_udiv_adjust (mode, mod, op1);
4449 return simplify_gen_binary (PLUS, mode, div, adj);
4450 }
4451 else
4452 {
4453 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4454 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4455 rtx adj = round_sdiv_adjust (mode, mod, op1);
4456 return simplify_gen_binary (PLUS, mode, div, adj);
4457 }
4458
4459 case ROUND_MOD_EXPR:
4460 if (unsignedp)
4461 {
4462 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4463 rtx adj = round_udiv_adjust (mode, mod, op1);
4464 adj = simplify_gen_unary (NEG, mode,
4465 simplify_gen_binary (MULT, mode, adj, op1),
4466 mode);
4467 return simplify_gen_binary (PLUS, mode, mod, adj);
4468 }
4469 else
4470 {
4471 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4472 rtx adj = round_sdiv_adjust (mode, mod, op1);
4473 adj = simplify_gen_unary (NEG, mode,
4474 simplify_gen_binary (MULT, mode, adj, op1),
4475 mode);
4476 return simplify_gen_binary (PLUS, mode, mod, adj);
4477 }
4478
4479 case LSHIFT_EXPR:
4480 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4481
4482 case RSHIFT_EXPR:
4483 if (unsignedp)
4484 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4485 else
4486 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4487
4488 case LROTATE_EXPR:
4489 return simplify_gen_binary (ROTATE, mode, op0, op1);
4490
4491 case RROTATE_EXPR:
4492 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4493
4494 case MIN_EXPR:
4495 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4496
4497 case MAX_EXPR:
4498 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4499
4500 case BIT_AND_EXPR:
4501 case TRUTH_AND_EXPR:
4502 return simplify_gen_binary (AND, mode, op0, op1);
4503
4504 case BIT_IOR_EXPR:
4505 case TRUTH_OR_EXPR:
4506 return simplify_gen_binary (IOR, mode, op0, op1);
4507
4508 case BIT_XOR_EXPR:
4509 case TRUTH_XOR_EXPR:
4510 return simplify_gen_binary (XOR, mode, op0, op1);
4511
4512 case TRUTH_ANDIF_EXPR:
4513 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4514
4515 case TRUTH_ORIF_EXPR:
4516 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4517
4518 case TRUTH_NOT_EXPR:
4519 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4520
4521 case LT_EXPR:
4522 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4523 op0, op1);
4524
4525 case LE_EXPR:
4526 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4527 op0, op1);
4528
4529 case GT_EXPR:
4530 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4531 op0, op1);
4532
4533 case GE_EXPR:
4534 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4535 op0, op1);
4536
4537 case EQ_EXPR:
4538 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4539
4540 case NE_EXPR:
4541 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4542
4543 case UNORDERED_EXPR:
4544 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4545
4546 case ORDERED_EXPR:
4547 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4548
4549 case UNLT_EXPR:
4550 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4551
4552 case UNLE_EXPR:
4553 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4554
4555 case UNGT_EXPR:
4556 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4557
4558 case UNGE_EXPR:
4559 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4560
4561 case UNEQ_EXPR:
4562 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4563
4564 case LTGT_EXPR:
4565 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4566
4567 case COND_EXPR:
4568 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4569
4570 case COMPLEX_EXPR:
4571 gcc_assert (COMPLEX_MODE_P (mode));
4572 if (GET_MODE (op0) == VOIDmode)
4573 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4574 if (GET_MODE (op1) == VOIDmode)
4575 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4576 return gen_rtx_CONCAT (mode, op0, op1);
4577
4578 case CONJ_EXPR:
4579 if (GET_CODE (op0) == CONCAT)
4580 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4581 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4582 XEXP (op0, 1),
4583 GET_MODE_INNER (mode)));
4584 else
4585 {
4586 machine_mode imode = GET_MODE_INNER (mode);
4587 rtx re, im;
4588
4589 if (MEM_P (op0))
4590 {
4591 re = adjust_address_nv (op0, imode, 0);
4592 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4593 }
4594 else
4595 {
4596 machine_mode ifmode = int_mode_for_mode (mode);
4597 machine_mode ihmode = int_mode_for_mode (imode);
4598 rtx halfsize;
4599 if (ifmode == BLKmode || ihmode == BLKmode)
4600 return NULL;
4601 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4602 re = op0;
4603 if (mode != ifmode)
4604 re = gen_rtx_SUBREG (ifmode, re, 0);
4605 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4606 if (imode != ihmode)
4607 re = gen_rtx_SUBREG (imode, re, 0);
4608 im = copy_rtx (op0);
4609 if (mode != ifmode)
4610 im = gen_rtx_SUBREG (ifmode, im, 0);
4611 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4612 if (imode != ihmode)
4613 im = gen_rtx_SUBREG (imode, im, 0);
4614 }
4615 im = gen_rtx_NEG (imode, im);
4616 return gen_rtx_CONCAT (mode, re, im);
4617 }
4618
4619 case ADDR_EXPR:
4620 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4621 if (!op0 || !MEM_P (op0))
4622 {
4623 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4624 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4625 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4626 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4627 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4628 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4629
4630 if (handled_component_p (TREE_OPERAND (exp, 0)))
4631 {
4632 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4633 tree decl
4634 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4635 &bitoffset, &bitsize, &maxsize);
4636 if ((TREE_CODE (decl) == VAR_DECL
4637 || TREE_CODE (decl) == PARM_DECL
4638 || TREE_CODE (decl) == RESULT_DECL)
4639 && (!TREE_ADDRESSABLE (decl)
4640 || target_for_debug_bind (decl))
4641 && (bitoffset % BITS_PER_UNIT) == 0
4642 && bitsize > 0
4643 && bitsize == maxsize)
4644 {
4645 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4646 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4647 }
4648 }
4649
4650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4651 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4652 == ADDR_EXPR)
4653 {
4654 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4655 0));
4656 if (op0 != NULL
4657 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4658 || (GET_CODE (op0) == PLUS
4659 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4660 && CONST_INT_P (XEXP (op0, 1)))))
4661 {
4662 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4663 1));
4664 if (!op1 || !CONST_INT_P (op1))
4665 return NULL;
4666
4667 return plus_constant (mode, op0, INTVAL (op1));
4668 }
4669 }
4670
4671 return NULL;
4672 }
4673
4674 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4675 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4676
4677 return op0;
4678
4679 case VECTOR_CST:
4680 {
4681 unsigned i;
4682
4683 op0 = gen_rtx_CONCATN
4684 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4685
4686 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4687 {
4688 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4689 if (!op1)
4690 return NULL;
4691 XVECEXP (op0, 0, i) = op1;
4692 }
4693
4694 return op0;
4695 }
4696
4697 case CONSTRUCTOR:
4698 if (TREE_CLOBBER_P (exp))
4699 return NULL;
4700 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4701 {
4702 unsigned i;
4703 tree val;
4704
4705 op0 = gen_rtx_CONCATN
4706 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4707
4708 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4709 {
4710 op1 = expand_debug_expr (val);
4711 if (!op1)
4712 return NULL;
4713 XVECEXP (op0, 0, i) = op1;
4714 }
4715
4716 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4717 {
4718 op1 = expand_debug_expr
4719 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4720
4721 if (!op1)
4722 return NULL;
4723
4724 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4725 XVECEXP (op0, 0, i) = op1;
4726 }
4727
4728 return op0;
4729 }
4730 else
4731 goto flag_unsupported;
4732
4733 case CALL_EXPR:
4734 /* ??? Maybe handle some builtins? */
4735 return NULL;
4736
4737 case SSA_NAME:
4738 {
4739 gimple g = get_gimple_for_ssa_name (exp);
4740 if (g)
4741 {
4742 tree t = NULL_TREE;
4743 if (deep_ter_debug_map)
4744 {
4745 tree *slot = deep_ter_debug_map->get (exp);
4746 if (slot)
4747 t = *slot;
4748 }
4749 if (t == NULL_TREE)
4750 t = gimple_assign_rhs_to_tree (g);
4751 op0 = expand_debug_expr (t);
4752 if (!op0)
4753 return NULL;
4754 }
4755 else
4756 {
4757 int part = var_to_partition (SA.map, exp);
4758
4759 if (part == NO_PARTITION)
4760 {
4761 /* If this is a reference to an incoming value of parameter
4762 that is never used in the code or where the incoming
4763 value is never used in the code, use PARM_DECL's
4764 DECL_RTL if set. */
4765 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4766 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4767 {
4768 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4769 if (op0)
4770 goto adjust_mode;
4771 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4772 if (op0)
4773 goto adjust_mode;
4774 }
4775 return NULL;
4776 }
4777
4778 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4779
4780 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4781 }
4782 goto adjust_mode;
4783 }
4784
4785 case ERROR_MARK:
4786 return NULL;
4787
4788 /* Vector stuff. For most of the codes we don't have rtl codes. */
4789 case REALIGN_LOAD_EXPR:
4790 case REDUC_MAX_EXPR:
4791 case REDUC_MIN_EXPR:
4792 case REDUC_PLUS_EXPR:
4793 case VEC_COND_EXPR:
4794 case VEC_PACK_FIX_TRUNC_EXPR:
4795 case VEC_PACK_SAT_EXPR:
4796 case VEC_PACK_TRUNC_EXPR:
4797 case VEC_UNPACK_FLOAT_HI_EXPR:
4798 case VEC_UNPACK_FLOAT_LO_EXPR:
4799 case VEC_UNPACK_HI_EXPR:
4800 case VEC_UNPACK_LO_EXPR:
4801 case VEC_WIDEN_MULT_HI_EXPR:
4802 case VEC_WIDEN_MULT_LO_EXPR:
4803 case VEC_WIDEN_MULT_EVEN_EXPR:
4804 case VEC_WIDEN_MULT_ODD_EXPR:
4805 case VEC_WIDEN_LSHIFT_HI_EXPR:
4806 case VEC_WIDEN_LSHIFT_LO_EXPR:
4807 case VEC_PERM_EXPR:
4808 return NULL;
4809
4810 /* Misc codes. */
4811 case ADDR_SPACE_CONVERT_EXPR:
4812 case FIXED_CONVERT_EXPR:
4813 case OBJ_TYPE_REF:
4814 case WITH_SIZE_EXPR:
4815 return NULL;
4816
4817 case DOT_PROD_EXPR:
4818 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4819 && SCALAR_INT_MODE_P (mode))
4820 {
4821 op0
4822 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4823 0)))
4824 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4825 inner_mode);
4826 op1
4827 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4828 1)))
4829 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4830 inner_mode);
4831 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4832 return simplify_gen_binary (PLUS, mode, op0, op2);
4833 }
4834 return NULL;
4835
4836 case WIDEN_MULT_EXPR:
4837 case WIDEN_MULT_PLUS_EXPR:
4838 case WIDEN_MULT_MINUS_EXPR:
4839 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4840 && SCALAR_INT_MODE_P (mode))
4841 {
4842 inner_mode = GET_MODE (op0);
4843 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4844 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4845 else
4846 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4847 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4848 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4849 else
4850 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4851 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4852 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4853 return op0;
4854 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4855 return simplify_gen_binary (PLUS, mode, op0, op2);
4856 else
4857 return simplify_gen_binary (MINUS, mode, op2, op0);
4858 }
4859 return NULL;
4860
4861 case MULT_HIGHPART_EXPR:
4862 /* ??? Similar to the above. */
4863 return NULL;
4864
4865 case WIDEN_SUM_EXPR:
4866 case WIDEN_LSHIFT_EXPR:
4867 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4868 && SCALAR_INT_MODE_P (mode))
4869 {
4870 op0
4871 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4872 0)))
4873 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4874 inner_mode);
4875 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4876 ? ASHIFT : PLUS, mode, op0, op1);
4877 }
4878 return NULL;
4879
4880 case FMA_EXPR:
4881 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4882
4883 default:
4884 flag_unsupported:
4885 #ifdef ENABLE_CHECKING
4886 debug_tree (exp);
4887 gcc_unreachable ();
4888 #else
4889 return NULL;
4890 #endif
4891 }
4892 }
4893
4894 /* Return an RTX equivalent to the source bind value of the tree expression
4895 EXP. */
4896
4897 static rtx
4898 expand_debug_source_expr (tree exp)
4899 {
4900 rtx op0 = NULL_RTX;
4901 machine_mode mode = VOIDmode, inner_mode;
4902
4903 switch (TREE_CODE (exp))
4904 {
4905 case PARM_DECL:
4906 {
4907 mode = DECL_MODE (exp);
4908 op0 = expand_debug_parm_decl (exp);
4909 if (op0)
4910 break;
4911 /* See if this isn't an argument that has been completely
4912 optimized out. */
4913 if (!DECL_RTL_SET_P (exp)
4914 && !DECL_INCOMING_RTL (exp)
4915 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4916 {
4917 tree aexp = DECL_ORIGIN (exp);
4918 if (DECL_CONTEXT (aexp)
4919 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4920 {
4921 vec<tree, va_gc> **debug_args;
4922 unsigned int ix;
4923 tree ddecl;
4924 debug_args = decl_debug_args_lookup (current_function_decl);
4925 if (debug_args != NULL)
4926 {
4927 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4928 ix += 2)
4929 if (ddecl == aexp)
4930 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4931 }
4932 }
4933 }
4934 break;
4935 }
4936 default:
4937 break;
4938 }
4939
4940 if (op0 == NULL_RTX)
4941 return NULL_RTX;
4942
4943 inner_mode = GET_MODE (op0);
4944 if (mode == inner_mode)
4945 return op0;
4946
4947 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4948 {
4949 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4950 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4951 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4952 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4953 else
4954 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4955 }
4956 else if (FLOAT_MODE_P (mode))
4957 gcc_unreachable ();
4958 else if (FLOAT_MODE_P (inner_mode))
4959 {
4960 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4961 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4962 else
4963 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4964 }
4965 else if (CONSTANT_P (op0)
4966 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4967 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4968 subreg_lowpart_offset (mode, inner_mode));
4969 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4970 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4971 else
4972 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4973
4974 return op0;
4975 }
4976
4977 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4978 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4979 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4980
4981 static void
4982 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4983 {
4984 rtx exp = *exp_p;
4985
4986 if (exp == NULL_RTX)
4987 return;
4988
4989 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4990 return;
4991
4992 if (depth == 4)
4993 {
4994 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4995 rtx dval = make_debug_expr_from_rtl (exp);
4996
4997 /* Emit a debug bind insn before INSN. */
4998 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4999 DEBUG_EXPR_TREE_DECL (dval), exp,
5000 VAR_INIT_STATUS_INITIALIZED);
5001
5002 emit_debug_insn_before (bind, insn);
5003 *exp_p = dval;
5004 return;
5005 }
5006
5007 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5008 int i, j;
5009 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5010 switch (*format_ptr++)
5011 {
5012 case 'e':
5013 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5014 break;
5015
5016 case 'E':
5017 case 'V':
5018 for (j = 0; j < XVECLEN (exp, i); j++)
5019 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5020 break;
5021
5022 default:
5023 break;
5024 }
5025 }
5026
5027 /* Expand the _LOCs in debug insns. We run this after expanding all
5028 regular insns, so that any variables referenced in the function
5029 will have their DECL_RTLs set. */
5030
5031 static void
5032 expand_debug_locations (void)
5033 {
5034 rtx_insn *insn;
5035 rtx_insn *last = get_last_insn ();
5036 int save_strict_alias = flag_strict_aliasing;
5037
5038 /* New alias sets while setting up memory attributes cause
5039 -fcompare-debug failures, even though it doesn't bring about any
5040 codegen changes. */
5041 flag_strict_aliasing = 0;
5042
5043 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5044 if (DEBUG_INSN_P (insn))
5045 {
5046 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5047 rtx val;
5048 rtx_insn *prev_insn, *insn2;
5049 machine_mode mode;
5050
5051 if (value == NULL_TREE)
5052 val = NULL_RTX;
5053 else
5054 {
5055 if (INSN_VAR_LOCATION_STATUS (insn)
5056 == VAR_INIT_STATUS_UNINITIALIZED)
5057 val = expand_debug_source_expr (value);
5058 /* The avoid_deep_ter_for_debug function inserts
5059 debug bind stmts after SSA_NAME definition, with the
5060 SSA_NAME as the whole bind location. Disable temporarily
5061 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5062 being defined in this DEBUG_INSN. */
5063 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5064 {
5065 tree *slot = deep_ter_debug_map->get (value);
5066 if (slot)
5067 {
5068 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5069 *slot = NULL_TREE;
5070 else
5071 slot = NULL;
5072 }
5073 val = expand_debug_expr (value);
5074 if (slot)
5075 *slot = INSN_VAR_LOCATION_DECL (insn);
5076 }
5077 else
5078 val = expand_debug_expr (value);
5079 gcc_assert (last == get_last_insn ());
5080 }
5081
5082 if (!val)
5083 val = gen_rtx_UNKNOWN_VAR_LOC ();
5084 else
5085 {
5086 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5087
5088 gcc_assert (mode == GET_MODE (val)
5089 || (GET_MODE (val) == VOIDmode
5090 && (CONST_SCALAR_INT_P (val)
5091 || GET_CODE (val) == CONST_FIXED
5092 || GET_CODE (val) == LABEL_REF)));
5093 }
5094
5095 INSN_VAR_LOCATION_LOC (insn) = val;
5096 prev_insn = PREV_INSN (insn);
5097 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5098 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5099 }
5100
5101 flag_strict_aliasing = save_strict_alias;
5102 }
5103
5104 /* Performs swapping operands of commutative operations to expand
5105 the expensive one first. */
5106
5107 static void
5108 reorder_operands (basic_block bb)
5109 {
5110 unsigned int *lattice; /* Hold cost of each statement. */
5111 unsigned int i = 0, n = 0;
5112 gimple_stmt_iterator gsi;
5113 gimple_seq stmts;
5114 gimple stmt;
5115 bool swap;
5116 tree op0, op1;
5117 ssa_op_iter iter;
5118 use_operand_p use_p;
5119 gimple def0, def1;
5120
5121 /* Compute cost of each statement using estimate_num_insns. */
5122 stmts = bb_seq (bb);
5123 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5124 {
5125 stmt = gsi_stmt (gsi);
5126 if (!is_gimple_debug (stmt))
5127 gimple_set_uid (stmt, n++);
5128 }
5129 lattice = XNEWVEC (unsigned int, n);
5130 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5131 {
5132 unsigned cost;
5133 stmt = gsi_stmt (gsi);
5134 if (is_gimple_debug (stmt))
5135 continue;
5136 cost = estimate_num_insns (stmt, &eni_size_weights);
5137 lattice[i] = cost;
5138 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5139 {
5140 tree use = USE_FROM_PTR (use_p);
5141 gimple def_stmt;
5142 if (TREE_CODE (use) != SSA_NAME)
5143 continue;
5144 def_stmt = get_gimple_for_ssa_name (use);
5145 if (!def_stmt)
5146 continue;
5147 lattice[i] += lattice[gimple_uid (def_stmt)];
5148 }
5149 i++;
5150 if (!is_gimple_assign (stmt)
5151 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5152 continue;
5153 op0 = gimple_op (stmt, 1);
5154 op1 = gimple_op (stmt, 2);
5155 if (TREE_CODE (op0) != SSA_NAME
5156 || TREE_CODE (op1) != SSA_NAME)
5157 continue;
5158 /* Swap operands if the second one is more expensive. */
5159 def0 = get_gimple_for_ssa_name (op0);
5160 def1 = get_gimple_for_ssa_name (op1);
5161 if (!def1)
5162 continue;
5163 swap = false;
5164 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5165 swap = true;
5166 if (swap)
5167 {
5168 if (dump_file && (dump_flags & TDF_DETAILS))
5169 {
5170 fprintf (dump_file, "Swap operands in stmt:\n");
5171 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5172 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5173 def0 ? lattice[gimple_uid (def0)] : 0,
5174 lattice[gimple_uid (def1)]);
5175 }
5176 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5177 gimple_assign_rhs2_ptr (stmt));
5178 }
5179 }
5180 XDELETE (lattice);
5181 }
5182
5183 /* Expand basic block BB from GIMPLE trees to RTL. */
5184
5185 static basic_block
5186 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5187 {
5188 gimple_stmt_iterator gsi;
5189 gimple_seq stmts;
5190 gimple stmt = NULL;
5191 rtx_note *note;
5192 rtx_insn *last;
5193 edge e;
5194 edge_iterator ei;
5195
5196 if (dump_file)
5197 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5198 bb->index);
5199
5200 /* Note that since we are now transitioning from GIMPLE to RTL, we
5201 cannot use the gsi_*_bb() routines because they expect the basic
5202 block to be in GIMPLE, instead of RTL. Therefore, we need to
5203 access the BB sequence directly. */
5204 if (optimize)
5205 reorder_operands (bb);
5206 stmts = bb_seq (bb);
5207 bb->il.gimple.seq = NULL;
5208 bb->il.gimple.phi_nodes = NULL;
5209 rtl_profile_for_bb (bb);
5210 init_rtl_bb_info (bb);
5211 bb->flags |= BB_RTL;
5212
5213 /* Remove the RETURN_EXPR if we may fall though to the exit
5214 instead. */
5215 gsi = gsi_last (stmts);
5216 if (!gsi_end_p (gsi)
5217 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5218 {
5219 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5220
5221 gcc_assert (single_succ_p (bb));
5222 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5223
5224 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5225 && !gimple_return_retval (ret_stmt))
5226 {
5227 gsi_remove (&gsi, false);
5228 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5229 }
5230 }
5231
5232 gsi = gsi_start (stmts);
5233 if (!gsi_end_p (gsi))
5234 {
5235 stmt = gsi_stmt (gsi);
5236 if (gimple_code (stmt) != GIMPLE_LABEL)
5237 stmt = NULL;
5238 }
5239
5240 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5241
5242 if (stmt || elt)
5243 {
5244 last = get_last_insn ();
5245
5246 if (stmt)
5247 {
5248 expand_gimple_stmt (stmt);
5249 gsi_next (&gsi);
5250 }
5251
5252 if (elt)
5253 emit_label (*elt);
5254
5255 /* Java emits line number notes in the top of labels.
5256 ??? Make this go away once line number notes are obsoleted. */
5257 BB_HEAD (bb) = NEXT_INSN (last);
5258 if (NOTE_P (BB_HEAD (bb)))
5259 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5260 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5261
5262 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5263 }
5264 else
5265 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5266
5267 NOTE_BASIC_BLOCK (note) = bb;
5268
5269 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5270 {
5271 basic_block new_bb;
5272
5273 stmt = gsi_stmt (gsi);
5274
5275 /* If this statement is a non-debug one, and we generate debug
5276 insns, then this one might be the last real use of a TERed
5277 SSA_NAME, but where there are still some debug uses further
5278 down. Expanding the current SSA name in such further debug
5279 uses by their RHS might lead to wrong debug info, as coalescing
5280 might make the operands of such RHS be placed into the same
5281 pseudo as something else. Like so:
5282 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5283 use(a_1);
5284 a_2 = ...
5285 #DEBUG ... => a_1
5286 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5287 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5288 the write to a_2 would actually have clobbered the place which
5289 formerly held a_0.
5290
5291 So, instead of that, we recognize the situation, and generate
5292 debug temporaries at the last real use of TERed SSA names:
5293 a_1 = a_0 + 1;
5294 #DEBUG #D1 => a_1
5295 use(a_1);
5296 a_2 = ...
5297 #DEBUG ... => #D1
5298 */
5299 if (MAY_HAVE_DEBUG_INSNS
5300 && SA.values
5301 && !is_gimple_debug (stmt))
5302 {
5303 ssa_op_iter iter;
5304 tree op;
5305 gimple def;
5306
5307 location_t sloc = curr_insn_location ();
5308
5309 /* Look for SSA names that have their last use here (TERed
5310 names always have only one real use). */
5311 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5312 if ((def = get_gimple_for_ssa_name (op)))
5313 {
5314 imm_use_iterator imm_iter;
5315 use_operand_p use_p;
5316 bool have_debug_uses = false;
5317
5318 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5319 {
5320 if (gimple_debug_bind_p (USE_STMT (use_p)))
5321 {
5322 have_debug_uses = true;
5323 break;
5324 }
5325 }
5326
5327 if (have_debug_uses)
5328 {
5329 /* OP is a TERed SSA name, with DEF its defining
5330 statement, and where OP is used in further debug
5331 instructions. Generate a debug temporary, and
5332 replace all uses of OP in debug insns with that
5333 temporary. */
5334 gimple debugstmt;
5335 tree value = gimple_assign_rhs_to_tree (def);
5336 tree vexpr = make_node (DEBUG_EXPR_DECL);
5337 rtx val;
5338 machine_mode mode;
5339
5340 set_curr_insn_location (gimple_location (def));
5341
5342 DECL_ARTIFICIAL (vexpr) = 1;
5343 TREE_TYPE (vexpr) = TREE_TYPE (value);
5344 if (DECL_P (value))
5345 mode = DECL_MODE (value);
5346 else
5347 mode = TYPE_MODE (TREE_TYPE (value));
5348 DECL_MODE (vexpr) = mode;
5349
5350 val = gen_rtx_VAR_LOCATION
5351 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5352
5353 emit_debug_insn (val);
5354
5355 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5356 {
5357 if (!gimple_debug_bind_p (debugstmt))
5358 continue;
5359
5360 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5361 SET_USE (use_p, vexpr);
5362
5363 update_stmt (debugstmt);
5364 }
5365 }
5366 }
5367 set_curr_insn_location (sloc);
5368 }
5369
5370 currently_expanding_gimple_stmt = stmt;
5371
5372 /* Expand this statement, then evaluate the resulting RTL and
5373 fixup the CFG accordingly. */
5374 if (gimple_code (stmt) == GIMPLE_COND)
5375 {
5376 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5377 if (new_bb)
5378 return new_bb;
5379 }
5380 else if (gimple_debug_bind_p (stmt))
5381 {
5382 location_t sloc = curr_insn_location ();
5383 gimple_stmt_iterator nsi = gsi;
5384
5385 for (;;)
5386 {
5387 tree var = gimple_debug_bind_get_var (stmt);
5388 tree value;
5389 rtx val;
5390 machine_mode mode;
5391
5392 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5393 && TREE_CODE (var) != LABEL_DECL
5394 && !target_for_debug_bind (var))
5395 goto delink_debug_stmt;
5396
5397 if (gimple_debug_bind_has_value_p (stmt))
5398 value = gimple_debug_bind_get_value (stmt);
5399 else
5400 value = NULL_TREE;
5401
5402 last = get_last_insn ();
5403
5404 set_curr_insn_location (gimple_location (stmt));
5405
5406 if (DECL_P (var))
5407 mode = DECL_MODE (var);
5408 else
5409 mode = TYPE_MODE (TREE_TYPE (var));
5410
5411 val = gen_rtx_VAR_LOCATION
5412 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5413
5414 emit_debug_insn (val);
5415
5416 if (dump_file && (dump_flags & TDF_DETAILS))
5417 {
5418 /* We can't dump the insn with a TREE where an RTX
5419 is expected. */
5420 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5421 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5422 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5423 }
5424
5425 delink_debug_stmt:
5426 /* In order not to generate too many debug temporaries,
5427 we delink all uses of debug statements we already expanded.
5428 Therefore debug statements between definition and real
5429 use of TERed SSA names will continue to use the SSA name,
5430 and not be replaced with debug temps. */
5431 delink_stmt_imm_use (stmt);
5432
5433 gsi = nsi;
5434 gsi_next (&nsi);
5435 if (gsi_end_p (nsi))
5436 break;
5437 stmt = gsi_stmt (nsi);
5438 if (!gimple_debug_bind_p (stmt))
5439 break;
5440 }
5441
5442 set_curr_insn_location (sloc);
5443 }
5444 else if (gimple_debug_source_bind_p (stmt))
5445 {
5446 location_t sloc = curr_insn_location ();
5447 tree var = gimple_debug_source_bind_get_var (stmt);
5448 tree value = gimple_debug_source_bind_get_value (stmt);
5449 rtx val;
5450 machine_mode mode;
5451
5452 last = get_last_insn ();
5453
5454 set_curr_insn_location (gimple_location (stmt));
5455
5456 mode = DECL_MODE (var);
5457
5458 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5459 VAR_INIT_STATUS_UNINITIALIZED);
5460
5461 emit_debug_insn (val);
5462
5463 if (dump_file && (dump_flags & TDF_DETAILS))
5464 {
5465 /* We can't dump the insn with a TREE where an RTX
5466 is expected. */
5467 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5468 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5469 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5470 }
5471
5472 set_curr_insn_location (sloc);
5473 }
5474 else
5475 {
5476 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5477 if (call_stmt
5478 && gimple_call_tail_p (call_stmt)
5479 && disable_tail_calls)
5480 gimple_call_set_tail (call_stmt, false);
5481
5482 if (call_stmt && gimple_call_tail_p (call_stmt))
5483 {
5484 bool can_fallthru;
5485 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5486 if (new_bb)
5487 {
5488 if (can_fallthru)
5489 bb = new_bb;
5490 else
5491 return new_bb;
5492 }
5493 }
5494 else
5495 {
5496 def_operand_p def_p;
5497 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5498
5499 if (def_p != NULL)
5500 {
5501 /* Ignore this stmt if it is in the list of
5502 replaceable expressions. */
5503 if (SA.values
5504 && bitmap_bit_p (SA.values,
5505 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5506 continue;
5507 }
5508 last = expand_gimple_stmt (stmt);
5509 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5510 }
5511 }
5512 }
5513
5514 currently_expanding_gimple_stmt = NULL;
5515
5516 /* Expand implicit goto and convert goto_locus. */
5517 FOR_EACH_EDGE (e, ei, bb->succs)
5518 {
5519 if (e->goto_locus != UNKNOWN_LOCATION)
5520 set_curr_insn_location (e->goto_locus);
5521 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5522 {
5523 emit_jump (label_rtx_for_bb (e->dest));
5524 e->flags &= ~EDGE_FALLTHRU;
5525 }
5526 }
5527
5528 /* Expanded RTL can create a jump in the last instruction of block.
5529 This later might be assumed to be a jump to successor and break edge insertion.
5530 We need to insert dummy move to prevent this. PR41440. */
5531 if (single_succ_p (bb)
5532 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5533 && (last = get_last_insn ())
5534 && JUMP_P (last))
5535 {
5536 rtx dummy = gen_reg_rtx (SImode);
5537 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5538 }
5539
5540 do_pending_stack_adjust ();
5541
5542 /* Find the block tail. The last insn in the block is the insn
5543 before a barrier and/or table jump insn. */
5544 last = get_last_insn ();
5545 if (BARRIER_P (last))
5546 last = PREV_INSN (last);
5547 if (JUMP_TABLE_DATA_P (last))
5548 last = PREV_INSN (PREV_INSN (last));
5549 BB_END (bb) = last;
5550
5551 update_bb_for_insn (bb);
5552
5553 return bb;
5554 }
5555
5556
5557 /* Create a basic block for initialization code. */
5558
5559 static basic_block
5560 construct_init_block (void)
5561 {
5562 basic_block init_block, first_block;
5563 edge e = NULL;
5564 int flags;
5565
5566 /* Multiple entry points not supported yet. */
5567 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5568 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5569 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5570 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5571 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5572
5573 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5574
5575 /* When entry edge points to first basic block, we don't need jump,
5576 otherwise we have to jump into proper target. */
5577 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5578 {
5579 tree label = gimple_block_label (e->dest);
5580
5581 emit_jump (label_rtx (label));
5582 flags = 0;
5583 }
5584 else
5585 flags = EDGE_FALLTHRU;
5586
5587 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5588 get_last_insn (),
5589 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5590 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5591 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5592 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5593 if (e)
5594 {
5595 first_block = e->dest;
5596 redirect_edge_succ (e, init_block);
5597 e = make_edge (init_block, first_block, flags);
5598 }
5599 else
5600 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5601 e->probability = REG_BR_PROB_BASE;
5602 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5603
5604 update_bb_for_insn (init_block);
5605 return init_block;
5606 }
5607
5608 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5609 found in the block tree. */
5610
5611 static void
5612 set_block_levels (tree block, int level)
5613 {
5614 while (block)
5615 {
5616 BLOCK_NUMBER (block) = level;
5617 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5618 block = BLOCK_CHAIN (block);
5619 }
5620 }
5621
5622 /* Create a block containing landing pads and similar stuff. */
5623
5624 static void
5625 construct_exit_block (void)
5626 {
5627 rtx_insn *head = get_last_insn ();
5628 rtx_insn *end;
5629 basic_block exit_block;
5630 edge e, e2;
5631 unsigned ix;
5632 edge_iterator ei;
5633 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5634 rtx_insn *orig_end = BB_END (prev_bb);
5635
5636 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5637
5638 /* Make sure the locus is set to the end of the function, so that
5639 epilogue line numbers and warnings are set properly. */
5640 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5641 input_location = cfun->function_end_locus;
5642
5643 /* Generate rtl for function exit. */
5644 expand_function_end ();
5645
5646 end = get_last_insn ();
5647 if (head == end)
5648 return;
5649 /* While emitting the function end we could move end of the last basic
5650 block. */
5651 BB_END (prev_bb) = orig_end;
5652 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5653 head = NEXT_INSN (head);
5654 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5655 bb frequency counting will be confused. Any instructions before that
5656 label are emitted for the case where PREV_BB falls through into the
5657 exit block, so append those instructions to prev_bb in that case. */
5658 if (NEXT_INSN (head) != return_label)
5659 {
5660 while (NEXT_INSN (head) != return_label)
5661 {
5662 if (!NOTE_P (NEXT_INSN (head)))
5663 BB_END (prev_bb) = NEXT_INSN (head);
5664 head = NEXT_INSN (head);
5665 }
5666 }
5667 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5668 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5669 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5670 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5671
5672 ix = 0;
5673 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5674 {
5675 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5676 if (!(e->flags & EDGE_ABNORMAL))
5677 redirect_edge_succ (e, exit_block);
5678 else
5679 ix++;
5680 }
5681
5682 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5683 e->probability = REG_BR_PROB_BASE;
5684 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5685 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5686 if (e2 != e)
5687 {
5688 e->count -= e2->count;
5689 exit_block->count -= e2->count;
5690 exit_block->frequency -= EDGE_FREQUENCY (e2);
5691 }
5692 if (e->count < 0)
5693 e->count = 0;
5694 if (exit_block->count < 0)
5695 exit_block->count = 0;
5696 if (exit_block->frequency < 0)
5697 exit_block->frequency = 0;
5698 update_bb_for_insn (exit_block);
5699 }
5700
5701 /* Helper function for discover_nonconstant_array_refs.
5702 Look for ARRAY_REF nodes with non-constant indexes and mark them
5703 addressable. */
5704
5705 static tree
5706 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5707 void *data ATTRIBUTE_UNUSED)
5708 {
5709 tree t = *tp;
5710
5711 if (IS_TYPE_OR_DECL_P (t))
5712 *walk_subtrees = 0;
5713 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5714 {
5715 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5716 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5717 && (!TREE_OPERAND (t, 2)
5718 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5719 || (TREE_CODE (t) == COMPONENT_REF
5720 && (!TREE_OPERAND (t,2)
5721 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5722 || TREE_CODE (t) == BIT_FIELD_REF
5723 || TREE_CODE (t) == REALPART_EXPR
5724 || TREE_CODE (t) == IMAGPART_EXPR
5725 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5726 || CONVERT_EXPR_P (t))
5727 t = TREE_OPERAND (t, 0);
5728
5729 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5730 {
5731 t = get_base_address (t);
5732 if (t && DECL_P (t)
5733 && DECL_MODE (t) != BLKmode)
5734 TREE_ADDRESSABLE (t) = 1;
5735 }
5736
5737 *walk_subtrees = 0;
5738 }
5739
5740 return NULL_TREE;
5741 }
5742
5743 /* RTL expansion is not able to compile array references with variable
5744 offsets for arrays stored in single register. Discover such
5745 expressions and mark variables as addressable to avoid this
5746 scenario. */
5747
5748 static void
5749 discover_nonconstant_array_refs (void)
5750 {
5751 basic_block bb;
5752 gimple_stmt_iterator gsi;
5753
5754 FOR_EACH_BB_FN (bb, cfun)
5755 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5756 {
5757 gimple stmt = gsi_stmt (gsi);
5758 if (!is_gimple_debug (stmt))
5759 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5760 }
5761 }
5762
5763 /* This function sets crtl->args.internal_arg_pointer to a virtual
5764 register if DRAP is needed. Local register allocator will replace
5765 virtual_incoming_args_rtx with the virtual register. */
5766
5767 static void
5768 expand_stack_alignment (void)
5769 {
5770 rtx drap_rtx;
5771 unsigned int preferred_stack_boundary;
5772
5773 if (! SUPPORTS_STACK_ALIGNMENT)
5774 return;
5775
5776 if (cfun->calls_alloca
5777 || cfun->has_nonlocal_label
5778 || crtl->has_nonlocal_goto)
5779 crtl->need_drap = true;
5780
5781 /* Call update_stack_boundary here again to update incoming stack
5782 boundary. It may set incoming stack alignment to a different
5783 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5784 use the minimum incoming stack alignment to check if it is OK
5785 to perform sibcall optimization since sibcall optimization will
5786 only align the outgoing stack to incoming stack boundary. */
5787 if (targetm.calls.update_stack_boundary)
5788 targetm.calls.update_stack_boundary ();
5789
5790 /* The incoming stack frame has to be aligned at least at
5791 parm_stack_boundary. */
5792 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5793
5794 /* Update crtl->stack_alignment_estimated and use it later to align
5795 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5796 exceptions since callgraph doesn't collect incoming stack alignment
5797 in this case. */
5798 if (cfun->can_throw_non_call_exceptions
5799 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5800 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5801 else
5802 preferred_stack_boundary = crtl->preferred_stack_boundary;
5803 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5804 crtl->stack_alignment_estimated = preferred_stack_boundary;
5805 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5806 crtl->stack_alignment_needed = preferred_stack_boundary;
5807
5808 gcc_assert (crtl->stack_alignment_needed
5809 <= crtl->stack_alignment_estimated);
5810
5811 crtl->stack_realign_needed
5812 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5813 crtl->stack_realign_tried = crtl->stack_realign_needed;
5814
5815 crtl->stack_realign_processed = true;
5816
5817 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5818 alignment. */
5819 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5820 drap_rtx = targetm.calls.get_drap_rtx ();
5821
5822 /* stack_realign_drap and drap_rtx must match. */
5823 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5824
5825 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5826 if (NULL != drap_rtx)
5827 {
5828 crtl->args.internal_arg_pointer = drap_rtx;
5829
5830 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5831 needed. */
5832 fixup_tail_calls ();
5833 }
5834 }
5835 \f
5836
5837 static void
5838 expand_main_function (void)
5839 {
5840 #if (defined(INVOKE__main) \
5841 || (!defined(HAS_INIT_SECTION) \
5842 && !defined(INIT_SECTION_ASM_OP) \
5843 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5844 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5845 #endif
5846 }
5847 \f
5848
5849 /* Expand code to initialize the stack_protect_guard. This is invoked at
5850 the beginning of a function to be protected. */
5851
5852 #ifndef HAVE_stack_protect_set
5853 # define HAVE_stack_protect_set 0
5854 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5855 #endif
5856
5857 static void
5858 stack_protect_prologue (void)
5859 {
5860 tree guard_decl = targetm.stack_protect_guard ();
5861 rtx x, y;
5862
5863 x = expand_normal (crtl->stack_protect_guard);
5864 y = expand_normal (guard_decl);
5865
5866 /* Allow the target to copy from Y to X without leaking Y into a
5867 register. */
5868 if (HAVE_stack_protect_set)
5869 {
5870 rtx insn = gen_stack_protect_set (x, y);
5871 if (insn)
5872 {
5873 emit_insn (insn);
5874 return;
5875 }
5876 }
5877
5878 /* Otherwise do a straight move. */
5879 emit_move_insn (x, y);
5880 }
5881
5882 /* Translate the intermediate representation contained in the CFG
5883 from GIMPLE trees to RTL.
5884
5885 We do conversion per basic block and preserve/update the tree CFG.
5886 This implies we have to do some magic as the CFG can simultaneously
5887 consist of basic blocks containing RTL and GIMPLE trees. This can
5888 confuse the CFG hooks, so be careful to not manipulate CFG during
5889 the expansion. */
5890
5891 namespace {
5892
5893 const pass_data pass_data_expand =
5894 {
5895 RTL_PASS, /* type */
5896 "expand", /* name */
5897 OPTGROUP_NONE, /* optinfo_flags */
5898 TV_EXPAND, /* tv_id */
5899 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5900 | PROP_gimple_lcx
5901 | PROP_gimple_lvec
5902 | PROP_gimple_lva), /* properties_required */
5903 PROP_rtl, /* properties_provided */
5904 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5905 0, /* todo_flags_start */
5906 0, /* todo_flags_finish */
5907 };
5908
5909 class pass_expand : public rtl_opt_pass
5910 {
5911 public:
5912 pass_expand (gcc::context *ctxt)
5913 : rtl_opt_pass (pass_data_expand, ctxt)
5914 {}
5915
5916 /* opt_pass methods: */
5917 virtual unsigned int execute (function *);
5918
5919 }; // class pass_expand
5920
5921 unsigned int
5922 pass_expand::execute (function *fun)
5923 {
5924 basic_block bb, init_block;
5925 sbitmap blocks;
5926 edge_iterator ei;
5927 edge e;
5928 rtx_insn *var_seq, *var_ret_seq;
5929 unsigned i;
5930
5931 timevar_push (TV_OUT_OF_SSA);
5932 rewrite_out_of_ssa (&SA);
5933 timevar_pop (TV_OUT_OF_SSA);
5934 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5935
5936 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5937 {
5938 gimple_stmt_iterator gsi;
5939 FOR_EACH_BB_FN (bb, cfun)
5940 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5941 if (gimple_debug_bind_p (gsi_stmt (gsi)))
5942 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5943 }
5944
5945 /* Make sure all values used by the optimization passes have sane
5946 defaults. */
5947 reg_renumber = 0;
5948
5949 /* Some backends want to know that we are expanding to RTL. */
5950 currently_expanding_to_rtl = 1;
5951 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5952 free_dominance_info (CDI_DOMINATORS);
5953
5954 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5955
5956 if (chkp_function_instrumented_p (current_function_decl))
5957 chkp_reset_rtl_bounds ();
5958
5959 insn_locations_init ();
5960 if (!DECL_IS_BUILTIN (current_function_decl))
5961 {
5962 /* Eventually, all FEs should explicitly set function_start_locus. */
5963 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5964 set_curr_insn_location
5965 (DECL_SOURCE_LOCATION (current_function_decl));
5966 else
5967 set_curr_insn_location (fun->function_start_locus);
5968 }
5969 else
5970 set_curr_insn_location (UNKNOWN_LOCATION);
5971 prologue_location = curr_insn_location ();
5972
5973 #ifdef INSN_SCHEDULING
5974 init_sched_attrs ();
5975 #endif
5976
5977 /* Make sure first insn is a note even if we don't want linenums.
5978 This makes sure the first insn will never be deleted.
5979 Also, final expects a note to appear there. */
5980 emit_note (NOTE_INSN_DELETED);
5981
5982 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5983 discover_nonconstant_array_refs ();
5984
5985 targetm.expand_to_rtl_hook ();
5986 crtl->stack_alignment_needed = STACK_BOUNDARY;
5987 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5988 crtl->stack_alignment_estimated = 0;
5989 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5990 fun->cfg->max_jumptable_ents = 0;
5991
5992 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5993 of the function section at exapnsion time to predict distance of calls. */
5994 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5995
5996 /* Expand the variables recorded during gimple lowering. */
5997 timevar_push (TV_VAR_EXPAND);
5998 start_sequence ();
5999
6000 var_ret_seq = expand_used_vars ();
6001
6002 var_seq = get_insns ();
6003 end_sequence ();
6004 timevar_pop (TV_VAR_EXPAND);
6005
6006 /* Honor stack protection warnings. */
6007 if (warn_stack_protect)
6008 {
6009 if (fun->calls_alloca)
6010 warning (OPT_Wstack_protector,
6011 "stack protector not protecting local variables: "
6012 "variable length buffer");
6013 if (has_short_buffer && !crtl->stack_protect_guard)
6014 warning (OPT_Wstack_protector,
6015 "stack protector not protecting function: "
6016 "all local arrays are less than %d bytes long",
6017 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6018 }
6019
6020 /* Set up parameters and prepare for return, for the function. */
6021 expand_function_start (current_function_decl);
6022
6023 /* If we emitted any instructions for setting up the variables,
6024 emit them before the FUNCTION_START note. */
6025 if (var_seq)
6026 {
6027 emit_insn_before (var_seq, parm_birth_insn);
6028
6029 /* In expand_function_end we'll insert the alloca save/restore
6030 before parm_birth_insn. We've just insertted an alloca call.
6031 Adjust the pointer to match. */
6032 parm_birth_insn = var_seq;
6033 }
6034
6035 /* Now that we also have the parameter RTXs, copy them over to our
6036 partitions. */
6037 for (i = 0; i < SA.map->num_partitions; i++)
6038 {
6039 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
6040
6041 if (TREE_CODE (var) != VAR_DECL
6042 && !SA.partition_to_pseudo[i])
6043 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
6044 gcc_assert (SA.partition_to_pseudo[i]);
6045
6046 /* If this decl was marked as living in multiple places, reset
6047 this now to NULL. */
6048 if (DECL_RTL_IF_SET (var) == pc_rtx)
6049 SET_DECL_RTL (var, NULL);
6050
6051 /* Some RTL parts really want to look at DECL_RTL(x) when x
6052 was a decl marked in REG_ATTR or MEM_ATTR. We could use
6053 SET_DECL_RTL here making this available, but that would mean
6054 to select one of the potentially many RTLs for one DECL. Instead
6055 of doing that we simply reset the MEM_EXPR of the RTL in question,
6056 then nobody can get at it and hence nobody can call DECL_RTL on it. */
6057 if (!DECL_RTL_SET_P (var))
6058 {
6059 if (MEM_P (SA.partition_to_pseudo[i]))
6060 set_mem_expr (SA.partition_to_pseudo[i], NULL);
6061 }
6062 }
6063
6064 /* If we have a class containing differently aligned pointers
6065 we need to merge those into the corresponding RTL pointer
6066 alignment. */
6067 for (i = 1; i < num_ssa_names; i++)
6068 {
6069 tree name = ssa_name (i);
6070 int part;
6071 rtx r;
6072
6073 if (!name
6074 /* We might have generated new SSA names in
6075 update_alias_info_with_stack_vars. They will have a NULL
6076 defining statements, and won't be part of the partitioning,
6077 so ignore those. */
6078 || !SSA_NAME_DEF_STMT (name))
6079 continue;
6080 part = var_to_partition (SA.map, name);
6081 if (part == NO_PARTITION)
6082 continue;
6083
6084 /* Adjust all partition members to get the underlying decl of
6085 the representative which we might have created in expand_one_var. */
6086 if (SSA_NAME_VAR (name) == NULL_TREE)
6087 {
6088 tree leader = partition_to_var (SA.map, part);
6089 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
6090 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
6091 }
6092 if (!POINTER_TYPE_P (TREE_TYPE (name)))
6093 continue;
6094
6095 r = SA.partition_to_pseudo[part];
6096 if (REG_P (r))
6097 mark_reg_pointer (r, get_pointer_alignment (name));
6098 }
6099
6100 /* If this function is `main', emit a call to `__main'
6101 to run global initializers, etc. */
6102 if (DECL_NAME (current_function_decl)
6103 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6104 && DECL_FILE_SCOPE_P (current_function_decl))
6105 expand_main_function ();
6106
6107 /* Initialize the stack_protect_guard field. This must happen after the
6108 call to __main (if any) so that the external decl is initialized. */
6109 if (crtl->stack_protect_guard)
6110 stack_protect_prologue ();
6111
6112 expand_phi_nodes (&SA);
6113
6114 /* Register rtl specific functions for cfg. */
6115 rtl_register_cfg_hooks ();
6116
6117 init_block = construct_init_block ();
6118
6119 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6120 remaining edges later. */
6121 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6122 e->flags &= ~EDGE_EXECUTABLE;
6123
6124 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6125 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6126 next_bb)
6127 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6128
6129 if (MAY_HAVE_DEBUG_INSNS)
6130 expand_debug_locations ();
6131
6132 if (deep_ter_debug_map)
6133 {
6134 delete deep_ter_debug_map;
6135 deep_ter_debug_map = NULL;
6136 }
6137
6138 /* Free stuff we no longer need after GIMPLE optimizations. */
6139 free_dominance_info (CDI_DOMINATORS);
6140 free_dominance_info (CDI_POST_DOMINATORS);
6141 delete_tree_cfg_annotations ();
6142
6143 timevar_push (TV_OUT_OF_SSA);
6144 finish_out_of_ssa (&SA);
6145 timevar_pop (TV_OUT_OF_SSA);
6146
6147 timevar_push (TV_POST_EXPAND);
6148 /* We are no longer in SSA form. */
6149 fun->gimple_df->in_ssa_p = false;
6150 loops_state_clear (LOOP_CLOSED_SSA);
6151
6152 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6153 conservatively to true until they are all profile aware. */
6154 delete lab_rtx_for_bb;
6155 free_histograms ();
6156
6157 construct_exit_block ();
6158 insn_locations_finalize ();
6159
6160 if (var_ret_seq)
6161 {
6162 rtx_insn *after = return_label;
6163 rtx_insn *next = NEXT_INSN (after);
6164 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6165 after = next;
6166 emit_insn_after (var_ret_seq, after);
6167 }
6168
6169 /* Zap the tree EH table. */
6170 set_eh_throw_stmt_table (fun, NULL);
6171
6172 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6173 split edges which edge insertions might do. */
6174 rebuild_jump_labels (get_insns ());
6175
6176 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6177 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6178 {
6179 edge e;
6180 edge_iterator ei;
6181 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6182 {
6183 if (e->insns.r)
6184 {
6185 rebuild_jump_labels_chain (e->insns.r);
6186 /* Put insns after parm birth, but before
6187 NOTE_INSNS_FUNCTION_BEG. */
6188 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6189 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6190 {
6191 rtx_insn *insns = e->insns.r;
6192 e->insns.r = NULL;
6193 if (NOTE_P (parm_birth_insn)
6194 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6195 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6196 else
6197 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6198 }
6199 else
6200 commit_one_edge_insertion (e);
6201 }
6202 else
6203 ei_next (&ei);
6204 }
6205 }
6206
6207 /* We're done expanding trees to RTL. */
6208 currently_expanding_to_rtl = 0;
6209
6210 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6211 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6212 {
6213 edge e;
6214 edge_iterator ei;
6215 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6216 {
6217 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6218 e->flags &= ~EDGE_EXECUTABLE;
6219
6220 /* At the moment not all abnormal edges match the RTL
6221 representation. It is safe to remove them here as
6222 find_many_sub_basic_blocks will rediscover them.
6223 In the future we should get this fixed properly. */
6224 if ((e->flags & EDGE_ABNORMAL)
6225 && !(e->flags & EDGE_SIBCALL))
6226 remove_edge (e);
6227 else
6228 ei_next (&ei);
6229 }
6230 }
6231
6232 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6233 bitmap_ones (blocks);
6234 find_many_sub_basic_blocks (blocks);
6235 sbitmap_free (blocks);
6236 purge_all_dead_edges ();
6237
6238 expand_stack_alignment ();
6239
6240 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6241 function. */
6242 if (crtl->tail_call_emit)
6243 fixup_tail_calls ();
6244
6245 /* After initial rtl generation, call back to finish generating
6246 exception support code. We need to do this before cleaning up
6247 the CFG as the code does not expect dead landing pads. */
6248 if (fun->eh->region_tree != NULL)
6249 finish_eh_generation ();
6250
6251 /* Remove unreachable blocks, otherwise we cannot compute dominators
6252 which are needed for loop state verification. As a side-effect
6253 this also compacts blocks.
6254 ??? We cannot remove trivially dead insns here as for example
6255 the DRAP reg on i?86 is not magically live at this point.
6256 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6257 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6258
6259 #ifdef ENABLE_CHECKING
6260 verify_flow_info ();
6261 #endif
6262
6263 /* Initialize pseudos allocated for hard registers. */
6264 emit_initial_value_sets ();
6265
6266 /* And finally unshare all RTL. */
6267 unshare_all_rtl ();
6268
6269 /* There's no need to defer outputting this function any more; we
6270 know we want to output it. */
6271 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6272
6273 /* Now that we're done expanding trees to RTL, we shouldn't have any
6274 more CONCATs anywhere. */
6275 generating_concat_p = 0;
6276
6277 if (dump_file)
6278 {
6279 fprintf (dump_file,
6280 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6281 /* And the pass manager will dump RTL for us. */
6282 }
6283
6284 /* If we're emitting a nested function, make sure its parent gets
6285 emitted as well. Doing otherwise confuses debug info. */
6286 {
6287 tree parent;
6288 for (parent = DECL_CONTEXT (current_function_decl);
6289 parent != NULL_TREE;
6290 parent = get_containing_scope (parent))
6291 if (TREE_CODE (parent) == FUNCTION_DECL)
6292 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6293 }
6294
6295 /* We are now committed to emitting code for this function. Do any
6296 preparation, such as emitting abstract debug info for the inline
6297 before it gets mangled by optimization. */
6298 if (cgraph_function_possibly_inlined_p (current_function_decl))
6299 (*debug_hooks->outlining_inline_function) (current_function_decl);
6300
6301 TREE_ASM_WRITTEN (current_function_decl) = 1;
6302
6303 /* After expanding, the return labels are no longer needed. */
6304 return_label = NULL;
6305 naked_return_label = NULL;
6306
6307 /* After expanding, the tm_restart map is no longer needed. */
6308 if (fun->gimple_df->tm_restart)
6309 fun->gimple_df->tm_restart = NULL;
6310
6311 /* Tag the blocks with a depth number so that change_scope can find
6312 the common parent easily. */
6313 set_block_levels (DECL_INITIAL (fun->decl), 0);
6314 default_rtl_profile ();
6315
6316 timevar_pop (TV_POST_EXPAND);
6317
6318 return 0;
6319 }
6320
6321 } // anon namespace
6322
6323 rtl_opt_pass *
6324 make_pass_expand (gcc::context *ctxt)
6325 {
6326 return new pass_expand (ctxt);
6327 }