]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
Update copyright years.
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87
88 /* This variable holds information helping the rewriting of SSA trees
89 into RTL. */
90 struct ssaexpand SA;
91
92 /* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
94 gimple *currently_expanding_gimple_stmt;
95
96 static rtx expand_debug_expr (tree);
97
98 static bool defer_stack_allocation (tree, bool);
99
100 static void record_alignment_for_reg_var (unsigned int);
101
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103 statement STMT. */
104
105 tree
106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108 tree t;
109 enum gimple_rhs_class grhs_class;
110
111 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112
113 if (grhs_class == GIMPLE_TERNARY_RHS)
114 t = build3 (gimple_assign_rhs_code (stmt),
115 TREE_TYPE (gimple_assign_lhs (stmt)),
116 gimple_assign_rhs1 (stmt),
117 gimple_assign_rhs2 (stmt),
118 gimple_assign_rhs3 (stmt));
119 else if (grhs_class == GIMPLE_BINARY_RHS)
120 t = build2 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt),
123 gimple_assign_rhs2 (stmt));
124 else if (grhs_class == GIMPLE_UNARY_RHS)
125 t = build1 (gimple_assign_rhs_code (stmt),
126 TREE_TYPE (gimple_assign_lhs (stmt)),
127 gimple_assign_rhs1 (stmt));
128 else if (grhs_class == GIMPLE_SINGLE_RHS)
129 {
130 t = gimple_assign_rhs1 (stmt);
131 /* Avoid modifying this tree in place below. */
132 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 && gimple_location (stmt) != EXPR_LOCATION (t))
134 || (gimple_block (stmt)
135 && currently_expanding_to_rtl
136 && EXPR_P (t)))
137 t = copy_node (t);
138 }
139 else
140 gcc_unreachable ();
141
142 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143 SET_EXPR_LOCATION (t, gimple_location (stmt));
144
145 return t;
146 }
147
148
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
159
160 static tree
161 leader_merge (tree cur, tree next)
162 {
163 if (cur == NULL || cur == next)
164 return next;
165
166 if (DECL_P (cur) && DECL_IGNORED_P (cur))
167 return cur;
168
169 if (DECL_P (next) && DECL_IGNORED_P (next))
170 return next;
171
172 return cur;
173 }
174
175 /* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
178 static inline void
179 set_rtl (tree t, rtx x)
180 {
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 || (use_register_for_decl (t)
184 ? (REG_P (x)
185 || (GET_CODE (x) == CONCAT
186 && (REG_P (XEXP (x, 0))
187 || SUBREG_P (XEXP (x, 0)))
188 && (REG_P (XEXP (x, 1))
189 || SUBREG_P (XEXP (x, 1))))
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
194 || (GET_CODE (x) == PARALLEL
195 && SSAVAR (t)
196 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 && (GET_MODE (x) == BLKmode
198 || !flag_tree_coalesce_vars)))
199 : (MEM_P (x) || x == pc_rtx
200 || (GET_CODE (x) == CONCAT
201 && MEM_P (XEXP (x, 0))
202 && MEM_P (XEXP (x, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
211 unpromoted REGs. */
212 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 || (SSAVAR (t)
214 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 && (promote_ssa_mode (t, NULL) == BLKmode
216 || !flag_tree_coalesce_vars))
217 || !use_register_for_decl (t)
218 || GET_MODE (x) == promote_ssa_mode (t, NULL));
219
220 if (x)
221 {
222 bool skip = false;
223 tree cur = NULL_TREE;
224 rtx xm = x;
225
226 retry:
227 if (MEM_P (xm))
228 cur = MEM_EXPR (xm);
229 else if (REG_P (xm))
230 cur = REG_EXPR (xm);
231 else if (SUBREG_P (xm))
232 {
233 gcc_assert (subreg_lowpart_p (xm));
234 xm = SUBREG_REG (xm);
235 goto retry;
236 }
237 else if (GET_CODE (xm) == CONCAT)
238 {
239 xm = XEXP (xm, 0);
240 goto retry;
241 }
242 else if (GET_CODE (xm) == PARALLEL)
243 {
244 xm = XVECEXP (xm, 0, 0);
245 gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 xm = XEXP (xm, 0);
247 goto retry;
248 }
249 else if (xm == pc_rtx)
250 skip = true;
251 else
252 gcc_unreachable ();
253
254 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255
256 if (cur != next)
257 {
258 if (MEM_P (x))
259 set_mem_attributes (x,
260 next && TREE_CODE (next) == SSA_NAME
261 ? TREE_TYPE (next)
262 : next, true);
263 else
264 set_reg_attrs_for_decl_rtl (next, x);
265 }
266 }
267
268 if (TREE_CODE (t) == SSA_NAME)
269 {
270 int part = var_to_partition (SA.map, t);
271 if (part != NO_PARTITION)
272 {
273 if (SA.partition_to_pseudo[part])
274 gcc_assert (SA.partition_to_pseudo[part] == x);
275 else if (x != pc_rtx)
276 SA.partition_to_pseudo[part] = x;
277 }
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
280 DECL. For PARMs and RESULTs, do so only when setting the
281 default def. */
282 if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 && (VAR_P (SSA_NAME_VAR (t))
284 || SSA_NAME_IS_DEFAULT_DEF (t)))
285 {
286 tree var = SSA_NAME_VAR (t);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var))
289 SET_DECL_RTL (var, x);
290 /* If we have it set already to "multiple places" don't
291 change this. */
292 else if (DECL_RTL (var) == pc_rtx)
293 ;
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var) != x)
301 SET_DECL_RTL (var, pc_rtx);
302 }
303 }
304 else
305 SET_DECL_RTL (t, x);
306 }
307
308 /* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
310 struct stack_var
311 {
312 /* The Variable. */
313 tree decl;
314
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
317 poly_uint64 size;
318
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
321 unsigned int alignb;
322
323 /* The partition representative. */
324 size_t representative;
325
326 /* The next stack variable in the partition, or EOC. */
327 size_t next;
328
329 /* The numbers of conflicting stack variables. */
330 bitmap conflicts;
331 };
332
333 #define EOC ((size_t)-1)
334
335 /* We have an array of such objects while deciding allocation. */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340
341 /* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343 static bitmap_obstack stack_var_bitmap_obstack;
344
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346 is non-decreasing. */
347 static size_t *stack_vars_sorted;
348
349 /* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352 static int frame_phase;
353
354 /* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356 static bool has_protected_decls;
357
358 /* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360 static bool has_short_buffer;
361
362 /* Compute the byte alignment to use for DECL. Ignore alignment
363 we can't do with expected alignment of the stack boundary. */
364
365 static unsigned int
366 align_local_variable (tree decl)
367 {
368 unsigned int align;
369
370 if (TREE_CODE (decl) == SSA_NAME)
371 align = TYPE_ALIGN (TREE_TYPE (decl));
372 else
373 {
374 align = LOCAL_DECL_ALIGNMENT (decl);
375 SET_DECL_ALIGN (decl, align);
376 }
377 return align / BITS_PER_UNIT;
378 }
379
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
382
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386 return align_up ? (base + align - 1) & -align : base & -align;
387 }
388
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
391
392 static poly_int64
393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
394 {
395 poly_int64 offset, new_frame_offset;
396
397 if (FRAME_GROWS_DOWNWARD)
398 {
399 new_frame_offset
400 = aligned_lower_bound (frame_offset - frame_phase - size,
401 align) + frame_phase;
402 offset = new_frame_offset;
403 }
404 else
405 {
406 new_frame_offset
407 = aligned_upper_bound (frame_offset - frame_phase,
408 align) + frame_phase;
409 offset = new_frame_offset;
410 new_frame_offset += size;
411 }
412 frame_offset = new_frame_offset;
413
414 if (frame_offset_overflow (frame_offset, cfun->decl))
415 frame_offset = offset = 0;
416
417 return offset;
418 }
419
420 /* Accumulate DECL into STACK_VARS. */
421
422 static void
423 add_stack_var (tree decl)
424 {
425 struct stack_var *v;
426
427 if (stack_vars_num >= stack_vars_alloc)
428 {
429 if (stack_vars_alloc)
430 stack_vars_alloc = stack_vars_alloc * 3 / 2;
431 else
432 stack_vars_alloc = 32;
433 stack_vars
434 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435 }
436 if (!decl_to_stack_part)
437 decl_to_stack_part = new hash_map<tree, size_t>;
438
439 v = &stack_vars[stack_vars_num];
440 decl_to_stack_part->put (decl, stack_vars_num);
441
442 v->decl = decl;
443 tree size = TREE_CODE (decl) == SSA_NAME
444 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445 : DECL_SIZE_UNIT (decl);
446 v->size = tree_to_poly_uint64 (size);
447 /* Ensure that all variables have size, so that &a != &b for any two
448 variables that are simultaneously live. */
449 if (known_eq (v->size, 0U))
450 v->size = 1;
451 v->alignb = align_local_variable (decl);
452 /* An alignment of zero can mightily confuse us later. */
453 gcc_assert (v->alignb != 0);
454
455 /* All variables are initially in their own partition. */
456 v->representative = stack_vars_num;
457 v->next = EOC;
458
459 /* All variables initially conflict with no other. */
460 v->conflicts = NULL;
461
462 /* Ensure that this decl doesn't get put onto the list twice. */
463 set_rtl (decl, pc_rtx);
464
465 stack_vars_num++;
466 }
467
468 /* Make the decls associated with luid's X and Y conflict. */
469
470 static void
471 add_stack_var_conflict (size_t x, size_t y)
472 {
473 struct stack_var *a = &stack_vars[x];
474 struct stack_var *b = &stack_vars[y];
475 if (!a->conflicts)
476 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477 if (!b->conflicts)
478 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479 bitmap_set_bit (a->conflicts, y);
480 bitmap_set_bit (b->conflicts, x);
481 }
482
483 /* Check whether the decls associated with luid's X and Y conflict. */
484
485 static bool
486 stack_var_conflict_p (size_t x, size_t y)
487 {
488 struct stack_var *a = &stack_vars[x];
489 struct stack_var *b = &stack_vars[y];
490 if (x == y)
491 return false;
492 /* Partitions containing an SSA name result from gimple registers
493 with things like unsupported modes. They are top-level and
494 hence conflict with everything else. */
495 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496 return true;
497
498 if (!a->conflicts || !b->conflicts)
499 return false;
500 return bitmap_bit_p (a->conflicts, y);
501 }
502
503 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
504 enter its partition number into bitmap DATA. */
505
506 static bool
507 visit_op (gimple *, tree op, tree, void *data)
508 {
509 bitmap active = (bitmap)data;
510 op = get_base_address (op);
511 if (op
512 && DECL_P (op)
513 && DECL_RTL_IF_SET (op) == pc_rtx)
514 {
515 size_t *v = decl_to_stack_part->get (op);
516 if (v)
517 bitmap_set_bit (active, *v);
518 }
519 return false;
520 }
521
522 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
523 record conflicts between it and all currently active other partitions
524 from bitmap DATA. */
525
526 static bool
527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529 bitmap active = (bitmap)data;
530 op = get_base_address (op);
531 if (op
532 && DECL_P (op)
533 && DECL_RTL_IF_SET (op) == pc_rtx)
534 {
535 size_t *v = decl_to_stack_part->get (op);
536 if (v && bitmap_set_bit (active, *v))
537 {
538 size_t num = *v;
539 bitmap_iterator bi;
540 unsigned i;
541 gcc_assert (num < stack_vars_num);
542 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 add_stack_var_conflict (num, i);
544 }
545 }
546 return false;
547 }
548
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550 at the end of BB, leaving the result in WORK. We're called to generate
551 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552 liveness. */
553
554 static void
555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557 edge e;
558 edge_iterator ei;
559 gimple_stmt_iterator gsi;
560 walk_stmt_load_store_addr_fn visit;
561
562 bitmap_clear (work);
563 FOR_EACH_EDGE (e, ei, bb->preds)
564 bitmap_ior_into (work, (bitmap)e->src->aux);
565
566 visit = visit_op;
567
568 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569 {
570 gimple *stmt = gsi_stmt (gsi);
571 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572 }
573 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574 {
575 gimple *stmt = gsi_stmt (gsi);
576
577 if (gimple_clobber_p (stmt))
578 {
579 tree lhs = gimple_assign_lhs (stmt);
580 size_t *v;
581 /* Nested function lowering might introduce LHSs
582 that are COMPONENT_REFs. */
583 if (!VAR_P (lhs))
584 continue;
585 if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 && (v = decl_to_stack_part->get (lhs)))
587 bitmap_clear_bit (work, *v);
588 }
589 else if (!is_gimple_debug (stmt))
590 {
591 if (for_conflict
592 && visit == visit_op)
593 {
594 /* If this is the first real instruction in this BB we need
595 to add conflicts for everything live at this point now.
596 Unlike classical liveness for named objects we can't
597 rely on seeing a def/use of the names we're interested in.
598 There might merely be indirect loads/stores. We'd not add any
599 conflicts for such partitions. */
600 bitmap_iterator bi;
601 unsigned i;
602 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 {
604 struct stack_var *a = &stack_vars[i];
605 if (!a->conflicts)
606 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 bitmap_ior_into (a->conflicts, work);
608 }
609 visit = visit_conflict;
610 }
611 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 }
613 }
614 }
615
616 /* Generate stack partition conflicts between all partitions that are
617 simultaneously live. */
618
619 static void
620 add_scope_conflicts (void)
621 {
622 basic_block bb;
623 bool changed;
624 bitmap work = BITMAP_ALLOC (NULL);
625 int *rpo;
626 int n_bbs;
627
628 /* We approximate the live range of a stack variable by taking the first
629 mention of its name as starting point(s), and by the end-of-scope
630 death clobber added by gimplify as ending point(s) of the range.
631 This overapproximates in the case we for instance moved an address-taken
632 operation upward, without also moving a dereference to it upwards.
633 But it's conservatively correct as a variable never can hold values
634 before its name is mentioned at least once.
635
636 We then do a mostly classical bitmap liveness algorithm. */
637
638 FOR_ALL_BB_FN (bb, cfun)
639 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640
641 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643
644 changed = true;
645 while (changed)
646 {
647 int i;
648 changed = false;
649 for (i = 0; i < n_bbs; i++)
650 {
651 bitmap active;
652 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 active = (bitmap)bb->aux;
654 add_scope_conflicts_1 (bb, work, false);
655 if (bitmap_ior_into (active, work))
656 changed = true;
657 }
658 }
659
660 FOR_EACH_BB_FN (bb, cfun)
661 add_scope_conflicts_1 (bb, work, true);
662
663 free (rpo);
664 BITMAP_FREE (work);
665 FOR_ALL_BB_FN (bb, cfun)
666 BITMAP_FREE (bb->aux);
667 }
668
669 /* A subroutine of partition_stack_vars. A comparison function for qsort,
670 sorting an array of indices by the properties of the object. */
671
672 static int
673 stack_var_cmp (const void *a, const void *b)
674 {
675 size_t ia = *(const size_t *)a;
676 size_t ib = *(const size_t *)b;
677 unsigned int aligna = stack_vars[ia].alignb;
678 unsigned int alignb = stack_vars[ib].alignb;
679 poly_int64 sizea = stack_vars[ia].size;
680 poly_int64 sizeb = stack_vars[ib].size;
681 tree decla = stack_vars[ia].decl;
682 tree declb = stack_vars[ib].decl;
683 bool largea, largeb;
684 unsigned int uida, uidb;
685
686 /* Primary compare on "large" alignment. Large comes first. */
687 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689 if (largea != largeb)
690 return (int)largeb - (int)largea;
691
692 /* Secondary compare on size, decreasing */
693 int diff = compare_sizes_for_sort (sizeb, sizea);
694 if (diff != 0)
695 return diff;
696
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
702
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla) == SSA_NAME)
707 {
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
712 }
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
717 if (uida < uidb)
718 return 1;
719 if (uida > uidb)
720 return -1;
721 return 0;
722 }
723
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
730
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 part_hashmap *decls_to_partitions,
734 hash_set<bitmap> *visited, bitmap temp)
735 {
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
739
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
744 || visited->add (pt->vars))
745 return;
746
747 bitmap_clear (temp);
748
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
755 && (part = decls_to_partitions->get (i)))
756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
759 }
760
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
765
766 static void
767 update_alias_info_with_stack_vars (void)
768 {
769 part_hashmap *decls_to_partitions = NULL;
770 size_t i, j;
771 tree var = NULL_TREE;
772
773 for (i = 0; i < stack_vars_num; i++)
774 {
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
778
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
783
784 if (!decls_to_partitions)
785 {
786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 }
789
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
796
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
801 {
802 tree decl = stack_vars[j].decl;
803 unsigned int uid = DECL_PT_UID (decl);
804 bitmap_set_bit (part, uid);
805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
809 }
810
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
813 pt_solution_set (&pi->pt, part, false);
814 }
815
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
819 {
820 unsigned i;
821 tree name;
822 hash_set<bitmap> visited;
823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824
825 FOR_EACH_SSA_NAME (i, name, cfun)
826 {
827 struct ptr_info_def *pi;
828
829 if (POINTER_TYPE_P (TREE_TYPE (name))
830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 &visited, temp);
833 }
834
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 decls_to_partitions, &visited, temp);
837
838 delete decls_to_partitions;
839 BITMAP_FREE (temp);
840 }
841 }
842
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
846
847 static void
848 union_stack_vars (size_t a, size_t b)
849 {
850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
853
854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
858 stack_vars[a].next = b;
859
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
863
864 /* Update the interference graph and merge the conflicts. */
865 if (vb->conflicts)
866 {
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
870 }
871 }
872
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
876
877 Sort the objects by size in descending order.
878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
884 }
885 }
886 */
887
888 static void
889 partition_stack_vars (void)
890 {
891 size_t si, sj, n = stack_vars_num;
892
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
896
897 if (n == 1)
898 return;
899
900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901
902 for (si = 0; si < n; ++si)
903 {
904 size_t i = stack_vars_sorted[si];
905 unsigned int ialign = stack_vars[i].alignb;
906 poly_int64 isize = stack_vars[i].size;
907
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
913
914 for (sj = si + 1; sj < n; ++sj)
915 {
916 size_t j = stack_vars_sorted[sj];
917 unsigned int jalign = stack_vars[j].alignb;
918 poly_int64 jsize = stack_vars[j].size;
919
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
923
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 break;
929
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if (asan_sanitize_stack_p ()
935 && maybe_ne (isize, jsize)
936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
938
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
941 continue;
942
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i, j);
945 }
946 }
947
948 update_alias_info_with_stack_vars ();
949 }
950
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
952
953 static void
954 dump_stack_var_partition (void)
955 {
956 size_t si, i, j, n = stack_vars_num;
957
958 for (si = 0; si < n; ++si)
959 {
960 i = stack_vars_sorted[si];
961
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
965
966 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967 print_dec (stack_vars[i].size, dump_file);
968 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969
970 for (j = i; j != EOC; j = stack_vars[j].next)
971 {
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 }
975 fputc ('\n', dump_file);
976 }
977 }
978
979 /* Assign rtl to DECL at BASE + OFFSET. */
980
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 poly_int64 offset)
984 {
985 unsigned align;
986 rtx x;
987
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990
991 x = plus_constant (Pmode, base, offset);
992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
995
996 if (TREE_CODE (decl) != SSA_NAME)
997 {
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
1003 align = known_alignment (offset);
1004 align *= BITS_PER_UNIT;
1005 if (align == 0 || align > base_align)
1006 align = base_align;
1007
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1011
1012 SET_DECL_ALIGN (decl, align);
1013 DECL_USER_ALIGN (decl) = 0;
1014 }
1015
1016 set_rtl (decl, x);
1017 }
1018
1019 struct stack_vars_data
1020 {
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec<HOST_WIDE_INT> asan_vec;
1025
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec<tree> asan_decl_vec;
1028
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1031
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
1034 };
1035
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1039
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043 size_t si, i, j, n = stack_vars_num;
1044 poly_uint64 large_size = 0, large_alloc = 0;
1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
1047 bool large_allocation_done = false;
1048 tree decl;
1049
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055 {
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1058 {
1059 unsigned alignb;
1060
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1063
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1070
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1074
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1078
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
1085 continue;
1086
1087 large_size = aligned_upper_bound (large_size, alignb);
1088 large_size += stack_vars[i].size;
1089 }
1090 }
1091
1092 for (si = 0; si < n; ++si)
1093 {
1094 rtx base;
1095 unsigned base_align, alignb;
1096 poly_int64 offset;
1097
1098 i = stack_vars_sorted[si];
1099
1100 /* Skip variables that aren't partition representatives, for now. */
1101 if (stack_vars[i].representative != i)
1102 continue;
1103
1104 /* Skip variables that have already had rtl assigned. See also
1105 add_stack_var where we perpetrate this pc_rtx hack. */
1106 decl = stack_vars[i].decl;
1107 if (TREE_CODE (decl) == SSA_NAME
1108 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 : DECL_RTL (decl) != pc_rtx)
1110 continue;
1111
1112 /* Check the predicate to see whether this variable should be
1113 allocated in this pass. */
1114 if (pred && !pred (i))
1115 continue;
1116
1117 alignb = stack_vars[i].alignb;
1118 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 {
1120 base = virtual_stack_vars_rtx;
1121 /* ASAN description strings don't yet have a syntax for expressing
1122 polynomial offsets. */
1123 HOST_WIDE_INT prev_offset;
1124 if (asan_sanitize_stack_p ()
1125 && pred
1126 && frame_offset.is_constant (&prev_offset)
1127 && stack_vars[i].size.is_constant ())
1128 {
1129 prev_offset = align_base (prev_offset,
1130 MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 !FRAME_GROWS_DOWNWARD);
1132 tree repr_decl = NULL_TREE;
1133 offset
1134 = alloc_stack_frame_space (stack_vars[i].size
1135 + ASAN_RED_ZONE_SIZE,
1136 MAX (alignb, ASAN_RED_ZONE_SIZE));
1137
1138 data->asan_vec.safe_push (prev_offset);
1139 /* Allocating a constant amount of space from a constant
1140 starting offset must give a constant result. */
1141 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 .to_constant ());
1143 /* Find best representative of the partition.
1144 Prefer those with DECL_NAME, even better
1145 satisfying asan_protect_stack_decl predicate. */
1146 for (j = i; j != EOC; j = stack_vars[j].next)
1147 if (asan_protect_stack_decl (stack_vars[j].decl)
1148 && DECL_NAME (stack_vars[j].decl))
1149 {
1150 repr_decl = stack_vars[j].decl;
1151 break;
1152 }
1153 else if (repr_decl == NULL_TREE
1154 && DECL_P (stack_vars[j].decl)
1155 && DECL_NAME (stack_vars[j].decl))
1156 repr_decl = stack_vars[j].decl;
1157 if (repr_decl == NULL_TREE)
1158 repr_decl = stack_vars[i].decl;
1159 data->asan_decl_vec.safe_push (repr_decl);
1160 data->asan_alignb = MAX (data->asan_alignb, alignb);
1161 if (data->asan_base == NULL)
1162 data->asan_base = gen_reg_rtx (Pmode);
1163 base = data->asan_base;
1164
1165 if (!STRICT_ALIGNMENT)
1166 base_align = crtl->max_used_stack_slot_alignment;
1167 else
1168 base_align = MAX (crtl->max_used_stack_slot_alignment,
1169 GET_MODE_ALIGNMENT (SImode)
1170 << ASAN_SHADOW_SHIFT);
1171 }
1172 else
1173 {
1174 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1175 base_align = crtl->max_used_stack_slot_alignment;
1176 }
1177 }
1178 else
1179 {
1180 /* Large alignment is only processed in the last pass. */
1181 if (pred)
1182 continue;
1183
1184 /* If there were any variables requiring "large" alignment, allocate
1185 space. */
1186 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1187 {
1188 poly_int64 loffset;
1189 rtx large_allocsize;
1190
1191 large_allocsize = gen_int_mode (large_size, Pmode);
1192 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1193 loffset = alloc_stack_frame_space
1194 (rtx_to_poly_int64 (large_allocsize),
1195 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1196 large_base = get_dynamic_stack_base (loffset, large_align);
1197 large_allocation_done = true;
1198 }
1199 gcc_assert (large_base != NULL);
1200
1201 large_alloc = aligned_upper_bound (large_alloc, alignb);
1202 offset = large_alloc;
1203 large_alloc += stack_vars[i].size;
1204
1205 base = large_base;
1206 base_align = large_align;
1207 }
1208
1209 /* Create rtl for each variable based on their location within the
1210 partition. */
1211 for (j = i; j != EOC; j = stack_vars[j].next)
1212 {
1213 expand_one_stack_var_at (stack_vars[j].decl,
1214 base, base_align,
1215 offset);
1216 }
1217 }
1218
1219 gcc_assert (known_eq (large_alloc, large_size));
1220 }
1221
1222 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1223 static poly_uint64
1224 account_stack_vars (void)
1225 {
1226 size_t si, j, i, n = stack_vars_num;
1227 poly_uint64 size = 0;
1228
1229 for (si = 0; si < n; ++si)
1230 {
1231 i = stack_vars_sorted[si];
1232
1233 /* Skip variables that aren't partition representatives, for now. */
1234 if (stack_vars[i].representative != i)
1235 continue;
1236
1237 size += stack_vars[i].size;
1238 for (j = i; j != EOC; j = stack_vars[j].next)
1239 set_rtl (stack_vars[j].decl, NULL);
1240 }
1241 return size;
1242 }
1243
1244 /* Record the RTL assignment X for the default def of PARM. */
1245
1246 extern void
1247 set_parm_rtl (tree parm, rtx x)
1248 {
1249 gcc_assert (TREE_CODE (parm) == PARM_DECL
1250 || TREE_CODE (parm) == RESULT_DECL);
1251
1252 if (x && !MEM_P (x))
1253 {
1254 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1255 TYPE_MODE (TREE_TYPE (parm)),
1256 TYPE_ALIGN (TREE_TYPE (parm)));
1257
1258 /* If the variable alignment is very large we'll dynamicaly
1259 allocate it, which means that in-frame portion is just a
1260 pointer. ??? We've got a pseudo for sure here, do we
1261 actually dynamically allocate its spilling area if needed?
1262 ??? Isn't it a problem when POINTER_SIZE also exceeds
1263 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1264 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1265 align = POINTER_SIZE;
1266
1267 record_alignment_for_reg_var (align);
1268 }
1269
1270 tree ssa = ssa_default_def (cfun, parm);
1271 if (!ssa)
1272 return set_rtl (parm, x);
1273
1274 int part = var_to_partition (SA.map, ssa);
1275 gcc_assert (part != NO_PARTITION);
1276
1277 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1278 gcc_assert (changed);
1279
1280 set_rtl (ssa, x);
1281 gcc_assert (DECL_RTL (parm) == x);
1282 }
1283
1284 /* A subroutine of expand_one_var. Called to immediately assign rtl
1285 to a variable to be allocated in the stack frame. */
1286
1287 static void
1288 expand_one_stack_var_1 (tree var)
1289 {
1290 poly_uint64 size;
1291 poly_int64 offset;
1292 unsigned byte_align;
1293
1294 if (TREE_CODE (var) == SSA_NAME)
1295 {
1296 tree type = TREE_TYPE (var);
1297 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1298 byte_align = TYPE_ALIGN_UNIT (type);
1299 }
1300 else
1301 {
1302 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1303 byte_align = align_local_variable (var);
1304 }
1305
1306 /* We handle highly aligned variables in expand_stack_vars. */
1307 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1308
1309 offset = alloc_stack_frame_space (size, byte_align);
1310
1311 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1312 crtl->max_used_stack_slot_alignment, offset);
1313 }
1314
1315 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1316 already assigned some MEM. */
1317
1318 static void
1319 expand_one_stack_var (tree var)
1320 {
1321 if (TREE_CODE (var) == SSA_NAME)
1322 {
1323 int part = var_to_partition (SA.map, var);
1324 if (part != NO_PARTITION)
1325 {
1326 rtx x = SA.partition_to_pseudo[part];
1327 gcc_assert (x);
1328 gcc_assert (MEM_P (x));
1329 return;
1330 }
1331 }
1332
1333 return expand_one_stack_var_1 (var);
1334 }
1335
1336 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1337 that will reside in a hard register. */
1338
1339 static void
1340 expand_one_hard_reg_var (tree var)
1341 {
1342 rest_of_decl_compilation (var, 0, 0);
1343 }
1344
1345 /* Record the alignment requirements of some variable assigned to a
1346 pseudo. */
1347
1348 static void
1349 record_alignment_for_reg_var (unsigned int align)
1350 {
1351 if (SUPPORTS_STACK_ALIGNMENT
1352 && crtl->stack_alignment_estimated < align)
1353 {
1354 /* stack_alignment_estimated shouldn't change after stack
1355 realign decision made */
1356 gcc_assert (!crtl->stack_realign_processed);
1357 crtl->stack_alignment_estimated = align;
1358 }
1359
1360 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1361 So here we only make sure stack_alignment_needed >= align. */
1362 if (crtl->stack_alignment_needed < align)
1363 crtl->stack_alignment_needed = align;
1364 if (crtl->max_used_stack_slot_alignment < align)
1365 crtl->max_used_stack_slot_alignment = align;
1366 }
1367
1368 /* Create RTL for an SSA partition. */
1369
1370 static void
1371 expand_one_ssa_partition (tree var)
1372 {
1373 int part = var_to_partition (SA.map, var);
1374 gcc_assert (part != NO_PARTITION);
1375
1376 if (SA.partition_to_pseudo[part])
1377 return;
1378
1379 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1380 TYPE_MODE (TREE_TYPE (var)),
1381 TYPE_ALIGN (TREE_TYPE (var)));
1382
1383 /* If the variable alignment is very large we'll dynamicaly allocate
1384 it, which means that in-frame portion is just a pointer. */
1385 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1386 align = POINTER_SIZE;
1387
1388 record_alignment_for_reg_var (align);
1389
1390 if (!use_register_for_decl (var))
1391 {
1392 if (defer_stack_allocation (var, true))
1393 add_stack_var (var);
1394 else
1395 expand_one_stack_var_1 (var);
1396 return;
1397 }
1398
1399 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1400 rtx x = gen_reg_rtx (reg_mode);
1401
1402 set_rtl (var, x);
1403
1404 /* For a promoted variable, X will not be used directly but wrapped in a
1405 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1406 will assume that its upper bits can be inferred from its lower bits.
1407 Therefore, if X isn't initialized on every path from the entry, then
1408 we must do it manually in order to fulfill the above assumption. */
1409 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1410 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1411 emit_move_insn (x, CONST0_RTX (reg_mode));
1412 }
1413
1414 /* Record the association between the RTL generated for partition PART
1415 and the underlying variable of the SSA_NAME VAR. */
1416
1417 static void
1418 adjust_one_expanded_partition_var (tree var)
1419 {
1420 if (!var)
1421 return;
1422
1423 tree decl = SSA_NAME_VAR (var);
1424
1425 int part = var_to_partition (SA.map, var);
1426 if (part == NO_PARTITION)
1427 return;
1428
1429 rtx x = SA.partition_to_pseudo[part];
1430
1431 gcc_assert (x);
1432
1433 set_rtl (var, x);
1434
1435 if (!REG_P (x))
1436 return;
1437
1438 /* Note if the object is a user variable. */
1439 if (decl && !DECL_ARTIFICIAL (decl))
1440 mark_user_reg (x);
1441
1442 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1443 mark_reg_pointer (x, get_pointer_alignment (var));
1444 }
1445
1446 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1447 that will reside in a pseudo register. */
1448
1449 static void
1450 expand_one_register_var (tree var)
1451 {
1452 if (TREE_CODE (var) == SSA_NAME)
1453 {
1454 int part = var_to_partition (SA.map, var);
1455 if (part != NO_PARTITION)
1456 {
1457 rtx x = SA.partition_to_pseudo[part];
1458 gcc_assert (x);
1459 gcc_assert (REG_P (x));
1460 return;
1461 }
1462 gcc_unreachable ();
1463 }
1464
1465 tree decl = var;
1466 tree type = TREE_TYPE (decl);
1467 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1468 rtx x = gen_reg_rtx (reg_mode);
1469
1470 set_rtl (var, x);
1471
1472 /* Note if the object is a user variable. */
1473 if (!DECL_ARTIFICIAL (decl))
1474 mark_user_reg (x);
1475
1476 if (POINTER_TYPE_P (type))
1477 mark_reg_pointer (x, get_pointer_alignment (var));
1478 }
1479
1480 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1481 has some associated error, e.g. its type is error-mark. We just need
1482 to pick something that won't crash the rest of the compiler. */
1483
1484 static void
1485 expand_one_error_var (tree var)
1486 {
1487 machine_mode mode = DECL_MODE (var);
1488 rtx x;
1489
1490 if (mode == BLKmode)
1491 x = gen_rtx_MEM (BLKmode, const0_rtx);
1492 else if (mode == VOIDmode)
1493 x = const0_rtx;
1494 else
1495 x = gen_reg_rtx (mode);
1496
1497 SET_DECL_RTL (var, x);
1498 }
1499
1500 /* A subroutine of expand_one_var. VAR is a variable that will be
1501 allocated to the local stack frame. Return true if we wish to
1502 add VAR to STACK_VARS so that it will be coalesced with other
1503 variables. Return false to allocate VAR immediately.
1504
1505 This function is used to reduce the number of variables considered
1506 for coalescing, which reduces the size of the quadratic problem. */
1507
1508 static bool
1509 defer_stack_allocation (tree var, bool toplevel)
1510 {
1511 tree size_unit = TREE_CODE (var) == SSA_NAME
1512 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1513 : DECL_SIZE_UNIT (var);
1514 poly_uint64 size;
1515
1516 /* Whether the variable is small enough for immediate allocation not to be
1517 a problem with regard to the frame size. */
1518 bool smallish
1519 = (poly_int_tree_p (size_unit, &size)
1520 && (estimated_poly_value (size)
1521 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1522
1523 /* If stack protection is enabled, *all* stack variables must be deferred,
1524 so that we can re-order the strings to the top of the frame.
1525 Similarly for Address Sanitizer. */
1526 if (flag_stack_protect || asan_sanitize_stack_p ())
1527 return true;
1528
1529 unsigned int align = TREE_CODE (var) == SSA_NAME
1530 ? TYPE_ALIGN (TREE_TYPE (var))
1531 : DECL_ALIGN (var);
1532
1533 /* We handle "large" alignment via dynamic allocation. We want to handle
1534 this extra complication in only one place, so defer them. */
1535 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1536 return true;
1537
1538 bool ignored = TREE_CODE (var) == SSA_NAME
1539 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1540 : DECL_IGNORED_P (var);
1541
1542 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1543 might be detached from their block and appear at toplevel when we reach
1544 here. We want to coalesce them with variables from other blocks when
1545 the immediate contribution to the frame size would be noticeable. */
1546 if (toplevel && optimize > 0 && ignored && !smallish)
1547 return true;
1548
1549 /* Variables declared in the outermost scope automatically conflict
1550 with every other variable. The only reason to want to defer them
1551 at all is that, after sorting, we can more efficiently pack
1552 small variables in the stack frame. Continue to defer at -O2. */
1553 if (toplevel && optimize < 2)
1554 return false;
1555
1556 /* Without optimization, *most* variables are allocated from the
1557 stack, which makes the quadratic problem large exactly when we
1558 want compilation to proceed as quickly as possible. On the
1559 other hand, we don't want the function's stack frame size to
1560 get completely out of hand. So we avoid adding scalars and
1561 "small" aggregates to the list at all. */
1562 if (optimize == 0 && smallish)
1563 return false;
1564
1565 return true;
1566 }
1567
1568 /* A subroutine of expand_used_vars. Expand one variable according to
1569 its flavor. Variables to be placed on the stack are not actually
1570 expanded yet, merely recorded.
1571 When REALLY_EXPAND is false, only add stack values to be allocated.
1572 Return stack usage this variable is supposed to take.
1573 */
1574
1575 static poly_uint64
1576 expand_one_var (tree var, bool toplevel, bool really_expand)
1577 {
1578 unsigned int align = BITS_PER_UNIT;
1579 tree origvar = var;
1580
1581 var = SSAVAR (var);
1582
1583 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1584 {
1585 if (is_global_var (var))
1586 return 0;
1587
1588 /* Because we don't know if VAR will be in register or on stack,
1589 we conservatively assume it will be on stack even if VAR is
1590 eventually put into register after RA pass. For non-automatic
1591 variables, which won't be on stack, we collect alignment of
1592 type and ignore user specified alignment. Similarly for
1593 SSA_NAMEs for which use_register_for_decl returns true. */
1594 if (TREE_STATIC (var)
1595 || DECL_EXTERNAL (var)
1596 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1597 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1598 TYPE_MODE (TREE_TYPE (var)),
1599 TYPE_ALIGN (TREE_TYPE (var)));
1600 else if (DECL_HAS_VALUE_EXPR_P (var)
1601 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1602 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1603 or variables which were assigned a stack slot already by
1604 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1605 changed from the offset chosen to it. */
1606 align = crtl->stack_alignment_estimated;
1607 else
1608 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1609
1610 /* If the variable alignment is very large we'll dynamicaly allocate
1611 it, which means that in-frame portion is just a pointer. */
1612 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1613 align = POINTER_SIZE;
1614 }
1615
1616 record_alignment_for_reg_var (align);
1617
1618 poly_uint64 size;
1619 if (TREE_CODE (origvar) == SSA_NAME)
1620 {
1621 gcc_assert (!VAR_P (var)
1622 || (!DECL_EXTERNAL (var)
1623 && !DECL_HAS_VALUE_EXPR_P (var)
1624 && !TREE_STATIC (var)
1625 && TREE_TYPE (var) != error_mark_node
1626 && !DECL_HARD_REGISTER (var)
1627 && really_expand));
1628 }
1629 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1630 ;
1631 else if (DECL_EXTERNAL (var))
1632 ;
1633 else if (DECL_HAS_VALUE_EXPR_P (var))
1634 ;
1635 else if (TREE_STATIC (var))
1636 ;
1637 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1638 ;
1639 else if (TREE_TYPE (var) == error_mark_node)
1640 {
1641 if (really_expand)
1642 expand_one_error_var (var);
1643 }
1644 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1645 {
1646 if (really_expand)
1647 {
1648 expand_one_hard_reg_var (var);
1649 if (!DECL_HARD_REGISTER (var))
1650 /* Invalid register specification. */
1651 expand_one_error_var (var);
1652 }
1653 }
1654 else if (use_register_for_decl (var))
1655 {
1656 if (really_expand)
1657 expand_one_register_var (origvar);
1658 }
1659 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1660 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1661 {
1662 /* Reject variables which cover more than half of the address-space. */
1663 if (really_expand)
1664 {
1665 error ("size of variable %q+D is too large", var);
1666 expand_one_error_var (var);
1667 }
1668 }
1669 else if (defer_stack_allocation (var, toplevel))
1670 add_stack_var (origvar);
1671 else
1672 {
1673 if (really_expand)
1674 {
1675 if (lookup_attribute ("naked",
1676 DECL_ATTRIBUTES (current_function_decl)))
1677 error ("cannot allocate stack for variable %q+D, naked function.",
1678 var);
1679
1680 expand_one_stack_var (origvar);
1681 }
1682 return size;
1683 }
1684 return 0;
1685 }
1686
1687 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1688 expanding variables. Those variables that can be put into registers
1689 are allocated pseudos; those that can't are put on the stack.
1690
1691 TOPLEVEL is true if this is the outermost BLOCK. */
1692
1693 static void
1694 expand_used_vars_for_block (tree block, bool toplevel)
1695 {
1696 tree t;
1697
1698 /* Expand all variables at this level. */
1699 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1700 if (TREE_USED (t)
1701 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1702 || !DECL_NONSHAREABLE (t)))
1703 expand_one_var (t, toplevel, true);
1704
1705 /* Expand all variables at containing levels. */
1706 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1707 expand_used_vars_for_block (t, false);
1708 }
1709
1710 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1711 and clear TREE_USED on all local variables. */
1712
1713 static void
1714 clear_tree_used (tree block)
1715 {
1716 tree t;
1717
1718 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1719 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1720 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1721 || !DECL_NONSHAREABLE (t))
1722 TREE_USED (t) = 0;
1723
1724 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1725 clear_tree_used (t);
1726 }
1727
1728 enum {
1729 SPCT_FLAG_DEFAULT = 1,
1730 SPCT_FLAG_ALL = 2,
1731 SPCT_FLAG_STRONG = 3,
1732 SPCT_FLAG_EXPLICIT = 4
1733 };
1734
1735 /* Examine TYPE and determine a bit mask of the following features. */
1736
1737 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1738 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1739 #define SPCT_HAS_ARRAY 4
1740 #define SPCT_HAS_AGGREGATE 8
1741
1742 static unsigned int
1743 stack_protect_classify_type (tree type)
1744 {
1745 unsigned int ret = 0;
1746 tree t;
1747
1748 switch (TREE_CODE (type))
1749 {
1750 case ARRAY_TYPE:
1751 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1752 if (t == char_type_node
1753 || t == signed_char_type_node
1754 || t == unsigned_char_type_node)
1755 {
1756 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1757 unsigned HOST_WIDE_INT len;
1758
1759 if (!TYPE_SIZE_UNIT (type)
1760 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1761 len = max;
1762 else
1763 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1764
1765 if (len < max)
1766 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1767 else
1768 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1769 }
1770 else
1771 ret = SPCT_HAS_ARRAY;
1772 break;
1773
1774 case UNION_TYPE:
1775 case QUAL_UNION_TYPE:
1776 case RECORD_TYPE:
1777 ret = SPCT_HAS_AGGREGATE;
1778 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1779 if (TREE_CODE (t) == FIELD_DECL)
1780 ret |= stack_protect_classify_type (TREE_TYPE (t));
1781 break;
1782
1783 default:
1784 break;
1785 }
1786
1787 return ret;
1788 }
1789
1790 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1791 part of the local stack frame. Remember if we ever return nonzero for
1792 any variable in this function. The return value is the phase number in
1793 which the variable should be allocated. */
1794
1795 static int
1796 stack_protect_decl_phase (tree decl)
1797 {
1798 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1799 int ret = 0;
1800
1801 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1802 has_short_buffer = true;
1803
1804 if (flag_stack_protect == SPCT_FLAG_ALL
1805 || flag_stack_protect == SPCT_FLAG_STRONG
1806 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1807 && lookup_attribute ("stack_protect",
1808 DECL_ATTRIBUTES (current_function_decl))))
1809 {
1810 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1811 && !(bits & SPCT_HAS_AGGREGATE))
1812 ret = 1;
1813 else if (bits & SPCT_HAS_ARRAY)
1814 ret = 2;
1815 }
1816 else
1817 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1818
1819 if (ret)
1820 has_protected_decls = true;
1821
1822 return ret;
1823 }
1824
1825 /* Two helper routines that check for phase 1 and phase 2. These are used
1826 as callbacks for expand_stack_vars. */
1827
1828 static bool
1829 stack_protect_decl_phase_1 (size_t i)
1830 {
1831 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1832 }
1833
1834 static bool
1835 stack_protect_decl_phase_2 (size_t i)
1836 {
1837 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1838 }
1839
1840 /* And helper function that checks for asan phase (with stack protector
1841 it is phase 3). This is used as callback for expand_stack_vars.
1842 Returns true if any of the vars in the partition need to be protected. */
1843
1844 static bool
1845 asan_decl_phase_3 (size_t i)
1846 {
1847 while (i != EOC)
1848 {
1849 if (asan_protect_stack_decl (stack_vars[i].decl))
1850 return true;
1851 i = stack_vars[i].next;
1852 }
1853 return false;
1854 }
1855
1856 /* Ensure that variables in different stack protection phases conflict
1857 so that they are not merged and share the same stack slot. */
1858
1859 static void
1860 add_stack_protection_conflicts (void)
1861 {
1862 size_t i, j, n = stack_vars_num;
1863 unsigned char *phase;
1864
1865 phase = XNEWVEC (unsigned char, n);
1866 for (i = 0; i < n; ++i)
1867 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1868
1869 for (i = 0; i < n; ++i)
1870 {
1871 unsigned char ph_i = phase[i];
1872 for (j = i + 1; j < n; ++j)
1873 if (ph_i != phase[j])
1874 add_stack_var_conflict (i, j);
1875 }
1876
1877 XDELETEVEC (phase);
1878 }
1879
1880 /* Create a decl for the guard at the top of the stack frame. */
1881
1882 static void
1883 create_stack_guard (void)
1884 {
1885 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1886 VAR_DECL, NULL, ptr_type_node);
1887 TREE_THIS_VOLATILE (guard) = 1;
1888 TREE_USED (guard) = 1;
1889 expand_one_stack_var (guard);
1890 crtl->stack_protect_guard = guard;
1891 }
1892
1893 /* Prepare for expanding variables. */
1894 static void
1895 init_vars_expansion (void)
1896 {
1897 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1898 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1899
1900 /* A map from decl to stack partition. */
1901 decl_to_stack_part = new hash_map<tree, size_t>;
1902
1903 /* Initialize local stack smashing state. */
1904 has_protected_decls = false;
1905 has_short_buffer = false;
1906 }
1907
1908 /* Free up stack variable graph data. */
1909 static void
1910 fini_vars_expansion (void)
1911 {
1912 bitmap_obstack_release (&stack_var_bitmap_obstack);
1913 if (stack_vars)
1914 XDELETEVEC (stack_vars);
1915 if (stack_vars_sorted)
1916 XDELETEVEC (stack_vars_sorted);
1917 stack_vars = NULL;
1918 stack_vars_sorted = NULL;
1919 stack_vars_alloc = stack_vars_num = 0;
1920 delete decl_to_stack_part;
1921 decl_to_stack_part = NULL;
1922 }
1923
1924 /* Make a fair guess for the size of the stack frame of the function
1925 in NODE. This doesn't have to be exact, the result is only used in
1926 the inline heuristics. So we don't want to run the full stack var
1927 packing algorithm (which is quadratic in the number of stack vars).
1928 Instead, we calculate the total size of all stack vars. This turns
1929 out to be a pretty fair estimate -- packing of stack vars doesn't
1930 happen very often. */
1931
1932 HOST_WIDE_INT
1933 estimated_stack_frame_size (struct cgraph_node *node)
1934 {
1935 poly_int64 size = 0;
1936 size_t i;
1937 tree var;
1938 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1939
1940 push_cfun (fn);
1941
1942 init_vars_expansion ();
1943
1944 FOR_EACH_LOCAL_DECL (fn, i, var)
1945 if (auto_var_in_fn_p (var, fn->decl))
1946 size += expand_one_var (var, true, false);
1947
1948 if (stack_vars_num > 0)
1949 {
1950 /* Fake sorting the stack vars for account_stack_vars (). */
1951 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1952 for (i = 0; i < stack_vars_num; ++i)
1953 stack_vars_sorted[i] = i;
1954 size += account_stack_vars ();
1955 }
1956
1957 fini_vars_expansion ();
1958 pop_cfun ();
1959 return estimated_poly_value (size);
1960 }
1961
1962 /* Helper routine to check if a record or union contains an array field. */
1963
1964 static int
1965 record_or_union_type_has_array_p (const_tree tree_type)
1966 {
1967 tree fields = TYPE_FIELDS (tree_type);
1968 tree f;
1969
1970 for (f = fields; f; f = DECL_CHAIN (f))
1971 if (TREE_CODE (f) == FIELD_DECL)
1972 {
1973 tree field_type = TREE_TYPE (f);
1974 if (RECORD_OR_UNION_TYPE_P (field_type)
1975 && record_or_union_type_has_array_p (field_type))
1976 return 1;
1977 if (TREE_CODE (field_type) == ARRAY_TYPE)
1978 return 1;
1979 }
1980 return 0;
1981 }
1982
1983 /* Check if the current function has local referenced variables that
1984 have their addresses taken, contain an array, or are arrays. */
1985
1986 static bool
1987 stack_protect_decl_p ()
1988 {
1989 unsigned i;
1990 tree var;
1991
1992 FOR_EACH_LOCAL_DECL (cfun, i, var)
1993 if (!is_global_var (var))
1994 {
1995 tree var_type = TREE_TYPE (var);
1996 if (VAR_P (var)
1997 && (TREE_CODE (var_type) == ARRAY_TYPE
1998 || TREE_ADDRESSABLE (var)
1999 || (RECORD_OR_UNION_TYPE_P (var_type)
2000 && record_or_union_type_has_array_p (var_type))))
2001 return true;
2002 }
2003 return false;
2004 }
2005
2006 /* Check if the current function has calls that use a return slot. */
2007
2008 static bool
2009 stack_protect_return_slot_p ()
2010 {
2011 basic_block bb;
2012
2013 FOR_ALL_BB_FN (bb, cfun)
2014 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2015 !gsi_end_p (gsi); gsi_next (&gsi))
2016 {
2017 gimple *stmt = gsi_stmt (gsi);
2018 /* This assumes that calls to internal-only functions never
2019 use a return slot. */
2020 if (is_gimple_call (stmt)
2021 && !gimple_call_internal_p (stmt)
2022 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2023 gimple_call_fndecl (stmt)))
2024 return true;
2025 }
2026 return false;
2027 }
2028
2029 /* Expand all variables used in the function. */
2030
2031 static rtx_insn *
2032 expand_used_vars (void)
2033 {
2034 tree var, outer_block = DECL_INITIAL (current_function_decl);
2035 auto_vec<tree> maybe_local_decls;
2036 rtx_insn *var_end_seq = NULL;
2037 unsigned i;
2038 unsigned len;
2039 bool gen_stack_protect_signal = false;
2040
2041 /* Compute the phase of the stack frame for this function. */
2042 {
2043 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2044 int off = targetm.starting_frame_offset () % align;
2045 frame_phase = off ? align - off : 0;
2046 }
2047
2048 /* Set TREE_USED on all variables in the local_decls. */
2049 FOR_EACH_LOCAL_DECL (cfun, i, var)
2050 TREE_USED (var) = 1;
2051 /* Clear TREE_USED on all variables associated with a block scope. */
2052 clear_tree_used (DECL_INITIAL (current_function_decl));
2053
2054 init_vars_expansion ();
2055
2056 if (targetm.use_pseudo_pic_reg ())
2057 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2058
2059 for (i = 0; i < SA.map->num_partitions; i++)
2060 {
2061 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2062 continue;
2063
2064 tree var = partition_to_var (SA.map, i);
2065
2066 gcc_assert (!virtual_operand_p (var));
2067
2068 expand_one_ssa_partition (var);
2069 }
2070
2071 if (flag_stack_protect == SPCT_FLAG_STRONG)
2072 gen_stack_protect_signal
2073 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2074
2075 /* At this point all variables on the local_decls with TREE_USED
2076 set are not associated with any block scope. Lay them out. */
2077
2078 len = vec_safe_length (cfun->local_decls);
2079 FOR_EACH_LOCAL_DECL (cfun, i, var)
2080 {
2081 bool expand_now = false;
2082
2083 /* Expanded above already. */
2084 if (is_gimple_reg (var))
2085 {
2086 TREE_USED (var) = 0;
2087 goto next;
2088 }
2089 /* We didn't set a block for static or extern because it's hard
2090 to tell the difference between a global variable (re)declared
2091 in a local scope, and one that's really declared there to
2092 begin with. And it doesn't really matter much, since we're
2093 not giving them stack space. Expand them now. */
2094 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2095 expand_now = true;
2096
2097 /* Expand variables not associated with any block now. Those created by
2098 the optimizers could be live anywhere in the function. Those that
2099 could possibly have been scoped originally and detached from their
2100 block will have their allocation deferred so we coalesce them with
2101 others when optimization is enabled. */
2102 else if (TREE_USED (var))
2103 expand_now = true;
2104
2105 /* Finally, mark all variables on the list as used. We'll use
2106 this in a moment when we expand those associated with scopes. */
2107 TREE_USED (var) = 1;
2108
2109 if (expand_now)
2110 expand_one_var (var, true, true);
2111
2112 next:
2113 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2114 {
2115 rtx rtl = DECL_RTL_IF_SET (var);
2116
2117 /* Keep artificial non-ignored vars in cfun->local_decls
2118 chain until instantiate_decls. */
2119 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2120 add_local_decl (cfun, var);
2121 else if (rtl == NULL_RTX)
2122 /* If rtl isn't set yet, which can happen e.g. with
2123 -fstack-protector, retry before returning from this
2124 function. */
2125 maybe_local_decls.safe_push (var);
2126 }
2127 }
2128
2129 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2130
2131 +-----------------+-----------------+
2132 | ...processed... | ...duplicates...|
2133 +-----------------+-----------------+
2134 ^
2135 +-- LEN points here.
2136
2137 We just want the duplicates, as those are the artificial
2138 non-ignored vars that we want to keep until instantiate_decls.
2139 Move them down and truncate the array. */
2140 if (!vec_safe_is_empty (cfun->local_decls))
2141 cfun->local_decls->block_remove (0, len);
2142
2143 /* At this point, all variables within the block tree with TREE_USED
2144 set are actually used by the optimized function. Lay them out. */
2145 expand_used_vars_for_block (outer_block, true);
2146
2147 if (stack_vars_num > 0)
2148 {
2149 add_scope_conflicts ();
2150
2151 /* If stack protection is enabled, we don't share space between
2152 vulnerable data and non-vulnerable data. */
2153 if (flag_stack_protect != 0
2154 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2155 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2156 && lookup_attribute ("stack_protect",
2157 DECL_ATTRIBUTES (current_function_decl)))))
2158 add_stack_protection_conflicts ();
2159
2160 /* Now that we have collected all stack variables, and have computed a
2161 minimal interference graph, attempt to save some stack space. */
2162 partition_stack_vars ();
2163 if (dump_file)
2164 dump_stack_var_partition ();
2165 }
2166
2167 switch (flag_stack_protect)
2168 {
2169 case SPCT_FLAG_ALL:
2170 create_stack_guard ();
2171 break;
2172
2173 case SPCT_FLAG_STRONG:
2174 if (gen_stack_protect_signal
2175 || cfun->calls_alloca || has_protected_decls
2176 || lookup_attribute ("stack_protect",
2177 DECL_ATTRIBUTES (current_function_decl)))
2178 create_stack_guard ();
2179 break;
2180
2181 case SPCT_FLAG_DEFAULT:
2182 if (cfun->calls_alloca || has_protected_decls
2183 || lookup_attribute ("stack_protect",
2184 DECL_ATTRIBUTES (current_function_decl)))
2185 create_stack_guard ();
2186 break;
2187
2188 case SPCT_FLAG_EXPLICIT:
2189 if (lookup_attribute ("stack_protect",
2190 DECL_ATTRIBUTES (current_function_decl)))
2191 create_stack_guard ();
2192 break;
2193 default:
2194 ;
2195 }
2196
2197 /* Assign rtl to each variable based on these partitions. */
2198 if (stack_vars_num > 0)
2199 {
2200 struct stack_vars_data data;
2201
2202 data.asan_base = NULL_RTX;
2203 data.asan_alignb = 0;
2204
2205 /* Reorder decls to be protected by iterating over the variables
2206 array multiple times, and allocating out of each phase in turn. */
2207 /* ??? We could probably integrate this into the qsort we did
2208 earlier, such that we naturally see these variables first,
2209 and thus naturally allocate things in the right order. */
2210 if (has_protected_decls)
2211 {
2212 /* Phase 1 contains only character arrays. */
2213 expand_stack_vars (stack_protect_decl_phase_1, &data);
2214
2215 /* Phase 2 contains other kinds of arrays. */
2216 if (flag_stack_protect == SPCT_FLAG_ALL
2217 || flag_stack_protect == SPCT_FLAG_STRONG
2218 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2219 && lookup_attribute ("stack_protect",
2220 DECL_ATTRIBUTES (current_function_decl))))
2221 expand_stack_vars (stack_protect_decl_phase_2, &data);
2222 }
2223
2224 if (asan_sanitize_stack_p ())
2225 /* Phase 3, any partitions that need asan protection
2226 in addition to phase 1 and 2. */
2227 expand_stack_vars (asan_decl_phase_3, &data);
2228
2229 /* ASAN description strings don't yet have a syntax for expressing
2230 polynomial offsets. */
2231 HOST_WIDE_INT prev_offset;
2232 if (!data.asan_vec.is_empty ()
2233 && frame_offset.is_constant (&prev_offset))
2234 {
2235 HOST_WIDE_INT offset, sz, redzonesz;
2236 redzonesz = ASAN_RED_ZONE_SIZE;
2237 sz = data.asan_vec[0] - prev_offset;
2238 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2239 && data.asan_alignb <= 4096
2240 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2241 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2242 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2243 /* Allocating a constant amount of space from a constant
2244 starting offset must give a constant result. */
2245 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2246 .to_constant ());
2247 data.asan_vec.safe_push (prev_offset);
2248 data.asan_vec.safe_push (offset);
2249 /* Leave space for alignment if STRICT_ALIGNMENT. */
2250 if (STRICT_ALIGNMENT)
2251 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2252 << ASAN_SHADOW_SHIFT)
2253 / BITS_PER_UNIT, 1);
2254
2255 var_end_seq
2256 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2257 data.asan_base,
2258 data.asan_alignb,
2259 data.asan_vec.address (),
2260 data.asan_decl_vec.address (),
2261 data.asan_vec.length ());
2262 }
2263
2264 expand_stack_vars (NULL, &data);
2265 }
2266
2267 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2268 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2269 virtual_stack_vars_rtx,
2270 var_end_seq);
2271
2272 fini_vars_expansion ();
2273
2274 /* If there were any artificial non-ignored vars without rtl
2275 found earlier, see if deferred stack allocation hasn't assigned
2276 rtl to them. */
2277 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2278 {
2279 rtx rtl = DECL_RTL_IF_SET (var);
2280
2281 /* Keep artificial non-ignored vars in cfun->local_decls
2282 chain until instantiate_decls. */
2283 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2284 add_local_decl (cfun, var);
2285 }
2286
2287 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2288 if (STACK_ALIGNMENT_NEEDED)
2289 {
2290 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2291 if (FRAME_GROWS_DOWNWARD)
2292 frame_offset = aligned_lower_bound (frame_offset, align);
2293 else
2294 frame_offset = aligned_upper_bound (frame_offset, align);
2295 }
2296
2297 return var_end_seq;
2298 }
2299
2300
2301 /* If we need to produce a detailed dump, print the tree representation
2302 for STMT to the dump file. SINCE is the last RTX after which the RTL
2303 generated for STMT should have been appended. */
2304
2305 static void
2306 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2307 {
2308 if (dump_file && (dump_flags & TDF_DETAILS))
2309 {
2310 fprintf (dump_file, "\n;; ");
2311 print_gimple_stmt (dump_file, stmt, 0,
2312 TDF_SLIM | (dump_flags & TDF_LINENO));
2313 fprintf (dump_file, "\n");
2314
2315 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2316 }
2317 }
2318
2319 /* Maps the blocks that do not contain tree labels to rtx labels. */
2320
2321 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2322
2323 /* Returns the label_rtx expression for a label starting basic block BB. */
2324
2325 static rtx_code_label *
2326 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2327 {
2328 gimple_stmt_iterator gsi;
2329 tree lab;
2330
2331 if (bb->flags & BB_RTL)
2332 return block_label (bb);
2333
2334 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2335 if (elt)
2336 return *elt;
2337
2338 /* Find the tree label if it is present. */
2339
2340 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2341 {
2342 glabel *lab_stmt;
2343
2344 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2345 if (!lab_stmt)
2346 break;
2347
2348 lab = gimple_label_label (lab_stmt);
2349 if (DECL_NONLOCAL (lab))
2350 break;
2351
2352 return jump_target_rtx (lab);
2353 }
2354
2355 rtx_code_label *l = gen_label_rtx ();
2356 lab_rtx_for_bb->put (bb, l);
2357 return l;
2358 }
2359
2360
2361 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2362 of a basic block where we just expanded the conditional at the end,
2363 possibly clean up the CFG and instruction sequence. LAST is the
2364 last instruction before the just emitted jump sequence. */
2365
2366 static void
2367 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2368 {
2369 /* Special case: when jumpif decides that the condition is
2370 trivial it emits an unconditional jump (and the necessary
2371 barrier). But we still have two edges, the fallthru one is
2372 wrong. purge_dead_edges would clean this up later. Unfortunately
2373 we have to insert insns (and split edges) before
2374 find_many_sub_basic_blocks and hence before purge_dead_edges.
2375 But splitting edges might create new blocks which depend on the
2376 fact that if there are two edges there's no barrier. So the
2377 barrier would get lost and verify_flow_info would ICE. Instead
2378 of auditing all edge splitters to care for the barrier (which
2379 normally isn't there in a cleaned CFG), fix it here. */
2380 if (BARRIER_P (get_last_insn ()))
2381 {
2382 rtx_insn *insn;
2383 remove_edge (e);
2384 /* Now, we have a single successor block, if we have insns to
2385 insert on the remaining edge we potentially will insert
2386 it at the end of this block (if the dest block isn't feasible)
2387 in order to avoid splitting the edge. This insertion will take
2388 place in front of the last jump. But we might have emitted
2389 multiple jumps (conditional and one unconditional) to the
2390 same destination. Inserting in front of the last one then
2391 is a problem. See PR 40021. We fix this by deleting all
2392 jumps except the last unconditional one. */
2393 insn = PREV_INSN (get_last_insn ());
2394 /* Make sure we have an unconditional jump. Otherwise we're
2395 confused. */
2396 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2397 for (insn = PREV_INSN (insn); insn != last;)
2398 {
2399 insn = PREV_INSN (insn);
2400 if (JUMP_P (NEXT_INSN (insn)))
2401 {
2402 if (!any_condjump_p (NEXT_INSN (insn)))
2403 {
2404 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2405 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2406 }
2407 delete_insn (NEXT_INSN (insn));
2408 }
2409 }
2410 }
2411 }
2412
2413 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2414 Returns a new basic block if we've terminated the current basic
2415 block and created a new one. */
2416
2417 static basic_block
2418 expand_gimple_cond (basic_block bb, gcond *stmt)
2419 {
2420 basic_block new_bb, dest;
2421 edge true_edge;
2422 edge false_edge;
2423 rtx_insn *last2, *last;
2424 enum tree_code code;
2425 tree op0, op1;
2426
2427 code = gimple_cond_code (stmt);
2428 op0 = gimple_cond_lhs (stmt);
2429 op1 = gimple_cond_rhs (stmt);
2430 /* We're sometimes presented with such code:
2431 D.123_1 = x < y;
2432 if (D.123_1 != 0)
2433 ...
2434 This would expand to two comparisons which then later might
2435 be cleaned up by combine. But some pattern matchers like if-conversion
2436 work better when there's only one compare, so make up for this
2437 here as special exception if TER would have made the same change. */
2438 if (SA.values
2439 && TREE_CODE (op0) == SSA_NAME
2440 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2441 && TREE_CODE (op1) == INTEGER_CST
2442 && ((gimple_cond_code (stmt) == NE_EXPR
2443 && integer_zerop (op1))
2444 || (gimple_cond_code (stmt) == EQ_EXPR
2445 && integer_onep (op1)))
2446 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2447 {
2448 gimple *second = SSA_NAME_DEF_STMT (op0);
2449 if (gimple_code (second) == GIMPLE_ASSIGN)
2450 {
2451 enum tree_code code2 = gimple_assign_rhs_code (second);
2452 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2453 {
2454 code = code2;
2455 op0 = gimple_assign_rhs1 (second);
2456 op1 = gimple_assign_rhs2 (second);
2457 }
2458 /* If jumps are cheap and the target does not support conditional
2459 compare, turn some more codes into jumpy sequences. */
2460 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2461 && targetm.gen_ccmp_first == NULL)
2462 {
2463 if ((code2 == BIT_AND_EXPR
2464 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2465 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2466 || code2 == TRUTH_AND_EXPR)
2467 {
2468 code = TRUTH_ANDIF_EXPR;
2469 op0 = gimple_assign_rhs1 (second);
2470 op1 = gimple_assign_rhs2 (second);
2471 }
2472 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2473 {
2474 code = TRUTH_ORIF_EXPR;
2475 op0 = gimple_assign_rhs1 (second);
2476 op1 = gimple_assign_rhs2 (second);
2477 }
2478 }
2479 }
2480 }
2481
2482 last2 = last = get_last_insn ();
2483
2484 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2485 set_curr_insn_location (gimple_location (stmt));
2486
2487 /* These flags have no purpose in RTL land. */
2488 true_edge->flags &= ~EDGE_TRUE_VALUE;
2489 false_edge->flags &= ~EDGE_FALSE_VALUE;
2490
2491 /* We can either have a pure conditional jump with one fallthru edge or
2492 two-way jump that needs to be decomposed into two basic blocks. */
2493 if (false_edge->dest == bb->next_bb)
2494 {
2495 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2496 true_edge->probability);
2497 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2498 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2499 set_curr_insn_location (true_edge->goto_locus);
2500 false_edge->flags |= EDGE_FALLTHRU;
2501 maybe_cleanup_end_of_block (false_edge, last);
2502 return NULL;
2503 }
2504 if (true_edge->dest == bb->next_bb)
2505 {
2506 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2507 false_edge->probability);
2508 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2509 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2510 set_curr_insn_location (false_edge->goto_locus);
2511 true_edge->flags |= EDGE_FALLTHRU;
2512 maybe_cleanup_end_of_block (true_edge, last);
2513 return NULL;
2514 }
2515
2516 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2517 true_edge->probability);
2518 last = get_last_insn ();
2519 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2520 set_curr_insn_location (false_edge->goto_locus);
2521 emit_jump (label_rtx_for_bb (false_edge->dest));
2522
2523 BB_END (bb) = last;
2524 if (BARRIER_P (BB_END (bb)))
2525 BB_END (bb) = PREV_INSN (BB_END (bb));
2526 update_bb_for_insn (bb);
2527
2528 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2529 dest = false_edge->dest;
2530 redirect_edge_succ (false_edge, new_bb);
2531 false_edge->flags |= EDGE_FALLTHRU;
2532 new_bb->count = false_edge->count ();
2533 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2534 add_bb_to_loop (new_bb, loop);
2535 if (loop->latch == bb
2536 && loop->header == dest)
2537 loop->latch = new_bb;
2538 make_single_succ_edge (new_bb, dest, 0);
2539 if (BARRIER_P (BB_END (new_bb)))
2540 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2541 update_bb_for_insn (new_bb);
2542
2543 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2544
2545 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2546 {
2547 set_curr_insn_location (true_edge->goto_locus);
2548 true_edge->goto_locus = curr_insn_location ();
2549 }
2550
2551 return new_bb;
2552 }
2553
2554 /* Mark all calls that can have a transaction restart. */
2555
2556 static void
2557 mark_transaction_restart_calls (gimple *stmt)
2558 {
2559 struct tm_restart_node dummy;
2560 tm_restart_node **slot;
2561
2562 if (!cfun->gimple_df->tm_restart)
2563 return;
2564
2565 dummy.stmt = stmt;
2566 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2567 if (slot)
2568 {
2569 struct tm_restart_node *n = *slot;
2570 tree list = n->label_or_list;
2571 rtx_insn *insn;
2572
2573 for (insn = next_real_insn (get_last_insn ());
2574 !CALL_P (insn);
2575 insn = next_real_insn (insn))
2576 continue;
2577
2578 if (TREE_CODE (list) == LABEL_DECL)
2579 add_reg_note (insn, REG_TM, label_rtx (list));
2580 else
2581 for (; list ; list = TREE_CHAIN (list))
2582 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2583 }
2584 }
2585
2586 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2587 statement STMT. */
2588
2589 static void
2590 expand_call_stmt (gcall *stmt)
2591 {
2592 tree exp, decl, lhs;
2593 bool builtin_p;
2594 size_t i;
2595
2596 if (gimple_call_internal_p (stmt))
2597 {
2598 expand_internal_call (stmt);
2599 return;
2600 }
2601
2602 /* If this is a call to a built-in function and it has no effect other
2603 than setting the lhs, try to implement it using an internal function
2604 instead. */
2605 decl = gimple_call_fndecl (stmt);
2606 if (gimple_call_lhs (stmt)
2607 && !gimple_has_side_effects (stmt)
2608 && (optimize || (decl && called_as_built_in (decl))))
2609 {
2610 internal_fn ifn = replacement_internal_fn (stmt);
2611 if (ifn != IFN_LAST)
2612 {
2613 expand_internal_call (ifn, stmt);
2614 return;
2615 }
2616 }
2617
2618 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2619
2620 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2621 builtin_p = decl && DECL_BUILT_IN (decl);
2622
2623 /* If this is not a builtin function, the function type through which the
2624 call is made may be different from the type of the function. */
2625 if (!builtin_p)
2626 CALL_EXPR_FN (exp)
2627 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2628 CALL_EXPR_FN (exp));
2629
2630 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2631 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2632
2633 for (i = 0; i < gimple_call_num_args (stmt); i++)
2634 {
2635 tree arg = gimple_call_arg (stmt, i);
2636 gimple *def;
2637 /* TER addresses into arguments of builtin functions so we have a
2638 chance to infer more correct alignment information. See PR39954. */
2639 if (builtin_p
2640 && TREE_CODE (arg) == SSA_NAME
2641 && (def = get_gimple_for_ssa_name (arg))
2642 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2643 arg = gimple_assign_rhs1 (def);
2644 CALL_EXPR_ARG (exp, i) = arg;
2645 }
2646
2647 if (gimple_has_side_effects (stmt))
2648 TREE_SIDE_EFFECTS (exp) = 1;
2649
2650 if (gimple_call_nothrow_p (stmt))
2651 TREE_NOTHROW (exp) = 1;
2652
2653 if (gimple_no_warning_p (stmt))
2654 TREE_NO_WARNING (exp) = 1;
2655
2656 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2657 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2658 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2659 if (decl
2660 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2661 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2662 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2663 else
2664 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2665 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2666 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2667 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2668 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2669
2670 /* Ensure RTL is created for debug args. */
2671 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2672 {
2673 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2674 unsigned int ix;
2675 tree dtemp;
2676
2677 if (debug_args)
2678 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2679 {
2680 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2681 expand_debug_expr (dtemp);
2682 }
2683 }
2684
2685 rtx_insn *before_call = get_last_insn ();
2686 lhs = gimple_call_lhs (stmt);
2687 if (lhs)
2688 expand_assignment (lhs, exp, false);
2689 else
2690 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2691
2692 /* If the gimple call is an indirect call and has 'nocf_check'
2693 attribute find a generated CALL insn to mark it as no
2694 control-flow verification is needed. */
2695 if (gimple_call_nocf_check_p (stmt)
2696 && !gimple_call_fndecl (stmt))
2697 {
2698 rtx_insn *last = get_last_insn ();
2699 while (!CALL_P (last)
2700 && last != before_call)
2701 last = PREV_INSN (last);
2702
2703 if (last != before_call)
2704 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2705 }
2706
2707 mark_transaction_restart_calls (stmt);
2708 }
2709
2710
2711 /* Generate RTL for an asm statement (explicit assembler code).
2712 STRING is a STRING_CST node containing the assembler code text,
2713 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2714 insn is volatile; don't optimize it. */
2715
2716 static void
2717 expand_asm_loc (tree string, int vol, location_t locus)
2718 {
2719 rtx body;
2720
2721 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2722 ggc_strdup (TREE_STRING_POINTER (string)),
2723 locus);
2724
2725 MEM_VOLATILE_P (body) = vol;
2726
2727 /* Non-empty basic ASM implicitly clobbers memory. */
2728 if (TREE_STRING_LENGTH (string) != 0)
2729 {
2730 rtx asm_op, clob;
2731 unsigned i, nclobbers;
2732 auto_vec<rtx> input_rvec, output_rvec;
2733 auto_vec<const char *> constraints;
2734 auto_vec<rtx> clobber_rvec;
2735 HARD_REG_SET clobbered_regs;
2736 CLEAR_HARD_REG_SET (clobbered_regs);
2737
2738 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2739 clobber_rvec.safe_push (clob);
2740
2741 if (targetm.md_asm_adjust)
2742 targetm.md_asm_adjust (output_rvec, input_rvec,
2743 constraints, clobber_rvec,
2744 clobbered_regs);
2745
2746 asm_op = body;
2747 nclobbers = clobber_rvec.length ();
2748 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2749
2750 XVECEXP (body, 0, 0) = asm_op;
2751 for (i = 0; i < nclobbers; i++)
2752 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2753 }
2754
2755 emit_insn (body);
2756 }
2757
2758 /* Return the number of times character C occurs in string S. */
2759 static int
2760 n_occurrences (int c, const char *s)
2761 {
2762 int n = 0;
2763 while (*s)
2764 n += (*s++ == c);
2765 return n;
2766 }
2767
2768 /* A subroutine of expand_asm_operands. Check that all operands have
2769 the same number of alternatives. Return true if so. */
2770
2771 static bool
2772 check_operand_nalternatives (const vec<const char *> &constraints)
2773 {
2774 unsigned len = constraints.length();
2775 if (len > 0)
2776 {
2777 int nalternatives = n_occurrences (',', constraints[0]);
2778
2779 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2780 {
2781 error ("too many alternatives in %<asm%>");
2782 return false;
2783 }
2784
2785 for (unsigned i = 1; i < len; ++i)
2786 if (n_occurrences (',', constraints[i]) != nalternatives)
2787 {
2788 error ("operand constraints for %<asm%> differ "
2789 "in number of alternatives");
2790 return false;
2791 }
2792 }
2793 return true;
2794 }
2795
2796 /* Check for overlap between registers marked in CLOBBERED_REGS and
2797 anything inappropriate in T. Emit error and return the register
2798 variable definition for error, NULL_TREE for ok. */
2799
2800 static bool
2801 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2802 {
2803 /* Conflicts between asm-declared register variables and the clobber
2804 list are not allowed. */
2805 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2806
2807 if (overlap)
2808 {
2809 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2810 DECL_NAME (overlap));
2811
2812 /* Reset registerness to stop multiple errors emitted for a single
2813 variable. */
2814 DECL_REGISTER (overlap) = 0;
2815 return true;
2816 }
2817
2818 return false;
2819 }
2820
2821 /* Generate RTL for an asm statement with arguments.
2822 STRING is the instruction template.
2823 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2824 Each output or input has an expression in the TREE_VALUE and
2825 a tree list in TREE_PURPOSE which in turn contains a constraint
2826 name in TREE_VALUE (or NULL_TREE) and a constraint string
2827 in TREE_PURPOSE.
2828 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2829 that is clobbered by this insn.
2830
2831 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2832 should be the fallthru basic block of the asm goto.
2833
2834 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2835 Some elements of OUTPUTS may be replaced with trees representing temporary
2836 values. The caller should copy those temporary values to the originally
2837 specified lvalues.
2838
2839 VOL nonzero means the insn is volatile; don't optimize it. */
2840
2841 static void
2842 expand_asm_stmt (gasm *stmt)
2843 {
2844 class save_input_location
2845 {
2846 location_t old;
2847
2848 public:
2849 explicit save_input_location(location_t where)
2850 {
2851 old = input_location;
2852 input_location = where;
2853 }
2854
2855 ~save_input_location()
2856 {
2857 input_location = old;
2858 }
2859 };
2860
2861 location_t locus = gimple_location (stmt);
2862
2863 if (gimple_asm_input_p (stmt))
2864 {
2865 const char *s = gimple_asm_string (stmt);
2866 tree string = build_string (strlen (s), s);
2867 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2868 return;
2869 }
2870
2871 /* There are some legacy diagnostics in here, and also avoids a
2872 sixth parameger to targetm.md_asm_adjust. */
2873 save_input_location s_i_l(locus);
2874
2875 unsigned noutputs = gimple_asm_noutputs (stmt);
2876 unsigned ninputs = gimple_asm_ninputs (stmt);
2877 unsigned nlabels = gimple_asm_nlabels (stmt);
2878 unsigned i;
2879
2880 /* ??? Diagnose during gimplification? */
2881 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2882 {
2883 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2884 return;
2885 }
2886
2887 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2888 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2889 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2890
2891 /* Copy the gimple vectors into new vectors that we can manipulate. */
2892
2893 output_tvec.safe_grow (noutputs);
2894 input_tvec.safe_grow (ninputs);
2895 constraints.safe_grow (noutputs + ninputs);
2896
2897 for (i = 0; i < noutputs; ++i)
2898 {
2899 tree t = gimple_asm_output_op (stmt, i);
2900 output_tvec[i] = TREE_VALUE (t);
2901 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2902 }
2903 for (i = 0; i < ninputs; i++)
2904 {
2905 tree t = gimple_asm_input_op (stmt, i);
2906 input_tvec[i] = TREE_VALUE (t);
2907 constraints[i + noutputs]
2908 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2909 }
2910
2911 /* ??? Diagnose during gimplification? */
2912 if (! check_operand_nalternatives (constraints))
2913 return;
2914
2915 /* Count the number of meaningful clobbered registers, ignoring what
2916 we would ignore later. */
2917 auto_vec<rtx> clobber_rvec;
2918 HARD_REG_SET clobbered_regs;
2919 CLEAR_HARD_REG_SET (clobbered_regs);
2920
2921 if (unsigned n = gimple_asm_nclobbers (stmt))
2922 {
2923 clobber_rvec.reserve (n);
2924 for (i = 0; i < n; i++)
2925 {
2926 tree t = gimple_asm_clobber_op (stmt, i);
2927 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2928 int nregs, j;
2929
2930 j = decode_reg_name_and_count (regname, &nregs);
2931 if (j < 0)
2932 {
2933 if (j == -2)
2934 {
2935 /* ??? Diagnose during gimplification? */
2936 error ("unknown register name %qs in %<asm%>", regname);
2937 }
2938 else if (j == -4)
2939 {
2940 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2941 clobber_rvec.safe_push (x);
2942 }
2943 else
2944 {
2945 /* Otherwise we should have -1 == empty string
2946 or -3 == cc, which is not a register. */
2947 gcc_assert (j == -1 || j == -3);
2948 }
2949 }
2950 else
2951 for (int reg = j; reg < j + nregs; reg++)
2952 {
2953 /* Clobbering the PIC register is an error. */
2954 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2955 {
2956 /* ??? Diagnose during gimplification? */
2957 error ("PIC register clobbered by %qs in %<asm%>",
2958 regname);
2959 return;
2960 }
2961
2962 SET_HARD_REG_BIT (clobbered_regs, reg);
2963 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2964 clobber_rvec.safe_push (x);
2965 }
2966 }
2967 }
2968 unsigned nclobbers = clobber_rvec.length();
2969
2970 /* First pass over inputs and outputs checks validity and sets
2971 mark_addressable if needed. */
2972 /* ??? Diagnose during gimplification? */
2973
2974 for (i = 0; i < noutputs; ++i)
2975 {
2976 tree val = output_tvec[i];
2977 tree type = TREE_TYPE (val);
2978 const char *constraint;
2979 bool is_inout;
2980 bool allows_reg;
2981 bool allows_mem;
2982
2983 /* Try to parse the output constraint. If that fails, there's
2984 no point in going further. */
2985 constraint = constraints[i];
2986 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2987 &allows_mem, &allows_reg, &is_inout))
2988 return;
2989
2990 if (! allows_reg
2991 && (allows_mem
2992 || is_inout
2993 || (DECL_P (val)
2994 && REG_P (DECL_RTL (val))
2995 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2996 mark_addressable (val);
2997 }
2998
2999 for (i = 0; i < ninputs; ++i)
3000 {
3001 bool allows_reg, allows_mem;
3002 const char *constraint;
3003
3004 constraint = constraints[i + noutputs];
3005 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3006 constraints.address (),
3007 &allows_mem, &allows_reg))
3008 return;
3009
3010 if (! allows_reg && allows_mem)
3011 mark_addressable (input_tvec[i]);
3012 }
3013
3014 /* Second pass evaluates arguments. */
3015
3016 /* Make sure stack is consistent for asm goto. */
3017 if (nlabels > 0)
3018 do_pending_stack_adjust ();
3019 int old_generating_concat_p = generating_concat_p;
3020
3021 /* Vector of RTX's of evaluated output operands. */
3022 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3023 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3024 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3025
3026 output_rvec.safe_grow (noutputs);
3027
3028 for (i = 0; i < noutputs; ++i)
3029 {
3030 tree val = output_tvec[i];
3031 tree type = TREE_TYPE (val);
3032 bool is_inout, allows_reg, allows_mem, ok;
3033 rtx op;
3034
3035 ok = parse_output_constraint (&constraints[i], i, ninputs,
3036 noutputs, &allows_mem, &allows_reg,
3037 &is_inout);
3038 gcc_assert (ok);
3039
3040 /* If an output operand is not a decl or indirect ref and our constraint
3041 allows a register, make a temporary to act as an intermediate.
3042 Make the asm insn write into that, then we will copy it to
3043 the real output operand. Likewise for promoted variables. */
3044
3045 generating_concat_p = 0;
3046
3047 if ((TREE_CODE (val) == INDIRECT_REF
3048 && allows_mem)
3049 || (DECL_P (val)
3050 && (allows_mem || REG_P (DECL_RTL (val)))
3051 && ! (REG_P (DECL_RTL (val))
3052 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3053 || ! allows_reg
3054 || is_inout)
3055 {
3056 op = expand_expr (val, NULL_RTX, VOIDmode,
3057 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3058 if (MEM_P (op))
3059 op = validize_mem (op);
3060
3061 if (! allows_reg && !MEM_P (op))
3062 error ("output number %d not directly addressable", i);
3063 if ((! allows_mem && MEM_P (op))
3064 || GET_CODE (op) == CONCAT)
3065 {
3066 rtx old_op = op;
3067 op = gen_reg_rtx (GET_MODE (op));
3068
3069 generating_concat_p = old_generating_concat_p;
3070
3071 if (is_inout)
3072 emit_move_insn (op, old_op);
3073
3074 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3075 emit_move_insn (old_op, op);
3076 after_rtl_seq = get_insns ();
3077 after_rtl_end = get_last_insn ();
3078 end_sequence ();
3079 }
3080 }
3081 else
3082 {
3083 op = assign_temp (type, 0, 1);
3084 op = validize_mem (op);
3085 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3086 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3087
3088 generating_concat_p = old_generating_concat_p;
3089
3090 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3091 expand_assignment (val, make_tree (type, op), false);
3092 after_rtl_seq = get_insns ();
3093 after_rtl_end = get_last_insn ();
3094 end_sequence ();
3095 }
3096 output_rvec[i] = op;
3097
3098 if (is_inout)
3099 inout_opnum.safe_push (i);
3100 }
3101
3102 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3103 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3104
3105 input_rvec.safe_grow (ninputs);
3106 input_mode.safe_grow (ninputs);
3107
3108 generating_concat_p = 0;
3109
3110 for (i = 0; i < ninputs; ++i)
3111 {
3112 tree val = input_tvec[i];
3113 tree type = TREE_TYPE (val);
3114 bool allows_reg, allows_mem, ok;
3115 const char *constraint;
3116 rtx op;
3117
3118 constraint = constraints[i + noutputs];
3119 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3120 constraints.address (),
3121 &allows_mem, &allows_reg);
3122 gcc_assert (ok);
3123
3124 /* EXPAND_INITIALIZER will not generate code for valid initializer
3125 constants, but will still generate code for other types of operand.
3126 This is the behavior we want for constant constraints. */
3127 op = expand_expr (val, NULL_RTX, VOIDmode,
3128 allows_reg ? EXPAND_NORMAL
3129 : allows_mem ? EXPAND_MEMORY
3130 : EXPAND_INITIALIZER);
3131
3132 /* Never pass a CONCAT to an ASM. */
3133 if (GET_CODE (op) == CONCAT)
3134 op = force_reg (GET_MODE (op), op);
3135 else if (MEM_P (op))
3136 op = validize_mem (op);
3137
3138 if (asm_operand_ok (op, constraint, NULL) <= 0)
3139 {
3140 if (allows_reg && TYPE_MODE (type) != BLKmode)
3141 op = force_reg (TYPE_MODE (type), op);
3142 else if (!allows_mem)
3143 warning (0, "asm operand %d probably doesn%'t match constraints",
3144 i + noutputs);
3145 else if (MEM_P (op))
3146 {
3147 /* We won't recognize either volatile memory or memory
3148 with a queued address as available a memory_operand
3149 at this point. Ignore it: clearly this *is* a memory. */
3150 }
3151 else
3152 gcc_unreachable ();
3153 }
3154 input_rvec[i] = op;
3155 input_mode[i] = TYPE_MODE (type);
3156 }
3157
3158 /* For in-out operands, copy output rtx to input rtx. */
3159 unsigned ninout = inout_opnum.length();
3160 for (i = 0; i < ninout; i++)
3161 {
3162 int j = inout_opnum[i];
3163 rtx o = output_rvec[j];
3164
3165 input_rvec.safe_push (o);
3166 input_mode.safe_push (GET_MODE (o));
3167
3168 char buffer[16];
3169 sprintf (buffer, "%d", j);
3170 constraints.safe_push (ggc_strdup (buffer));
3171 }
3172 ninputs += ninout;
3173
3174 /* Sometimes we wish to automatically clobber registers across an asm.
3175 Case in point is when the i386 backend moved from cc0 to a hard reg --
3176 maintaining source-level compatibility means automatically clobbering
3177 the flags register. */
3178 rtx_insn *after_md_seq = NULL;
3179 if (targetm.md_asm_adjust)
3180 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3181 constraints, clobber_rvec,
3182 clobbered_regs);
3183
3184 /* Do not allow the hook to change the output and input count,
3185 lest it mess up the operand numbering. */
3186 gcc_assert (output_rvec.length() == noutputs);
3187 gcc_assert (input_rvec.length() == ninputs);
3188 gcc_assert (constraints.length() == noutputs + ninputs);
3189
3190 /* But it certainly can adjust the clobbers. */
3191 nclobbers = clobber_rvec.length();
3192
3193 /* Third pass checks for easy conflicts. */
3194 /* ??? Why are we doing this on trees instead of rtx. */
3195
3196 bool clobber_conflict_found = 0;
3197 for (i = 0; i < noutputs; ++i)
3198 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3199 clobber_conflict_found = 1;
3200 for (i = 0; i < ninputs - ninout; ++i)
3201 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3202 clobber_conflict_found = 1;
3203
3204 /* Make vectors for the expression-rtx, constraint strings,
3205 and named operands. */
3206
3207 rtvec argvec = rtvec_alloc (ninputs);
3208 rtvec constraintvec = rtvec_alloc (ninputs);
3209 rtvec labelvec = rtvec_alloc (nlabels);
3210
3211 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3212 : GET_MODE (output_rvec[0])),
3213 ggc_strdup (gimple_asm_string (stmt)),
3214 "", 0, argvec, constraintvec,
3215 labelvec, locus);
3216 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3217
3218 for (i = 0; i < ninputs; ++i)
3219 {
3220 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3221 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3222 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3223 constraints[i + noutputs],
3224 locus);
3225 }
3226
3227 /* Copy labels to the vector. */
3228 rtx_code_label *fallthru_label = NULL;
3229 if (nlabels > 0)
3230 {
3231 basic_block fallthru_bb = NULL;
3232 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3233 if (fallthru)
3234 fallthru_bb = fallthru->dest;
3235
3236 for (i = 0; i < nlabels; ++i)
3237 {
3238 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3239 rtx_insn *r;
3240 /* If asm goto has any labels in the fallthru basic block, use
3241 a label that we emit immediately after the asm goto. Expansion
3242 may insert further instructions into the same basic block after
3243 asm goto and if we don't do this, insertion of instructions on
3244 the fallthru edge might misbehave. See PR58670. */
3245 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3246 {
3247 if (fallthru_label == NULL_RTX)
3248 fallthru_label = gen_label_rtx ();
3249 r = fallthru_label;
3250 }
3251 else
3252 r = label_rtx (label);
3253 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3254 }
3255 }
3256
3257 /* Now, for each output, construct an rtx
3258 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3259 ARGVEC CONSTRAINTS OPNAMES))
3260 If there is more than one, put them inside a PARALLEL. */
3261
3262 if (nlabels > 0 && nclobbers == 0)
3263 {
3264 gcc_assert (noutputs == 0);
3265 emit_jump_insn (body);
3266 }
3267 else if (noutputs == 0 && nclobbers == 0)
3268 {
3269 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3270 emit_insn (body);
3271 }
3272 else if (noutputs == 1 && nclobbers == 0)
3273 {
3274 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3275 emit_insn (gen_rtx_SET (output_rvec[0], body));
3276 }
3277 else
3278 {
3279 rtx obody = body;
3280 int num = noutputs;
3281
3282 if (num == 0)
3283 num = 1;
3284
3285 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3286
3287 /* For each output operand, store a SET. */
3288 for (i = 0; i < noutputs; ++i)
3289 {
3290 rtx src, o = output_rvec[i];
3291 if (i == 0)
3292 {
3293 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3294 src = obody;
3295 }
3296 else
3297 {
3298 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3299 ASM_OPERANDS_TEMPLATE (obody),
3300 constraints[i], i, argvec,
3301 constraintvec, labelvec, locus);
3302 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3303 }
3304 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3305 }
3306
3307 /* If there are no outputs (but there are some clobbers)
3308 store the bare ASM_OPERANDS into the PARALLEL. */
3309 if (i == 0)
3310 XVECEXP (body, 0, i++) = obody;
3311
3312 /* Store (clobber REG) for each clobbered register specified. */
3313 for (unsigned j = 0; j < nclobbers; ++j)
3314 {
3315 rtx clobbered_reg = clobber_rvec[j];
3316
3317 /* Do sanity check for overlap between clobbers and respectively
3318 input and outputs that hasn't been handled. Such overlap
3319 should have been detected and reported above. */
3320 if (!clobber_conflict_found && REG_P (clobbered_reg))
3321 {
3322 /* We test the old body (obody) contents to avoid
3323 tripping over the under-construction body. */
3324 for (unsigned k = 0; k < noutputs; ++k)
3325 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3326 internal_error ("asm clobber conflict with output operand");
3327
3328 for (unsigned k = 0; k < ninputs - ninout; ++k)
3329 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3330 internal_error ("asm clobber conflict with input operand");
3331 }
3332
3333 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3334 }
3335
3336 if (nlabels > 0)
3337 emit_jump_insn (body);
3338 else
3339 emit_insn (body);
3340 }
3341
3342 generating_concat_p = old_generating_concat_p;
3343
3344 if (fallthru_label)
3345 emit_label (fallthru_label);
3346
3347 if (after_md_seq)
3348 emit_insn (after_md_seq);
3349 if (after_rtl_seq)
3350 emit_insn (after_rtl_seq);
3351
3352 free_temp_slots ();
3353 crtl->has_asm_statement = 1;
3354 }
3355
3356 /* Emit code to jump to the address
3357 specified by the pointer expression EXP. */
3358
3359 static void
3360 expand_computed_goto (tree exp)
3361 {
3362 rtx x = expand_normal (exp);
3363
3364 do_pending_stack_adjust ();
3365 emit_indirect_jump (x);
3366 }
3367
3368 /* Generate RTL code for a `goto' statement with target label LABEL.
3369 LABEL should be a LABEL_DECL tree node that was or will later be
3370 defined with `expand_label'. */
3371
3372 static void
3373 expand_goto (tree label)
3374 {
3375 if (flag_checking)
3376 {
3377 /* Check for a nonlocal goto to a containing function. Should have
3378 gotten translated to __builtin_nonlocal_goto. */
3379 tree context = decl_function_context (label);
3380 gcc_assert (!context || context == current_function_decl);
3381 }
3382
3383 emit_jump (jump_target_rtx (label));
3384 }
3385
3386 /* Output a return with no value. */
3387
3388 static void
3389 expand_null_return_1 (void)
3390 {
3391 clear_pending_stack_adjust ();
3392 do_pending_stack_adjust ();
3393 emit_jump (return_label);
3394 }
3395
3396 /* Generate RTL to return from the current function, with no value.
3397 (That is, we do not do anything about returning any value.) */
3398
3399 void
3400 expand_null_return (void)
3401 {
3402 /* If this function was declared to return a value, but we
3403 didn't, clobber the return registers so that they are not
3404 propagated live to the rest of the function. */
3405 clobber_return_register ();
3406
3407 expand_null_return_1 ();
3408 }
3409
3410 /* Generate RTL to return from the current function, with value VAL. */
3411
3412 static void
3413 expand_value_return (rtx val)
3414 {
3415 /* Copy the value to the return location unless it's already there. */
3416
3417 tree decl = DECL_RESULT (current_function_decl);
3418 rtx return_reg = DECL_RTL (decl);
3419 if (return_reg != val)
3420 {
3421 tree funtype = TREE_TYPE (current_function_decl);
3422 tree type = TREE_TYPE (decl);
3423 int unsignedp = TYPE_UNSIGNED (type);
3424 machine_mode old_mode = DECL_MODE (decl);
3425 machine_mode mode;
3426 if (DECL_BY_REFERENCE (decl))
3427 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3428 else
3429 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3430
3431 if (mode != old_mode)
3432 val = convert_modes (mode, old_mode, val, unsignedp);
3433
3434 if (GET_CODE (return_reg) == PARALLEL)
3435 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3436 else
3437 emit_move_insn (return_reg, val);
3438 }
3439
3440 expand_null_return_1 ();
3441 }
3442
3443 /* Generate RTL to evaluate the expression RETVAL and return it
3444 from the current function. */
3445
3446 static void
3447 expand_return (tree retval, tree bounds)
3448 {
3449 rtx result_rtl;
3450 rtx val = 0;
3451 tree retval_rhs;
3452 rtx bounds_rtl;
3453
3454 /* If function wants no value, give it none. */
3455 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3456 {
3457 expand_normal (retval);
3458 expand_null_return ();
3459 return;
3460 }
3461
3462 if (retval == error_mark_node)
3463 {
3464 /* Treat this like a return of no value from a function that
3465 returns a value. */
3466 expand_null_return ();
3467 return;
3468 }
3469 else if ((TREE_CODE (retval) == MODIFY_EXPR
3470 || TREE_CODE (retval) == INIT_EXPR)
3471 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3472 retval_rhs = TREE_OPERAND (retval, 1);
3473 else
3474 retval_rhs = retval;
3475
3476 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3477
3478 /* Put returned bounds to the right place. */
3479 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3480 if (bounds_rtl)
3481 {
3482 rtx addr = NULL;
3483 rtx bnd = NULL;
3484
3485 if (bounds && bounds != error_mark_node)
3486 {
3487 bnd = expand_normal (bounds);
3488 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3489 }
3490 else if (REG_P (bounds_rtl))
3491 {
3492 if (bounds)
3493 bnd = chkp_expand_zero_bounds ();
3494 else
3495 {
3496 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3497 addr = gen_rtx_MEM (Pmode, addr);
3498 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3499 }
3500
3501 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3502 }
3503 else
3504 {
3505 int n;
3506
3507 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3508
3509 if (bounds)
3510 bnd = chkp_expand_zero_bounds ();
3511 else
3512 {
3513 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3514 addr = gen_rtx_MEM (Pmode, addr);
3515 }
3516
3517 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3518 {
3519 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3520 if (!bounds)
3521 {
3522 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3523 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3524 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3525 }
3526 targetm.calls.store_returned_bounds (slot, bnd);
3527 }
3528 }
3529 }
3530 else if (chkp_function_instrumented_p (current_function_decl)
3531 && !BOUNDED_P (retval_rhs)
3532 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3533 && TREE_CODE (retval_rhs) != RESULT_DECL)
3534 {
3535 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3536 addr = gen_rtx_MEM (Pmode, addr);
3537
3538 gcc_assert (MEM_P (result_rtl));
3539
3540 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3541 }
3542
3543 /* If we are returning the RESULT_DECL, then the value has already
3544 been stored into it, so we don't have to do anything special. */
3545 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3546 expand_value_return (result_rtl);
3547
3548 /* If the result is an aggregate that is being returned in one (or more)
3549 registers, load the registers here. */
3550
3551 else if (retval_rhs != 0
3552 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3553 && REG_P (result_rtl))
3554 {
3555 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3556 if (val)
3557 {
3558 /* Use the mode of the result value on the return register. */
3559 PUT_MODE (result_rtl, GET_MODE (val));
3560 expand_value_return (val);
3561 }
3562 else
3563 expand_null_return ();
3564 }
3565 else if (retval_rhs != 0
3566 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3567 && (REG_P (result_rtl)
3568 || (GET_CODE (result_rtl) == PARALLEL)))
3569 {
3570 /* Compute the return value into a temporary (usually a pseudo reg). */
3571 val
3572 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3573 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3574 val = force_not_mem (val);
3575 expand_value_return (val);
3576 }
3577 else
3578 {
3579 /* No hard reg used; calculate value into hard return reg. */
3580 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 expand_value_return (result_rtl);
3582 }
3583 }
3584
3585 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3586 STMT that doesn't require special handling for outgoing edges. That
3587 is no tailcalls and no GIMPLE_COND. */
3588
3589 static void
3590 expand_gimple_stmt_1 (gimple *stmt)
3591 {
3592 tree op0;
3593
3594 set_curr_insn_location (gimple_location (stmt));
3595
3596 switch (gimple_code (stmt))
3597 {
3598 case GIMPLE_GOTO:
3599 op0 = gimple_goto_dest (stmt);
3600 if (TREE_CODE (op0) == LABEL_DECL)
3601 expand_goto (op0);
3602 else
3603 expand_computed_goto (op0);
3604 break;
3605 case GIMPLE_LABEL:
3606 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3607 break;
3608 case GIMPLE_NOP:
3609 case GIMPLE_PREDICT:
3610 break;
3611 case GIMPLE_SWITCH:
3612 {
3613 gswitch *swtch = as_a <gswitch *> (stmt);
3614 if (gimple_switch_num_labels (swtch) == 1)
3615 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3616 else
3617 expand_case (swtch);
3618 }
3619 break;
3620 case GIMPLE_ASM:
3621 expand_asm_stmt (as_a <gasm *> (stmt));
3622 break;
3623 case GIMPLE_CALL:
3624 expand_call_stmt (as_a <gcall *> (stmt));
3625 break;
3626
3627 case GIMPLE_RETURN:
3628 {
3629 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3630 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3631
3632 if (op0 && op0 != error_mark_node)
3633 {
3634 tree result = DECL_RESULT (current_function_decl);
3635
3636 /* Mark we have return statement with missing bounds. */
3637 if (!bnd
3638 && chkp_function_instrumented_p (cfun->decl)
3639 && !DECL_P (op0))
3640 bnd = error_mark_node;
3641
3642 /* If we are not returning the current function's RESULT_DECL,
3643 build an assignment to it. */
3644 if (op0 != result)
3645 {
3646 /* I believe that a function's RESULT_DECL is unique. */
3647 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3648
3649 /* ??? We'd like to use simply expand_assignment here,
3650 but this fails if the value is of BLKmode but the return
3651 decl is a register. expand_return has special handling
3652 for this combination, which eventually should move
3653 to common code. See comments there. Until then, let's
3654 build a modify expression :-/ */
3655 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3656 result, op0);
3657 }
3658 }
3659
3660 if (!op0)
3661 expand_null_return ();
3662 else
3663 expand_return (op0, bnd);
3664 }
3665 break;
3666
3667 case GIMPLE_ASSIGN:
3668 {
3669 gassign *assign_stmt = as_a <gassign *> (stmt);
3670 tree lhs = gimple_assign_lhs (assign_stmt);
3671
3672 /* Tree expand used to fiddle with |= and &= of two bitfield
3673 COMPONENT_REFs here. This can't happen with gimple, the LHS
3674 of binary assigns must be a gimple reg. */
3675
3676 if (TREE_CODE (lhs) != SSA_NAME
3677 || get_gimple_rhs_class (gimple_expr_code (stmt))
3678 == GIMPLE_SINGLE_RHS)
3679 {
3680 tree rhs = gimple_assign_rhs1 (assign_stmt);
3681 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3682 == GIMPLE_SINGLE_RHS);
3683 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3684 /* Do not put locations on possibly shared trees. */
3685 && !is_gimple_min_invariant (rhs))
3686 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3687 if (TREE_CLOBBER_P (rhs))
3688 /* This is a clobber to mark the going out of scope for
3689 this LHS. */
3690 ;
3691 else
3692 expand_assignment (lhs, rhs,
3693 gimple_assign_nontemporal_move_p (
3694 assign_stmt));
3695 }
3696 else
3697 {
3698 rtx target, temp;
3699 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3700 struct separate_ops ops;
3701 bool promoted = false;
3702
3703 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3704 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3705 promoted = true;
3706
3707 ops.code = gimple_assign_rhs_code (assign_stmt);
3708 ops.type = TREE_TYPE (lhs);
3709 switch (get_gimple_rhs_class (ops.code))
3710 {
3711 case GIMPLE_TERNARY_RHS:
3712 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3713 /* Fallthru */
3714 case GIMPLE_BINARY_RHS:
3715 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3716 /* Fallthru */
3717 case GIMPLE_UNARY_RHS:
3718 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3719 break;
3720 default:
3721 gcc_unreachable ();
3722 }
3723 ops.location = gimple_location (stmt);
3724
3725 /* If we want to use a nontemporal store, force the value to
3726 register first. If we store into a promoted register,
3727 don't directly expand to target. */
3728 temp = nontemporal || promoted ? NULL_RTX : target;
3729 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3730 EXPAND_NORMAL);
3731
3732 if (temp == target)
3733 ;
3734 else if (promoted)
3735 {
3736 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3737 /* If TEMP is a VOIDmode constant, use convert_modes to make
3738 sure that we properly convert it. */
3739 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3740 {
3741 temp = convert_modes (GET_MODE (target),
3742 TYPE_MODE (ops.type),
3743 temp, unsignedp);
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 GET_MODE (target), temp, unsignedp);
3746 }
3747
3748 convert_move (SUBREG_REG (target), temp, unsignedp);
3749 }
3750 else if (nontemporal && emit_storent_insn (target, temp))
3751 ;
3752 else
3753 {
3754 temp = force_operand (temp, target);
3755 if (temp != target)
3756 emit_move_insn (target, temp);
3757 }
3758 }
3759 }
3760 break;
3761
3762 default:
3763 gcc_unreachable ();
3764 }
3765 }
3766
3767 /* Expand one gimple statement STMT and return the last RTL instruction
3768 before any of the newly generated ones.
3769
3770 In addition to generating the necessary RTL instructions this also
3771 sets REG_EH_REGION notes if necessary and sets the current source
3772 location for diagnostics. */
3773
3774 static rtx_insn *
3775 expand_gimple_stmt (gimple *stmt)
3776 {
3777 location_t saved_location = input_location;
3778 rtx_insn *last = get_last_insn ();
3779 int lp_nr;
3780
3781 gcc_assert (cfun);
3782
3783 /* We need to save and restore the current source location so that errors
3784 discovered during expansion are emitted with the right location. But
3785 it would be better if the diagnostic routines used the source location
3786 embedded in the tree nodes rather than globals. */
3787 if (gimple_has_location (stmt))
3788 input_location = gimple_location (stmt);
3789
3790 expand_gimple_stmt_1 (stmt);
3791
3792 /* Free any temporaries used to evaluate this statement. */
3793 free_temp_slots ();
3794
3795 input_location = saved_location;
3796
3797 /* Mark all insns that may trap. */
3798 lp_nr = lookup_stmt_eh_lp (stmt);
3799 if (lp_nr)
3800 {
3801 rtx_insn *insn;
3802 for (insn = next_real_insn (last); insn;
3803 insn = next_real_insn (insn))
3804 {
3805 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3806 /* If we want exceptions for non-call insns, any
3807 may_trap_p instruction may throw. */
3808 && GET_CODE (PATTERN (insn)) != CLOBBER
3809 && GET_CODE (PATTERN (insn)) != USE
3810 && insn_could_throw_p (insn))
3811 make_reg_eh_region_note (insn, 0, lp_nr);
3812 }
3813 }
3814
3815 return last;
3816 }
3817
3818 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3819 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3820 generated a tail call (something that might be denied by the ABI
3821 rules governing the call; see calls.c).
3822
3823 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3824 can still reach the rest of BB. The case here is __builtin_sqrt,
3825 where the NaN result goes through the external function (with a
3826 tailcall) and the normal result happens via a sqrt instruction. */
3827
3828 static basic_block
3829 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3830 {
3831 rtx_insn *last2, *last;
3832 edge e;
3833 edge_iterator ei;
3834 profile_probability probability;
3835
3836 last2 = last = expand_gimple_stmt (stmt);
3837
3838 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3839 if (CALL_P (last) && SIBLING_CALL_P (last))
3840 goto found;
3841
3842 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3843
3844 *can_fallthru = true;
3845 return NULL;
3846
3847 found:
3848 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3849 Any instructions emitted here are about to be deleted. */
3850 do_pending_stack_adjust ();
3851
3852 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3853 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3854 EH or abnormal edges, we shouldn't have created a tail call in
3855 the first place. So it seems to me we should just be removing
3856 all edges here, or redirecting the existing fallthru edge to
3857 the exit block. */
3858
3859 probability = profile_probability::never ();
3860
3861 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3862 {
3863 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3864 {
3865 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3866 e->dest->count -= e->count ();
3867 probability += e->probability;
3868 remove_edge (e);
3869 }
3870 else
3871 ei_next (&ei);
3872 }
3873
3874 /* This is somewhat ugly: the call_expr expander often emits instructions
3875 after the sibcall (to perform the function return). These confuse the
3876 find_many_sub_basic_blocks code, so we need to get rid of these. */
3877 last = NEXT_INSN (last);
3878 gcc_assert (BARRIER_P (last));
3879
3880 *can_fallthru = false;
3881 while (NEXT_INSN (last))
3882 {
3883 /* For instance an sqrt builtin expander expands if with
3884 sibcall in the then and label for `else`. */
3885 if (LABEL_P (NEXT_INSN (last)))
3886 {
3887 *can_fallthru = true;
3888 break;
3889 }
3890 delete_insn (NEXT_INSN (last));
3891 }
3892
3893 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3894 | EDGE_SIBCALL);
3895 e->probability = probability;
3896 BB_END (bb) = last;
3897 update_bb_for_insn (bb);
3898
3899 if (NEXT_INSN (last))
3900 {
3901 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3902
3903 last = BB_END (bb);
3904 if (BARRIER_P (last))
3905 BB_END (bb) = PREV_INSN (last);
3906 }
3907
3908 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3909
3910 return bb;
3911 }
3912
3913 /* Return the difference between the floor and the truncated result of
3914 a signed division by OP1 with remainder MOD. */
3915 static rtx
3916 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3917 {
3918 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3919 return gen_rtx_IF_THEN_ELSE
3920 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3921 gen_rtx_IF_THEN_ELSE
3922 (mode, gen_rtx_LT (BImode,
3923 gen_rtx_DIV (mode, op1, mod),
3924 const0_rtx),
3925 constm1_rtx, const0_rtx),
3926 const0_rtx);
3927 }
3928
3929 /* Return the difference between the ceil and the truncated result of
3930 a signed division by OP1 with remainder MOD. */
3931 static rtx
3932 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3933 {
3934 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3935 return gen_rtx_IF_THEN_ELSE
3936 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3937 gen_rtx_IF_THEN_ELSE
3938 (mode, gen_rtx_GT (BImode,
3939 gen_rtx_DIV (mode, op1, mod),
3940 const0_rtx),
3941 const1_rtx, const0_rtx),
3942 const0_rtx);
3943 }
3944
3945 /* Return the difference between the ceil and the truncated result of
3946 an unsigned division by OP1 with remainder MOD. */
3947 static rtx
3948 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3949 {
3950 /* (mod != 0 ? 1 : 0) */
3951 return gen_rtx_IF_THEN_ELSE
3952 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3953 const1_rtx, const0_rtx);
3954 }
3955
3956 /* Return the difference between the rounded and the truncated result
3957 of a signed division by OP1 with remainder MOD. Halfway cases are
3958 rounded away from zero, rather than to the nearest even number. */
3959 static rtx
3960 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3961 {
3962 /* (abs (mod) >= abs (op1) - abs (mod)
3963 ? (op1 / mod > 0 ? 1 : -1)
3964 : 0) */
3965 return gen_rtx_IF_THEN_ELSE
3966 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3967 gen_rtx_MINUS (mode,
3968 gen_rtx_ABS (mode, op1),
3969 gen_rtx_ABS (mode, mod))),
3970 gen_rtx_IF_THEN_ELSE
3971 (mode, gen_rtx_GT (BImode,
3972 gen_rtx_DIV (mode, op1, mod),
3973 const0_rtx),
3974 const1_rtx, constm1_rtx),
3975 const0_rtx);
3976 }
3977
3978 /* Return the difference between the rounded and the truncated result
3979 of a unsigned division by OP1 with remainder MOD. Halfway cases
3980 are rounded away from zero, rather than to the nearest even
3981 number. */
3982 static rtx
3983 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3984 {
3985 /* (mod >= op1 - mod ? 1 : 0) */
3986 return gen_rtx_IF_THEN_ELSE
3987 (mode, gen_rtx_GE (BImode, mod,
3988 gen_rtx_MINUS (mode, op1, mod)),
3989 const1_rtx, const0_rtx);
3990 }
3991
3992 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3993 any rtl. */
3994
3995 static rtx
3996 convert_debug_memory_address (scalar_int_mode mode, rtx x,
3997 addr_space_t as)
3998 {
3999 #ifndef POINTERS_EXTEND_UNSIGNED
4000 gcc_assert (mode == Pmode
4001 || mode == targetm.addr_space.address_mode (as));
4002 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4003 #else
4004 rtx temp;
4005
4006 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4007
4008 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4009 return x;
4010
4011 /* X must have some form of address mode already. */
4012 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4013 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4014 x = lowpart_subreg (mode, x, xmode);
4015 else if (POINTERS_EXTEND_UNSIGNED > 0)
4016 x = gen_rtx_ZERO_EXTEND (mode, x);
4017 else if (!POINTERS_EXTEND_UNSIGNED)
4018 x = gen_rtx_SIGN_EXTEND (mode, x);
4019 else
4020 {
4021 switch (GET_CODE (x))
4022 {
4023 case SUBREG:
4024 if ((SUBREG_PROMOTED_VAR_P (x)
4025 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4026 || (GET_CODE (SUBREG_REG (x)) == PLUS
4027 && REG_P (XEXP (SUBREG_REG (x), 0))
4028 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4029 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4030 && GET_MODE (SUBREG_REG (x)) == mode)
4031 return SUBREG_REG (x);
4032 break;
4033 case LABEL_REF:
4034 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4035 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4036 return temp;
4037 case SYMBOL_REF:
4038 temp = shallow_copy_rtx (x);
4039 PUT_MODE (temp, mode);
4040 return temp;
4041 case CONST:
4042 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4043 if (temp)
4044 temp = gen_rtx_CONST (mode, temp);
4045 return temp;
4046 case PLUS:
4047 case MINUS:
4048 if (CONST_INT_P (XEXP (x, 1)))
4049 {
4050 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4051 if (temp)
4052 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4053 }
4054 break;
4055 default:
4056 break;
4057 }
4058 /* Don't know how to express ptr_extend as operation in debug info. */
4059 return NULL;
4060 }
4061 #endif /* POINTERS_EXTEND_UNSIGNED */
4062
4063 return x;
4064 }
4065
4066 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4067 by avoid_deep_ter_for_debug. */
4068
4069 static hash_map<tree, tree> *deep_ter_debug_map;
4070
4071 /* Split too deep TER chains for debug stmts using debug temporaries. */
4072
4073 static void
4074 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4075 {
4076 use_operand_p use_p;
4077 ssa_op_iter iter;
4078 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4079 {
4080 tree use = USE_FROM_PTR (use_p);
4081 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4082 continue;
4083 gimple *g = get_gimple_for_ssa_name (use);
4084 if (g == NULL)
4085 continue;
4086 if (depth > 6 && !stmt_ends_bb_p (g))
4087 {
4088 if (deep_ter_debug_map == NULL)
4089 deep_ter_debug_map = new hash_map<tree, tree>;
4090
4091 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4092 if (vexpr != NULL)
4093 continue;
4094 vexpr = make_node (DEBUG_EXPR_DECL);
4095 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4096 DECL_ARTIFICIAL (vexpr) = 1;
4097 TREE_TYPE (vexpr) = TREE_TYPE (use);
4098 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4099 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4100 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4101 avoid_deep_ter_for_debug (def_temp, 0);
4102 }
4103 else
4104 avoid_deep_ter_for_debug (g, depth + 1);
4105 }
4106 }
4107
4108 /* Return an RTX equivalent to the value of the parameter DECL. */
4109
4110 static rtx
4111 expand_debug_parm_decl (tree decl)
4112 {
4113 rtx incoming = DECL_INCOMING_RTL (decl);
4114
4115 if (incoming
4116 && GET_MODE (incoming) != BLKmode
4117 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4118 || (MEM_P (incoming)
4119 && REG_P (XEXP (incoming, 0))
4120 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4121 {
4122 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4123
4124 #ifdef HAVE_window_save
4125 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4126 If the target machine has an explicit window save instruction, the
4127 actual entry value is the corresponding OUTGOING_REGNO instead. */
4128 if (REG_P (incoming)
4129 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4130 incoming
4131 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4132 OUTGOING_REGNO (REGNO (incoming)), 0);
4133 else if (MEM_P (incoming))
4134 {
4135 rtx reg = XEXP (incoming, 0);
4136 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4137 {
4138 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4139 incoming = replace_equiv_address_nv (incoming, reg);
4140 }
4141 else
4142 incoming = copy_rtx (incoming);
4143 }
4144 #endif
4145
4146 ENTRY_VALUE_EXP (rtl) = incoming;
4147 return rtl;
4148 }
4149
4150 if (incoming
4151 && GET_MODE (incoming) != BLKmode
4152 && !TREE_ADDRESSABLE (decl)
4153 && MEM_P (incoming)
4154 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4155 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4156 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4157 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4158 return copy_rtx (incoming);
4159
4160 return NULL_RTX;
4161 }
4162
4163 /* Return an RTX equivalent to the value of the tree expression EXP. */
4164
4165 static rtx
4166 expand_debug_expr (tree exp)
4167 {
4168 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4169 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4170 machine_mode inner_mode = VOIDmode;
4171 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4172 addr_space_t as;
4173 scalar_int_mode op0_mode, op1_mode, addr_mode;
4174
4175 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4176 {
4177 case tcc_expression:
4178 switch (TREE_CODE (exp))
4179 {
4180 case COND_EXPR:
4181 case DOT_PROD_EXPR:
4182 case SAD_EXPR:
4183 case WIDEN_MULT_PLUS_EXPR:
4184 case WIDEN_MULT_MINUS_EXPR:
4185 case FMA_EXPR:
4186 goto ternary;
4187
4188 case TRUTH_ANDIF_EXPR:
4189 case TRUTH_ORIF_EXPR:
4190 case TRUTH_AND_EXPR:
4191 case TRUTH_OR_EXPR:
4192 case TRUTH_XOR_EXPR:
4193 goto binary;
4194
4195 case TRUTH_NOT_EXPR:
4196 goto unary;
4197
4198 default:
4199 break;
4200 }
4201 break;
4202
4203 ternary:
4204 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4205 if (!op2)
4206 return NULL_RTX;
4207 /* Fall through. */
4208
4209 binary:
4210 case tcc_binary:
4211 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4212 if (!op1)
4213 return NULL_RTX;
4214 switch (TREE_CODE (exp))
4215 {
4216 case LSHIFT_EXPR:
4217 case RSHIFT_EXPR:
4218 case LROTATE_EXPR:
4219 case RROTATE_EXPR:
4220 case WIDEN_LSHIFT_EXPR:
4221 /* Ensure second operand isn't wider than the first one. */
4222 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4223 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4224 && (GET_MODE_UNIT_PRECISION (mode)
4225 < GET_MODE_PRECISION (op1_mode)))
4226 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4227 break;
4228 default:
4229 break;
4230 }
4231 /* Fall through. */
4232
4233 unary:
4234 case tcc_unary:
4235 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4236 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4237 if (!op0)
4238 return NULL_RTX;
4239 break;
4240
4241 case tcc_comparison:
4242 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4243 goto binary;
4244
4245 case tcc_type:
4246 case tcc_statement:
4247 gcc_unreachable ();
4248
4249 case tcc_constant:
4250 case tcc_exceptional:
4251 case tcc_declaration:
4252 case tcc_reference:
4253 case tcc_vl_exp:
4254 break;
4255 }
4256
4257 switch (TREE_CODE (exp))
4258 {
4259 case STRING_CST:
4260 if (!lookup_constant_def (exp))
4261 {
4262 if (strlen (TREE_STRING_POINTER (exp)) + 1
4263 != (size_t) TREE_STRING_LENGTH (exp))
4264 return NULL_RTX;
4265 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4266 op0 = gen_rtx_MEM (BLKmode, op0);
4267 set_mem_attributes (op0, exp, 0);
4268 return op0;
4269 }
4270 /* Fall through. */
4271
4272 case INTEGER_CST:
4273 case REAL_CST:
4274 case FIXED_CST:
4275 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4276 return op0;
4277
4278 case POLY_INT_CST:
4279 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4280
4281 case COMPLEX_CST:
4282 gcc_assert (COMPLEX_MODE_P (mode));
4283 op0 = expand_debug_expr (TREE_REALPART (exp));
4284 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4285 return gen_rtx_CONCAT (mode, op0, op1);
4286
4287 case DEBUG_EXPR_DECL:
4288 op0 = DECL_RTL_IF_SET (exp);
4289
4290 if (op0)
4291 return op0;
4292
4293 op0 = gen_rtx_DEBUG_EXPR (mode);
4294 DEBUG_EXPR_TREE_DECL (op0) = exp;
4295 SET_DECL_RTL (exp, op0);
4296
4297 return op0;
4298
4299 case VAR_DECL:
4300 case PARM_DECL:
4301 case FUNCTION_DECL:
4302 case LABEL_DECL:
4303 case CONST_DECL:
4304 case RESULT_DECL:
4305 op0 = DECL_RTL_IF_SET (exp);
4306
4307 /* This decl was probably optimized away. */
4308 if (!op0)
4309 {
4310 if (!VAR_P (exp)
4311 || DECL_EXTERNAL (exp)
4312 || !TREE_STATIC (exp)
4313 || !DECL_NAME (exp)
4314 || DECL_HARD_REGISTER (exp)
4315 || DECL_IN_CONSTANT_POOL (exp)
4316 || mode == VOIDmode)
4317 return NULL;
4318
4319 op0 = make_decl_rtl_for_debug (exp);
4320 if (!MEM_P (op0)
4321 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4322 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4323 return NULL;
4324 }
4325 else
4326 op0 = copy_rtx (op0);
4327
4328 if (GET_MODE (op0) == BLKmode
4329 /* If op0 is not BLKmode, but mode is, adjust_mode
4330 below would ICE. While it is likely a FE bug,
4331 try to be robust here. See PR43166. */
4332 || mode == BLKmode
4333 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4334 {
4335 gcc_assert (MEM_P (op0));
4336 op0 = adjust_address_nv (op0, mode, 0);
4337 return op0;
4338 }
4339
4340 /* Fall through. */
4341
4342 adjust_mode:
4343 case PAREN_EXPR:
4344 CASE_CONVERT:
4345 {
4346 inner_mode = GET_MODE (op0);
4347
4348 if (mode == inner_mode)
4349 return op0;
4350
4351 if (inner_mode == VOIDmode)
4352 {
4353 if (TREE_CODE (exp) == SSA_NAME)
4354 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4355 else
4356 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4357 if (mode == inner_mode)
4358 return op0;
4359 }
4360
4361 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4362 {
4363 if (GET_MODE_UNIT_BITSIZE (mode)
4364 == GET_MODE_UNIT_BITSIZE (inner_mode))
4365 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4366 else if (GET_MODE_UNIT_BITSIZE (mode)
4367 < GET_MODE_UNIT_BITSIZE (inner_mode))
4368 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4369 else
4370 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4371 }
4372 else if (FLOAT_MODE_P (mode))
4373 {
4374 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4375 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4376 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4377 else
4378 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4379 }
4380 else if (FLOAT_MODE_P (inner_mode))
4381 {
4382 if (unsignedp)
4383 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4384 else
4385 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4386 }
4387 else if (GET_MODE_UNIT_PRECISION (mode)
4388 == GET_MODE_UNIT_PRECISION (inner_mode))
4389 op0 = lowpart_subreg (mode, op0, inner_mode);
4390 else if (GET_MODE_UNIT_PRECISION (mode)
4391 < GET_MODE_UNIT_PRECISION (inner_mode))
4392 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4393 else if (UNARY_CLASS_P (exp)
4394 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4395 : unsignedp)
4396 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4397 else
4398 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4399
4400 return op0;
4401 }
4402
4403 case MEM_REF:
4404 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4405 {
4406 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4407 TREE_OPERAND (exp, 0),
4408 TREE_OPERAND (exp, 1));
4409 if (newexp)
4410 return expand_debug_expr (newexp);
4411 }
4412 /* FALLTHROUGH */
4413 case INDIRECT_REF:
4414 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4415 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4416 if (!op0)
4417 return NULL;
4418
4419 if (TREE_CODE (exp) == MEM_REF)
4420 {
4421 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4422 || (GET_CODE (op0) == PLUS
4423 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4424 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4425 Instead just use get_inner_reference. */
4426 goto component_ref;
4427
4428 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4429 if (!op1 || !CONST_INT_P (op1))
4430 return NULL;
4431
4432 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4433 }
4434
4435 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4436
4437 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4438 op0, as);
4439 if (op0 == NULL_RTX)
4440 return NULL;
4441
4442 op0 = gen_rtx_MEM (mode, op0);
4443 set_mem_attributes (op0, exp, 0);
4444 if (TREE_CODE (exp) == MEM_REF
4445 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4446 set_mem_expr (op0, NULL_TREE);
4447 set_mem_addr_space (op0, as);
4448
4449 return op0;
4450
4451 case TARGET_MEM_REF:
4452 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4453 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4454 return NULL;
4455
4456 op0 = expand_debug_expr
4457 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4458 if (!op0)
4459 return NULL;
4460
4461 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4462 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4463 op0, as);
4464 if (op0 == NULL_RTX)
4465 return NULL;
4466
4467 op0 = gen_rtx_MEM (mode, op0);
4468
4469 set_mem_attributes (op0, exp, 0);
4470 set_mem_addr_space (op0, as);
4471
4472 return op0;
4473
4474 component_ref:
4475 case ARRAY_REF:
4476 case ARRAY_RANGE_REF:
4477 case COMPONENT_REF:
4478 case BIT_FIELD_REF:
4479 case REALPART_EXPR:
4480 case IMAGPART_EXPR:
4481 case VIEW_CONVERT_EXPR:
4482 {
4483 machine_mode mode1;
4484 poly_int64 bitsize, bitpos;
4485 tree offset;
4486 int reversep, volatilep = 0;
4487 tree tem
4488 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4489 &unsignedp, &reversep, &volatilep);
4490 rtx orig_op0;
4491
4492 if (known_eq (bitsize, 0))
4493 return NULL;
4494
4495 orig_op0 = op0 = expand_debug_expr (tem);
4496
4497 if (!op0)
4498 return NULL;
4499
4500 if (offset)
4501 {
4502 machine_mode addrmode, offmode;
4503
4504 if (!MEM_P (op0))
4505 return NULL;
4506
4507 op0 = XEXP (op0, 0);
4508 addrmode = GET_MODE (op0);
4509 if (addrmode == VOIDmode)
4510 addrmode = Pmode;
4511
4512 op1 = expand_debug_expr (offset);
4513 if (!op1)
4514 return NULL;
4515
4516 offmode = GET_MODE (op1);
4517 if (offmode == VOIDmode)
4518 offmode = TYPE_MODE (TREE_TYPE (offset));
4519
4520 if (addrmode != offmode)
4521 op1 = lowpart_subreg (addrmode, op1, offmode);
4522
4523 /* Don't use offset_address here, we don't need a
4524 recognizable address, and we don't want to generate
4525 code. */
4526 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4527 op0, op1));
4528 }
4529
4530 if (MEM_P (op0))
4531 {
4532 if (mode1 == VOIDmode)
4533 /* Bitfield. */
4534 mode1 = smallest_int_mode_for_size (bitsize);
4535 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4536 if (maybe_ne (bytepos, 0))
4537 {
4538 op0 = adjust_address_nv (op0, mode1, bytepos);
4539 bitpos = num_trailing_bits (bitpos);
4540 }
4541 else if (known_eq (bitpos, 0)
4542 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4543 op0 = adjust_address_nv (op0, mode, 0);
4544 else if (GET_MODE (op0) != mode1)
4545 op0 = adjust_address_nv (op0, mode1, 0);
4546 else
4547 op0 = copy_rtx (op0);
4548 if (op0 == orig_op0)
4549 op0 = shallow_copy_rtx (op0);
4550 set_mem_attributes (op0, exp, 0);
4551 }
4552
4553 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4554 return op0;
4555
4556 if (maybe_lt (bitpos, 0))
4557 return NULL;
4558
4559 if (GET_MODE (op0) == BLKmode)
4560 return NULL;
4561
4562 poly_int64 bytepos;
4563 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4564 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4565 {
4566 machine_mode opmode = GET_MODE (op0);
4567
4568 if (opmode == VOIDmode)
4569 opmode = TYPE_MODE (TREE_TYPE (tem));
4570
4571 /* This condition may hold if we're expanding the address
4572 right past the end of an array that turned out not to
4573 be addressable (i.e., the address was only computed in
4574 debug stmts). The gen_subreg below would rightfully
4575 crash, and the address doesn't really exist, so just
4576 drop it. */
4577 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4578 return NULL;
4579
4580 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4581 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4582 }
4583
4584 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4585 && TYPE_UNSIGNED (TREE_TYPE (exp))
4586 ? SIGN_EXTRACT
4587 : ZERO_EXTRACT, mode,
4588 GET_MODE (op0) != VOIDmode
4589 ? GET_MODE (op0)
4590 : TYPE_MODE (TREE_TYPE (tem)),
4591 op0, gen_int_mode (bitsize, word_mode),
4592 gen_int_mode (bitpos, word_mode));
4593 }
4594
4595 case ABS_EXPR:
4596 return simplify_gen_unary (ABS, mode, op0, mode);
4597
4598 case NEGATE_EXPR:
4599 return simplify_gen_unary (NEG, mode, op0, mode);
4600
4601 case BIT_NOT_EXPR:
4602 return simplify_gen_unary (NOT, mode, op0, mode);
4603
4604 case FLOAT_EXPR:
4605 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4606 0)))
4607 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4608 inner_mode);
4609
4610 case FIX_TRUNC_EXPR:
4611 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4612 inner_mode);
4613
4614 case POINTER_PLUS_EXPR:
4615 /* For the rare target where pointers are not the same size as
4616 size_t, we need to check for mis-matched modes and correct
4617 the addend. */
4618 if (op0 && op1
4619 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4620 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4621 && op0_mode != op1_mode)
4622 {
4623 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4624 /* If OP0 is a partial mode, then we must truncate, even
4625 if it has the same bitsize as OP1 as GCC's
4626 representation of partial modes is opaque. */
4627 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4628 && (GET_MODE_BITSIZE (op0_mode)
4629 == GET_MODE_BITSIZE (op1_mode))))
4630 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4631 else
4632 /* We always sign-extend, regardless of the signedness of
4633 the operand, because the operand is always unsigned
4634 here even if the original C expression is signed. */
4635 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4636 }
4637 /* Fall through. */
4638 case PLUS_EXPR:
4639 return simplify_gen_binary (PLUS, mode, op0, op1);
4640
4641 case MINUS_EXPR:
4642 case POINTER_DIFF_EXPR:
4643 return simplify_gen_binary (MINUS, mode, op0, op1);
4644
4645 case MULT_EXPR:
4646 return simplify_gen_binary (MULT, mode, op0, op1);
4647
4648 case RDIV_EXPR:
4649 case TRUNC_DIV_EXPR:
4650 case EXACT_DIV_EXPR:
4651 if (unsignedp)
4652 return simplify_gen_binary (UDIV, mode, op0, op1);
4653 else
4654 return simplify_gen_binary (DIV, mode, op0, op1);
4655
4656 case TRUNC_MOD_EXPR:
4657 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4658
4659 case FLOOR_DIV_EXPR:
4660 if (unsignedp)
4661 return simplify_gen_binary (UDIV, mode, op0, op1);
4662 else
4663 {
4664 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4665 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4666 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4667 return simplify_gen_binary (PLUS, mode, div, adj);
4668 }
4669
4670 case FLOOR_MOD_EXPR:
4671 if (unsignedp)
4672 return simplify_gen_binary (UMOD, mode, op0, op1);
4673 else
4674 {
4675 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4676 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4677 adj = simplify_gen_unary (NEG, mode,
4678 simplify_gen_binary (MULT, mode, adj, op1),
4679 mode);
4680 return simplify_gen_binary (PLUS, mode, mod, adj);
4681 }
4682
4683 case CEIL_DIV_EXPR:
4684 if (unsignedp)
4685 {
4686 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4687 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4688 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4689 return simplify_gen_binary (PLUS, mode, div, adj);
4690 }
4691 else
4692 {
4693 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4694 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4695 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4696 return simplify_gen_binary (PLUS, mode, div, adj);
4697 }
4698
4699 case CEIL_MOD_EXPR:
4700 if (unsignedp)
4701 {
4702 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4703 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4704 adj = simplify_gen_unary (NEG, mode,
4705 simplify_gen_binary (MULT, mode, adj, op1),
4706 mode);
4707 return simplify_gen_binary (PLUS, mode, mod, adj);
4708 }
4709 else
4710 {
4711 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4712 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4713 adj = simplify_gen_unary (NEG, mode,
4714 simplify_gen_binary (MULT, mode, adj, op1),
4715 mode);
4716 return simplify_gen_binary (PLUS, mode, mod, adj);
4717 }
4718
4719 case ROUND_DIV_EXPR:
4720 if (unsignedp)
4721 {
4722 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4723 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4724 rtx adj = round_udiv_adjust (mode, mod, op1);
4725 return simplify_gen_binary (PLUS, mode, div, adj);
4726 }
4727 else
4728 {
4729 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4730 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4731 rtx adj = round_sdiv_adjust (mode, mod, op1);
4732 return simplify_gen_binary (PLUS, mode, div, adj);
4733 }
4734
4735 case ROUND_MOD_EXPR:
4736 if (unsignedp)
4737 {
4738 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4739 rtx adj = round_udiv_adjust (mode, mod, op1);
4740 adj = simplify_gen_unary (NEG, mode,
4741 simplify_gen_binary (MULT, mode, adj, op1),
4742 mode);
4743 return simplify_gen_binary (PLUS, mode, mod, adj);
4744 }
4745 else
4746 {
4747 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4748 rtx adj = round_sdiv_adjust (mode, mod, op1);
4749 adj = simplify_gen_unary (NEG, mode,
4750 simplify_gen_binary (MULT, mode, adj, op1),
4751 mode);
4752 return simplify_gen_binary (PLUS, mode, mod, adj);
4753 }
4754
4755 case LSHIFT_EXPR:
4756 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4757
4758 case RSHIFT_EXPR:
4759 if (unsignedp)
4760 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4761 else
4762 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4763
4764 case LROTATE_EXPR:
4765 return simplify_gen_binary (ROTATE, mode, op0, op1);
4766
4767 case RROTATE_EXPR:
4768 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4769
4770 case MIN_EXPR:
4771 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4772
4773 case MAX_EXPR:
4774 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4775
4776 case BIT_AND_EXPR:
4777 case TRUTH_AND_EXPR:
4778 return simplify_gen_binary (AND, mode, op0, op1);
4779
4780 case BIT_IOR_EXPR:
4781 case TRUTH_OR_EXPR:
4782 return simplify_gen_binary (IOR, mode, op0, op1);
4783
4784 case BIT_XOR_EXPR:
4785 case TRUTH_XOR_EXPR:
4786 return simplify_gen_binary (XOR, mode, op0, op1);
4787
4788 case TRUTH_ANDIF_EXPR:
4789 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4790
4791 case TRUTH_ORIF_EXPR:
4792 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4793
4794 case TRUTH_NOT_EXPR:
4795 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4796
4797 case LT_EXPR:
4798 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4799 op0, op1);
4800
4801 case LE_EXPR:
4802 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4803 op0, op1);
4804
4805 case GT_EXPR:
4806 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4807 op0, op1);
4808
4809 case GE_EXPR:
4810 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4811 op0, op1);
4812
4813 case EQ_EXPR:
4814 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4815
4816 case NE_EXPR:
4817 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4818
4819 case UNORDERED_EXPR:
4820 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4821
4822 case ORDERED_EXPR:
4823 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4824
4825 case UNLT_EXPR:
4826 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4827
4828 case UNLE_EXPR:
4829 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4830
4831 case UNGT_EXPR:
4832 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4833
4834 case UNGE_EXPR:
4835 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4836
4837 case UNEQ_EXPR:
4838 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4839
4840 case LTGT_EXPR:
4841 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4842
4843 case COND_EXPR:
4844 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4845
4846 case COMPLEX_EXPR:
4847 gcc_assert (COMPLEX_MODE_P (mode));
4848 if (GET_MODE (op0) == VOIDmode)
4849 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4850 if (GET_MODE (op1) == VOIDmode)
4851 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4852 return gen_rtx_CONCAT (mode, op0, op1);
4853
4854 case CONJ_EXPR:
4855 if (GET_CODE (op0) == CONCAT)
4856 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4857 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4858 XEXP (op0, 1),
4859 GET_MODE_INNER (mode)));
4860 else
4861 {
4862 scalar_mode imode = GET_MODE_INNER (mode);
4863 rtx re, im;
4864
4865 if (MEM_P (op0))
4866 {
4867 re = adjust_address_nv (op0, imode, 0);
4868 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4869 }
4870 else
4871 {
4872 scalar_int_mode ifmode;
4873 scalar_int_mode ihmode;
4874 rtx halfsize;
4875 if (!int_mode_for_mode (mode).exists (&ifmode)
4876 || !int_mode_for_mode (imode).exists (&ihmode))
4877 return NULL;
4878 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4879 re = op0;
4880 if (mode != ifmode)
4881 re = gen_rtx_SUBREG (ifmode, re, 0);
4882 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4883 if (imode != ihmode)
4884 re = gen_rtx_SUBREG (imode, re, 0);
4885 im = copy_rtx (op0);
4886 if (mode != ifmode)
4887 im = gen_rtx_SUBREG (ifmode, im, 0);
4888 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4889 if (imode != ihmode)
4890 im = gen_rtx_SUBREG (imode, im, 0);
4891 }
4892 im = gen_rtx_NEG (imode, im);
4893 return gen_rtx_CONCAT (mode, re, im);
4894 }
4895
4896 case ADDR_EXPR:
4897 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4898 if (!op0 || !MEM_P (op0))
4899 {
4900 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4901 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4902 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4903 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4904 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4905 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4906
4907 if (handled_component_p (TREE_OPERAND (exp, 0)))
4908 {
4909 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4910 bool reverse;
4911 tree decl
4912 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4913 &bitsize, &maxsize, &reverse);
4914 if ((VAR_P (decl)
4915 || TREE_CODE (decl) == PARM_DECL
4916 || TREE_CODE (decl) == RESULT_DECL)
4917 && (!TREE_ADDRESSABLE (decl)
4918 || target_for_debug_bind (decl))
4919 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4920 && known_gt (bitsize, 0)
4921 && known_eq (bitsize, maxsize))
4922 {
4923 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4924 return plus_constant (mode, base, byteoffset);
4925 }
4926 }
4927
4928 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4929 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4930 == ADDR_EXPR)
4931 {
4932 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4933 0));
4934 if (op0 != NULL
4935 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4936 || (GET_CODE (op0) == PLUS
4937 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4938 && CONST_INT_P (XEXP (op0, 1)))))
4939 {
4940 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4941 1));
4942 if (!op1 || !CONST_INT_P (op1))
4943 return NULL;
4944
4945 return plus_constant (mode, op0, INTVAL (op1));
4946 }
4947 }
4948
4949 return NULL;
4950 }
4951
4952 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4953 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4954 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4955
4956 return op0;
4957
4958 case VECTOR_CST:
4959 {
4960 unsigned i, nelts;
4961
4962 nelts = VECTOR_CST_NELTS (exp);
4963 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4964
4965 for (i = 0; i < nelts; ++i)
4966 {
4967 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4968 if (!op1)
4969 return NULL;
4970 XVECEXP (op0, 0, i) = op1;
4971 }
4972
4973 return op0;
4974 }
4975
4976 case CONSTRUCTOR:
4977 if (TREE_CLOBBER_P (exp))
4978 return NULL;
4979 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4980 {
4981 unsigned i;
4982 tree val;
4983
4984 op0 = gen_rtx_CONCATN
4985 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4986
4987 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4988 {
4989 op1 = expand_debug_expr (val);
4990 if (!op1)
4991 return NULL;
4992 XVECEXP (op0, 0, i) = op1;
4993 }
4994
4995 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4996 {
4997 op1 = expand_debug_expr
4998 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4999
5000 if (!op1)
5001 return NULL;
5002
5003 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
5004 XVECEXP (op0, 0, i) = op1;
5005 }
5006
5007 return op0;
5008 }
5009 else
5010 goto flag_unsupported;
5011
5012 case CALL_EXPR:
5013 /* ??? Maybe handle some builtins? */
5014 return NULL;
5015
5016 case SSA_NAME:
5017 {
5018 gimple *g = get_gimple_for_ssa_name (exp);
5019 if (g)
5020 {
5021 tree t = NULL_TREE;
5022 if (deep_ter_debug_map)
5023 {
5024 tree *slot = deep_ter_debug_map->get (exp);
5025 if (slot)
5026 t = *slot;
5027 }
5028 if (t == NULL_TREE)
5029 t = gimple_assign_rhs_to_tree (g);
5030 op0 = expand_debug_expr (t);
5031 if (!op0)
5032 return NULL;
5033 }
5034 else
5035 {
5036 /* If this is a reference to an incoming value of
5037 parameter that is never used in the code or where the
5038 incoming value is never used in the code, use
5039 PARM_DECL's DECL_RTL if set. */
5040 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5041 && SSA_NAME_VAR (exp)
5042 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5043 && has_zero_uses (exp))
5044 {
5045 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5046 if (op0)
5047 goto adjust_mode;
5048 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5049 if (op0)
5050 goto adjust_mode;
5051 }
5052
5053 int part = var_to_partition (SA.map, exp);
5054
5055 if (part == NO_PARTITION)
5056 return NULL;
5057
5058 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5059
5060 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5061 }
5062 goto adjust_mode;
5063 }
5064
5065 case ERROR_MARK:
5066 return NULL;
5067
5068 /* Vector stuff. For most of the codes we don't have rtl codes. */
5069 case REALIGN_LOAD_EXPR:
5070 case VEC_COND_EXPR:
5071 case VEC_PACK_FIX_TRUNC_EXPR:
5072 case VEC_PACK_SAT_EXPR:
5073 case VEC_PACK_TRUNC_EXPR:
5074 case VEC_UNPACK_FLOAT_HI_EXPR:
5075 case VEC_UNPACK_FLOAT_LO_EXPR:
5076 case VEC_UNPACK_HI_EXPR:
5077 case VEC_UNPACK_LO_EXPR:
5078 case VEC_WIDEN_MULT_HI_EXPR:
5079 case VEC_WIDEN_MULT_LO_EXPR:
5080 case VEC_WIDEN_MULT_EVEN_EXPR:
5081 case VEC_WIDEN_MULT_ODD_EXPR:
5082 case VEC_WIDEN_LSHIFT_HI_EXPR:
5083 case VEC_WIDEN_LSHIFT_LO_EXPR:
5084 case VEC_PERM_EXPR:
5085 case VEC_DUPLICATE_EXPR:
5086 case VEC_SERIES_EXPR:
5087 return NULL;
5088
5089 /* Misc codes. */
5090 case ADDR_SPACE_CONVERT_EXPR:
5091 case FIXED_CONVERT_EXPR:
5092 case OBJ_TYPE_REF:
5093 case WITH_SIZE_EXPR:
5094 case BIT_INSERT_EXPR:
5095 return NULL;
5096
5097 case DOT_PROD_EXPR:
5098 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5099 && SCALAR_INT_MODE_P (mode))
5100 {
5101 op0
5102 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5103 0)))
5104 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5105 inner_mode);
5106 op1
5107 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5108 1)))
5109 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5110 inner_mode);
5111 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5112 return simplify_gen_binary (PLUS, mode, op0, op2);
5113 }
5114 return NULL;
5115
5116 case WIDEN_MULT_EXPR:
5117 case WIDEN_MULT_PLUS_EXPR:
5118 case WIDEN_MULT_MINUS_EXPR:
5119 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5120 && SCALAR_INT_MODE_P (mode))
5121 {
5122 inner_mode = GET_MODE (op0);
5123 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5124 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5125 else
5126 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5127 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5128 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5129 else
5130 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5131 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5132 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5133 return op0;
5134 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5135 return simplify_gen_binary (PLUS, mode, op0, op2);
5136 else
5137 return simplify_gen_binary (MINUS, mode, op2, op0);
5138 }
5139 return NULL;
5140
5141 case MULT_HIGHPART_EXPR:
5142 /* ??? Similar to the above. */
5143 return NULL;
5144
5145 case WIDEN_SUM_EXPR:
5146 case WIDEN_LSHIFT_EXPR:
5147 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5148 && SCALAR_INT_MODE_P (mode))
5149 {
5150 op0
5151 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5152 0)))
5153 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5154 inner_mode);
5155 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5156 ? ASHIFT : PLUS, mode, op0, op1);
5157 }
5158 return NULL;
5159
5160 case FMA_EXPR:
5161 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5162
5163 default:
5164 flag_unsupported:
5165 if (flag_checking)
5166 {
5167 debug_tree (exp);
5168 gcc_unreachable ();
5169 }
5170 return NULL;
5171 }
5172 }
5173
5174 /* Return an RTX equivalent to the source bind value of the tree expression
5175 EXP. */
5176
5177 static rtx
5178 expand_debug_source_expr (tree exp)
5179 {
5180 rtx op0 = NULL_RTX;
5181 machine_mode mode = VOIDmode, inner_mode;
5182
5183 switch (TREE_CODE (exp))
5184 {
5185 case PARM_DECL:
5186 {
5187 mode = DECL_MODE (exp);
5188 op0 = expand_debug_parm_decl (exp);
5189 if (op0)
5190 break;
5191 /* See if this isn't an argument that has been completely
5192 optimized out. */
5193 if (!DECL_RTL_SET_P (exp)
5194 && !DECL_INCOMING_RTL (exp)
5195 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5196 {
5197 tree aexp = DECL_ORIGIN (exp);
5198 if (DECL_CONTEXT (aexp)
5199 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5200 {
5201 vec<tree, va_gc> **debug_args;
5202 unsigned int ix;
5203 tree ddecl;
5204 debug_args = decl_debug_args_lookup (current_function_decl);
5205 if (debug_args != NULL)
5206 {
5207 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5208 ix += 2)
5209 if (ddecl == aexp)
5210 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5211 }
5212 }
5213 }
5214 break;
5215 }
5216 default:
5217 break;
5218 }
5219
5220 if (op0 == NULL_RTX)
5221 return NULL_RTX;
5222
5223 inner_mode = GET_MODE (op0);
5224 if (mode == inner_mode)
5225 return op0;
5226
5227 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5228 {
5229 if (GET_MODE_UNIT_BITSIZE (mode)
5230 == GET_MODE_UNIT_BITSIZE (inner_mode))
5231 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5232 else if (GET_MODE_UNIT_BITSIZE (mode)
5233 < GET_MODE_UNIT_BITSIZE (inner_mode))
5234 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5235 else
5236 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5237 }
5238 else if (FLOAT_MODE_P (mode))
5239 gcc_unreachable ();
5240 else if (FLOAT_MODE_P (inner_mode))
5241 {
5242 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5243 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5244 else
5245 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5246 }
5247 else if (GET_MODE_UNIT_PRECISION (mode)
5248 == GET_MODE_UNIT_PRECISION (inner_mode))
5249 op0 = lowpart_subreg (mode, op0, inner_mode);
5250 else if (GET_MODE_UNIT_PRECISION (mode)
5251 < GET_MODE_UNIT_PRECISION (inner_mode))
5252 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5253 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5254 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5255 else
5256 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5257
5258 return op0;
5259 }
5260
5261 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5262 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5263 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5264
5265 static void
5266 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5267 {
5268 rtx exp = *exp_p;
5269
5270 if (exp == NULL_RTX)
5271 return;
5272
5273 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5274 return;
5275
5276 if (depth == 4)
5277 {
5278 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5279 rtx dval = make_debug_expr_from_rtl (exp);
5280
5281 /* Emit a debug bind insn before INSN. */
5282 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5283 DEBUG_EXPR_TREE_DECL (dval), exp,
5284 VAR_INIT_STATUS_INITIALIZED);
5285
5286 emit_debug_insn_before (bind, insn);
5287 *exp_p = dval;
5288 return;
5289 }
5290
5291 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5292 int i, j;
5293 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5294 switch (*format_ptr++)
5295 {
5296 case 'e':
5297 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5298 break;
5299
5300 case 'E':
5301 case 'V':
5302 for (j = 0; j < XVECLEN (exp, i); j++)
5303 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5304 break;
5305
5306 default:
5307 break;
5308 }
5309 }
5310
5311 /* Expand the _LOCs in debug insns. We run this after expanding all
5312 regular insns, so that any variables referenced in the function
5313 will have their DECL_RTLs set. */
5314
5315 static void
5316 expand_debug_locations (void)
5317 {
5318 rtx_insn *insn;
5319 rtx_insn *last = get_last_insn ();
5320 int save_strict_alias = flag_strict_aliasing;
5321
5322 /* New alias sets while setting up memory attributes cause
5323 -fcompare-debug failures, even though it doesn't bring about any
5324 codegen changes. */
5325 flag_strict_aliasing = 0;
5326
5327 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5328 if (DEBUG_BIND_INSN_P (insn))
5329 {
5330 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5331 rtx val;
5332 rtx_insn *prev_insn, *insn2;
5333 machine_mode mode;
5334
5335 if (value == NULL_TREE)
5336 val = NULL_RTX;
5337 else
5338 {
5339 if (INSN_VAR_LOCATION_STATUS (insn)
5340 == VAR_INIT_STATUS_UNINITIALIZED)
5341 val = expand_debug_source_expr (value);
5342 /* The avoid_deep_ter_for_debug function inserts
5343 debug bind stmts after SSA_NAME definition, with the
5344 SSA_NAME as the whole bind location. Disable temporarily
5345 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5346 being defined in this DEBUG_INSN. */
5347 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5348 {
5349 tree *slot = deep_ter_debug_map->get (value);
5350 if (slot)
5351 {
5352 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5353 *slot = NULL_TREE;
5354 else
5355 slot = NULL;
5356 }
5357 val = expand_debug_expr (value);
5358 if (slot)
5359 *slot = INSN_VAR_LOCATION_DECL (insn);
5360 }
5361 else
5362 val = expand_debug_expr (value);
5363 gcc_assert (last == get_last_insn ());
5364 }
5365
5366 if (!val)
5367 val = gen_rtx_UNKNOWN_VAR_LOC ();
5368 else
5369 {
5370 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5371
5372 gcc_assert (mode == GET_MODE (val)
5373 || (GET_MODE (val) == VOIDmode
5374 && (CONST_SCALAR_INT_P (val)
5375 || GET_CODE (val) == CONST_FIXED
5376 || GET_CODE (val) == LABEL_REF)));
5377 }
5378
5379 INSN_VAR_LOCATION_LOC (insn) = val;
5380 prev_insn = PREV_INSN (insn);
5381 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5382 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5383 }
5384
5385 flag_strict_aliasing = save_strict_alias;
5386 }
5387
5388 /* Performs swapping operands of commutative operations to expand
5389 the expensive one first. */
5390
5391 static void
5392 reorder_operands (basic_block bb)
5393 {
5394 unsigned int *lattice; /* Hold cost of each statement. */
5395 unsigned int i = 0, n = 0;
5396 gimple_stmt_iterator gsi;
5397 gimple_seq stmts;
5398 gimple *stmt;
5399 bool swap;
5400 tree op0, op1;
5401 ssa_op_iter iter;
5402 use_operand_p use_p;
5403 gimple *def0, *def1;
5404
5405 /* Compute cost of each statement using estimate_num_insns. */
5406 stmts = bb_seq (bb);
5407 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5408 {
5409 stmt = gsi_stmt (gsi);
5410 if (!is_gimple_debug (stmt))
5411 gimple_set_uid (stmt, n++);
5412 }
5413 lattice = XNEWVEC (unsigned int, n);
5414 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5415 {
5416 unsigned cost;
5417 stmt = gsi_stmt (gsi);
5418 if (is_gimple_debug (stmt))
5419 continue;
5420 cost = estimate_num_insns (stmt, &eni_size_weights);
5421 lattice[i] = cost;
5422 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5423 {
5424 tree use = USE_FROM_PTR (use_p);
5425 gimple *def_stmt;
5426 if (TREE_CODE (use) != SSA_NAME)
5427 continue;
5428 def_stmt = get_gimple_for_ssa_name (use);
5429 if (!def_stmt)
5430 continue;
5431 lattice[i] += lattice[gimple_uid (def_stmt)];
5432 }
5433 i++;
5434 if (!is_gimple_assign (stmt)
5435 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5436 continue;
5437 op0 = gimple_op (stmt, 1);
5438 op1 = gimple_op (stmt, 2);
5439 if (TREE_CODE (op0) != SSA_NAME
5440 || TREE_CODE (op1) != SSA_NAME)
5441 continue;
5442 /* Swap operands if the second one is more expensive. */
5443 def0 = get_gimple_for_ssa_name (op0);
5444 def1 = get_gimple_for_ssa_name (op1);
5445 if (!def1)
5446 continue;
5447 swap = false;
5448 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5449 swap = true;
5450 if (swap)
5451 {
5452 if (dump_file && (dump_flags & TDF_DETAILS))
5453 {
5454 fprintf (dump_file, "Swap operands in stmt:\n");
5455 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5456 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5457 def0 ? lattice[gimple_uid (def0)] : 0,
5458 lattice[gimple_uid (def1)]);
5459 }
5460 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5461 gimple_assign_rhs2_ptr (stmt));
5462 }
5463 }
5464 XDELETE (lattice);
5465 }
5466
5467 /* Expand basic block BB from GIMPLE trees to RTL. */
5468
5469 static basic_block
5470 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5471 {
5472 gimple_stmt_iterator gsi;
5473 gimple_seq stmts;
5474 gimple *stmt = NULL;
5475 rtx_note *note = NULL;
5476 rtx_insn *last;
5477 edge e;
5478 edge_iterator ei;
5479
5480 if (dump_file)
5481 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5482 bb->index);
5483
5484 /* Note that since we are now transitioning from GIMPLE to RTL, we
5485 cannot use the gsi_*_bb() routines because they expect the basic
5486 block to be in GIMPLE, instead of RTL. Therefore, we need to
5487 access the BB sequence directly. */
5488 if (optimize)
5489 reorder_operands (bb);
5490 stmts = bb_seq (bb);
5491 bb->il.gimple.seq = NULL;
5492 bb->il.gimple.phi_nodes = NULL;
5493 rtl_profile_for_bb (bb);
5494 init_rtl_bb_info (bb);
5495 bb->flags |= BB_RTL;
5496
5497 /* Remove the RETURN_EXPR if we may fall though to the exit
5498 instead. */
5499 gsi = gsi_last (stmts);
5500 if (!gsi_end_p (gsi)
5501 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5502 {
5503 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5504
5505 gcc_assert (single_succ_p (bb));
5506 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5507
5508 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5509 && !gimple_return_retval (ret_stmt))
5510 {
5511 gsi_remove (&gsi, false);
5512 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5513 }
5514 }
5515
5516 gsi = gsi_start (stmts);
5517 if (!gsi_end_p (gsi))
5518 {
5519 stmt = gsi_stmt (gsi);
5520 if (gimple_code (stmt) != GIMPLE_LABEL)
5521 stmt = NULL;
5522 }
5523
5524 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5525
5526 if (stmt || elt)
5527 {
5528 gcc_checking_assert (!note);
5529 last = get_last_insn ();
5530
5531 if (stmt)
5532 {
5533 expand_gimple_stmt (stmt);
5534 gsi_next (&gsi);
5535 }
5536
5537 if (elt)
5538 emit_label (*elt);
5539
5540 BB_HEAD (bb) = NEXT_INSN (last);
5541 if (NOTE_P (BB_HEAD (bb)))
5542 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5543 gcc_assert (LABEL_P (BB_HEAD (bb)));
5544 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5545
5546 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5547 }
5548 else
5549 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5550
5551 if (note)
5552 NOTE_BASIC_BLOCK (note) = bb;
5553
5554 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5555 {
5556 basic_block new_bb;
5557
5558 stmt = gsi_stmt (gsi);
5559
5560 /* If this statement is a non-debug one, and we generate debug
5561 insns, then this one might be the last real use of a TERed
5562 SSA_NAME, but where there are still some debug uses further
5563 down. Expanding the current SSA name in such further debug
5564 uses by their RHS might lead to wrong debug info, as coalescing
5565 might make the operands of such RHS be placed into the same
5566 pseudo as something else. Like so:
5567 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5568 use(a_1);
5569 a_2 = ...
5570 #DEBUG ... => a_1
5571 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5572 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5573 the write to a_2 would actually have clobbered the place which
5574 formerly held a_0.
5575
5576 So, instead of that, we recognize the situation, and generate
5577 debug temporaries at the last real use of TERed SSA names:
5578 a_1 = a_0 + 1;
5579 #DEBUG #D1 => a_1
5580 use(a_1);
5581 a_2 = ...
5582 #DEBUG ... => #D1
5583 */
5584 if (MAY_HAVE_DEBUG_BIND_INSNS
5585 && SA.values
5586 && !is_gimple_debug (stmt))
5587 {
5588 ssa_op_iter iter;
5589 tree op;
5590 gimple *def;
5591
5592 location_t sloc = curr_insn_location ();
5593
5594 /* Look for SSA names that have their last use here (TERed
5595 names always have only one real use). */
5596 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5597 if ((def = get_gimple_for_ssa_name (op)))
5598 {
5599 imm_use_iterator imm_iter;
5600 use_operand_p use_p;
5601 bool have_debug_uses = false;
5602
5603 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5604 {
5605 if (gimple_debug_bind_p (USE_STMT (use_p)))
5606 {
5607 have_debug_uses = true;
5608 break;
5609 }
5610 }
5611
5612 if (have_debug_uses)
5613 {
5614 /* OP is a TERed SSA name, with DEF its defining
5615 statement, and where OP is used in further debug
5616 instructions. Generate a debug temporary, and
5617 replace all uses of OP in debug insns with that
5618 temporary. */
5619 gimple *debugstmt;
5620 tree value = gimple_assign_rhs_to_tree (def);
5621 tree vexpr = make_node (DEBUG_EXPR_DECL);
5622 rtx val;
5623 machine_mode mode;
5624
5625 set_curr_insn_location (gimple_location (def));
5626
5627 DECL_ARTIFICIAL (vexpr) = 1;
5628 TREE_TYPE (vexpr) = TREE_TYPE (value);
5629 if (DECL_P (value))
5630 mode = DECL_MODE (value);
5631 else
5632 mode = TYPE_MODE (TREE_TYPE (value));
5633 SET_DECL_MODE (vexpr, mode);
5634
5635 val = gen_rtx_VAR_LOCATION
5636 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5637
5638 emit_debug_insn (val);
5639
5640 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5641 {
5642 if (!gimple_debug_bind_p (debugstmt))
5643 continue;
5644
5645 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5646 SET_USE (use_p, vexpr);
5647
5648 update_stmt (debugstmt);
5649 }
5650 }
5651 }
5652 set_curr_insn_location (sloc);
5653 }
5654
5655 currently_expanding_gimple_stmt = stmt;
5656
5657 /* Expand this statement, then evaluate the resulting RTL and
5658 fixup the CFG accordingly. */
5659 if (gimple_code (stmt) == GIMPLE_COND)
5660 {
5661 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5662 if (new_bb)
5663 return new_bb;
5664 }
5665 else if (is_gimple_debug (stmt))
5666 {
5667 location_t sloc = curr_insn_location ();
5668 gimple_stmt_iterator nsi = gsi;
5669
5670 for (;;)
5671 {
5672 tree var;
5673 tree value = NULL_TREE;
5674 rtx val = NULL_RTX;
5675 machine_mode mode;
5676
5677 if (!gimple_debug_nonbind_marker_p (stmt))
5678 {
5679 if (gimple_debug_bind_p (stmt))
5680 {
5681 var = gimple_debug_bind_get_var (stmt);
5682
5683 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5684 && TREE_CODE (var) != LABEL_DECL
5685 && !target_for_debug_bind (var))
5686 goto delink_debug_stmt;
5687
5688 if (DECL_P (var))
5689 mode = DECL_MODE (var);
5690 else
5691 mode = TYPE_MODE (TREE_TYPE (var));
5692
5693 if (gimple_debug_bind_has_value_p (stmt))
5694 value = gimple_debug_bind_get_value (stmt);
5695
5696 val = gen_rtx_VAR_LOCATION
5697 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5698 }
5699 else if (gimple_debug_source_bind_p (stmt))
5700 {
5701 var = gimple_debug_source_bind_get_var (stmt);
5702
5703 value = gimple_debug_source_bind_get_value (stmt);
5704
5705 mode = DECL_MODE (var);
5706
5707 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5708 VAR_INIT_STATUS_UNINITIALIZED);
5709 }
5710 else
5711 gcc_unreachable ();
5712 }
5713 /* If this function was first compiled with markers
5714 enabled, but they're now disable (e.g. LTO), drop
5715 them on the floor. */
5716 else if (gimple_debug_nonbind_marker_p (stmt)
5717 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5718 goto delink_debug_stmt;
5719 else if (gimple_debug_begin_stmt_p (stmt))
5720 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5721 else
5722 gcc_unreachable ();
5723
5724 last = get_last_insn ();
5725
5726 set_curr_insn_location (gimple_location (stmt));
5727
5728 emit_debug_insn (val);
5729
5730 if (dump_file && (dump_flags & TDF_DETAILS))
5731 {
5732 /* We can't dump the insn with a TREE where an RTX
5733 is expected. */
5734 if (GET_CODE (val) == VAR_LOCATION)
5735 {
5736 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5737 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5738 }
5739 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5740 if (GET_CODE (val) == VAR_LOCATION)
5741 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5742 }
5743
5744 delink_debug_stmt:
5745 /* In order not to generate too many debug temporaries,
5746 we delink all uses of debug statements we already expanded.
5747 Therefore debug statements between definition and real
5748 use of TERed SSA names will continue to use the SSA name,
5749 and not be replaced with debug temps. */
5750 delink_stmt_imm_use (stmt);
5751
5752 gsi = nsi;
5753 gsi_next (&nsi);
5754 if (gsi_end_p (nsi))
5755 break;
5756 stmt = gsi_stmt (nsi);
5757 if (!is_gimple_debug (stmt))
5758 break;
5759 }
5760
5761 set_curr_insn_location (sloc);
5762 }
5763 else
5764 {
5765 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5766 if (call_stmt
5767 && gimple_call_tail_p (call_stmt)
5768 && disable_tail_calls)
5769 gimple_call_set_tail (call_stmt, false);
5770
5771 if (call_stmt && gimple_call_tail_p (call_stmt))
5772 {
5773 bool can_fallthru;
5774 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5775 if (new_bb)
5776 {
5777 if (can_fallthru)
5778 bb = new_bb;
5779 else
5780 return new_bb;
5781 }
5782 }
5783 else
5784 {
5785 def_operand_p def_p;
5786 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5787
5788 if (def_p != NULL)
5789 {
5790 /* Ignore this stmt if it is in the list of
5791 replaceable expressions. */
5792 if (SA.values
5793 && bitmap_bit_p (SA.values,
5794 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5795 continue;
5796 }
5797 last = expand_gimple_stmt (stmt);
5798 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5799 }
5800 }
5801 }
5802
5803 currently_expanding_gimple_stmt = NULL;
5804
5805 /* Expand implicit goto and convert goto_locus. */
5806 FOR_EACH_EDGE (e, ei, bb->succs)
5807 {
5808 if (e->goto_locus != UNKNOWN_LOCATION)
5809 set_curr_insn_location (e->goto_locus);
5810 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5811 {
5812 emit_jump (label_rtx_for_bb (e->dest));
5813 e->flags &= ~EDGE_FALLTHRU;
5814 }
5815 }
5816
5817 /* Expanded RTL can create a jump in the last instruction of block.
5818 This later might be assumed to be a jump to successor and break edge insertion.
5819 We need to insert dummy move to prevent this. PR41440. */
5820 if (single_succ_p (bb)
5821 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5822 && (last = get_last_insn ())
5823 && (JUMP_P (last)
5824 || (DEBUG_INSN_P (last)
5825 && JUMP_P (prev_nondebug_insn (last)))))
5826 {
5827 rtx dummy = gen_reg_rtx (SImode);
5828 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5829 }
5830
5831 do_pending_stack_adjust ();
5832
5833 /* Find the block tail. The last insn in the block is the insn
5834 before a barrier and/or table jump insn. */
5835 last = get_last_insn ();
5836 if (BARRIER_P (last))
5837 last = PREV_INSN (last);
5838 if (JUMP_TABLE_DATA_P (last))
5839 last = PREV_INSN (PREV_INSN (last));
5840 BB_END (bb) = last;
5841
5842 update_bb_for_insn (bb);
5843
5844 return bb;
5845 }
5846
5847
5848 /* Create a basic block for initialization code. */
5849
5850 static basic_block
5851 construct_init_block (void)
5852 {
5853 basic_block init_block, first_block;
5854 edge e = NULL;
5855 int flags;
5856
5857 /* Multiple entry points not supported yet. */
5858 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5859 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5860 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5861 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5862 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5863
5864 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5865
5866 /* When entry edge points to first basic block, we don't need jump,
5867 otherwise we have to jump into proper target. */
5868 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5869 {
5870 tree label = gimple_block_label (e->dest);
5871
5872 emit_jump (jump_target_rtx (label));
5873 flags = 0;
5874 }
5875 else
5876 flags = EDGE_FALLTHRU;
5877
5878 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5879 get_last_insn (),
5880 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5881 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5882 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5883 if (e)
5884 {
5885 first_block = e->dest;
5886 redirect_edge_succ (e, init_block);
5887 e = make_single_succ_edge (init_block, first_block, flags);
5888 }
5889 else
5890 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5891 EDGE_FALLTHRU);
5892
5893 update_bb_for_insn (init_block);
5894 return init_block;
5895 }
5896
5897 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5898 found in the block tree. */
5899
5900 static void
5901 set_block_levels (tree block, int level)
5902 {
5903 while (block)
5904 {
5905 BLOCK_NUMBER (block) = level;
5906 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5907 block = BLOCK_CHAIN (block);
5908 }
5909 }
5910
5911 /* Create a block containing landing pads and similar stuff. */
5912
5913 static void
5914 construct_exit_block (void)
5915 {
5916 rtx_insn *head = get_last_insn ();
5917 rtx_insn *end;
5918 basic_block exit_block;
5919 edge e, e2;
5920 unsigned ix;
5921 edge_iterator ei;
5922 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5923 rtx_insn *orig_end = BB_END (prev_bb);
5924
5925 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5926
5927 /* Make sure the locus is set to the end of the function, so that
5928 epilogue line numbers and warnings are set properly. */
5929 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5930 input_location = cfun->function_end_locus;
5931
5932 /* Generate rtl for function exit. */
5933 expand_function_end ();
5934
5935 end = get_last_insn ();
5936 if (head == end)
5937 return;
5938 /* While emitting the function end we could move end of the last basic
5939 block. */
5940 BB_END (prev_bb) = orig_end;
5941 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5942 head = NEXT_INSN (head);
5943 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5944 bb count counting will be confused. Any instructions before that
5945 label are emitted for the case where PREV_BB falls through into the
5946 exit block, so append those instructions to prev_bb in that case. */
5947 if (NEXT_INSN (head) != return_label)
5948 {
5949 while (NEXT_INSN (head) != return_label)
5950 {
5951 if (!NOTE_P (NEXT_INSN (head)))
5952 BB_END (prev_bb) = NEXT_INSN (head);
5953 head = NEXT_INSN (head);
5954 }
5955 }
5956 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5957 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5958 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5959
5960 ix = 0;
5961 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5962 {
5963 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5964 if (!(e->flags & EDGE_ABNORMAL))
5965 redirect_edge_succ (e, exit_block);
5966 else
5967 ix++;
5968 }
5969
5970 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5971 EDGE_FALLTHRU);
5972 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5973 if (e2 != e)
5974 {
5975 exit_block->count -= e2->count ();
5976 }
5977 update_bb_for_insn (exit_block);
5978 }
5979
5980 /* Helper function for discover_nonconstant_array_refs.
5981 Look for ARRAY_REF nodes with non-constant indexes and mark them
5982 addressable. */
5983
5984 static tree
5985 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5986 void *data ATTRIBUTE_UNUSED)
5987 {
5988 tree t = *tp;
5989
5990 if (IS_TYPE_OR_DECL_P (t))
5991 *walk_subtrees = 0;
5992 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5993 {
5994 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5995 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5996 && (!TREE_OPERAND (t, 2)
5997 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5998 || (TREE_CODE (t) == COMPONENT_REF
5999 && (!TREE_OPERAND (t,2)
6000 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6001 || TREE_CODE (t) == BIT_FIELD_REF
6002 || TREE_CODE (t) == REALPART_EXPR
6003 || TREE_CODE (t) == IMAGPART_EXPR
6004 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6005 || CONVERT_EXPR_P (t))
6006 t = TREE_OPERAND (t, 0);
6007
6008 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6009 {
6010 t = get_base_address (t);
6011 if (t && DECL_P (t)
6012 && DECL_MODE (t) != BLKmode)
6013 TREE_ADDRESSABLE (t) = 1;
6014 }
6015
6016 *walk_subtrees = 0;
6017 }
6018
6019 return NULL_TREE;
6020 }
6021
6022 /* RTL expansion is not able to compile array references with variable
6023 offsets for arrays stored in single register. Discover such
6024 expressions and mark variables as addressable to avoid this
6025 scenario. */
6026
6027 static void
6028 discover_nonconstant_array_refs (void)
6029 {
6030 basic_block bb;
6031 gimple_stmt_iterator gsi;
6032
6033 FOR_EACH_BB_FN (bb, cfun)
6034 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6035 {
6036 gimple *stmt = gsi_stmt (gsi);
6037 if (!is_gimple_debug (stmt))
6038 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6039 }
6040 }
6041
6042 /* This function sets crtl->args.internal_arg_pointer to a virtual
6043 register if DRAP is needed. Local register allocator will replace
6044 virtual_incoming_args_rtx with the virtual register. */
6045
6046 static void
6047 expand_stack_alignment (void)
6048 {
6049 rtx drap_rtx;
6050 unsigned int preferred_stack_boundary;
6051
6052 if (! SUPPORTS_STACK_ALIGNMENT)
6053 return;
6054
6055 if (cfun->calls_alloca
6056 || cfun->has_nonlocal_label
6057 || crtl->has_nonlocal_goto)
6058 crtl->need_drap = true;
6059
6060 /* Call update_stack_boundary here again to update incoming stack
6061 boundary. It may set incoming stack alignment to a different
6062 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6063 use the minimum incoming stack alignment to check if it is OK
6064 to perform sibcall optimization since sibcall optimization will
6065 only align the outgoing stack to incoming stack boundary. */
6066 if (targetm.calls.update_stack_boundary)
6067 targetm.calls.update_stack_boundary ();
6068
6069 /* The incoming stack frame has to be aligned at least at
6070 parm_stack_boundary. */
6071 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6072
6073 /* Update crtl->stack_alignment_estimated and use it later to align
6074 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6075 exceptions since callgraph doesn't collect incoming stack alignment
6076 in this case. */
6077 if (cfun->can_throw_non_call_exceptions
6078 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6079 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6080 else
6081 preferred_stack_boundary = crtl->preferred_stack_boundary;
6082 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6083 crtl->stack_alignment_estimated = preferred_stack_boundary;
6084 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6085 crtl->stack_alignment_needed = preferred_stack_boundary;
6086
6087 gcc_assert (crtl->stack_alignment_needed
6088 <= crtl->stack_alignment_estimated);
6089
6090 crtl->stack_realign_needed
6091 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6092 crtl->stack_realign_tried = crtl->stack_realign_needed;
6093
6094 crtl->stack_realign_processed = true;
6095
6096 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6097 alignment. */
6098 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6099 drap_rtx = targetm.calls.get_drap_rtx ();
6100
6101 /* stack_realign_drap and drap_rtx must match. */
6102 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6103
6104 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6105 if (drap_rtx != NULL)
6106 {
6107 crtl->args.internal_arg_pointer = drap_rtx;
6108
6109 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6110 needed. */
6111 fixup_tail_calls ();
6112 }
6113 }
6114 \f
6115
6116 static void
6117 expand_main_function (void)
6118 {
6119 #if (defined(INVOKE__main) \
6120 || (!defined(HAS_INIT_SECTION) \
6121 && !defined(INIT_SECTION_ASM_OP) \
6122 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6123 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6124 #endif
6125 }
6126 \f
6127
6128 /* Expand code to initialize the stack_protect_guard. This is invoked at
6129 the beginning of a function to be protected. */
6130
6131 static void
6132 stack_protect_prologue (void)
6133 {
6134 tree guard_decl = targetm.stack_protect_guard ();
6135 rtx x, y;
6136
6137 x = expand_normal (crtl->stack_protect_guard);
6138 if (guard_decl)
6139 y = expand_normal (guard_decl);
6140 else
6141 y = const0_rtx;
6142
6143 /* Allow the target to copy from Y to X without leaking Y into a
6144 register. */
6145 if (targetm.have_stack_protect_set ())
6146 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6147 {
6148 emit_insn (insn);
6149 return;
6150 }
6151
6152 /* Otherwise do a straight move. */
6153 emit_move_insn (x, y);
6154 }
6155
6156 /* Translate the intermediate representation contained in the CFG
6157 from GIMPLE trees to RTL.
6158
6159 We do conversion per basic block and preserve/update the tree CFG.
6160 This implies we have to do some magic as the CFG can simultaneously
6161 consist of basic blocks containing RTL and GIMPLE trees. This can
6162 confuse the CFG hooks, so be careful to not manipulate CFG during
6163 the expansion. */
6164
6165 namespace {
6166
6167 const pass_data pass_data_expand =
6168 {
6169 RTL_PASS, /* type */
6170 "expand", /* name */
6171 OPTGROUP_NONE, /* optinfo_flags */
6172 TV_EXPAND, /* tv_id */
6173 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6174 | PROP_gimple_lcx
6175 | PROP_gimple_lvec
6176 | PROP_gimple_lva), /* properties_required */
6177 PROP_rtl, /* properties_provided */
6178 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6179 0, /* todo_flags_start */
6180 0, /* todo_flags_finish */
6181 };
6182
6183 class pass_expand : public rtl_opt_pass
6184 {
6185 public:
6186 pass_expand (gcc::context *ctxt)
6187 : rtl_opt_pass (pass_data_expand, ctxt)
6188 {}
6189
6190 /* opt_pass methods: */
6191 virtual unsigned int execute (function *);
6192
6193 }; // class pass_expand
6194
6195 unsigned int
6196 pass_expand::execute (function *fun)
6197 {
6198 basic_block bb, init_block;
6199 edge_iterator ei;
6200 edge e;
6201 rtx_insn *var_seq, *var_ret_seq;
6202 unsigned i;
6203
6204 timevar_push (TV_OUT_OF_SSA);
6205 rewrite_out_of_ssa (&SA);
6206 timevar_pop (TV_OUT_OF_SSA);
6207 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6208
6209 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6210 {
6211 gimple_stmt_iterator gsi;
6212 FOR_EACH_BB_FN (bb, cfun)
6213 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6214 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6215 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6216 }
6217
6218 /* Make sure all values used by the optimization passes have sane
6219 defaults. */
6220 reg_renumber = 0;
6221
6222 /* Some backends want to know that we are expanding to RTL. */
6223 currently_expanding_to_rtl = 1;
6224 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6225 free_dominance_info (CDI_DOMINATORS);
6226
6227 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6228
6229 if (chkp_function_instrumented_p (current_function_decl))
6230 chkp_reset_rtl_bounds ();
6231
6232 insn_locations_init ();
6233 if (!DECL_IS_BUILTIN (current_function_decl))
6234 {
6235 /* Eventually, all FEs should explicitly set function_start_locus. */
6236 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6237 set_curr_insn_location
6238 (DECL_SOURCE_LOCATION (current_function_decl));
6239 else
6240 set_curr_insn_location (fun->function_start_locus);
6241 }
6242 else
6243 set_curr_insn_location (UNKNOWN_LOCATION);
6244 prologue_location = curr_insn_location ();
6245
6246 #ifdef INSN_SCHEDULING
6247 init_sched_attrs ();
6248 #endif
6249
6250 /* Make sure first insn is a note even if we don't want linenums.
6251 This makes sure the first insn will never be deleted.
6252 Also, final expects a note to appear there. */
6253 emit_note (NOTE_INSN_DELETED);
6254
6255 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6256 discover_nonconstant_array_refs ();
6257
6258 targetm.expand_to_rtl_hook ();
6259 crtl->init_stack_alignment ();
6260 fun->cfg->max_jumptable_ents = 0;
6261
6262 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6263 of the function section at exapnsion time to predict distance of calls. */
6264 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6265
6266 /* Expand the variables recorded during gimple lowering. */
6267 timevar_push (TV_VAR_EXPAND);
6268 start_sequence ();
6269
6270 var_ret_seq = expand_used_vars ();
6271
6272 var_seq = get_insns ();
6273 end_sequence ();
6274 timevar_pop (TV_VAR_EXPAND);
6275
6276 /* Honor stack protection warnings. */
6277 if (warn_stack_protect)
6278 {
6279 if (fun->calls_alloca)
6280 warning (OPT_Wstack_protector,
6281 "stack protector not protecting local variables: "
6282 "variable length buffer");
6283 if (has_short_buffer && !crtl->stack_protect_guard)
6284 warning (OPT_Wstack_protector,
6285 "stack protector not protecting function: "
6286 "all local arrays are less than %d bytes long",
6287 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6288 }
6289
6290 /* Set up parameters and prepare for return, for the function. */
6291 expand_function_start (current_function_decl);
6292
6293 /* If we emitted any instructions for setting up the variables,
6294 emit them before the FUNCTION_START note. */
6295 if (var_seq)
6296 {
6297 emit_insn_before (var_seq, parm_birth_insn);
6298
6299 /* In expand_function_end we'll insert the alloca save/restore
6300 before parm_birth_insn. We've just insertted an alloca call.
6301 Adjust the pointer to match. */
6302 parm_birth_insn = var_seq;
6303 }
6304
6305 /* Now propagate the RTL assignment of each partition to the
6306 underlying var of each SSA_NAME. */
6307 tree name;
6308
6309 FOR_EACH_SSA_NAME (i, name, cfun)
6310 {
6311 /* We might have generated new SSA names in
6312 update_alias_info_with_stack_vars. They will have a NULL
6313 defining statements, and won't be part of the partitioning,
6314 so ignore those. */
6315 if (!SSA_NAME_DEF_STMT (name))
6316 continue;
6317
6318 adjust_one_expanded_partition_var (name);
6319 }
6320
6321 /* Clean up RTL of variables that straddle across multiple
6322 partitions, and check that the rtl of any PARM_DECLs that are not
6323 cleaned up is that of their default defs. */
6324 FOR_EACH_SSA_NAME (i, name, cfun)
6325 {
6326 int part;
6327
6328 /* We might have generated new SSA names in
6329 update_alias_info_with_stack_vars. They will have a NULL
6330 defining statements, and won't be part of the partitioning,
6331 so ignore those. */
6332 if (!SSA_NAME_DEF_STMT (name))
6333 continue;
6334 part = var_to_partition (SA.map, name);
6335 if (part == NO_PARTITION)
6336 continue;
6337
6338 /* If this decl was marked as living in multiple places, reset
6339 this now to NULL. */
6340 tree var = SSA_NAME_VAR (name);
6341 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6342 SET_DECL_RTL (var, NULL);
6343 /* Check that the pseudos chosen by assign_parms are those of
6344 the corresponding default defs. */
6345 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6346 && (TREE_CODE (var) == PARM_DECL
6347 || TREE_CODE (var) == RESULT_DECL))
6348 {
6349 rtx in = DECL_RTL_IF_SET (var);
6350 gcc_assert (in);
6351 rtx out = SA.partition_to_pseudo[part];
6352 gcc_assert (in == out);
6353
6354 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6355 those expected by debug backends for each parm and for
6356 the result. This is particularly important for stabs,
6357 whose register elimination from parm's DECL_RTL may cause
6358 -fcompare-debug differences as SET_DECL_RTL changes reg's
6359 attrs. So, make sure the RTL already has the parm as the
6360 EXPR, so that it won't change. */
6361 SET_DECL_RTL (var, NULL_RTX);
6362 if (MEM_P (in))
6363 set_mem_attributes (in, var, true);
6364 SET_DECL_RTL (var, in);
6365 }
6366 }
6367
6368 /* If this function is `main', emit a call to `__main'
6369 to run global initializers, etc. */
6370 if (DECL_NAME (current_function_decl)
6371 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6372 && DECL_FILE_SCOPE_P (current_function_decl))
6373 expand_main_function ();
6374
6375 /* Initialize the stack_protect_guard field. This must happen after the
6376 call to __main (if any) so that the external decl is initialized. */
6377 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6378 stack_protect_prologue ();
6379
6380 expand_phi_nodes (&SA);
6381
6382 /* Release any stale SSA redirection data. */
6383 redirect_edge_var_map_empty ();
6384
6385 /* Register rtl specific functions for cfg. */
6386 rtl_register_cfg_hooks ();
6387
6388 init_block = construct_init_block ();
6389
6390 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6391 remaining edges later. */
6392 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6393 e->flags &= ~EDGE_EXECUTABLE;
6394
6395 /* If the function has too many markers, drop them while expanding. */
6396 if (cfun->debug_marker_count
6397 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6398 cfun->debug_nonbind_markers = false;
6399
6400 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6401 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6402 next_bb)
6403 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6404
6405 if (MAY_HAVE_DEBUG_BIND_INSNS)
6406 expand_debug_locations ();
6407
6408 if (deep_ter_debug_map)
6409 {
6410 delete deep_ter_debug_map;
6411 deep_ter_debug_map = NULL;
6412 }
6413
6414 /* Free stuff we no longer need after GIMPLE optimizations. */
6415 free_dominance_info (CDI_DOMINATORS);
6416 free_dominance_info (CDI_POST_DOMINATORS);
6417 delete_tree_cfg_annotations (fun);
6418
6419 timevar_push (TV_OUT_OF_SSA);
6420 finish_out_of_ssa (&SA);
6421 timevar_pop (TV_OUT_OF_SSA);
6422
6423 timevar_push (TV_POST_EXPAND);
6424 /* We are no longer in SSA form. */
6425 fun->gimple_df->in_ssa_p = false;
6426 loops_state_clear (LOOP_CLOSED_SSA);
6427
6428 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6429 conservatively to true until they are all profile aware. */
6430 delete lab_rtx_for_bb;
6431 free_histograms (fun);
6432
6433 construct_exit_block ();
6434 insn_locations_finalize ();
6435
6436 if (var_ret_seq)
6437 {
6438 rtx_insn *after = return_label;
6439 rtx_insn *next = NEXT_INSN (after);
6440 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6441 after = next;
6442 emit_insn_after (var_ret_seq, after);
6443 }
6444
6445 /* Zap the tree EH table. */
6446 set_eh_throw_stmt_table (fun, NULL);
6447
6448 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6449 split edges which edge insertions might do. */
6450 rebuild_jump_labels (get_insns ());
6451
6452 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6453 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6454 {
6455 edge e;
6456 edge_iterator ei;
6457 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6458 {
6459 if (e->insns.r)
6460 {
6461 rebuild_jump_labels_chain (e->insns.r);
6462 /* Put insns after parm birth, but before
6463 NOTE_INSNS_FUNCTION_BEG. */
6464 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6465 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6466 {
6467 rtx_insn *insns = e->insns.r;
6468 e->insns.r = NULL;
6469 if (NOTE_P (parm_birth_insn)
6470 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6471 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6472 else
6473 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6474 }
6475 else
6476 commit_one_edge_insertion (e);
6477 }
6478 else
6479 ei_next (&ei);
6480 }
6481 }
6482
6483 /* We're done expanding trees to RTL. */
6484 currently_expanding_to_rtl = 0;
6485
6486 flush_mark_addressable_queue ();
6487
6488 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6489 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6490 {
6491 edge e;
6492 edge_iterator ei;
6493 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6494 {
6495 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6496 e->flags &= ~EDGE_EXECUTABLE;
6497
6498 /* At the moment not all abnormal edges match the RTL
6499 representation. It is safe to remove them here as
6500 find_many_sub_basic_blocks will rediscover them.
6501 In the future we should get this fixed properly. */
6502 if ((e->flags & EDGE_ABNORMAL)
6503 && !(e->flags & EDGE_SIBCALL))
6504 remove_edge (e);
6505 else
6506 ei_next (&ei);
6507 }
6508 }
6509
6510 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6511 bitmap_ones (blocks);
6512 find_many_sub_basic_blocks (blocks);
6513 purge_all_dead_edges ();
6514
6515 expand_stack_alignment ();
6516
6517 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6518 function. */
6519 if (crtl->tail_call_emit)
6520 fixup_tail_calls ();
6521
6522 /* After initial rtl generation, call back to finish generating
6523 exception support code. We need to do this before cleaning up
6524 the CFG as the code does not expect dead landing pads. */
6525 if (fun->eh->region_tree != NULL)
6526 finish_eh_generation ();
6527
6528 /* BB subdivision may have created basic blocks that are are only reachable
6529 from unlikely bbs but not marked as such in the profile. */
6530 if (optimize)
6531 propagate_unlikely_bbs_forward ();
6532
6533 /* Remove unreachable blocks, otherwise we cannot compute dominators
6534 which are needed for loop state verification. As a side-effect
6535 this also compacts blocks.
6536 ??? We cannot remove trivially dead insns here as for example
6537 the DRAP reg on i?86 is not magically live at this point.
6538 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6539 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6540
6541 checking_verify_flow_info ();
6542
6543 /* Initialize pseudos allocated for hard registers. */
6544 emit_initial_value_sets ();
6545
6546 /* And finally unshare all RTL. */
6547 unshare_all_rtl ();
6548
6549 /* There's no need to defer outputting this function any more; we
6550 know we want to output it. */
6551 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6552
6553 /* Now that we're done expanding trees to RTL, we shouldn't have any
6554 more CONCATs anywhere. */
6555 generating_concat_p = 0;
6556
6557 if (dump_file)
6558 {
6559 fprintf (dump_file,
6560 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6561 /* And the pass manager will dump RTL for us. */
6562 }
6563
6564 /* If we're emitting a nested function, make sure its parent gets
6565 emitted as well. Doing otherwise confuses debug info. */
6566 {
6567 tree parent;
6568 for (parent = DECL_CONTEXT (current_function_decl);
6569 parent != NULL_TREE;
6570 parent = get_containing_scope (parent))
6571 if (TREE_CODE (parent) == FUNCTION_DECL)
6572 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6573 }
6574
6575 TREE_ASM_WRITTEN (current_function_decl) = 1;
6576
6577 /* After expanding, the return labels are no longer needed. */
6578 return_label = NULL;
6579 naked_return_label = NULL;
6580
6581 /* After expanding, the tm_restart map is no longer needed. */
6582 if (fun->gimple_df->tm_restart)
6583 fun->gimple_df->tm_restart = NULL;
6584
6585 /* Tag the blocks with a depth number so that change_scope can find
6586 the common parent easily. */
6587 set_block_levels (DECL_INITIAL (fun->decl), 0);
6588 default_rtl_profile ();
6589
6590 /* For -dx discard loops now, otherwise IL verify in clean_state will
6591 ICE. */
6592 if (rtl_dump_and_exit)
6593 {
6594 cfun->curr_properties &= ~PROP_loops;
6595 loop_optimizer_finalize ();
6596 }
6597
6598 timevar_pop (TV_POST_EXPAND);
6599
6600 return 0;
6601 }
6602
6603 } // anon namespace
6604
6605 rtl_opt_pass *
6606 make_pass_expand (gcc::context *ctxt)
6607 {
6608 return new pass_expand (ctxt);
6609 }