]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
poly_int: tree constants
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87
88 /* This variable holds information helping the rewriting of SSA trees
89 into RTL. */
90 struct ssaexpand SA;
91
92 /* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
94 gimple *currently_expanding_gimple_stmt;
95
96 static rtx expand_debug_expr (tree);
97
98 static bool defer_stack_allocation (tree, bool);
99
100 static void record_alignment_for_reg_var (unsigned int);
101
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103 statement STMT. */
104
105 tree
106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108 tree t;
109 enum gimple_rhs_class grhs_class;
110
111 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112
113 if (grhs_class == GIMPLE_TERNARY_RHS)
114 t = build3 (gimple_assign_rhs_code (stmt),
115 TREE_TYPE (gimple_assign_lhs (stmt)),
116 gimple_assign_rhs1 (stmt),
117 gimple_assign_rhs2 (stmt),
118 gimple_assign_rhs3 (stmt));
119 else if (grhs_class == GIMPLE_BINARY_RHS)
120 t = build2 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt),
123 gimple_assign_rhs2 (stmt));
124 else if (grhs_class == GIMPLE_UNARY_RHS)
125 t = build1 (gimple_assign_rhs_code (stmt),
126 TREE_TYPE (gimple_assign_lhs (stmt)),
127 gimple_assign_rhs1 (stmt));
128 else if (grhs_class == GIMPLE_SINGLE_RHS)
129 {
130 t = gimple_assign_rhs1 (stmt);
131 /* Avoid modifying this tree in place below. */
132 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 && gimple_location (stmt) != EXPR_LOCATION (t))
134 || (gimple_block (stmt)
135 && currently_expanding_to_rtl
136 && EXPR_P (t)))
137 t = copy_node (t);
138 }
139 else
140 gcc_unreachable ();
141
142 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143 SET_EXPR_LOCATION (t, gimple_location (stmt));
144
145 return t;
146 }
147
148
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
159
160 static tree
161 leader_merge (tree cur, tree next)
162 {
163 if (cur == NULL || cur == next)
164 return next;
165
166 if (DECL_P (cur) && DECL_IGNORED_P (cur))
167 return cur;
168
169 if (DECL_P (next) && DECL_IGNORED_P (next))
170 return next;
171
172 return cur;
173 }
174
175 /* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
178 static inline void
179 set_rtl (tree t, rtx x)
180 {
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 || (use_register_for_decl (t)
184 ? (REG_P (x)
185 || (GET_CODE (x) == CONCAT
186 && (REG_P (XEXP (x, 0))
187 || SUBREG_P (XEXP (x, 0)))
188 && (REG_P (XEXP (x, 1))
189 || SUBREG_P (XEXP (x, 1))))
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
194 || (GET_CODE (x) == PARALLEL
195 && SSAVAR (t)
196 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 && (GET_MODE (x) == BLKmode
198 || !flag_tree_coalesce_vars)))
199 : (MEM_P (x) || x == pc_rtx
200 || (GET_CODE (x) == CONCAT
201 && MEM_P (XEXP (x, 0))
202 && MEM_P (XEXP (x, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
211 unpromoted REGs. */
212 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 || (SSAVAR (t)
214 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 && (promote_ssa_mode (t, NULL) == BLKmode
216 || !flag_tree_coalesce_vars))
217 || !use_register_for_decl (t)
218 || GET_MODE (x) == promote_ssa_mode (t, NULL));
219
220 if (x)
221 {
222 bool skip = false;
223 tree cur = NULL_TREE;
224 rtx xm = x;
225
226 retry:
227 if (MEM_P (xm))
228 cur = MEM_EXPR (xm);
229 else if (REG_P (xm))
230 cur = REG_EXPR (xm);
231 else if (SUBREG_P (xm))
232 {
233 gcc_assert (subreg_lowpart_p (xm));
234 xm = SUBREG_REG (xm);
235 goto retry;
236 }
237 else if (GET_CODE (xm) == CONCAT)
238 {
239 xm = XEXP (xm, 0);
240 goto retry;
241 }
242 else if (GET_CODE (xm) == PARALLEL)
243 {
244 xm = XVECEXP (xm, 0, 0);
245 gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 xm = XEXP (xm, 0);
247 goto retry;
248 }
249 else if (xm == pc_rtx)
250 skip = true;
251 else
252 gcc_unreachable ();
253
254 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255
256 if (cur != next)
257 {
258 if (MEM_P (x))
259 set_mem_attributes (x,
260 next && TREE_CODE (next) == SSA_NAME
261 ? TREE_TYPE (next)
262 : next, true);
263 else
264 set_reg_attrs_for_decl_rtl (next, x);
265 }
266 }
267
268 if (TREE_CODE (t) == SSA_NAME)
269 {
270 int part = var_to_partition (SA.map, t);
271 if (part != NO_PARTITION)
272 {
273 if (SA.partition_to_pseudo[part])
274 gcc_assert (SA.partition_to_pseudo[part] == x);
275 else if (x != pc_rtx)
276 SA.partition_to_pseudo[part] = x;
277 }
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
280 DECL. For PARMs and RESULTs, do so only when setting the
281 default def. */
282 if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 && (VAR_P (SSA_NAME_VAR (t))
284 || SSA_NAME_IS_DEFAULT_DEF (t)))
285 {
286 tree var = SSA_NAME_VAR (t);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var))
289 SET_DECL_RTL (var, x);
290 /* If we have it set already to "multiple places" don't
291 change this. */
292 else if (DECL_RTL (var) == pc_rtx)
293 ;
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var) != x)
301 SET_DECL_RTL (var, pc_rtx);
302 }
303 }
304 else
305 SET_DECL_RTL (t, x);
306 }
307
308 /* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
310 struct stack_var
311 {
312 /* The Variable. */
313 tree decl;
314
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
317 HOST_WIDE_INT size;
318
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
321 unsigned int alignb;
322
323 /* The partition representative. */
324 size_t representative;
325
326 /* The next stack variable in the partition, or EOC. */
327 size_t next;
328
329 /* The numbers of conflicting stack variables. */
330 bitmap conflicts;
331 };
332
333 #define EOC ((size_t)-1)
334
335 /* We have an array of such objects while deciding allocation. */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340
341 /* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343 static bitmap_obstack stack_var_bitmap_obstack;
344
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346 is non-decreasing. */
347 static size_t *stack_vars_sorted;
348
349 /* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352 static int frame_phase;
353
354 /* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356 static bool has_protected_decls;
357
358 /* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360 static bool has_short_buffer;
361
362 /* Compute the byte alignment to use for DECL. Ignore alignment
363 we can't do with expected alignment of the stack boundary. */
364
365 static unsigned int
366 align_local_variable (tree decl)
367 {
368 unsigned int align;
369
370 if (TREE_CODE (decl) == SSA_NAME)
371 align = TYPE_ALIGN (TREE_TYPE (decl));
372 else
373 {
374 align = LOCAL_DECL_ALIGNMENT (decl);
375 SET_DECL_ALIGN (decl, align);
376 }
377 return align / BITS_PER_UNIT;
378 }
379
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
382
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386 return align_up ? (base + align - 1) & -align : base & -align;
387 }
388
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
391
392 static HOST_WIDE_INT
393 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
394 {
395 HOST_WIDE_INT offset, new_frame_offset;
396
397 if (FRAME_GROWS_DOWNWARD)
398 {
399 new_frame_offset
400 = align_base (frame_offset - frame_phase - size,
401 align, false) + frame_phase;
402 offset = new_frame_offset;
403 }
404 else
405 {
406 new_frame_offset
407 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
408 offset = new_frame_offset;
409 new_frame_offset += size;
410 }
411 frame_offset = new_frame_offset;
412
413 if (frame_offset_overflow (frame_offset, cfun->decl))
414 frame_offset = offset = 0;
415
416 return offset;
417 }
418
419 /* Accumulate DECL into STACK_VARS. */
420
421 static void
422 add_stack_var (tree decl)
423 {
424 struct stack_var *v;
425
426 if (stack_vars_num >= stack_vars_alloc)
427 {
428 if (stack_vars_alloc)
429 stack_vars_alloc = stack_vars_alloc * 3 / 2;
430 else
431 stack_vars_alloc = 32;
432 stack_vars
433 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
434 }
435 if (!decl_to_stack_part)
436 decl_to_stack_part = new hash_map<tree, size_t>;
437
438 v = &stack_vars[stack_vars_num];
439 decl_to_stack_part->put (decl, stack_vars_num);
440
441 v->decl = decl;
442 tree size = TREE_CODE (decl) == SSA_NAME
443 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
444 : DECL_SIZE_UNIT (decl);
445 v->size = tree_to_uhwi (size);
446 /* Ensure that all variables have size, so that &a != &b for any two
447 variables that are simultaneously live. */
448 if (v->size == 0)
449 v->size = 1;
450 v->alignb = align_local_variable (decl);
451 /* An alignment of zero can mightily confuse us later. */
452 gcc_assert (v->alignb != 0);
453
454 /* All variables are initially in their own partition. */
455 v->representative = stack_vars_num;
456 v->next = EOC;
457
458 /* All variables initially conflict with no other. */
459 v->conflicts = NULL;
460
461 /* Ensure that this decl doesn't get put onto the list twice. */
462 set_rtl (decl, pc_rtx);
463
464 stack_vars_num++;
465 }
466
467 /* Make the decls associated with luid's X and Y conflict. */
468
469 static void
470 add_stack_var_conflict (size_t x, size_t y)
471 {
472 struct stack_var *a = &stack_vars[x];
473 struct stack_var *b = &stack_vars[y];
474 if (!a->conflicts)
475 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
476 if (!b->conflicts)
477 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
478 bitmap_set_bit (a->conflicts, y);
479 bitmap_set_bit (b->conflicts, x);
480 }
481
482 /* Check whether the decls associated with luid's X and Y conflict. */
483
484 static bool
485 stack_var_conflict_p (size_t x, size_t y)
486 {
487 struct stack_var *a = &stack_vars[x];
488 struct stack_var *b = &stack_vars[y];
489 if (x == y)
490 return false;
491 /* Partitions containing an SSA name result from gimple registers
492 with things like unsupported modes. They are top-level and
493 hence conflict with everything else. */
494 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
495 return true;
496
497 if (!a->conflicts || !b->conflicts)
498 return false;
499 return bitmap_bit_p (a->conflicts, y);
500 }
501
502 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
503 enter its partition number into bitmap DATA. */
504
505 static bool
506 visit_op (gimple *, tree op, tree, void *data)
507 {
508 bitmap active = (bitmap)data;
509 op = get_base_address (op);
510 if (op
511 && DECL_P (op)
512 && DECL_RTL_IF_SET (op) == pc_rtx)
513 {
514 size_t *v = decl_to_stack_part->get (op);
515 if (v)
516 bitmap_set_bit (active, *v);
517 }
518 return false;
519 }
520
521 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
522 record conflicts between it and all currently active other partitions
523 from bitmap DATA. */
524
525 static bool
526 visit_conflict (gimple *, tree op, tree, void *data)
527 {
528 bitmap active = (bitmap)data;
529 op = get_base_address (op);
530 if (op
531 && DECL_P (op)
532 && DECL_RTL_IF_SET (op) == pc_rtx)
533 {
534 size_t *v = decl_to_stack_part->get (op);
535 if (v && bitmap_set_bit (active, *v))
536 {
537 size_t num = *v;
538 bitmap_iterator bi;
539 unsigned i;
540 gcc_assert (num < stack_vars_num);
541 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
542 add_stack_var_conflict (num, i);
543 }
544 }
545 return false;
546 }
547
548 /* Helper routine for add_scope_conflicts, calculating the active partitions
549 at the end of BB, leaving the result in WORK. We're called to generate
550 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
551 liveness. */
552
553 static void
554 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
555 {
556 edge e;
557 edge_iterator ei;
558 gimple_stmt_iterator gsi;
559 walk_stmt_load_store_addr_fn visit;
560
561 bitmap_clear (work);
562 FOR_EACH_EDGE (e, ei, bb->preds)
563 bitmap_ior_into (work, (bitmap)e->src->aux);
564
565 visit = visit_op;
566
567 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
568 {
569 gimple *stmt = gsi_stmt (gsi);
570 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
571 }
572 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
573 {
574 gimple *stmt = gsi_stmt (gsi);
575
576 if (gimple_clobber_p (stmt))
577 {
578 tree lhs = gimple_assign_lhs (stmt);
579 size_t *v;
580 /* Nested function lowering might introduce LHSs
581 that are COMPONENT_REFs. */
582 if (!VAR_P (lhs))
583 continue;
584 if (DECL_RTL_IF_SET (lhs) == pc_rtx
585 && (v = decl_to_stack_part->get (lhs)))
586 bitmap_clear_bit (work, *v);
587 }
588 else if (!is_gimple_debug (stmt))
589 {
590 if (for_conflict
591 && visit == visit_op)
592 {
593 /* If this is the first real instruction in this BB we need
594 to add conflicts for everything live at this point now.
595 Unlike classical liveness for named objects we can't
596 rely on seeing a def/use of the names we're interested in.
597 There might merely be indirect loads/stores. We'd not add any
598 conflicts for such partitions. */
599 bitmap_iterator bi;
600 unsigned i;
601 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
602 {
603 struct stack_var *a = &stack_vars[i];
604 if (!a->conflicts)
605 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
606 bitmap_ior_into (a->conflicts, work);
607 }
608 visit = visit_conflict;
609 }
610 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
611 }
612 }
613 }
614
615 /* Generate stack partition conflicts between all partitions that are
616 simultaneously live. */
617
618 static void
619 add_scope_conflicts (void)
620 {
621 basic_block bb;
622 bool changed;
623 bitmap work = BITMAP_ALLOC (NULL);
624 int *rpo;
625 int n_bbs;
626
627 /* We approximate the live range of a stack variable by taking the first
628 mention of its name as starting point(s), and by the end-of-scope
629 death clobber added by gimplify as ending point(s) of the range.
630 This overapproximates in the case we for instance moved an address-taken
631 operation upward, without also moving a dereference to it upwards.
632 But it's conservatively correct as a variable never can hold values
633 before its name is mentioned at least once.
634
635 We then do a mostly classical bitmap liveness algorithm. */
636
637 FOR_ALL_BB_FN (bb, cfun)
638 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
639
640 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
641 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
642
643 changed = true;
644 while (changed)
645 {
646 int i;
647 changed = false;
648 for (i = 0; i < n_bbs; i++)
649 {
650 bitmap active;
651 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
652 active = (bitmap)bb->aux;
653 add_scope_conflicts_1 (bb, work, false);
654 if (bitmap_ior_into (active, work))
655 changed = true;
656 }
657 }
658
659 FOR_EACH_BB_FN (bb, cfun)
660 add_scope_conflicts_1 (bb, work, true);
661
662 free (rpo);
663 BITMAP_FREE (work);
664 FOR_ALL_BB_FN (bb, cfun)
665 BITMAP_FREE (bb->aux);
666 }
667
668 /* A subroutine of partition_stack_vars. A comparison function for qsort,
669 sorting an array of indices by the properties of the object. */
670
671 static int
672 stack_var_cmp (const void *a, const void *b)
673 {
674 size_t ia = *(const size_t *)a;
675 size_t ib = *(const size_t *)b;
676 unsigned int aligna = stack_vars[ia].alignb;
677 unsigned int alignb = stack_vars[ib].alignb;
678 HOST_WIDE_INT sizea = stack_vars[ia].size;
679 HOST_WIDE_INT sizeb = stack_vars[ib].size;
680 tree decla = stack_vars[ia].decl;
681 tree declb = stack_vars[ib].decl;
682 bool largea, largeb;
683 unsigned int uida, uidb;
684
685 /* Primary compare on "large" alignment. Large comes first. */
686 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
687 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 if (largea != largeb)
689 return (int)largeb - (int)largea;
690
691 /* Secondary compare on size, decreasing */
692 if (sizea > sizeb)
693 return -1;
694 if (sizea < sizeb)
695 return 1;
696
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
702
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla) == SSA_NAME)
707 {
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
712 }
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
717 if (uida < uidb)
718 return 1;
719 if (uida > uidb)
720 return -1;
721 return 0;
722 }
723
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
730
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 part_hashmap *decls_to_partitions,
734 hash_set<bitmap> *visited, bitmap temp)
735 {
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
739
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
744 || visited->add (pt->vars))
745 return;
746
747 bitmap_clear (temp);
748
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
755 && (part = decls_to_partitions->get (i)))
756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
759 }
760
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
765
766 static void
767 update_alias_info_with_stack_vars (void)
768 {
769 part_hashmap *decls_to_partitions = NULL;
770 size_t i, j;
771 tree var = NULL_TREE;
772
773 for (i = 0; i < stack_vars_num; i++)
774 {
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
778
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
783
784 if (!decls_to_partitions)
785 {
786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 }
789
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
796
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
801 {
802 tree decl = stack_vars[j].decl;
803 unsigned int uid = DECL_PT_UID (decl);
804 bitmap_set_bit (part, uid);
805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
809 }
810
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
813 pt_solution_set (&pi->pt, part, false);
814 }
815
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
819 {
820 unsigned i;
821 tree name;
822 hash_set<bitmap> visited;
823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824
825 FOR_EACH_SSA_NAME (i, name, cfun)
826 {
827 struct ptr_info_def *pi;
828
829 if (POINTER_TYPE_P (TREE_TYPE (name))
830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 &visited, temp);
833 }
834
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 decls_to_partitions, &visited, temp);
837
838 delete decls_to_partitions;
839 BITMAP_FREE (temp);
840 }
841 }
842
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
846
847 static void
848 union_stack_vars (size_t a, size_t b)
849 {
850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
853
854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
858 stack_vars[a].next = b;
859
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
863
864 /* Update the interference graph and merge the conflicts. */
865 if (vb->conflicts)
866 {
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
870 }
871 }
872
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
876
877 Sort the objects by size in descending order.
878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
884 }
885 }
886 */
887
888 static void
889 partition_stack_vars (void)
890 {
891 size_t si, sj, n = stack_vars_num;
892
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
896
897 if (n == 1)
898 return;
899
900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901
902 for (si = 0; si < n; ++si)
903 {
904 size_t i = stack_vars_sorted[si];
905 unsigned int ialign = stack_vars[i].alignb;
906 HOST_WIDE_INT isize = stack_vars[i].size;
907
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
913
914 for (sj = si + 1; sj < n; ++sj)
915 {
916 size_t j = stack_vars_sorted[sj];
917 unsigned int jalign = stack_vars[j].alignb;
918 HOST_WIDE_INT jsize = stack_vars[j].size;
919
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
923
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 break;
929
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if ((asan_sanitize_stack_p ())
935 && isize != jsize
936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
938
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
941 continue;
942
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i, j);
945 }
946 }
947
948 update_alias_info_with_stack_vars ();
949 }
950
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
952
953 static void
954 dump_stack_var_partition (void)
955 {
956 size_t si, i, j, n = stack_vars_num;
957
958 for (si = 0; si < n; ++si)
959 {
960 i = stack_vars_sorted[si];
961
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
965
966 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
967 " align %u\n", (unsigned long) i, stack_vars[i].size,
968 stack_vars[i].alignb);
969
970 for (j = i; j != EOC; j = stack_vars[j].next)
971 {
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 }
975 fputc ('\n', dump_file);
976 }
977 }
978
979 /* Assign rtl to DECL at BASE + OFFSET. */
980
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 HOST_WIDE_INT offset)
984 {
985 unsigned align;
986 rtx x;
987
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
990
991 x = plus_constant (Pmode, base, offset);
992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
995
996 if (TREE_CODE (decl) != SSA_NAME)
997 {
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
1003 align = least_bit_hwi (offset);
1004 align *= BITS_PER_UNIT;
1005 if (align == 0 || align > base_align)
1006 align = base_align;
1007
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1011
1012 SET_DECL_ALIGN (decl, align);
1013 DECL_USER_ALIGN (decl) = 0;
1014 }
1015
1016 set_rtl (decl, x);
1017 }
1018
1019 struct stack_vars_data
1020 {
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec<HOST_WIDE_INT> asan_vec;
1025
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec<tree> asan_decl_vec;
1028
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1031
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
1034 };
1035
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1039
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043 size_t si, i, j, n = stack_vars_num;
1044 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
1047 bool large_allocation_done = false;
1048 tree decl;
1049
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055 {
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1058 {
1059 unsigned alignb;
1060
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1063
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1070
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1074
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1078
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
1085 continue;
1086
1087 large_size += alignb - 1;
1088 large_size &= -(HOST_WIDE_INT)alignb;
1089 large_size += stack_vars[i].size;
1090 }
1091 }
1092
1093 for (si = 0; si < n; ++si)
1094 {
1095 rtx base;
1096 unsigned base_align, alignb;
1097 HOST_WIDE_INT offset;
1098
1099 i = stack_vars_sorted[si];
1100
1101 /* Skip variables that aren't partition representatives, for now. */
1102 if (stack_vars[i].representative != i)
1103 continue;
1104
1105 /* Skip variables that have already had rtl assigned. See also
1106 add_stack_var where we perpetrate this pc_rtx hack. */
1107 decl = stack_vars[i].decl;
1108 if (TREE_CODE (decl) == SSA_NAME
1109 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1110 : DECL_RTL (decl) != pc_rtx)
1111 continue;
1112
1113 /* Check the predicate to see whether this variable should be
1114 allocated in this pass. */
1115 if (pred && !pred (i))
1116 continue;
1117
1118 alignb = stack_vars[i].alignb;
1119 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1120 {
1121 base = virtual_stack_vars_rtx;
1122 if ((asan_sanitize_stack_p ())
1123 && pred)
1124 {
1125 HOST_WIDE_INT prev_offset
1126 = align_base (frame_offset,
1127 MAX (alignb, ASAN_RED_ZONE_SIZE),
1128 !FRAME_GROWS_DOWNWARD);
1129 tree repr_decl = NULL_TREE;
1130 offset
1131 = alloc_stack_frame_space (stack_vars[i].size
1132 + ASAN_RED_ZONE_SIZE,
1133 MAX (alignb, ASAN_RED_ZONE_SIZE));
1134
1135 data->asan_vec.safe_push (prev_offset);
1136 data->asan_vec.safe_push (offset + stack_vars[i].size);
1137 /* Find best representative of the partition.
1138 Prefer those with DECL_NAME, even better
1139 satisfying asan_protect_stack_decl predicate. */
1140 for (j = i; j != EOC; j = stack_vars[j].next)
1141 if (asan_protect_stack_decl (stack_vars[j].decl)
1142 && DECL_NAME (stack_vars[j].decl))
1143 {
1144 repr_decl = stack_vars[j].decl;
1145 break;
1146 }
1147 else if (repr_decl == NULL_TREE
1148 && DECL_P (stack_vars[j].decl)
1149 && DECL_NAME (stack_vars[j].decl))
1150 repr_decl = stack_vars[j].decl;
1151 if (repr_decl == NULL_TREE)
1152 repr_decl = stack_vars[i].decl;
1153 data->asan_decl_vec.safe_push (repr_decl);
1154 data->asan_alignb = MAX (data->asan_alignb, alignb);
1155 if (data->asan_base == NULL)
1156 data->asan_base = gen_reg_rtx (Pmode);
1157 base = data->asan_base;
1158
1159 if (!STRICT_ALIGNMENT)
1160 base_align = crtl->max_used_stack_slot_alignment;
1161 else
1162 base_align = MAX (crtl->max_used_stack_slot_alignment,
1163 GET_MODE_ALIGNMENT (SImode)
1164 << ASAN_SHADOW_SHIFT);
1165 }
1166 else
1167 {
1168 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1169 base_align = crtl->max_used_stack_slot_alignment;
1170 }
1171 }
1172 else
1173 {
1174 /* Large alignment is only processed in the last pass. */
1175 if (pred)
1176 continue;
1177
1178 /* If there were any variables requiring "large" alignment, allocate
1179 space. */
1180 if (large_size > 0 && ! large_allocation_done)
1181 {
1182 HOST_WIDE_INT loffset;
1183 rtx large_allocsize;
1184
1185 large_allocsize = GEN_INT (large_size);
1186 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1187 loffset = alloc_stack_frame_space
1188 (INTVAL (large_allocsize),
1189 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1190 large_base = get_dynamic_stack_base (loffset, large_align);
1191 large_allocation_done = true;
1192 }
1193 gcc_assert (large_base != NULL);
1194
1195 large_alloc += alignb - 1;
1196 large_alloc &= -(HOST_WIDE_INT)alignb;
1197 offset = large_alloc;
1198 large_alloc += stack_vars[i].size;
1199
1200 base = large_base;
1201 base_align = large_align;
1202 }
1203
1204 /* Create rtl for each variable based on their location within the
1205 partition. */
1206 for (j = i; j != EOC; j = stack_vars[j].next)
1207 {
1208 expand_one_stack_var_at (stack_vars[j].decl,
1209 base, base_align,
1210 offset);
1211 }
1212 }
1213
1214 gcc_assert (large_alloc == large_size);
1215 }
1216
1217 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1218 static HOST_WIDE_INT
1219 account_stack_vars (void)
1220 {
1221 size_t si, j, i, n = stack_vars_num;
1222 HOST_WIDE_INT size = 0;
1223
1224 for (si = 0; si < n; ++si)
1225 {
1226 i = stack_vars_sorted[si];
1227
1228 /* Skip variables that aren't partition representatives, for now. */
1229 if (stack_vars[i].representative != i)
1230 continue;
1231
1232 size += stack_vars[i].size;
1233 for (j = i; j != EOC; j = stack_vars[j].next)
1234 set_rtl (stack_vars[j].decl, NULL);
1235 }
1236 return size;
1237 }
1238
1239 /* Record the RTL assignment X for the default def of PARM. */
1240
1241 extern void
1242 set_parm_rtl (tree parm, rtx x)
1243 {
1244 gcc_assert (TREE_CODE (parm) == PARM_DECL
1245 || TREE_CODE (parm) == RESULT_DECL);
1246
1247 if (x && !MEM_P (x))
1248 {
1249 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1250 TYPE_MODE (TREE_TYPE (parm)),
1251 TYPE_ALIGN (TREE_TYPE (parm)));
1252
1253 /* If the variable alignment is very large we'll dynamicaly
1254 allocate it, which means that in-frame portion is just a
1255 pointer. ??? We've got a pseudo for sure here, do we
1256 actually dynamically allocate its spilling area if needed?
1257 ??? Isn't it a problem when POINTER_SIZE also exceeds
1258 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1259 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1260 align = POINTER_SIZE;
1261
1262 record_alignment_for_reg_var (align);
1263 }
1264
1265 tree ssa = ssa_default_def (cfun, parm);
1266 if (!ssa)
1267 return set_rtl (parm, x);
1268
1269 int part = var_to_partition (SA.map, ssa);
1270 gcc_assert (part != NO_PARTITION);
1271
1272 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1273 gcc_assert (changed);
1274
1275 set_rtl (ssa, x);
1276 gcc_assert (DECL_RTL (parm) == x);
1277 }
1278
1279 /* A subroutine of expand_one_var. Called to immediately assign rtl
1280 to a variable to be allocated in the stack frame. */
1281
1282 static void
1283 expand_one_stack_var_1 (tree var)
1284 {
1285 HOST_WIDE_INT size, offset;
1286 unsigned byte_align;
1287
1288 if (TREE_CODE (var) == SSA_NAME)
1289 {
1290 tree type = TREE_TYPE (var);
1291 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1292 byte_align = TYPE_ALIGN_UNIT (type);
1293 }
1294 else
1295 {
1296 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1297 byte_align = align_local_variable (var);
1298 }
1299
1300 /* We handle highly aligned variables in expand_stack_vars. */
1301 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1302
1303 offset = alloc_stack_frame_space (size, byte_align);
1304
1305 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1306 crtl->max_used_stack_slot_alignment, offset);
1307 }
1308
1309 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1310 already assigned some MEM. */
1311
1312 static void
1313 expand_one_stack_var (tree var)
1314 {
1315 if (TREE_CODE (var) == SSA_NAME)
1316 {
1317 int part = var_to_partition (SA.map, var);
1318 if (part != NO_PARTITION)
1319 {
1320 rtx x = SA.partition_to_pseudo[part];
1321 gcc_assert (x);
1322 gcc_assert (MEM_P (x));
1323 return;
1324 }
1325 }
1326
1327 return expand_one_stack_var_1 (var);
1328 }
1329
1330 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1331 that will reside in a hard register. */
1332
1333 static void
1334 expand_one_hard_reg_var (tree var)
1335 {
1336 rest_of_decl_compilation (var, 0, 0);
1337 }
1338
1339 /* Record the alignment requirements of some variable assigned to a
1340 pseudo. */
1341
1342 static void
1343 record_alignment_for_reg_var (unsigned int align)
1344 {
1345 if (SUPPORTS_STACK_ALIGNMENT
1346 && crtl->stack_alignment_estimated < align)
1347 {
1348 /* stack_alignment_estimated shouldn't change after stack
1349 realign decision made */
1350 gcc_assert (!crtl->stack_realign_processed);
1351 crtl->stack_alignment_estimated = align;
1352 }
1353
1354 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1355 So here we only make sure stack_alignment_needed >= align. */
1356 if (crtl->stack_alignment_needed < align)
1357 crtl->stack_alignment_needed = align;
1358 if (crtl->max_used_stack_slot_alignment < align)
1359 crtl->max_used_stack_slot_alignment = align;
1360 }
1361
1362 /* Create RTL for an SSA partition. */
1363
1364 static void
1365 expand_one_ssa_partition (tree var)
1366 {
1367 int part = var_to_partition (SA.map, var);
1368 gcc_assert (part != NO_PARTITION);
1369
1370 if (SA.partition_to_pseudo[part])
1371 return;
1372
1373 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1374 TYPE_MODE (TREE_TYPE (var)),
1375 TYPE_ALIGN (TREE_TYPE (var)));
1376
1377 /* If the variable alignment is very large we'll dynamicaly allocate
1378 it, which means that in-frame portion is just a pointer. */
1379 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1380 align = POINTER_SIZE;
1381
1382 record_alignment_for_reg_var (align);
1383
1384 if (!use_register_for_decl (var))
1385 {
1386 if (defer_stack_allocation (var, true))
1387 add_stack_var (var);
1388 else
1389 expand_one_stack_var_1 (var);
1390 return;
1391 }
1392
1393 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1394 rtx x = gen_reg_rtx (reg_mode);
1395
1396 set_rtl (var, x);
1397
1398 /* For a promoted variable, X will not be used directly but wrapped in a
1399 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1400 will assume that its upper bits can be inferred from its lower bits.
1401 Therefore, if X isn't initialized on every path from the entry, then
1402 we must do it manually in order to fulfill the above assumption. */
1403 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1404 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1405 emit_move_insn (x, CONST0_RTX (reg_mode));
1406 }
1407
1408 /* Record the association between the RTL generated for partition PART
1409 and the underlying variable of the SSA_NAME VAR. */
1410
1411 static void
1412 adjust_one_expanded_partition_var (tree var)
1413 {
1414 if (!var)
1415 return;
1416
1417 tree decl = SSA_NAME_VAR (var);
1418
1419 int part = var_to_partition (SA.map, var);
1420 if (part == NO_PARTITION)
1421 return;
1422
1423 rtx x = SA.partition_to_pseudo[part];
1424
1425 gcc_assert (x);
1426
1427 set_rtl (var, x);
1428
1429 if (!REG_P (x))
1430 return;
1431
1432 /* Note if the object is a user variable. */
1433 if (decl && !DECL_ARTIFICIAL (decl))
1434 mark_user_reg (x);
1435
1436 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1437 mark_reg_pointer (x, get_pointer_alignment (var));
1438 }
1439
1440 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1441 that will reside in a pseudo register. */
1442
1443 static void
1444 expand_one_register_var (tree var)
1445 {
1446 if (TREE_CODE (var) == SSA_NAME)
1447 {
1448 int part = var_to_partition (SA.map, var);
1449 if (part != NO_PARTITION)
1450 {
1451 rtx x = SA.partition_to_pseudo[part];
1452 gcc_assert (x);
1453 gcc_assert (REG_P (x));
1454 return;
1455 }
1456 gcc_unreachable ();
1457 }
1458
1459 tree decl = var;
1460 tree type = TREE_TYPE (decl);
1461 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1462 rtx x = gen_reg_rtx (reg_mode);
1463
1464 set_rtl (var, x);
1465
1466 /* Note if the object is a user variable. */
1467 if (!DECL_ARTIFICIAL (decl))
1468 mark_user_reg (x);
1469
1470 if (POINTER_TYPE_P (type))
1471 mark_reg_pointer (x, get_pointer_alignment (var));
1472 }
1473
1474 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1475 has some associated error, e.g. its type is error-mark. We just need
1476 to pick something that won't crash the rest of the compiler. */
1477
1478 static void
1479 expand_one_error_var (tree var)
1480 {
1481 machine_mode mode = DECL_MODE (var);
1482 rtx x;
1483
1484 if (mode == BLKmode)
1485 x = gen_rtx_MEM (BLKmode, const0_rtx);
1486 else if (mode == VOIDmode)
1487 x = const0_rtx;
1488 else
1489 x = gen_reg_rtx (mode);
1490
1491 SET_DECL_RTL (var, x);
1492 }
1493
1494 /* A subroutine of expand_one_var. VAR is a variable that will be
1495 allocated to the local stack frame. Return true if we wish to
1496 add VAR to STACK_VARS so that it will be coalesced with other
1497 variables. Return false to allocate VAR immediately.
1498
1499 This function is used to reduce the number of variables considered
1500 for coalescing, which reduces the size of the quadratic problem. */
1501
1502 static bool
1503 defer_stack_allocation (tree var, bool toplevel)
1504 {
1505 tree size_unit = TREE_CODE (var) == SSA_NAME
1506 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1507 : DECL_SIZE_UNIT (var);
1508
1509 /* Whether the variable is small enough for immediate allocation not to be
1510 a problem with regard to the frame size. */
1511 bool smallish
1512 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
1513 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1514
1515 /* If stack protection is enabled, *all* stack variables must be deferred,
1516 so that we can re-order the strings to the top of the frame.
1517 Similarly for Address Sanitizer. */
1518 if (flag_stack_protect || asan_sanitize_stack_p ())
1519 return true;
1520
1521 unsigned int align = TREE_CODE (var) == SSA_NAME
1522 ? TYPE_ALIGN (TREE_TYPE (var))
1523 : DECL_ALIGN (var);
1524
1525 /* We handle "large" alignment via dynamic allocation. We want to handle
1526 this extra complication in only one place, so defer them. */
1527 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1528 return true;
1529
1530 bool ignored = TREE_CODE (var) == SSA_NAME
1531 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1532 : DECL_IGNORED_P (var);
1533
1534 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1535 might be detached from their block and appear at toplevel when we reach
1536 here. We want to coalesce them with variables from other blocks when
1537 the immediate contribution to the frame size would be noticeable. */
1538 if (toplevel && optimize > 0 && ignored && !smallish)
1539 return true;
1540
1541 /* Variables declared in the outermost scope automatically conflict
1542 with every other variable. The only reason to want to defer them
1543 at all is that, after sorting, we can more efficiently pack
1544 small variables in the stack frame. Continue to defer at -O2. */
1545 if (toplevel && optimize < 2)
1546 return false;
1547
1548 /* Without optimization, *most* variables are allocated from the
1549 stack, which makes the quadratic problem large exactly when we
1550 want compilation to proceed as quickly as possible. On the
1551 other hand, we don't want the function's stack frame size to
1552 get completely out of hand. So we avoid adding scalars and
1553 "small" aggregates to the list at all. */
1554 if (optimize == 0 && smallish)
1555 return false;
1556
1557 return true;
1558 }
1559
1560 /* A subroutine of expand_used_vars. Expand one variable according to
1561 its flavor. Variables to be placed on the stack are not actually
1562 expanded yet, merely recorded.
1563 When REALLY_EXPAND is false, only add stack values to be allocated.
1564 Return stack usage this variable is supposed to take.
1565 */
1566
1567 static HOST_WIDE_INT
1568 expand_one_var (tree var, bool toplevel, bool really_expand)
1569 {
1570 unsigned int align = BITS_PER_UNIT;
1571 tree origvar = var;
1572
1573 var = SSAVAR (var);
1574
1575 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1576 {
1577 if (is_global_var (var))
1578 return 0;
1579
1580 /* Because we don't know if VAR will be in register or on stack,
1581 we conservatively assume it will be on stack even if VAR is
1582 eventually put into register after RA pass. For non-automatic
1583 variables, which won't be on stack, we collect alignment of
1584 type and ignore user specified alignment. Similarly for
1585 SSA_NAMEs for which use_register_for_decl returns true. */
1586 if (TREE_STATIC (var)
1587 || DECL_EXTERNAL (var)
1588 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1589 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1590 TYPE_MODE (TREE_TYPE (var)),
1591 TYPE_ALIGN (TREE_TYPE (var)));
1592 else if (DECL_HAS_VALUE_EXPR_P (var)
1593 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1594 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1595 or variables which were assigned a stack slot already by
1596 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1597 changed from the offset chosen to it. */
1598 align = crtl->stack_alignment_estimated;
1599 else
1600 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1601
1602 /* If the variable alignment is very large we'll dynamicaly allocate
1603 it, which means that in-frame portion is just a pointer. */
1604 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1605 align = POINTER_SIZE;
1606 }
1607
1608 record_alignment_for_reg_var (align);
1609
1610 if (TREE_CODE (origvar) == SSA_NAME)
1611 {
1612 gcc_assert (!VAR_P (var)
1613 || (!DECL_EXTERNAL (var)
1614 && !DECL_HAS_VALUE_EXPR_P (var)
1615 && !TREE_STATIC (var)
1616 && TREE_TYPE (var) != error_mark_node
1617 && !DECL_HARD_REGISTER (var)
1618 && really_expand));
1619 }
1620 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1621 ;
1622 else if (DECL_EXTERNAL (var))
1623 ;
1624 else if (DECL_HAS_VALUE_EXPR_P (var))
1625 ;
1626 else if (TREE_STATIC (var))
1627 ;
1628 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1629 ;
1630 else if (TREE_TYPE (var) == error_mark_node)
1631 {
1632 if (really_expand)
1633 expand_one_error_var (var);
1634 }
1635 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1636 {
1637 if (really_expand)
1638 {
1639 expand_one_hard_reg_var (var);
1640 if (!DECL_HARD_REGISTER (var))
1641 /* Invalid register specification. */
1642 expand_one_error_var (var);
1643 }
1644 }
1645 else if (use_register_for_decl (var))
1646 {
1647 if (really_expand)
1648 expand_one_register_var (origvar);
1649 }
1650 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1651 {
1652 /* Reject variables which cover more than half of the address-space. */
1653 if (really_expand)
1654 {
1655 error ("size of variable %q+D is too large", var);
1656 expand_one_error_var (var);
1657 }
1658 }
1659 else if (defer_stack_allocation (var, toplevel))
1660 add_stack_var (origvar);
1661 else
1662 {
1663 if (really_expand)
1664 {
1665 if (lookup_attribute ("naked",
1666 DECL_ATTRIBUTES (current_function_decl)))
1667 error ("cannot allocate stack for variable %q+D, naked function.",
1668 var);
1669
1670 expand_one_stack_var (origvar);
1671 }
1672
1673
1674 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1675 }
1676 return 0;
1677 }
1678
1679 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1680 expanding variables. Those variables that can be put into registers
1681 are allocated pseudos; those that can't are put on the stack.
1682
1683 TOPLEVEL is true if this is the outermost BLOCK. */
1684
1685 static void
1686 expand_used_vars_for_block (tree block, bool toplevel)
1687 {
1688 tree t;
1689
1690 /* Expand all variables at this level. */
1691 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1692 if (TREE_USED (t)
1693 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1694 || !DECL_NONSHAREABLE (t)))
1695 expand_one_var (t, toplevel, true);
1696
1697 /* Expand all variables at containing levels. */
1698 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1699 expand_used_vars_for_block (t, false);
1700 }
1701
1702 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1703 and clear TREE_USED on all local variables. */
1704
1705 static void
1706 clear_tree_used (tree block)
1707 {
1708 tree t;
1709
1710 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1711 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1712 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1713 || !DECL_NONSHAREABLE (t))
1714 TREE_USED (t) = 0;
1715
1716 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1717 clear_tree_used (t);
1718 }
1719
1720 enum {
1721 SPCT_FLAG_DEFAULT = 1,
1722 SPCT_FLAG_ALL = 2,
1723 SPCT_FLAG_STRONG = 3,
1724 SPCT_FLAG_EXPLICIT = 4
1725 };
1726
1727 /* Examine TYPE and determine a bit mask of the following features. */
1728
1729 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1730 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1731 #define SPCT_HAS_ARRAY 4
1732 #define SPCT_HAS_AGGREGATE 8
1733
1734 static unsigned int
1735 stack_protect_classify_type (tree type)
1736 {
1737 unsigned int ret = 0;
1738 tree t;
1739
1740 switch (TREE_CODE (type))
1741 {
1742 case ARRAY_TYPE:
1743 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1744 if (t == char_type_node
1745 || t == signed_char_type_node
1746 || t == unsigned_char_type_node)
1747 {
1748 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1749 unsigned HOST_WIDE_INT len;
1750
1751 if (!TYPE_SIZE_UNIT (type)
1752 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1753 len = max;
1754 else
1755 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1756
1757 if (len < max)
1758 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1759 else
1760 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1761 }
1762 else
1763 ret = SPCT_HAS_ARRAY;
1764 break;
1765
1766 case UNION_TYPE:
1767 case QUAL_UNION_TYPE:
1768 case RECORD_TYPE:
1769 ret = SPCT_HAS_AGGREGATE;
1770 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1771 if (TREE_CODE (t) == FIELD_DECL)
1772 ret |= stack_protect_classify_type (TREE_TYPE (t));
1773 break;
1774
1775 default:
1776 break;
1777 }
1778
1779 return ret;
1780 }
1781
1782 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1783 part of the local stack frame. Remember if we ever return nonzero for
1784 any variable in this function. The return value is the phase number in
1785 which the variable should be allocated. */
1786
1787 static int
1788 stack_protect_decl_phase (tree decl)
1789 {
1790 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1791 int ret = 0;
1792
1793 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1794 has_short_buffer = true;
1795
1796 if (flag_stack_protect == SPCT_FLAG_ALL
1797 || flag_stack_protect == SPCT_FLAG_STRONG
1798 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1799 && lookup_attribute ("stack_protect",
1800 DECL_ATTRIBUTES (current_function_decl))))
1801 {
1802 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1803 && !(bits & SPCT_HAS_AGGREGATE))
1804 ret = 1;
1805 else if (bits & SPCT_HAS_ARRAY)
1806 ret = 2;
1807 }
1808 else
1809 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1810
1811 if (ret)
1812 has_protected_decls = true;
1813
1814 return ret;
1815 }
1816
1817 /* Two helper routines that check for phase 1 and phase 2. These are used
1818 as callbacks for expand_stack_vars. */
1819
1820 static bool
1821 stack_protect_decl_phase_1 (size_t i)
1822 {
1823 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1824 }
1825
1826 static bool
1827 stack_protect_decl_phase_2 (size_t i)
1828 {
1829 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1830 }
1831
1832 /* And helper function that checks for asan phase (with stack protector
1833 it is phase 3). This is used as callback for expand_stack_vars.
1834 Returns true if any of the vars in the partition need to be protected. */
1835
1836 static bool
1837 asan_decl_phase_3 (size_t i)
1838 {
1839 while (i != EOC)
1840 {
1841 if (asan_protect_stack_decl (stack_vars[i].decl))
1842 return true;
1843 i = stack_vars[i].next;
1844 }
1845 return false;
1846 }
1847
1848 /* Ensure that variables in different stack protection phases conflict
1849 so that they are not merged and share the same stack slot. */
1850
1851 static void
1852 add_stack_protection_conflicts (void)
1853 {
1854 size_t i, j, n = stack_vars_num;
1855 unsigned char *phase;
1856
1857 phase = XNEWVEC (unsigned char, n);
1858 for (i = 0; i < n; ++i)
1859 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1860
1861 for (i = 0; i < n; ++i)
1862 {
1863 unsigned char ph_i = phase[i];
1864 for (j = i + 1; j < n; ++j)
1865 if (ph_i != phase[j])
1866 add_stack_var_conflict (i, j);
1867 }
1868
1869 XDELETEVEC (phase);
1870 }
1871
1872 /* Create a decl for the guard at the top of the stack frame. */
1873
1874 static void
1875 create_stack_guard (void)
1876 {
1877 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1878 VAR_DECL, NULL, ptr_type_node);
1879 TREE_THIS_VOLATILE (guard) = 1;
1880 TREE_USED (guard) = 1;
1881 expand_one_stack_var (guard);
1882 crtl->stack_protect_guard = guard;
1883 }
1884
1885 /* Prepare for expanding variables. */
1886 static void
1887 init_vars_expansion (void)
1888 {
1889 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1890 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1891
1892 /* A map from decl to stack partition. */
1893 decl_to_stack_part = new hash_map<tree, size_t>;
1894
1895 /* Initialize local stack smashing state. */
1896 has_protected_decls = false;
1897 has_short_buffer = false;
1898 }
1899
1900 /* Free up stack variable graph data. */
1901 static void
1902 fini_vars_expansion (void)
1903 {
1904 bitmap_obstack_release (&stack_var_bitmap_obstack);
1905 if (stack_vars)
1906 XDELETEVEC (stack_vars);
1907 if (stack_vars_sorted)
1908 XDELETEVEC (stack_vars_sorted);
1909 stack_vars = NULL;
1910 stack_vars_sorted = NULL;
1911 stack_vars_alloc = stack_vars_num = 0;
1912 delete decl_to_stack_part;
1913 decl_to_stack_part = NULL;
1914 }
1915
1916 /* Make a fair guess for the size of the stack frame of the function
1917 in NODE. This doesn't have to be exact, the result is only used in
1918 the inline heuristics. So we don't want to run the full stack var
1919 packing algorithm (which is quadratic in the number of stack vars).
1920 Instead, we calculate the total size of all stack vars. This turns
1921 out to be a pretty fair estimate -- packing of stack vars doesn't
1922 happen very often. */
1923
1924 HOST_WIDE_INT
1925 estimated_stack_frame_size (struct cgraph_node *node)
1926 {
1927 HOST_WIDE_INT size = 0;
1928 size_t i;
1929 tree var;
1930 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1931
1932 push_cfun (fn);
1933
1934 init_vars_expansion ();
1935
1936 FOR_EACH_LOCAL_DECL (fn, i, var)
1937 if (auto_var_in_fn_p (var, fn->decl))
1938 size += expand_one_var (var, true, false);
1939
1940 if (stack_vars_num > 0)
1941 {
1942 /* Fake sorting the stack vars for account_stack_vars (). */
1943 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1944 for (i = 0; i < stack_vars_num; ++i)
1945 stack_vars_sorted[i] = i;
1946 size += account_stack_vars ();
1947 }
1948
1949 fini_vars_expansion ();
1950 pop_cfun ();
1951 return size;
1952 }
1953
1954 /* Helper routine to check if a record or union contains an array field. */
1955
1956 static int
1957 record_or_union_type_has_array_p (const_tree tree_type)
1958 {
1959 tree fields = TYPE_FIELDS (tree_type);
1960 tree f;
1961
1962 for (f = fields; f; f = DECL_CHAIN (f))
1963 if (TREE_CODE (f) == FIELD_DECL)
1964 {
1965 tree field_type = TREE_TYPE (f);
1966 if (RECORD_OR_UNION_TYPE_P (field_type)
1967 && record_or_union_type_has_array_p (field_type))
1968 return 1;
1969 if (TREE_CODE (field_type) == ARRAY_TYPE)
1970 return 1;
1971 }
1972 return 0;
1973 }
1974
1975 /* Check if the current function has local referenced variables that
1976 have their addresses taken, contain an array, or are arrays. */
1977
1978 static bool
1979 stack_protect_decl_p ()
1980 {
1981 unsigned i;
1982 tree var;
1983
1984 FOR_EACH_LOCAL_DECL (cfun, i, var)
1985 if (!is_global_var (var))
1986 {
1987 tree var_type = TREE_TYPE (var);
1988 if (VAR_P (var)
1989 && (TREE_CODE (var_type) == ARRAY_TYPE
1990 || TREE_ADDRESSABLE (var)
1991 || (RECORD_OR_UNION_TYPE_P (var_type)
1992 && record_or_union_type_has_array_p (var_type))))
1993 return true;
1994 }
1995 return false;
1996 }
1997
1998 /* Check if the current function has calls that use a return slot. */
1999
2000 static bool
2001 stack_protect_return_slot_p ()
2002 {
2003 basic_block bb;
2004
2005 FOR_ALL_BB_FN (bb, cfun)
2006 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2007 !gsi_end_p (gsi); gsi_next (&gsi))
2008 {
2009 gimple *stmt = gsi_stmt (gsi);
2010 /* This assumes that calls to internal-only functions never
2011 use a return slot. */
2012 if (is_gimple_call (stmt)
2013 && !gimple_call_internal_p (stmt)
2014 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2015 gimple_call_fndecl (stmt)))
2016 return true;
2017 }
2018 return false;
2019 }
2020
2021 /* Expand all variables used in the function. */
2022
2023 static rtx_insn *
2024 expand_used_vars (void)
2025 {
2026 tree var, outer_block = DECL_INITIAL (current_function_decl);
2027 auto_vec<tree> maybe_local_decls;
2028 rtx_insn *var_end_seq = NULL;
2029 unsigned i;
2030 unsigned len;
2031 bool gen_stack_protect_signal = false;
2032
2033 /* Compute the phase of the stack frame for this function. */
2034 {
2035 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2036 int off = targetm.starting_frame_offset () % align;
2037 frame_phase = off ? align - off : 0;
2038 }
2039
2040 /* Set TREE_USED on all variables in the local_decls. */
2041 FOR_EACH_LOCAL_DECL (cfun, i, var)
2042 TREE_USED (var) = 1;
2043 /* Clear TREE_USED on all variables associated with a block scope. */
2044 clear_tree_used (DECL_INITIAL (current_function_decl));
2045
2046 init_vars_expansion ();
2047
2048 if (targetm.use_pseudo_pic_reg ())
2049 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2050
2051 for (i = 0; i < SA.map->num_partitions; i++)
2052 {
2053 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2054 continue;
2055
2056 tree var = partition_to_var (SA.map, i);
2057
2058 gcc_assert (!virtual_operand_p (var));
2059
2060 expand_one_ssa_partition (var);
2061 }
2062
2063 if (flag_stack_protect == SPCT_FLAG_STRONG)
2064 gen_stack_protect_signal
2065 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2066
2067 /* At this point all variables on the local_decls with TREE_USED
2068 set are not associated with any block scope. Lay them out. */
2069
2070 len = vec_safe_length (cfun->local_decls);
2071 FOR_EACH_LOCAL_DECL (cfun, i, var)
2072 {
2073 bool expand_now = false;
2074
2075 /* Expanded above already. */
2076 if (is_gimple_reg (var))
2077 {
2078 TREE_USED (var) = 0;
2079 goto next;
2080 }
2081 /* We didn't set a block for static or extern because it's hard
2082 to tell the difference between a global variable (re)declared
2083 in a local scope, and one that's really declared there to
2084 begin with. And it doesn't really matter much, since we're
2085 not giving them stack space. Expand them now. */
2086 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2087 expand_now = true;
2088
2089 /* Expand variables not associated with any block now. Those created by
2090 the optimizers could be live anywhere in the function. Those that
2091 could possibly have been scoped originally and detached from their
2092 block will have their allocation deferred so we coalesce them with
2093 others when optimization is enabled. */
2094 else if (TREE_USED (var))
2095 expand_now = true;
2096
2097 /* Finally, mark all variables on the list as used. We'll use
2098 this in a moment when we expand those associated with scopes. */
2099 TREE_USED (var) = 1;
2100
2101 if (expand_now)
2102 expand_one_var (var, true, true);
2103
2104 next:
2105 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2106 {
2107 rtx rtl = DECL_RTL_IF_SET (var);
2108
2109 /* Keep artificial non-ignored vars in cfun->local_decls
2110 chain until instantiate_decls. */
2111 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2112 add_local_decl (cfun, var);
2113 else if (rtl == NULL_RTX)
2114 /* If rtl isn't set yet, which can happen e.g. with
2115 -fstack-protector, retry before returning from this
2116 function. */
2117 maybe_local_decls.safe_push (var);
2118 }
2119 }
2120
2121 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2122
2123 +-----------------+-----------------+
2124 | ...processed... | ...duplicates...|
2125 +-----------------+-----------------+
2126 ^
2127 +-- LEN points here.
2128
2129 We just want the duplicates, as those are the artificial
2130 non-ignored vars that we want to keep until instantiate_decls.
2131 Move them down and truncate the array. */
2132 if (!vec_safe_is_empty (cfun->local_decls))
2133 cfun->local_decls->block_remove (0, len);
2134
2135 /* At this point, all variables within the block tree with TREE_USED
2136 set are actually used by the optimized function. Lay them out. */
2137 expand_used_vars_for_block (outer_block, true);
2138
2139 if (stack_vars_num > 0)
2140 {
2141 add_scope_conflicts ();
2142
2143 /* If stack protection is enabled, we don't share space between
2144 vulnerable data and non-vulnerable data. */
2145 if (flag_stack_protect != 0
2146 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2147 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2148 && lookup_attribute ("stack_protect",
2149 DECL_ATTRIBUTES (current_function_decl)))))
2150 add_stack_protection_conflicts ();
2151
2152 /* Now that we have collected all stack variables, and have computed a
2153 minimal interference graph, attempt to save some stack space. */
2154 partition_stack_vars ();
2155 if (dump_file)
2156 dump_stack_var_partition ();
2157 }
2158
2159 switch (flag_stack_protect)
2160 {
2161 case SPCT_FLAG_ALL:
2162 create_stack_guard ();
2163 break;
2164
2165 case SPCT_FLAG_STRONG:
2166 if (gen_stack_protect_signal
2167 || cfun->calls_alloca || has_protected_decls
2168 || lookup_attribute ("stack_protect",
2169 DECL_ATTRIBUTES (current_function_decl)))
2170 create_stack_guard ();
2171 break;
2172
2173 case SPCT_FLAG_DEFAULT:
2174 if (cfun->calls_alloca || has_protected_decls
2175 || lookup_attribute ("stack_protect",
2176 DECL_ATTRIBUTES (current_function_decl)))
2177 create_stack_guard ();
2178 break;
2179
2180 case SPCT_FLAG_EXPLICIT:
2181 if (lookup_attribute ("stack_protect",
2182 DECL_ATTRIBUTES (current_function_decl)))
2183 create_stack_guard ();
2184 break;
2185 default:
2186 ;
2187 }
2188
2189 /* Assign rtl to each variable based on these partitions. */
2190 if (stack_vars_num > 0)
2191 {
2192 struct stack_vars_data data;
2193
2194 data.asan_base = NULL_RTX;
2195 data.asan_alignb = 0;
2196
2197 /* Reorder decls to be protected by iterating over the variables
2198 array multiple times, and allocating out of each phase in turn. */
2199 /* ??? We could probably integrate this into the qsort we did
2200 earlier, such that we naturally see these variables first,
2201 and thus naturally allocate things in the right order. */
2202 if (has_protected_decls)
2203 {
2204 /* Phase 1 contains only character arrays. */
2205 expand_stack_vars (stack_protect_decl_phase_1, &data);
2206
2207 /* Phase 2 contains other kinds of arrays. */
2208 if (flag_stack_protect == SPCT_FLAG_ALL
2209 || flag_stack_protect == SPCT_FLAG_STRONG
2210 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2211 && lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl))))
2213 expand_stack_vars (stack_protect_decl_phase_2, &data);
2214 }
2215
2216 if (asan_sanitize_stack_p ())
2217 /* Phase 3, any partitions that need asan protection
2218 in addition to phase 1 and 2. */
2219 expand_stack_vars (asan_decl_phase_3, &data);
2220
2221 if (!data.asan_vec.is_empty ())
2222 {
2223 HOST_WIDE_INT prev_offset = frame_offset;
2224 HOST_WIDE_INT offset, sz, redzonesz;
2225 redzonesz = ASAN_RED_ZONE_SIZE;
2226 sz = data.asan_vec[0] - prev_offset;
2227 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2228 && data.asan_alignb <= 4096
2229 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2230 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2231 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2232 offset
2233 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
2234 data.asan_vec.safe_push (prev_offset);
2235 data.asan_vec.safe_push (offset);
2236 /* Leave space for alignment if STRICT_ALIGNMENT. */
2237 if (STRICT_ALIGNMENT)
2238 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2239 << ASAN_SHADOW_SHIFT)
2240 / BITS_PER_UNIT, 1);
2241
2242 var_end_seq
2243 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2244 data.asan_base,
2245 data.asan_alignb,
2246 data.asan_vec.address (),
2247 data.asan_decl_vec.address (),
2248 data.asan_vec.length ());
2249 }
2250
2251 expand_stack_vars (NULL, &data);
2252 }
2253
2254 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2255 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2256 virtual_stack_vars_rtx,
2257 var_end_seq);
2258
2259 fini_vars_expansion ();
2260
2261 /* If there were any artificial non-ignored vars without rtl
2262 found earlier, see if deferred stack allocation hasn't assigned
2263 rtl to them. */
2264 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2265 {
2266 rtx rtl = DECL_RTL_IF_SET (var);
2267
2268 /* Keep artificial non-ignored vars in cfun->local_decls
2269 chain until instantiate_decls. */
2270 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2271 add_local_decl (cfun, var);
2272 }
2273
2274 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2275 if (STACK_ALIGNMENT_NEEDED)
2276 {
2277 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2278 if (!FRAME_GROWS_DOWNWARD)
2279 frame_offset += align - 1;
2280 frame_offset &= -align;
2281 }
2282
2283 return var_end_seq;
2284 }
2285
2286
2287 /* If we need to produce a detailed dump, print the tree representation
2288 for STMT to the dump file. SINCE is the last RTX after which the RTL
2289 generated for STMT should have been appended. */
2290
2291 static void
2292 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2293 {
2294 if (dump_file && (dump_flags & TDF_DETAILS))
2295 {
2296 fprintf (dump_file, "\n;; ");
2297 print_gimple_stmt (dump_file, stmt, 0,
2298 TDF_SLIM | (dump_flags & TDF_LINENO));
2299 fprintf (dump_file, "\n");
2300
2301 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2302 }
2303 }
2304
2305 /* Maps the blocks that do not contain tree labels to rtx labels. */
2306
2307 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2308
2309 /* Returns the label_rtx expression for a label starting basic block BB. */
2310
2311 static rtx_code_label *
2312 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2313 {
2314 gimple_stmt_iterator gsi;
2315 tree lab;
2316
2317 if (bb->flags & BB_RTL)
2318 return block_label (bb);
2319
2320 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2321 if (elt)
2322 return *elt;
2323
2324 /* Find the tree label if it is present. */
2325
2326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2327 {
2328 glabel *lab_stmt;
2329
2330 if (is_gimple_debug (gsi_stmt (gsi)))
2331 continue;
2332
2333 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2334 if (!lab_stmt)
2335 break;
2336
2337 lab = gimple_label_label (lab_stmt);
2338 if (DECL_NONLOCAL (lab))
2339 break;
2340
2341 return jump_target_rtx (lab);
2342 }
2343
2344 rtx_code_label *l = gen_label_rtx ();
2345 lab_rtx_for_bb->put (bb, l);
2346 return l;
2347 }
2348
2349
2350 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2351 of a basic block where we just expanded the conditional at the end,
2352 possibly clean up the CFG and instruction sequence. LAST is the
2353 last instruction before the just emitted jump sequence. */
2354
2355 static void
2356 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2357 {
2358 /* Special case: when jumpif decides that the condition is
2359 trivial it emits an unconditional jump (and the necessary
2360 barrier). But we still have two edges, the fallthru one is
2361 wrong. purge_dead_edges would clean this up later. Unfortunately
2362 we have to insert insns (and split edges) before
2363 find_many_sub_basic_blocks and hence before purge_dead_edges.
2364 But splitting edges might create new blocks which depend on the
2365 fact that if there are two edges there's no barrier. So the
2366 barrier would get lost and verify_flow_info would ICE. Instead
2367 of auditing all edge splitters to care for the barrier (which
2368 normally isn't there in a cleaned CFG), fix it here. */
2369 if (BARRIER_P (get_last_insn ()))
2370 {
2371 rtx_insn *insn;
2372 remove_edge (e);
2373 /* Now, we have a single successor block, if we have insns to
2374 insert on the remaining edge we potentially will insert
2375 it at the end of this block (if the dest block isn't feasible)
2376 in order to avoid splitting the edge. This insertion will take
2377 place in front of the last jump. But we might have emitted
2378 multiple jumps (conditional and one unconditional) to the
2379 same destination. Inserting in front of the last one then
2380 is a problem. See PR 40021. We fix this by deleting all
2381 jumps except the last unconditional one. */
2382 insn = PREV_INSN (get_last_insn ());
2383 /* Make sure we have an unconditional jump. Otherwise we're
2384 confused. */
2385 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2386 for (insn = PREV_INSN (insn); insn != last;)
2387 {
2388 insn = PREV_INSN (insn);
2389 if (JUMP_P (NEXT_INSN (insn)))
2390 {
2391 if (!any_condjump_p (NEXT_INSN (insn)))
2392 {
2393 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2394 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2395 }
2396 delete_insn (NEXT_INSN (insn));
2397 }
2398 }
2399 }
2400 }
2401
2402 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2403 Returns a new basic block if we've terminated the current basic
2404 block and created a new one. */
2405
2406 static basic_block
2407 expand_gimple_cond (basic_block bb, gcond *stmt)
2408 {
2409 basic_block new_bb, dest;
2410 edge true_edge;
2411 edge false_edge;
2412 rtx_insn *last2, *last;
2413 enum tree_code code;
2414 tree op0, op1;
2415
2416 code = gimple_cond_code (stmt);
2417 op0 = gimple_cond_lhs (stmt);
2418 op1 = gimple_cond_rhs (stmt);
2419 /* We're sometimes presented with such code:
2420 D.123_1 = x < y;
2421 if (D.123_1 != 0)
2422 ...
2423 This would expand to two comparisons which then later might
2424 be cleaned up by combine. But some pattern matchers like if-conversion
2425 work better when there's only one compare, so make up for this
2426 here as special exception if TER would have made the same change. */
2427 if (SA.values
2428 && TREE_CODE (op0) == SSA_NAME
2429 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2430 && TREE_CODE (op1) == INTEGER_CST
2431 && ((gimple_cond_code (stmt) == NE_EXPR
2432 && integer_zerop (op1))
2433 || (gimple_cond_code (stmt) == EQ_EXPR
2434 && integer_onep (op1)))
2435 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2436 {
2437 gimple *second = SSA_NAME_DEF_STMT (op0);
2438 if (gimple_code (second) == GIMPLE_ASSIGN)
2439 {
2440 enum tree_code code2 = gimple_assign_rhs_code (second);
2441 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2442 {
2443 code = code2;
2444 op0 = gimple_assign_rhs1 (second);
2445 op1 = gimple_assign_rhs2 (second);
2446 }
2447 /* If jumps are cheap and the target does not support conditional
2448 compare, turn some more codes into jumpy sequences. */
2449 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2450 && targetm.gen_ccmp_first == NULL)
2451 {
2452 if ((code2 == BIT_AND_EXPR
2453 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2454 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2455 || code2 == TRUTH_AND_EXPR)
2456 {
2457 code = TRUTH_ANDIF_EXPR;
2458 op0 = gimple_assign_rhs1 (second);
2459 op1 = gimple_assign_rhs2 (second);
2460 }
2461 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2462 {
2463 code = TRUTH_ORIF_EXPR;
2464 op0 = gimple_assign_rhs1 (second);
2465 op1 = gimple_assign_rhs2 (second);
2466 }
2467 }
2468 }
2469 }
2470
2471 last2 = last = get_last_insn ();
2472
2473 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2474 set_curr_insn_location (gimple_location (stmt));
2475
2476 /* These flags have no purpose in RTL land. */
2477 true_edge->flags &= ~EDGE_TRUE_VALUE;
2478 false_edge->flags &= ~EDGE_FALSE_VALUE;
2479
2480 /* We can either have a pure conditional jump with one fallthru edge or
2481 two-way jump that needs to be decomposed into two basic blocks. */
2482 if (false_edge->dest == bb->next_bb)
2483 {
2484 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2485 true_edge->probability);
2486 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2487 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2488 set_curr_insn_location (true_edge->goto_locus);
2489 false_edge->flags |= EDGE_FALLTHRU;
2490 maybe_cleanup_end_of_block (false_edge, last);
2491 return NULL;
2492 }
2493 if (true_edge->dest == bb->next_bb)
2494 {
2495 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2496 false_edge->probability);
2497 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2498 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2499 set_curr_insn_location (false_edge->goto_locus);
2500 true_edge->flags |= EDGE_FALLTHRU;
2501 maybe_cleanup_end_of_block (true_edge, last);
2502 return NULL;
2503 }
2504
2505 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2506 true_edge->probability);
2507 last = get_last_insn ();
2508 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2509 set_curr_insn_location (false_edge->goto_locus);
2510 emit_jump (label_rtx_for_bb (false_edge->dest));
2511
2512 BB_END (bb) = last;
2513 if (BARRIER_P (BB_END (bb)))
2514 BB_END (bb) = PREV_INSN (BB_END (bb));
2515 update_bb_for_insn (bb);
2516
2517 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2518 dest = false_edge->dest;
2519 redirect_edge_succ (false_edge, new_bb);
2520 false_edge->flags |= EDGE_FALLTHRU;
2521 new_bb->count = false_edge->count ();
2522 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2523 add_bb_to_loop (new_bb, loop);
2524 if (loop->latch == bb
2525 && loop->header == dest)
2526 loop->latch = new_bb;
2527 make_single_succ_edge (new_bb, dest, 0);
2528 if (BARRIER_P (BB_END (new_bb)))
2529 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2530 update_bb_for_insn (new_bb);
2531
2532 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2533
2534 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2535 {
2536 set_curr_insn_location (true_edge->goto_locus);
2537 true_edge->goto_locus = curr_insn_location ();
2538 }
2539
2540 return new_bb;
2541 }
2542
2543 /* Mark all calls that can have a transaction restart. */
2544
2545 static void
2546 mark_transaction_restart_calls (gimple *stmt)
2547 {
2548 struct tm_restart_node dummy;
2549 tm_restart_node **slot;
2550
2551 if (!cfun->gimple_df->tm_restart)
2552 return;
2553
2554 dummy.stmt = stmt;
2555 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2556 if (slot)
2557 {
2558 struct tm_restart_node *n = *slot;
2559 tree list = n->label_or_list;
2560 rtx_insn *insn;
2561
2562 for (insn = next_real_insn (get_last_insn ());
2563 !CALL_P (insn);
2564 insn = next_real_insn (insn))
2565 continue;
2566
2567 if (TREE_CODE (list) == LABEL_DECL)
2568 add_reg_note (insn, REG_TM, label_rtx (list));
2569 else
2570 for (; list ; list = TREE_CHAIN (list))
2571 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2572 }
2573 }
2574
2575 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2576 statement STMT. */
2577
2578 static void
2579 expand_call_stmt (gcall *stmt)
2580 {
2581 tree exp, decl, lhs;
2582 bool builtin_p;
2583 size_t i;
2584
2585 if (gimple_call_internal_p (stmt))
2586 {
2587 expand_internal_call (stmt);
2588 return;
2589 }
2590
2591 /* If this is a call to a built-in function and it has no effect other
2592 than setting the lhs, try to implement it using an internal function
2593 instead. */
2594 decl = gimple_call_fndecl (stmt);
2595 if (gimple_call_lhs (stmt)
2596 && !gimple_has_side_effects (stmt)
2597 && (optimize || (decl && called_as_built_in (decl))))
2598 {
2599 internal_fn ifn = replacement_internal_fn (stmt);
2600 if (ifn != IFN_LAST)
2601 {
2602 expand_internal_call (ifn, stmt);
2603 return;
2604 }
2605 }
2606
2607 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2608
2609 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2610 builtin_p = decl && DECL_BUILT_IN (decl);
2611
2612 /* If this is not a builtin function, the function type through which the
2613 call is made may be different from the type of the function. */
2614 if (!builtin_p)
2615 CALL_EXPR_FN (exp)
2616 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2617 CALL_EXPR_FN (exp));
2618
2619 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2620 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2621
2622 for (i = 0; i < gimple_call_num_args (stmt); i++)
2623 {
2624 tree arg = gimple_call_arg (stmt, i);
2625 gimple *def;
2626 /* TER addresses into arguments of builtin functions so we have a
2627 chance to infer more correct alignment information. See PR39954. */
2628 if (builtin_p
2629 && TREE_CODE (arg) == SSA_NAME
2630 && (def = get_gimple_for_ssa_name (arg))
2631 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2632 arg = gimple_assign_rhs1 (def);
2633 CALL_EXPR_ARG (exp, i) = arg;
2634 }
2635
2636 if (gimple_has_side_effects (stmt))
2637 TREE_SIDE_EFFECTS (exp) = 1;
2638
2639 if (gimple_call_nothrow_p (stmt))
2640 TREE_NOTHROW (exp) = 1;
2641
2642 if (gimple_no_warning_p (stmt))
2643 TREE_NO_WARNING (exp) = 1;
2644
2645 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2646 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2647 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2648 if (decl
2649 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2650 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2651 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2652 else
2653 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2654 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2655 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2656 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2657 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2658
2659 /* Ensure RTL is created for debug args. */
2660 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2661 {
2662 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2663 unsigned int ix;
2664 tree dtemp;
2665
2666 if (debug_args)
2667 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2668 {
2669 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2670 expand_debug_expr (dtemp);
2671 }
2672 }
2673
2674 rtx_insn *before_call = get_last_insn ();
2675 lhs = gimple_call_lhs (stmt);
2676 if (lhs)
2677 expand_assignment (lhs, exp, false);
2678 else
2679 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2680
2681 /* If the gimple call is an indirect call and has 'nocf_check'
2682 attribute find a generated CALL insn to mark it as no
2683 control-flow verification is needed. */
2684 if (gimple_call_nocf_check_p (stmt)
2685 && !gimple_call_fndecl (stmt))
2686 {
2687 rtx_insn *last = get_last_insn ();
2688 while (!CALL_P (last)
2689 && last != before_call)
2690 last = PREV_INSN (last);
2691
2692 if (last != before_call)
2693 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2694 }
2695
2696 mark_transaction_restart_calls (stmt);
2697 }
2698
2699
2700 /* Generate RTL for an asm statement (explicit assembler code).
2701 STRING is a STRING_CST node containing the assembler code text,
2702 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2703 insn is volatile; don't optimize it. */
2704
2705 static void
2706 expand_asm_loc (tree string, int vol, location_t locus)
2707 {
2708 rtx body;
2709
2710 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2711 ggc_strdup (TREE_STRING_POINTER (string)),
2712 locus);
2713
2714 MEM_VOLATILE_P (body) = vol;
2715
2716 /* Non-empty basic ASM implicitly clobbers memory. */
2717 if (TREE_STRING_LENGTH (string) != 0)
2718 {
2719 rtx asm_op, clob;
2720 unsigned i, nclobbers;
2721 auto_vec<rtx> input_rvec, output_rvec;
2722 auto_vec<const char *> constraints;
2723 auto_vec<rtx> clobber_rvec;
2724 HARD_REG_SET clobbered_regs;
2725 CLEAR_HARD_REG_SET (clobbered_regs);
2726
2727 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2728 clobber_rvec.safe_push (clob);
2729
2730 if (targetm.md_asm_adjust)
2731 targetm.md_asm_adjust (output_rvec, input_rvec,
2732 constraints, clobber_rvec,
2733 clobbered_regs);
2734
2735 asm_op = body;
2736 nclobbers = clobber_rvec.length ();
2737 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2738
2739 XVECEXP (body, 0, 0) = asm_op;
2740 for (i = 0; i < nclobbers; i++)
2741 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2742 }
2743
2744 emit_insn (body);
2745 }
2746
2747 /* Return the number of times character C occurs in string S. */
2748 static int
2749 n_occurrences (int c, const char *s)
2750 {
2751 int n = 0;
2752 while (*s)
2753 n += (*s++ == c);
2754 return n;
2755 }
2756
2757 /* A subroutine of expand_asm_operands. Check that all operands have
2758 the same number of alternatives. Return true if so. */
2759
2760 static bool
2761 check_operand_nalternatives (const vec<const char *> &constraints)
2762 {
2763 unsigned len = constraints.length();
2764 if (len > 0)
2765 {
2766 int nalternatives = n_occurrences (',', constraints[0]);
2767
2768 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2769 {
2770 error ("too many alternatives in %<asm%>");
2771 return false;
2772 }
2773
2774 for (unsigned i = 1; i < len; ++i)
2775 if (n_occurrences (',', constraints[i]) != nalternatives)
2776 {
2777 error ("operand constraints for %<asm%> differ "
2778 "in number of alternatives");
2779 return false;
2780 }
2781 }
2782 return true;
2783 }
2784
2785 /* Check for overlap between registers marked in CLOBBERED_REGS and
2786 anything inappropriate in T. Emit error and return the register
2787 variable definition for error, NULL_TREE for ok. */
2788
2789 static bool
2790 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2791 {
2792 /* Conflicts between asm-declared register variables and the clobber
2793 list are not allowed. */
2794 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2795
2796 if (overlap)
2797 {
2798 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2799 DECL_NAME (overlap));
2800
2801 /* Reset registerness to stop multiple errors emitted for a single
2802 variable. */
2803 DECL_REGISTER (overlap) = 0;
2804 return true;
2805 }
2806
2807 return false;
2808 }
2809
2810 /* Generate RTL for an asm statement with arguments.
2811 STRING is the instruction template.
2812 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2813 Each output or input has an expression in the TREE_VALUE and
2814 a tree list in TREE_PURPOSE which in turn contains a constraint
2815 name in TREE_VALUE (or NULL_TREE) and a constraint string
2816 in TREE_PURPOSE.
2817 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2818 that is clobbered by this insn.
2819
2820 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2821 should be the fallthru basic block of the asm goto.
2822
2823 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2824 Some elements of OUTPUTS may be replaced with trees representing temporary
2825 values. The caller should copy those temporary values to the originally
2826 specified lvalues.
2827
2828 VOL nonzero means the insn is volatile; don't optimize it. */
2829
2830 static void
2831 expand_asm_stmt (gasm *stmt)
2832 {
2833 class save_input_location
2834 {
2835 location_t old;
2836
2837 public:
2838 explicit save_input_location(location_t where)
2839 {
2840 old = input_location;
2841 input_location = where;
2842 }
2843
2844 ~save_input_location()
2845 {
2846 input_location = old;
2847 }
2848 };
2849
2850 location_t locus = gimple_location (stmt);
2851
2852 if (gimple_asm_input_p (stmt))
2853 {
2854 const char *s = gimple_asm_string (stmt);
2855 tree string = build_string (strlen (s), s);
2856 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2857 return;
2858 }
2859
2860 /* There are some legacy diagnostics in here, and also avoids a
2861 sixth parameger to targetm.md_asm_adjust. */
2862 save_input_location s_i_l(locus);
2863
2864 unsigned noutputs = gimple_asm_noutputs (stmt);
2865 unsigned ninputs = gimple_asm_ninputs (stmt);
2866 unsigned nlabels = gimple_asm_nlabels (stmt);
2867 unsigned i;
2868
2869 /* ??? Diagnose during gimplification? */
2870 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2871 {
2872 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2873 return;
2874 }
2875
2876 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2877 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2878 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2879
2880 /* Copy the gimple vectors into new vectors that we can manipulate. */
2881
2882 output_tvec.safe_grow (noutputs);
2883 input_tvec.safe_grow (ninputs);
2884 constraints.safe_grow (noutputs + ninputs);
2885
2886 for (i = 0; i < noutputs; ++i)
2887 {
2888 tree t = gimple_asm_output_op (stmt, i);
2889 output_tvec[i] = TREE_VALUE (t);
2890 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2891 }
2892 for (i = 0; i < ninputs; i++)
2893 {
2894 tree t = gimple_asm_input_op (stmt, i);
2895 input_tvec[i] = TREE_VALUE (t);
2896 constraints[i + noutputs]
2897 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2898 }
2899
2900 /* ??? Diagnose during gimplification? */
2901 if (! check_operand_nalternatives (constraints))
2902 return;
2903
2904 /* Count the number of meaningful clobbered registers, ignoring what
2905 we would ignore later. */
2906 auto_vec<rtx> clobber_rvec;
2907 HARD_REG_SET clobbered_regs;
2908 CLEAR_HARD_REG_SET (clobbered_regs);
2909
2910 if (unsigned n = gimple_asm_nclobbers (stmt))
2911 {
2912 clobber_rvec.reserve (n);
2913 for (i = 0; i < n; i++)
2914 {
2915 tree t = gimple_asm_clobber_op (stmt, i);
2916 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2917 int nregs, j;
2918
2919 j = decode_reg_name_and_count (regname, &nregs);
2920 if (j < 0)
2921 {
2922 if (j == -2)
2923 {
2924 /* ??? Diagnose during gimplification? */
2925 error ("unknown register name %qs in %<asm%>", regname);
2926 }
2927 else if (j == -4)
2928 {
2929 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2930 clobber_rvec.safe_push (x);
2931 }
2932 else
2933 {
2934 /* Otherwise we should have -1 == empty string
2935 or -3 == cc, which is not a register. */
2936 gcc_assert (j == -1 || j == -3);
2937 }
2938 }
2939 else
2940 for (int reg = j; reg < j + nregs; reg++)
2941 {
2942 /* Clobbering the PIC register is an error. */
2943 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2944 {
2945 /* ??? Diagnose during gimplification? */
2946 error ("PIC register clobbered by %qs in %<asm%>",
2947 regname);
2948 return;
2949 }
2950
2951 SET_HARD_REG_BIT (clobbered_regs, reg);
2952 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2953 clobber_rvec.safe_push (x);
2954 }
2955 }
2956 }
2957 unsigned nclobbers = clobber_rvec.length();
2958
2959 /* First pass over inputs and outputs checks validity and sets
2960 mark_addressable if needed. */
2961 /* ??? Diagnose during gimplification? */
2962
2963 for (i = 0; i < noutputs; ++i)
2964 {
2965 tree val = output_tvec[i];
2966 tree type = TREE_TYPE (val);
2967 const char *constraint;
2968 bool is_inout;
2969 bool allows_reg;
2970 bool allows_mem;
2971
2972 /* Try to parse the output constraint. If that fails, there's
2973 no point in going further. */
2974 constraint = constraints[i];
2975 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2976 &allows_mem, &allows_reg, &is_inout))
2977 return;
2978
2979 if (! allows_reg
2980 && (allows_mem
2981 || is_inout
2982 || (DECL_P (val)
2983 && REG_P (DECL_RTL (val))
2984 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2985 mark_addressable (val);
2986 }
2987
2988 for (i = 0; i < ninputs; ++i)
2989 {
2990 bool allows_reg, allows_mem;
2991 const char *constraint;
2992
2993 constraint = constraints[i + noutputs];
2994 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2995 constraints.address (),
2996 &allows_mem, &allows_reg))
2997 return;
2998
2999 if (! allows_reg && allows_mem)
3000 mark_addressable (input_tvec[i]);
3001 }
3002
3003 /* Second pass evaluates arguments. */
3004
3005 /* Make sure stack is consistent for asm goto. */
3006 if (nlabels > 0)
3007 do_pending_stack_adjust ();
3008 int old_generating_concat_p = generating_concat_p;
3009
3010 /* Vector of RTX's of evaluated output operands. */
3011 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3012 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3013 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3014
3015 output_rvec.safe_grow (noutputs);
3016
3017 for (i = 0; i < noutputs; ++i)
3018 {
3019 tree val = output_tvec[i];
3020 tree type = TREE_TYPE (val);
3021 bool is_inout, allows_reg, allows_mem, ok;
3022 rtx op;
3023
3024 ok = parse_output_constraint (&constraints[i], i, ninputs,
3025 noutputs, &allows_mem, &allows_reg,
3026 &is_inout);
3027 gcc_assert (ok);
3028
3029 /* If an output operand is not a decl or indirect ref and our constraint
3030 allows a register, make a temporary to act as an intermediate.
3031 Make the asm insn write into that, then we will copy it to
3032 the real output operand. Likewise for promoted variables. */
3033
3034 generating_concat_p = 0;
3035
3036 if ((TREE_CODE (val) == INDIRECT_REF
3037 && allows_mem)
3038 || (DECL_P (val)
3039 && (allows_mem || REG_P (DECL_RTL (val)))
3040 && ! (REG_P (DECL_RTL (val))
3041 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3042 || ! allows_reg
3043 || is_inout)
3044 {
3045 op = expand_expr (val, NULL_RTX, VOIDmode,
3046 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3047 if (MEM_P (op))
3048 op = validize_mem (op);
3049
3050 if (! allows_reg && !MEM_P (op))
3051 error ("output number %d not directly addressable", i);
3052 if ((! allows_mem && MEM_P (op))
3053 || GET_CODE (op) == CONCAT)
3054 {
3055 rtx old_op = op;
3056 op = gen_reg_rtx (GET_MODE (op));
3057
3058 generating_concat_p = old_generating_concat_p;
3059
3060 if (is_inout)
3061 emit_move_insn (op, old_op);
3062
3063 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3064 emit_move_insn (old_op, op);
3065 after_rtl_seq = get_insns ();
3066 after_rtl_end = get_last_insn ();
3067 end_sequence ();
3068 }
3069 }
3070 else
3071 {
3072 op = assign_temp (type, 0, 1);
3073 op = validize_mem (op);
3074 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3075 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3076
3077 generating_concat_p = old_generating_concat_p;
3078
3079 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3080 expand_assignment (val, make_tree (type, op), false);
3081 after_rtl_seq = get_insns ();
3082 after_rtl_end = get_last_insn ();
3083 end_sequence ();
3084 }
3085 output_rvec[i] = op;
3086
3087 if (is_inout)
3088 inout_opnum.safe_push (i);
3089 }
3090
3091 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3092 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3093
3094 input_rvec.safe_grow (ninputs);
3095 input_mode.safe_grow (ninputs);
3096
3097 generating_concat_p = 0;
3098
3099 for (i = 0; i < ninputs; ++i)
3100 {
3101 tree val = input_tvec[i];
3102 tree type = TREE_TYPE (val);
3103 bool allows_reg, allows_mem, ok;
3104 const char *constraint;
3105 rtx op;
3106
3107 constraint = constraints[i + noutputs];
3108 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3109 constraints.address (),
3110 &allows_mem, &allows_reg);
3111 gcc_assert (ok);
3112
3113 /* EXPAND_INITIALIZER will not generate code for valid initializer
3114 constants, but will still generate code for other types of operand.
3115 This is the behavior we want for constant constraints. */
3116 op = expand_expr (val, NULL_RTX, VOIDmode,
3117 allows_reg ? EXPAND_NORMAL
3118 : allows_mem ? EXPAND_MEMORY
3119 : EXPAND_INITIALIZER);
3120
3121 /* Never pass a CONCAT to an ASM. */
3122 if (GET_CODE (op) == CONCAT)
3123 op = force_reg (GET_MODE (op), op);
3124 else if (MEM_P (op))
3125 op = validize_mem (op);
3126
3127 if (asm_operand_ok (op, constraint, NULL) <= 0)
3128 {
3129 if (allows_reg && TYPE_MODE (type) != BLKmode)
3130 op = force_reg (TYPE_MODE (type), op);
3131 else if (!allows_mem)
3132 warning (0, "asm operand %d probably doesn%'t match constraints",
3133 i + noutputs);
3134 else if (MEM_P (op))
3135 {
3136 /* We won't recognize either volatile memory or memory
3137 with a queued address as available a memory_operand
3138 at this point. Ignore it: clearly this *is* a memory. */
3139 }
3140 else
3141 gcc_unreachable ();
3142 }
3143 input_rvec[i] = op;
3144 input_mode[i] = TYPE_MODE (type);
3145 }
3146
3147 /* For in-out operands, copy output rtx to input rtx. */
3148 unsigned ninout = inout_opnum.length();
3149 for (i = 0; i < ninout; i++)
3150 {
3151 int j = inout_opnum[i];
3152 rtx o = output_rvec[j];
3153
3154 input_rvec.safe_push (o);
3155 input_mode.safe_push (GET_MODE (o));
3156
3157 char buffer[16];
3158 sprintf (buffer, "%d", j);
3159 constraints.safe_push (ggc_strdup (buffer));
3160 }
3161 ninputs += ninout;
3162
3163 /* Sometimes we wish to automatically clobber registers across an asm.
3164 Case in point is when the i386 backend moved from cc0 to a hard reg --
3165 maintaining source-level compatibility means automatically clobbering
3166 the flags register. */
3167 rtx_insn *after_md_seq = NULL;
3168 if (targetm.md_asm_adjust)
3169 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3170 constraints, clobber_rvec,
3171 clobbered_regs);
3172
3173 /* Do not allow the hook to change the output and input count,
3174 lest it mess up the operand numbering. */
3175 gcc_assert (output_rvec.length() == noutputs);
3176 gcc_assert (input_rvec.length() == ninputs);
3177 gcc_assert (constraints.length() == noutputs + ninputs);
3178
3179 /* But it certainly can adjust the clobbers. */
3180 nclobbers = clobber_rvec.length();
3181
3182 /* Third pass checks for easy conflicts. */
3183 /* ??? Why are we doing this on trees instead of rtx. */
3184
3185 bool clobber_conflict_found = 0;
3186 for (i = 0; i < noutputs; ++i)
3187 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3188 clobber_conflict_found = 1;
3189 for (i = 0; i < ninputs - ninout; ++i)
3190 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3191 clobber_conflict_found = 1;
3192
3193 /* Make vectors for the expression-rtx, constraint strings,
3194 and named operands. */
3195
3196 rtvec argvec = rtvec_alloc (ninputs);
3197 rtvec constraintvec = rtvec_alloc (ninputs);
3198 rtvec labelvec = rtvec_alloc (nlabels);
3199
3200 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3201 : GET_MODE (output_rvec[0])),
3202 ggc_strdup (gimple_asm_string (stmt)),
3203 "", 0, argvec, constraintvec,
3204 labelvec, locus);
3205 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3206
3207 for (i = 0; i < ninputs; ++i)
3208 {
3209 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3210 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3211 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3212 constraints[i + noutputs],
3213 locus);
3214 }
3215
3216 /* Copy labels to the vector. */
3217 rtx_code_label *fallthru_label = NULL;
3218 if (nlabels > 0)
3219 {
3220 basic_block fallthru_bb = NULL;
3221 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3222 if (fallthru)
3223 fallthru_bb = fallthru->dest;
3224
3225 for (i = 0; i < nlabels; ++i)
3226 {
3227 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3228 rtx_insn *r;
3229 /* If asm goto has any labels in the fallthru basic block, use
3230 a label that we emit immediately after the asm goto. Expansion
3231 may insert further instructions into the same basic block after
3232 asm goto and if we don't do this, insertion of instructions on
3233 the fallthru edge might misbehave. See PR58670. */
3234 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3235 {
3236 if (fallthru_label == NULL_RTX)
3237 fallthru_label = gen_label_rtx ();
3238 r = fallthru_label;
3239 }
3240 else
3241 r = label_rtx (label);
3242 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3243 }
3244 }
3245
3246 /* Now, for each output, construct an rtx
3247 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3248 ARGVEC CONSTRAINTS OPNAMES))
3249 If there is more than one, put them inside a PARALLEL. */
3250
3251 if (nlabels > 0 && nclobbers == 0)
3252 {
3253 gcc_assert (noutputs == 0);
3254 emit_jump_insn (body);
3255 }
3256 else if (noutputs == 0 && nclobbers == 0)
3257 {
3258 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3259 emit_insn (body);
3260 }
3261 else if (noutputs == 1 && nclobbers == 0)
3262 {
3263 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3264 emit_insn (gen_rtx_SET (output_rvec[0], body));
3265 }
3266 else
3267 {
3268 rtx obody = body;
3269 int num = noutputs;
3270
3271 if (num == 0)
3272 num = 1;
3273
3274 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3275
3276 /* For each output operand, store a SET. */
3277 for (i = 0; i < noutputs; ++i)
3278 {
3279 rtx src, o = output_rvec[i];
3280 if (i == 0)
3281 {
3282 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3283 src = obody;
3284 }
3285 else
3286 {
3287 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3288 ASM_OPERANDS_TEMPLATE (obody),
3289 constraints[i], i, argvec,
3290 constraintvec, labelvec, locus);
3291 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3292 }
3293 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3294 }
3295
3296 /* If there are no outputs (but there are some clobbers)
3297 store the bare ASM_OPERANDS into the PARALLEL. */
3298 if (i == 0)
3299 XVECEXP (body, 0, i++) = obody;
3300
3301 /* Store (clobber REG) for each clobbered register specified. */
3302 for (unsigned j = 0; j < nclobbers; ++j)
3303 {
3304 rtx clobbered_reg = clobber_rvec[j];
3305
3306 /* Do sanity check for overlap between clobbers and respectively
3307 input and outputs that hasn't been handled. Such overlap
3308 should have been detected and reported above. */
3309 if (!clobber_conflict_found && REG_P (clobbered_reg))
3310 {
3311 /* We test the old body (obody) contents to avoid
3312 tripping over the under-construction body. */
3313 for (unsigned k = 0; k < noutputs; ++k)
3314 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3315 internal_error ("asm clobber conflict with output operand");
3316
3317 for (unsigned k = 0; k < ninputs - ninout; ++k)
3318 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3319 internal_error ("asm clobber conflict with input operand");
3320 }
3321
3322 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3323 }
3324
3325 if (nlabels > 0)
3326 emit_jump_insn (body);
3327 else
3328 emit_insn (body);
3329 }
3330
3331 generating_concat_p = old_generating_concat_p;
3332
3333 if (fallthru_label)
3334 emit_label (fallthru_label);
3335
3336 if (after_md_seq)
3337 emit_insn (after_md_seq);
3338 if (after_rtl_seq)
3339 emit_insn (after_rtl_seq);
3340
3341 free_temp_slots ();
3342 crtl->has_asm_statement = 1;
3343 }
3344
3345 /* Emit code to jump to the address
3346 specified by the pointer expression EXP. */
3347
3348 static void
3349 expand_computed_goto (tree exp)
3350 {
3351 rtx x = expand_normal (exp);
3352
3353 do_pending_stack_adjust ();
3354 emit_indirect_jump (x);
3355 }
3356
3357 /* Generate RTL code for a `goto' statement with target label LABEL.
3358 LABEL should be a LABEL_DECL tree node that was or will later be
3359 defined with `expand_label'. */
3360
3361 static void
3362 expand_goto (tree label)
3363 {
3364 if (flag_checking)
3365 {
3366 /* Check for a nonlocal goto to a containing function. Should have
3367 gotten translated to __builtin_nonlocal_goto. */
3368 tree context = decl_function_context (label);
3369 gcc_assert (!context || context == current_function_decl);
3370 }
3371
3372 emit_jump (jump_target_rtx (label));
3373 }
3374
3375 /* Output a return with no value. */
3376
3377 static void
3378 expand_null_return_1 (void)
3379 {
3380 clear_pending_stack_adjust ();
3381 do_pending_stack_adjust ();
3382 emit_jump (return_label);
3383 }
3384
3385 /* Generate RTL to return from the current function, with no value.
3386 (That is, we do not do anything about returning any value.) */
3387
3388 void
3389 expand_null_return (void)
3390 {
3391 /* If this function was declared to return a value, but we
3392 didn't, clobber the return registers so that they are not
3393 propagated live to the rest of the function. */
3394 clobber_return_register ();
3395
3396 expand_null_return_1 ();
3397 }
3398
3399 /* Generate RTL to return from the current function, with value VAL. */
3400
3401 static void
3402 expand_value_return (rtx val)
3403 {
3404 /* Copy the value to the return location unless it's already there. */
3405
3406 tree decl = DECL_RESULT (current_function_decl);
3407 rtx return_reg = DECL_RTL (decl);
3408 if (return_reg != val)
3409 {
3410 tree funtype = TREE_TYPE (current_function_decl);
3411 tree type = TREE_TYPE (decl);
3412 int unsignedp = TYPE_UNSIGNED (type);
3413 machine_mode old_mode = DECL_MODE (decl);
3414 machine_mode mode;
3415 if (DECL_BY_REFERENCE (decl))
3416 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3417 else
3418 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3419
3420 if (mode != old_mode)
3421 val = convert_modes (mode, old_mode, val, unsignedp);
3422
3423 if (GET_CODE (return_reg) == PARALLEL)
3424 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3425 else
3426 emit_move_insn (return_reg, val);
3427 }
3428
3429 expand_null_return_1 ();
3430 }
3431
3432 /* Generate RTL to evaluate the expression RETVAL and return it
3433 from the current function. */
3434
3435 static void
3436 expand_return (tree retval, tree bounds)
3437 {
3438 rtx result_rtl;
3439 rtx val = 0;
3440 tree retval_rhs;
3441 rtx bounds_rtl;
3442
3443 /* If function wants no value, give it none. */
3444 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3445 {
3446 expand_normal (retval);
3447 expand_null_return ();
3448 return;
3449 }
3450
3451 if (retval == error_mark_node)
3452 {
3453 /* Treat this like a return of no value from a function that
3454 returns a value. */
3455 expand_null_return ();
3456 return;
3457 }
3458 else if ((TREE_CODE (retval) == MODIFY_EXPR
3459 || TREE_CODE (retval) == INIT_EXPR)
3460 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3461 retval_rhs = TREE_OPERAND (retval, 1);
3462 else
3463 retval_rhs = retval;
3464
3465 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3466
3467 /* Put returned bounds to the right place. */
3468 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3469 if (bounds_rtl)
3470 {
3471 rtx addr = NULL;
3472 rtx bnd = NULL;
3473
3474 if (bounds && bounds != error_mark_node)
3475 {
3476 bnd = expand_normal (bounds);
3477 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3478 }
3479 else if (REG_P (bounds_rtl))
3480 {
3481 if (bounds)
3482 bnd = chkp_expand_zero_bounds ();
3483 else
3484 {
3485 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3486 addr = gen_rtx_MEM (Pmode, addr);
3487 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3488 }
3489
3490 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3491 }
3492 else
3493 {
3494 int n;
3495
3496 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3497
3498 if (bounds)
3499 bnd = chkp_expand_zero_bounds ();
3500 else
3501 {
3502 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3503 addr = gen_rtx_MEM (Pmode, addr);
3504 }
3505
3506 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3507 {
3508 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3509 if (!bounds)
3510 {
3511 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3512 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3513 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3514 }
3515 targetm.calls.store_returned_bounds (slot, bnd);
3516 }
3517 }
3518 }
3519 else if (chkp_function_instrumented_p (current_function_decl)
3520 && !BOUNDED_P (retval_rhs)
3521 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3522 && TREE_CODE (retval_rhs) != RESULT_DECL)
3523 {
3524 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3525 addr = gen_rtx_MEM (Pmode, addr);
3526
3527 gcc_assert (MEM_P (result_rtl));
3528
3529 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3530 }
3531
3532 /* If we are returning the RESULT_DECL, then the value has already
3533 been stored into it, so we don't have to do anything special. */
3534 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3535 expand_value_return (result_rtl);
3536
3537 /* If the result is an aggregate that is being returned in one (or more)
3538 registers, load the registers here. */
3539
3540 else if (retval_rhs != 0
3541 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3542 && REG_P (result_rtl))
3543 {
3544 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3545 if (val)
3546 {
3547 /* Use the mode of the result value on the return register. */
3548 PUT_MODE (result_rtl, GET_MODE (val));
3549 expand_value_return (val);
3550 }
3551 else
3552 expand_null_return ();
3553 }
3554 else if (retval_rhs != 0
3555 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3556 && (REG_P (result_rtl)
3557 || (GET_CODE (result_rtl) == PARALLEL)))
3558 {
3559 /* Compute the return value into a temporary (usually a pseudo reg). */
3560 val
3561 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3562 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3563 val = force_not_mem (val);
3564 expand_value_return (val);
3565 }
3566 else
3567 {
3568 /* No hard reg used; calculate value into hard return reg. */
3569 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3570 expand_value_return (result_rtl);
3571 }
3572 }
3573
3574 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3575 STMT that doesn't require special handling for outgoing edges. That
3576 is no tailcalls and no GIMPLE_COND. */
3577
3578 static void
3579 expand_gimple_stmt_1 (gimple *stmt)
3580 {
3581 tree op0;
3582
3583 set_curr_insn_location (gimple_location (stmt));
3584
3585 switch (gimple_code (stmt))
3586 {
3587 case GIMPLE_GOTO:
3588 op0 = gimple_goto_dest (stmt);
3589 if (TREE_CODE (op0) == LABEL_DECL)
3590 expand_goto (op0);
3591 else
3592 expand_computed_goto (op0);
3593 break;
3594 case GIMPLE_LABEL:
3595 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3596 break;
3597 case GIMPLE_NOP:
3598 case GIMPLE_PREDICT:
3599 break;
3600 case GIMPLE_SWITCH:
3601 {
3602 gswitch *swtch = as_a <gswitch *> (stmt);
3603 if (gimple_switch_num_labels (swtch) == 1)
3604 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3605 else
3606 expand_case (swtch);
3607 }
3608 break;
3609 case GIMPLE_ASM:
3610 expand_asm_stmt (as_a <gasm *> (stmt));
3611 break;
3612 case GIMPLE_CALL:
3613 expand_call_stmt (as_a <gcall *> (stmt));
3614 break;
3615
3616 case GIMPLE_RETURN:
3617 {
3618 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3619 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3620
3621 if (op0 && op0 != error_mark_node)
3622 {
3623 tree result = DECL_RESULT (current_function_decl);
3624
3625 /* Mark we have return statement with missing bounds. */
3626 if (!bnd
3627 && chkp_function_instrumented_p (cfun->decl)
3628 && !DECL_P (op0))
3629 bnd = error_mark_node;
3630
3631 /* If we are not returning the current function's RESULT_DECL,
3632 build an assignment to it. */
3633 if (op0 != result)
3634 {
3635 /* I believe that a function's RESULT_DECL is unique. */
3636 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3637
3638 /* ??? We'd like to use simply expand_assignment here,
3639 but this fails if the value is of BLKmode but the return
3640 decl is a register. expand_return has special handling
3641 for this combination, which eventually should move
3642 to common code. See comments there. Until then, let's
3643 build a modify expression :-/ */
3644 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3645 result, op0);
3646 }
3647 }
3648
3649 if (!op0)
3650 expand_null_return ();
3651 else
3652 expand_return (op0, bnd);
3653 }
3654 break;
3655
3656 case GIMPLE_ASSIGN:
3657 {
3658 gassign *assign_stmt = as_a <gassign *> (stmt);
3659 tree lhs = gimple_assign_lhs (assign_stmt);
3660
3661 /* Tree expand used to fiddle with |= and &= of two bitfield
3662 COMPONENT_REFs here. This can't happen with gimple, the LHS
3663 of binary assigns must be a gimple reg. */
3664
3665 if (TREE_CODE (lhs) != SSA_NAME
3666 || get_gimple_rhs_class (gimple_expr_code (stmt))
3667 == GIMPLE_SINGLE_RHS)
3668 {
3669 tree rhs = gimple_assign_rhs1 (assign_stmt);
3670 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3671 == GIMPLE_SINGLE_RHS);
3672 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3673 /* Do not put locations on possibly shared trees. */
3674 && !is_gimple_min_invariant (rhs))
3675 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3676 if (TREE_CLOBBER_P (rhs))
3677 /* This is a clobber to mark the going out of scope for
3678 this LHS. */
3679 ;
3680 else
3681 expand_assignment (lhs, rhs,
3682 gimple_assign_nontemporal_move_p (
3683 assign_stmt));
3684 }
3685 else
3686 {
3687 rtx target, temp;
3688 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3689 struct separate_ops ops;
3690 bool promoted = false;
3691
3692 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3693 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3694 promoted = true;
3695
3696 ops.code = gimple_assign_rhs_code (assign_stmt);
3697 ops.type = TREE_TYPE (lhs);
3698 switch (get_gimple_rhs_class (ops.code))
3699 {
3700 case GIMPLE_TERNARY_RHS:
3701 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3702 /* Fallthru */
3703 case GIMPLE_BINARY_RHS:
3704 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3705 /* Fallthru */
3706 case GIMPLE_UNARY_RHS:
3707 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3708 break;
3709 default:
3710 gcc_unreachable ();
3711 }
3712 ops.location = gimple_location (stmt);
3713
3714 /* If we want to use a nontemporal store, force the value to
3715 register first. If we store into a promoted register,
3716 don't directly expand to target. */
3717 temp = nontemporal || promoted ? NULL_RTX : target;
3718 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3719 EXPAND_NORMAL);
3720
3721 if (temp == target)
3722 ;
3723 else if (promoted)
3724 {
3725 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3726 /* If TEMP is a VOIDmode constant, use convert_modes to make
3727 sure that we properly convert it. */
3728 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3729 {
3730 temp = convert_modes (GET_MODE (target),
3731 TYPE_MODE (ops.type),
3732 temp, unsignedp);
3733 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3734 GET_MODE (target), temp, unsignedp);
3735 }
3736
3737 convert_move (SUBREG_REG (target), temp, unsignedp);
3738 }
3739 else if (nontemporal && emit_storent_insn (target, temp))
3740 ;
3741 else
3742 {
3743 temp = force_operand (temp, target);
3744 if (temp != target)
3745 emit_move_insn (target, temp);
3746 }
3747 }
3748 }
3749 break;
3750
3751 default:
3752 gcc_unreachable ();
3753 }
3754 }
3755
3756 /* Expand one gimple statement STMT and return the last RTL instruction
3757 before any of the newly generated ones.
3758
3759 In addition to generating the necessary RTL instructions this also
3760 sets REG_EH_REGION notes if necessary and sets the current source
3761 location for diagnostics. */
3762
3763 static rtx_insn *
3764 expand_gimple_stmt (gimple *stmt)
3765 {
3766 location_t saved_location = input_location;
3767 rtx_insn *last = get_last_insn ();
3768 int lp_nr;
3769
3770 gcc_assert (cfun);
3771
3772 /* We need to save and restore the current source location so that errors
3773 discovered during expansion are emitted with the right location. But
3774 it would be better if the diagnostic routines used the source location
3775 embedded in the tree nodes rather than globals. */
3776 if (gimple_has_location (stmt))
3777 input_location = gimple_location (stmt);
3778
3779 expand_gimple_stmt_1 (stmt);
3780
3781 /* Free any temporaries used to evaluate this statement. */
3782 free_temp_slots ();
3783
3784 input_location = saved_location;
3785
3786 /* Mark all insns that may trap. */
3787 lp_nr = lookup_stmt_eh_lp (stmt);
3788 if (lp_nr)
3789 {
3790 rtx_insn *insn;
3791 for (insn = next_real_insn (last); insn;
3792 insn = next_real_insn (insn))
3793 {
3794 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3795 /* If we want exceptions for non-call insns, any
3796 may_trap_p instruction may throw. */
3797 && GET_CODE (PATTERN (insn)) != CLOBBER
3798 && GET_CODE (PATTERN (insn)) != USE
3799 && insn_could_throw_p (insn))
3800 make_reg_eh_region_note (insn, 0, lp_nr);
3801 }
3802 }
3803
3804 return last;
3805 }
3806
3807 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3808 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3809 generated a tail call (something that might be denied by the ABI
3810 rules governing the call; see calls.c).
3811
3812 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3813 can still reach the rest of BB. The case here is __builtin_sqrt,
3814 where the NaN result goes through the external function (with a
3815 tailcall) and the normal result happens via a sqrt instruction. */
3816
3817 static basic_block
3818 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3819 {
3820 rtx_insn *last2, *last;
3821 edge e;
3822 edge_iterator ei;
3823 profile_probability probability;
3824
3825 last2 = last = expand_gimple_stmt (stmt);
3826
3827 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3828 if (CALL_P (last) && SIBLING_CALL_P (last))
3829 goto found;
3830
3831 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3832
3833 *can_fallthru = true;
3834 return NULL;
3835
3836 found:
3837 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3838 Any instructions emitted here are about to be deleted. */
3839 do_pending_stack_adjust ();
3840
3841 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3842 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3843 EH or abnormal edges, we shouldn't have created a tail call in
3844 the first place. So it seems to me we should just be removing
3845 all edges here, or redirecting the existing fallthru edge to
3846 the exit block. */
3847
3848 probability = profile_probability::never ();
3849
3850 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3851 {
3852 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3853 {
3854 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3855 e->dest->count -= e->count ();
3856 probability += e->probability;
3857 remove_edge (e);
3858 }
3859 else
3860 ei_next (&ei);
3861 }
3862
3863 /* This is somewhat ugly: the call_expr expander often emits instructions
3864 after the sibcall (to perform the function return). These confuse the
3865 find_many_sub_basic_blocks code, so we need to get rid of these. */
3866 last = NEXT_INSN (last);
3867 gcc_assert (BARRIER_P (last));
3868
3869 *can_fallthru = false;
3870 while (NEXT_INSN (last))
3871 {
3872 /* For instance an sqrt builtin expander expands if with
3873 sibcall in the then and label for `else`. */
3874 if (LABEL_P (NEXT_INSN (last)))
3875 {
3876 *can_fallthru = true;
3877 break;
3878 }
3879 delete_insn (NEXT_INSN (last));
3880 }
3881
3882 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3883 | EDGE_SIBCALL);
3884 e->probability = probability;
3885 BB_END (bb) = last;
3886 update_bb_for_insn (bb);
3887
3888 if (NEXT_INSN (last))
3889 {
3890 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3891
3892 last = BB_END (bb);
3893 if (BARRIER_P (last))
3894 BB_END (bb) = PREV_INSN (last);
3895 }
3896
3897 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3898
3899 return bb;
3900 }
3901
3902 /* Return the difference between the floor and the truncated result of
3903 a signed division by OP1 with remainder MOD. */
3904 static rtx
3905 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3906 {
3907 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3908 return gen_rtx_IF_THEN_ELSE
3909 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3910 gen_rtx_IF_THEN_ELSE
3911 (mode, gen_rtx_LT (BImode,
3912 gen_rtx_DIV (mode, op1, mod),
3913 const0_rtx),
3914 constm1_rtx, const0_rtx),
3915 const0_rtx);
3916 }
3917
3918 /* Return the difference between the ceil and the truncated result of
3919 a signed division by OP1 with remainder MOD. */
3920 static rtx
3921 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3922 {
3923 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3924 return gen_rtx_IF_THEN_ELSE
3925 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3926 gen_rtx_IF_THEN_ELSE
3927 (mode, gen_rtx_GT (BImode,
3928 gen_rtx_DIV (mode, op1, mod),
3929 const0_rtx),
3930 const1_rtx, const0_rtx),
3931 const0_rtx);
3932 }
3933
3934 /* Return the difference between the ceil and the truncated result of
3935 an unsigned division by OP1 with remainder MOD. */
3936 static rtx
3937 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3938 {
3939 /* (mod != 0 ? 1 : 0) */
3940 return gen_rtx_IF_THEN_ELSE
3941 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3942 const1_rtx, const0_rtx);
3943 }
3944
3945 /* Return the difference between the rounded and the truncated result
3946 of a signed division by OP1 with remainder MOD. Halfway cases are
3947 rounded away from zero, rather than to the nearest even number. */
3948 static rtx
3949 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3950 {
3951 /* (abs (mod) >= abs (op1) - abs (mod)
3952 ? (op1 / mod > 0 ? 1 : -1)
3953 : 0) */
3954 return gen_rtx_IF_THEN_ELSE
3955 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3956 gen_rtx_MINUS (mode,
3957 gen_rtx_ABS (mode, op1),
3958 gen_rtx_ABS (mode, mod))),
3959 gen_rtx_IF_THEN_ELSE
3960 (mode, gen_rtx_GT (BImode,
3961 gen_rtx_DIV (mode, op1, mod),
3962 const0_rtx),
3963 const1_rtx, constm1_rtx),
3964 const0_rtx);
3965 }
3966
3967 /* Return the difference between the rounded and the truncated result
3968 of a unsigned division by OP1 with remainder MOD. Halfway cases
3969 are rounded away from zero, rather than to the nearest even
3970 number. */
3971 static rtx
3972 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3973 {
3974 /* (mod >= op1 - mod ? 1 : 0) */
3975 return gen_rtx_IF_THEN_ELSE
3976 (mode, gen_rtx_GE (BImode, mod,
3977 gen_rtx_MINUS (mode, op1, mod)),
3978 const1_rtx, const0_rtx);
3979 }
3980
3981 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3982 any rtl. */
3983
3984 static rtx
3985 convert_debug_memory_address (scalar_int_mode mode, rtx x,
3986 addr_space_t as)
3987 {
3988 #ifndef POINTERS_EXTEND_UNSIGNED
3989 gcc_assert (mode == Pmode
3990 || mode == targetm.addr_space.address_mode (as));
3991 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
3992 #else
3993 rtx temp;
3994
3995 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3996
3997 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3998 return x;
3999
4000 /* X must have some form of address mode already. */
4001 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4002 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4003 x = lowpart_subreg (mode, x, xmode);
4004 else if (POINTERS_EXTEND_UNSIGNED > 0)
4005 x = gen_rtx_ZERO_EXTEND (mode, x);
4006 else if (!POINTERS_EXTEND_UNSIGNED)
4007 x = gen_rtx_SIGN_EXTEND (mode, x);
4008 else
4009 {
4010 switch (GET_CODE (x))
4011 {
4012 case SUBREG:
4013 if ((SUBREG_PROMOTED_VAR_P (x)
4014 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4015 || (GET_CODE (SUBREG_REG (x)) == PLUS
4016 && REG_P (XEXP (SUBREG_REG (x), 0))
4017 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4018 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4019 && GET_MODE (SUBREG_REG (x)) == mode)
4020 return SUBREG_REG (x);
4021 break;
4022 case LABEL_REF:
4023 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4024 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4025 return temp;
4026 case SYMBOL_REF:
4027 temp = shallow_copy_rtx (x);
4028 PUT_MODE (temp, mode);
4029 return temp;
4030 case CONST:
4031 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4032 if (temp)
4033 temp = gen_rtx_CONST (mode, temp);
4034 return temp;
4035 case PLUS:
4036 case MINUS:
4037 if (CONST_INT_P (XEXP (x, 1)))
4038 {
4039 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4040 if (temp)
4041 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4042 }
4043 break;
4044 default:
4045 break;
4046 }
4047 /* Don't know how to express ptr_extend as operation in debug info. */
4048 return NULL;
4049 }
4050 #endif /* POINTERS_EXTEND_UNSIGNED */
4051
4052 return x;
4053 }
4054
4055 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4056 by avoid_deep_ter_for_debug. */
4057
4058 static hash_map<tree, tree> *deep_ter_debug_map;
4059
4060 /* Split too deep TER chains for debug stmts using debug temporaries. */
4061
4062 static void
4063 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4064 {
4065 use_operand_p use_p;
4066 ssa_op_iter iter;
4067 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4068 {
4069 tree use = USE_FROM_PTR (use_p);
4070 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4071 continue;
4072 gimple *g = get_gimple_for_ssa_name (use);
4073 if (g == NULL)
4074 continue;
4075 if (depth > 6 && !stmt_ends_bb_p (g))
4076 {
4077 if (deep_ter_debug_map == NULL)
4078 deep_ter_debug_map = new hash_map<tree, tree>;
4079
4080 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4081 if (vexpr != NULL)
4082 continue;
4083 vexpr = make_node (DEBUG_EXPR_DECL);
4084 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4085 DECL_ARTIFICIAL (vexpr) = 1;
4086 TREE_TYPE (vexpr) = TREE_TYPE (use);
4087 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4088 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4089 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4090 avoid_deep_ter_for_debug (def_temp, 0);
4091 }
4092 else
4093 avoid_deep_ter_for_debug (g, depth + 1);
4094 }
4095 }
4096
4097 /* Return an RTX equivalent to the value of the parameter DECL. */
4098
4099 static rtx
4100 expand_debug_parm_decl (tree decl)
4101 {
4102 rtx incoming = DECL_INCOMING_RTL (decl);
4103
4104 if (incoming
4105 && GET_MODE (incoming) != BLKmode
4106 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4107 || (MEM_P (incoming)
4108 && REG_P (XEXP (incoming, 0))
4109 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4110 {
4111 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4112
4113 #ifdef HAVE_window_save
4114 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4115 If the target machine has an explicit window save instruction, the
4116 actual entry value is the corresponding OUTGOING_REGNO instead. */
4117 if (REG_P (incoming)
4118 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4119 incoming
4120 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4121 OUTGOING_REGNO (REGNO (incoming)), 0);
4122 else if (MEM_P (incoming))
4123 {
4124 rtx reg = XEXP (incoming, 0);
4125 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4126 {
4127 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4128 incoming = replace_equiv_address_nv (incoming, reg);
4129 }
4130 else
4131 incoming = copy_rtx (incoming);
4132 }
4133 #endif
4134
4135 ENTRY_VALUE_EXP (rtl) = incoming;
4136 return rtl;
4137 }
4138
4139 if (incoming
4140 && GET_MODE (incoming) != BLKmode
4141 && !TREE_ADDRESSABLE (decl)
4142 && MEM_P (incoming)
4143 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4144 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4145 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4146 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4147 return copy_rtx (incoming);
4148
4149 return NULL_RTX;
4150 }
4151
4152 /* Return an RTX equivalent to the value of the tree expression EXP. */
4153
4154 static rtx
4155 expand_debug_expr (tree exp)
4156 {
4157 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4158 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4159 machine_mode inner_mode = VOIDmode;
4160 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4161 addr_space_t as;
4162 scalar_int_mode op0_mode, op1_mode, addr_mode;
4163
4164 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4165 {
4166 case tcc_expression:
4167 switch (TREE_CODE (exp))
4168 {
4169 case COND_EXPR:
4170 case DOT_PROD_EXPR:
4171 case SAD_EXPR:
4172 case WIDEN_MULT_PLUS_EXPR:
4173 case WIDEN_MULT_MINUS_EXPR:
4174 case FMA_EXPR:
4175 goto ternary;
4176
4177 case TRUTH_ANDIF_EXPR:
4178 case TRUTH_ORIF_EXPR:
4179 case TRUTH_AND_EXPR:
4180 case TRUTH_OR_EXPR:
4181 case TRUTH_XOR_EXPR:
4182 goto binary;
4183
4184 case TRUTH_NOT_EXPR:
4185 goto unary;
4186
4187 default:
4188 break;
4189 }
4190 break;
4191
4192 ternary:
4193 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4194 if (!op2)
4195 return NULL_RTX;
4196 /* Fall through. */
4197
4198 binary:
4199 case tcc_binary:
4200 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4201 if (!op1)
4202 return NULL_RTX;
4203 switch (TREE_CODE (exp))
4204 {
4205 case LSHIFT_EXPR:
4206 case RSHIFT_EXPR:
4207 case LROTATE_EXPR:
4208 case RROTATE_EXPR:
4209 case WIDEN_LSHIFT_EXPR:
4210 /* Ensure second operand isn't wider than the first one. */
4211 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4212 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4213 && (GET_MODE_UNIT_PRECISION (mode)
4214 < GET_MODE_PRECISION (op1_mode)))
4215 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4216 break;
4217 default:
4218 break;
4219 }
4220 /* Fall through. */
4221
4222 unary:
4223 case tcc_unary:
4224 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4225 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4226 if (!op0)
4227 return NULL_RTX;
4228 break;
4229
4230 case tcc_comparison:
4231 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4232 goto binary;
4233
4234 case tcc_type:
4235 case tcc_statement:
4236 gcc_unreachable ();
4237
4238 case tcc_constant:
4239 case tcc_exceptional:
4240 case tcc_declaration:
4241 case tcc_reference:
4242 case tcc_vl_exp:
4243 break;
4244 }
4245
4246 switch (TREE_CODE (exp))
4247 {
4248 case STRING_CST:
4249 if (!lookup_constant_def (exp))
4250 {
4251 if (strlen (TREE_STRING_POINTER (exp)) + 1
4252 != (size_t) TREE_STRING_LENGTH (exp))
4253 return NULL_RTX;
4254 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4255 op0 = gen_rtx_MEM (BLKmode, op0);
4256 set_mem_attributes (op0, exp, 0);
4257 return op0;
4258 }
4259 /* Fall through. */
4260
4261 case INTEGER_CST:
4262 case REAL_CST:
4263 case FIXED_CST:
4264 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4265 return op0;
4266
4267 case POLY_INT_CST:
4268 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4269
4270 case COMPLEX_CST:
4271 gcc_assert (COMPLEX_MODE_P (mode));
4272 op0 = expand_debug_expr (TREE_REALPART (exp));
4273 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4274 return gen_rtx_CONCAT (mode, op0, op1);
4275
4276 case DEBUG_EXPR_DECL:
4277 op0 = DECL_RTL_IF_SET (exp);
4278
4279 if (op0)
4280 return op0;
4281
4282 op0 = gen_rtx_DEBUG_EXPR (mode);
4283 DEBUG_EXPR_TREE_DECL (op0) = exp;
4284 SET_DECL_RTL (exp, op0);
4285
4286 return op0;
4287
4288 case VAR_DECL:
4289 case PARM_DECL:
4290 case FUNCTION_DECL:
4291 case LABEL_DECL:
4292 case CONST_DECL:
4293 case RESULT_DECL:
4294 op0 = DECL_RTL_IF_SET (exp);
4295
4296 /* This decl was probably optimized away. */
4297 if (!op0)
4298 {
4299 if (!VAR_P (exp)
4300 || DECL_EXTERNAL (exp)
4301 || !TREE_STATIC (exp)
4302 || !DECL_NAME (exp)
4303 || DECL_HARD_REGISTER (exp)
4304 || DECL_IN_CONSTANT_POOL (exp)
4305 || mode == VOIDmode)
4306 return NULL;
4307
4308 op0 = make_decl_rtl_for_debug (exp);
4309 if (!MEM_P (op0)
4310 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4311 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4312 return NULL;
4313 }
4314 else
4315 op0 = copy_rtx (op0);
4316
4317 if (GET_MODE (op0) == BLKmode
4318 /* If op0 is not BLKmode, but mode is, adjust_mode
4319 below would ICE. While it is likely a FE bug,
4320 try to be robust here. See PR43166. */
4321 || mode == BLKmode
4322 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4323 {
4324 gcc_assert (MEM_P (op0));
4325 op0 = adjust_address_nv (op0, mode, 0);
4326 return op0;
4327 }
4328
4329 /* Fall through. */
4330
4331 adjust_mode:
4332 case PAREN_EXPR:
4333 CASE_CONVERT:
4334 {
4335 inner_mode = GET_MODE (op0);
4336
4337 if (mode == inner_mode)
4338 return op0;
4339
4340 if (inner_mode == VOIDmode)
4341 {
4342 if (TREE_CODE (exp) == SSA_NAME)
4343 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4344 else
4345 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4346 if (mode == inner_mode)
4347 return op0;
4348 }
4349
4350 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4351 {
4352 if (GET_MODE_UNIT_BITSIZE (mode)
4353 == GET_MODE_UNIT_BITSIZE (inner_mode))
4354 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4355 else if (GET_MODE_UNIT_BITSIZE (mode)
4356 < GET_MODE_UNIT_BITSIZE (inner_mode))
4357 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4358 else
4359 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4360 }
4361 else if (FLOAT_MODE_P (mode))
4362 {
4363 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4364 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4365 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4366 else
4367 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4368 }
4369 else if (FLOAT_MODE_P (inner_mode))
4370 {
4371 if (unsignedp)
4372 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4373 else
4374 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4375 }
4376 else if (GET_MODE_UNIT_PRECISION (mode)
4377 == GET_MODE_UNIT_PRECISION (inner_mode))
4378 op0 = lowpart_subreg (mode, op0, inner_mode);
4379 else if (GET_MODE_UNIT_PRECISION (mode)
4380 < GET_MODE_UNIT_PRECISION (inner_mode))
4381 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4382 else if (UNARY_CLASS_P (exp)
4383 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4384 : unsignedp)
4385 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4386 else
4387 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4388
4389 return op0;
4390 }
4391
4392 case MEM_REF:
4393 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4394 {
4395 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4396 TREE_OPERAND (exp, 0),
4397 TREE_OPERAND (exp, 1));
4398 if (newexp)
4399 return expand_debug_expr (newexp);
4400 }
4401 /* FALLTHROUGH */
4402 case INDIRECT_REF:
4403 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4404 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4405 if (!op0)
4406 return NULL;
4407
4408 if (TREE_CODE (exp) == MEM_REF)
4409 {
4410 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4411 || (GET_CODE (op0) == PLUS
4412 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4413 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4414 Instead just use get_inner_reference. */
4415 goto component_ref;
4416
4417 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4418 if (!op1 || !CONST_INT_P (op1))
4419 return NULL;
4420
4421 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4422 }
4423
4424 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4425
4426 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4427 op0, as);
4428 if (op0 == NULL_RTX)
4429 return NULL;
4430
4431 op0 = gen_rtx_MEM (mode, op0);
4432 set_mem_attributes (op0, exp, 0);
4433 if (TREE_CODE (exp) == MEM_REF
4434 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4435 set_mem_expr (op0, NULL_TREE);
4436 set_mem_addr_space (op0, as);
4437
4438 return op0;
4439
4440 case TARGET_MEM_REF:
4441 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4442 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4443 return NULL;
4444
4445 op0 = expand_debug_expr
4446 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4447 if (!op0)
4448 return NULL;
4449
4450 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4451 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4452 op0, as);
4453 if (op0 == NULL_RTX)
4454 return NULL;
4455
4456 op0 = gen_rtx_MEM (mode, op0);
4457
4458 set_mem_attributes (op0, exp, 0);
4459 set_mem_addr_space (op0, as);
4460
4461 return op0;
4462
4463 component_ref:
4464 case ARRAY_REF:
4465 case ARRAY_RANGE_REF:
4466 case COMPONENT_REF:
4467 case BIT_FIELD_REF:
4468 case REALPART_EXPR:
4469 case IMAGPART_EXPR:
4470 case VIEW_CONVERT_EXPR:
4471 {
4472 machine_mode mode1;
4473 HOST_WIDE_INT bitsize, bitpos;
4474 tree offset;
4475 int reversep, volatilep = 0;
4476 tree tem
4477 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4478 &unsignedp, &reversep, &volatilep);
4479 rtx orig_op0;
4480
4481 if (bitsize == 0)
4482 return NULL;
4483
4484 orig_op0 = op0 = expand_debug_expr (tem);
4485
4486 if (!op0)
4487 return NULL;
4488
4489 if (offset)
4490 {
4491 machine_mode addrmode, offmode;
4492
4493 if (!MEM_P (op0))
4494 return NULL;
4495
4496 op0 = XEXP (op0, 0);
4497 addrmode = GET_MODE (op0);
4498 if (addrmode == VOIDmode)
4499 addrmode = Pmode;
4500
4501 op1 = expand_debug_expr (offset);
4502 if (!op1)
4503 return NULL;
4504
4505 offmode = GET_MODE (op1);
4506 if (offmode == VOIDmode)
4507 offmode = TYPE_MODE (TREE_TYPE (offset));
4508
4509 if (addrmode != offmode)
4510 op1 = lowpart_subreg (addrmode, op1, offmode);
4511
4512 /* Don't use offset_address here, we don't need a
4513 recognizable address, and we don't want to generate
4514 code. */
4515 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4516 op0, op1));
4517 }
4518
4519 if (MEM_P (op0))
4520 {
4521 if (mode1 == VOIDmode)
4522 /* Bitfield. */
4523 mode1 = smallest_int_mode_for_size (bitsize);
4524 if (bitpos >= BITS_PER_UNIT)
4525 {
4526 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4527 bitpos %= BITS_PER_UNIT;
4528 }
4529 else if (bitpos < 0)
4530 {
4531 HOST_WIDE_INT units
4532 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4533 op0 = adjust_address_nv (op0, mode1, -units);
4534 bitpos += units * BITS_PER_UNIT;
4535 }
4536 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4537 op0 = adjust_address_nv (op0, mode, 0);
4538 else if (GET_MODE (op0) != mode1)
4539 op0 = adjust_address_nv (op0, mode1, 0);
4540 else
4541 op0 = copy_rtx (op0);
4542 if (op0 == orig_op0)
4543 op0 = shallow_copy_rtx (op0);
4544 set_mem_attributes (op0, exp, 0);
4545 }
4546
4547 if (bitpos == 0 && mode == GET_MODE (op0))
4548 return op0;
4549
4550 if (bitpos < 0)
4551 return NULL;
4552
4553 if (GET_MODE (op0) == BLKmode)
4554 return NULL;
4555
4556 if ((bitpos % BITS_PER_UNIT) == 0
4557 && bitsize == GET_MODE_BITSIZE (mode1))
4558 {
4559 machine_mode opmode = GET_MODE (op0);
4560
4561 if (opmode == VOIDmode)
4562 opmode = TYPE_MODE (TREE_TYPE (tem));
4563
4564 /* This condition may hold if we're expanding the address
4565 right past the end of an array that turned out not to
4566 be addressable (i.e., the address was only computed in
4567 debug stmts). The gen_subreg below would rightfully
4568 crash, and the address doesn't really exist, so just
4569 drop it. */
4570 if (bitpos >= GET_MODE_BITSIZE (opmode))
4571 return NULL;
4572
4573 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4574 return simplify_gen_subreg (mode, op0, opmode,
4575 bitpos / BITS_PER_UNIT);
4576 }
4577
4578 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4579 && TYPE_UNSIGNED (TREE_TYPE (exp))
4580 ? SIGN_EXTRACT
4581 : ZERO_EXTRACT, mode,
4582 GET_MODE (op0) != VOIDmode
4583 ? GET_MODE (op0)
4584 : TYPE_MODE (TREE_TYPE (tem)),
4585 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4586 }
4587
4588 case ABS_EXPR:
4589 return simplify_gen_unary (ABS, mode, op0, mode);
4590
4591 case NEGATE_EXPR:
4592 return simplify_gen_unary (NEG, mode, op0, mode);
4593
4594 case BIT_NOT_EXPR:
4595 return simplify_gen_unary (NOT, mode, op0, mode);
4596
4597 case FLOAT_EXPR:
4598 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4599 0)))
4600 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4601 inner_mode);
4602
4603 case FIX_TRUNC_EXPR:
4604 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4605 inner_mode);
4606
4607 case POINTER_PLUS_EXPR:
4608 /* For the rare target where pointers are not the same size as
4609 size_t, we need to check for mis-matched modes and correct
4610 the addend. */
4611 if (op0 && op1
4612 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4613 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4614 && op0_mode != op1_mode)
4615 {
4616 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4617 /* If OP0 is a partial mode, then we must truncate, even
4618 if it has the same bitsize as OP1 as GCC's
4619 representation of partial modes is opaque. */
4620 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4621 && (GET_MODE_BITSIZE (op0_mode)
4622 == GET_MODE_BITSIZE (op1_mode))))
4623 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4624 else
4625 /* We always sign-extend, regardless of the signedness of
4626 the operand, because the operand is always unsigned
4627 here even if the original C expression is signed. */
4628 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4629 }
4630 /* Fall through. */
4631 case PLUS_EXPR:
4632 return simplify_gen_binary (PLUS, mode, op0, op1);
4633
4634 case MINUS_EXPR:
4635 case POINTER_DIFF_EXPR:
4636 return simplify_gen_binary (MINUS, mode, op0, op1);
4637
4638 case MULT_EXPR:
4639 return simplify_gen_binary (MULT, mode, op0, op1);
4640
4641 case RDIV_EXPR:
4642 case TRUNC_DIV_EXPR:
4643 case EXACT_DIV_EXPR:
4644 if (unsignedp)
4645 return simplify_gen_binary (UDIV, mode, op0, op1);
4646 else
4647 return simplify_gen_binary (DIV, mode, op0, op1);
4648
4649 case TRUNC_MOD_EXPR:
4650 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4651
4652 case FLOOR_DIV_EXPR:
4653 if (unsignedp)
4654 return simplify_gen_binary (UDIV, mode, op0, op1);
4655 else
4656 {
4657 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4658 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4659 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4660 return simplify_gen_binary (PLUS, mode, div, adj);
4661 }
4662
4663 case FLOOR_MOD_EXPR:
4664 if (unsignedp)
4665 return simplify_gen_binary (UMOD, mode, op0, op1);
4666 else
4667 {
4668 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4669 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4670 adj = simplify_gen_unary (NEG, mode,
4671 simplify_gen_binary (MULT, mode, adj, op1),
4672 mode);
4673 return simplify_gen_binary (PLUS, mode, mod, adj);
4674 }
4675
4676 case CEIL_DIV_EXPR:
4677 if (unsignedp)
4678 {
4679 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4680 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4681 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4682 return simplify_gen_binary (PLUS, mode, div, adj);
4683 }
4684 else
4685 {
4686 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4687 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4688 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4689 return simplify_gen_binary (PLUS, mode, div, adj);
4690 }
4691
4692 case CEIL_MOD_EXPR:
4693 if (unsignedp)
4694 {
4695 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4696 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4697 adj = simplify_gen_unary (NEG, mode,
4698 simplify_gen_binary (MULT, mode, adj, op1),
4699 mode);
4700 return simplify_gen_binary (PLUS, mode, mod, adj);
4701 }
4702 else
4703 {
4704 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4705 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4706 adj = simplify_gen_unary (NEG, mode,
4707 simplify_gen_binary (MULT, mode, adj, op1),
4708 mode);
4709 return simplify_gen_binary (PLUS, mode, mod, adj);
4710 }
4711
4712 case ROUND_DIV_EXPR:
4713 if (unsignedp)
4714 {
4715 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4716 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4717 rtx adj = round_udiv_adjust (mode, mod, op1);
4718 return simplify_gen_binary (PLUS, mode, div, adj);
4719 }
4720 else
4721 {
4722 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4723 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4724 rtx adj = round_sdiv_adjust (mode, mod, op1);
4725 return simplify_gen_binary (PLUS, mode, div, adj);
4726 }
4727
4728 case ROUND_MOD_EXPR:
4729 if (unsignedp)
4730 {
4731 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4732 rtx adj = round_udiv_adjust (mode, mod, op1);
4733 adj = simplify_gen_unary (NEG, mode,
4734 simplify_gen_binary (MULT, mode, adj, op1),
4735 mode);
4736 return simplify_gen_binary (PLUS, mode, mod, adj);
4737 }
4738 else
4739 {
4740 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4741 rtx adj = round_sdiv_adjust (mode, mod, op1);
4742 adj = simplify_gen_unary (NEG, mode,
4743 simplify_gen_binary (MULT, mode, adj, op1),
4744 mode);
4745 return simplify_gen_binary (PLUS, mode, mod, adj);
4746 }
4747
4748 case LSHIFT_EXPR:
4749 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4750
4751 case RSHIFT_EXPR:
4752 if (unsignedp)
4753 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4754 else
4755 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4756
4757 case LROTATE_EXPR:
4758 return simplify_gen_binary (ROTATE, mode, op0, op1);
4759
4760 case RROTATE_EXPR:
4761 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4762
4763 case MIN_EXPR:
4764 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4765
4766 case MAX_EXPR:
4767 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4768
4769 case BIT_AND_EXPR:
4770 case TRUTH_AND_EXPR:
4771 return simplify_gen_binary (AND, mode, op0, op1);
4772
4773 case BIT_IOR_EXPR:
4774 case TRUTH_OR_EXPR:
4775 return simplify_gen_binary (IOR, mode, op0, op1);
4776
4777 case BIT_XOR_EXPR:
4778 case TRUTH_XOR_EXPR:
4779 return simplify_gen_binary (XOR, mode, op0, op1);
4780
4781 case TRUTH_ANDIF_EXPR:
4782 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4783
4784 case TRUTH_ORIF_EXPR:
4785 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4786
4787 case TRUTH_NOT_EXPR:
4788 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4789
4790 case LT_EXPR:
4791 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4792 op0, op1);
4793
4794 case LE_EXPR:
4795 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4796 op0, op1);
4797
4798 case GT_EXPR:
4799 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4800 op0, op1);
4801
4802 case GE_EXPR:
4803 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4804 op0, op1);
4805
4806 case EQ_EXPR:
4807 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4808
4809 case NE_EXPR:
4810 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4811
4812 case UNORDERED_EXPR:
4813 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4814
4815 case ORDERED_EXPR:
4816 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4817
4818 case UNLT_EXPR:
4819 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4820
4821 case UNLE_EXPR:
4822 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4823
4824 case UNGT_EXPR:
4825 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4826
4827 case UNGE_EXPR:
4828 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4829
4830 case UNEQ_EXPR:
4831 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4832
4833 case LTGT_EXPR:
4834 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4835
4836 case COND_EXPR:
4837 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4838
4839 case COMPLEX_EXPR:
4840 gcc_assert (COMPLEX_MODE_P (mode));
4841 if (GET_MODE (op0) == VOIDmode)
4842 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4843 if (GET_MODE (op1) == VOIDmode)
4844 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4845 return gen_rtx_CONCAT (mode, op0, op1);
4846
4847 case CONJ_EXPR:
4848 if (GET_CODE (op0) == CONCAT)
4849 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4850 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4851 XEXP (op0, 1),
4852 GET_MODE_INNER (mode)));
4853 else
4854 {
4855 scalar_mode imode = GET_MODE_INNER (mode);
4856 rtx re, im;
4857
4858 if (MEM_P (op0))
4859 {
4860 re = adjust_address_nv (op0, imode, 0);
4861 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4862 }
4863 else
4864 {
4865 scalar_int_mode ifmode;
4866 scalar_int_mode ihmode;
4867 rtx halfsize;
4868 if (!int_mode_for_mode (mode).exists (&ifmode)
4869 || !int_mode_for_mode (imode).exists (&ihmode))
4870 return NULL;
4871 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4872 re = op0;
4873 if (mode != ifmode)
4874 re = gen_rtx_SUBREG (ifmode, re, 0);
4875 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4876 if (imode != ihmode)
4877 re = gen_rtx_SUBREG (imode, re, 0);
4878 im = copy_rtx (op0);
4879 if (mode != ifmode)
4880 im = gen_rtx_SUBREG (ifmode, im, 0);
4881 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4882 if (imode != ihmode)
4883 im = gen_rtx_SUBREG (imode, im, 0);
4884 }
4885 im = gen_rtx_NEG (imode, im);
4886 return gen_rtx_CONCAT (mode, re, im);
4887 }
4888
4889 case ADDR_EXPR:
4890 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4891 if (!op0 || !MEM_P (op0))
4892 {
4893 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4894 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4895 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4896 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4897 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4898 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4899
4900 if (handled_component_p (TREE_OPERAND (exp, 0)))
4901 {
4902 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4903 bool reverse;
4904 tree decl
4905 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4906 &bitsize, &maxsize, &reverse);
4907 if ((VAR_P (decl)
4908 || TREE_CODE (decl) == PARM_DECL
4909 || TREE_CODE (decl) == RESULT_DECL)
4910 && (!TREE_ADDRESSABLE (decl)
4911 || target_for_debug_bind (decl))
4912 && (bitoffset % BITS_PER_UNIT) == 0
4913 && bitsize > 0
4914 && bitsize == maxsize)
4915 {
4916 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4917 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4918 }
4919 }
4920
4921 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4922 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4923 == ADDR_EXPR)
4924 {
4925 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4926 0));
4927 if (op0 != NULL
4928 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4929 || (GET_CODE (op0) == PLUS
4930 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4931 && CONST_INT_P (XEXP (op0, 1)))))
4932 {
4933 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4934 1));
4935 if (!op1 || !CONST_INT_P (op1))
4936 return NULL;
4937
4938 return plus_constant (mode, op0, INTVAL (op1));
4939 }
4940 }
4941
4942 return NULL;
4943 }
4944
4945 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4946 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4947 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4948
4949 return op0;
4950
4951 case VECTOR_CST:
4952 {
4953 unsigned i, nelts;
4954
4955 nelts = VECTOR_CST_NELTS (exp);
4956 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4957
4958 for (i = 0; i < nelts; ++i)
4959 {
4960 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4961 if (!op1)
4962 return NULL;
4963 XVECEXP (op0, 0, i) = op1;
4964 }
4965
4966 return op0;
4967 }
4968
4969 case CONSTRUCTOR:
4970 if (TREE_CLOBBER_P (exp))
4971 return NULL;
4972 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4973 {
4974 unsigned i;
4975 tree val;
4976
4977 op0 = gen_rtx_CONCATN
4978 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4979
4980 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4981 {
4982 op1 = expand_debug_expr (val);
4983 if (!op1)
4984 return NULL;
4985 XVECEXP (op0, 0, i) = op1;
4986 }
4987
4988 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4989 {
4990 op1 = expand_debug_expr
4991 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4992
4993 if (!op1)
4994 return NULL;
4995
4996 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4997 XVECEXP (op0, 0, i) = op1;
4998 }
4999
5000 return op0;
5001 }
5002 else
5003 goto flag_unsupported;
5004
5005 case CALL_EXPR:
5006 /* ??? Maybe handle some builtins? */
5007 return NULL;
5008
5009 case SSA_NAME:
5010 {
5011 gimple *g = get_gimple_for_ssa_name (exp);
5012 if (g)
5013 {
5014 tree t = NULL_TREE;
5015 if (deep_ter_debug_map)
5016 {
5017 tree *slot = deep_ter_debug_map->get (exp);
5018 if (slot)
5019 t = *slot;
5020 }
5021 if (t == NULL_TREE)
5022 t = gimple_assign_rhs_to_tree (g);
5023 op0 = expand_debug_expr (t);
5024 if (!op0)
5025 return NULL;
5026 }
5027 else
5028 {
5029 /* If this is a reference to an incoming value of
5030 parameter that is never used in the code or where the
5031 incoming value is never used in the code, use
5032 PARM_DECL's DECL_RTL if set. */
5033 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5034 && SSA_NAME_VAR (exp)
5035 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5036 && has_zero_uses (exp))
5037 {
5038 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5039 if (op0)
5040 goto adjust_mode;
5041 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5042 if (op0)
5043 goto adjust_mode;
5044 }
5045
5046 int part = var_to_partition (SA.map, exp);
5047
5048 if (part == NO_PARTITION)
5049 return NULL;
5050
5051 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5052
5053 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5054 }
5055 goto adjust_mode;
5056 }
5057
5058 case ERROR_MARK:
5059 return NULL;
5060
5061 /* Vector stuff. For most of the codes we don't have rtl codes. */
5062 case REALIGN_LOAD_EXPR:
5063 case VEC_COND_EXPR:
5064 case VEC_PACK_FIX_TRUNC_EXPR:
5065 case VEC_PACK_SAT_EXPR:
5066 case VEC_PACK_TRUNC_EXPR:
5067 case VEC_UNPACK_FLOAT_HI_EXPR:
5068 case VEC_UNPACK_FLOAT_LO_EXPR:
5069 case VEC_UNPACK_HI_EXPR:
5070 case VEC_UNPACK_LO_EXPR:
5071 case VEC_WIDEN_MULT_HI_EXPR:
5072 case VEC_WIDEN_MULT_LO_EXPR:
5073 case VEC_WIDEN_MULT_EVEN_EXPR:
5074 case VEC_WIDEN_MULT_ODD_EXPR:
5075 case VEC_WIDEN_LSHIFT_HI_EXPR:
5076 case VEC_WIDEN_LSHIFT_LO_EXPR:
5077 case VEC_PERM_EXPR:
5078 case VEC_DUPLICATE_EXPR:
5079 case VEC_SERIES_EXPR:
5080 return NULL;
5081
5082 /* Misc codes. */
5083 case ADDR_SPACE_CONVERT_EXPR:
5084 case FIXED_CONVERT_EXPR:
5085 case OBJ_TYPE_REF:
5086 case WITH_SIZE_EXPR:
5087 case BIT_INSERT_EXPR:
5088 return NULL;
5089
5090 case DOT_PROD_EXPR:
5091 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5092 && SCALAR_INT_MODE_P (mode))
5093 {
5094 op0
5095 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5096 0)))
5097 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5098 inner_mode);
5099 op1
5100 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5101 1)))
5102 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5103 inner_mode);
5104 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5105 return simplify_gen_binary (PLUS, mode, op0, op2);
5106 }
5107 return NULL;
5108
5109 case WIDEN_MULT_EXPR:
5110 case WIDEN_MULT_PLUS_EXPR:
5111 case WIDEN_MULT_MINUS_EXPR:
5112 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5113 && SCALAR_INT_MODE_P (mode))
5114 {
5115 inner_mode = GET_MODE (op0);
5116 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5117 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5118 else
5119 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5120 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5121 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5122 else
5123 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5124 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5125 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5126 return op0;
5127 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5128 return simplify_gen_binary (PLUS, mode, op0, op2);
5129 else
5130 return simplify_gen_binary (MINUS, mode, op2, op0);
5131 }
5132 return NULL;
5133
5134 case MULT_HIGHPART_EXPR:
5135 /* ??? Similar to the above. */
5136 return NULL;
5137
5138 case WIDEN_SUM_EXPR:
5139 case WIDEN_LSHIFT_EXPR:
5140 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5141 && SCALAR_INT_MODE_P (mode))
5142 {
5143 op0
5144 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5145 0)))
5146 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5147 inner_mode);
5148 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5149 ? ASHIFT : PLUS, mode, op0, op1);
5150 }
5151 return NULL;
5152
5153 case FMA_EXPR:
5154 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5155
5156 default:
5157 flag_unsupported:
5158 if (flag_checking)
5159 {
5160 debug_tree (exp);
5161 gcc_unreachable ();
5162 }
5163 return NULL;
5164 }
5165 }
5166
5167 /* Return an RTX equivalent to the source bind value of the tree expression
5168 EXP. */
5169
5170 static rtx
5171 expand_debug_source_expr (tree exp)
5172 {
5173 rtx op0 = NULL_RTX;
5174 machine_mode mode = VOIDmode, inner_mode;
5175
5176 switch (TREE_CODE (exp))
5177 {
5178 case PARM_DECL:
5179 {
5180 mode = DECL_MODE (exp);
5181 op0 = expand_debug_parm_decl (exp);
5182 if (op0)
5183 break;
5184 /* See if this isn't an argument that has been completely
5185 optimized out. */
5186 if (!DECL_RTL_SET_P (exp)
5187 && !DECL_INCOMING_RTL (exp)
5188 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5189 {
5190 tree aexp = DECL_ORIGIN (exp);
5191 if (DECL_CONTEXT (aexp)
5192 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5193 {
5194 vec<tree, va_gc> **debug_args;
5195 unsigned int ix;
5196 tree ddecl;
5197 debug_args = decl_debug_args_lookup (current_function_decl);
5198 if (debug_args != NULL)
5199 {
5200 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5201 ix += 2)
5202 if (ddecl == aexp)
5203 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5204 }
5205 }
5206 }
5207 break;
5208 }
5209 default:
5210 break;
5211 }
5212
5213 if (op0 == NULL_RTX)
5214 return NULL_RTX;
5215
5216 inner_mode = GET_MODE (op0);
5217 if (mode == inner_mode)
5218 return op0;
5219
5220 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5221 {
5222 if (GET_MODE_UNIT_BITSIZE (mode)
5223 == GET_MODE_UNIT_BITSIZE (inner_mode))
5224 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5225 else if (GET_MODE_UNIT_BITSIZE (mode)
5226 < GET_MODE_UNIT_BITSIZE (inner_mode))
5227 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5228 else
5229 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5230 }
5231 else if (FLOAT_MODE_P (mode))
5232 gcc_unreachable ();
5233 else if (FLOAT_MODE_P (inner_mode))
5234 {
5235 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5236 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5237 else
5238 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5239 }
5240 else if (GET_MODE_UNIT_PRECISION (mode)
5241 == GET_MODE_UNIT_PRECISION (inner_mode))
5242 op0 = lowpart_subreg (mode, op0, inner_mode);
5243 else if (GET_MODE_UNIT_PRECISION (mode)
5244 < GET_MODE_UNIT_PRECISION (inner_mode))
5245 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5246 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5247 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5248 else
5249 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5250
5251 return op0;
5252 }
5253
5254 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5255 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5256 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5257
5258 static void
5259 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5260 {
5261 rtx exp = *exp_p;
5262
5263 if (exp == NULL_RTX)
5264 return;
5265
5266 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5267 return;
5268
5269 if (depth == 4)
5270 {
5271 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5272 rtx dval = make_debug_expr_from_rtl (exp);
5273
5274 /* Emit a debug bind insn before INSN. */
5275 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5276 DEBUG_EXPR_TREE_DECL (dval), exp,
5277 VAR_INIT_STATUS_INITIALIZED);
5278
5279 emit_debug_insn_before (bind, insn);
5280 *exp_p = dval;
5281 return;
5282 }
5283
5284 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5285 int i, j;
5286 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5287 switch (*format_ptr++)
5288 {
5289 case 'e':
5290 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5291 break;
5292
5293 case 'E':
5294 case 'V':
5295 for (j = 0; j < XVECLEN (exp, i); j++)
5296 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5297 break;
5298
5299 default:
5300 break;
5301 }
5302 }
5303
5304 /* Expand the _LOCs in debug insns. We run this after expanding all
5305 regular insns, so that any variables referenced in the function
5306 will have their DECL_RTLs set. */
5307
5308 static void
5309 expand_debug_locations (void)
5310 {
5311 rtx_insn *insn;
5312 rtx_insn *last = get_last_insn ();
5313 int save_strict_alias = flag_strict_aliasing;
5314
5315 /* New alias sets while setting up memory attributes cause
5316 -fcompare-debug failures, even though it doesn't bring about any
5317 codegen changes. */
5318 flag_strict_aliasing = 0;
5319
5320 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5321 if (DEBUG_BIND_INSN_P (insn))
5322 {
5323 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5324 rtx val;
5325 rtx_insn *prev_insn, *insn2;
5326 machine_mode mode;
5327
5328 if (value == NULL_TREE)
5329 val = NULL_RTX;
5330 else
5331 {
5332 if (INSN_VAR_LOCATION_STATUS (insn)
5333 == VAR_INIT_STATUS_UNINITIALIZED)
5334 val = expand_debug_source_expr (value);
5335 /* The avoid_deep_ter_for_debug function inserts
5336 debug bind stmts after SSA_NAME definition, with the
5337 SSA_NAME as the whole bind location. Disable temporarily
5338 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5339 being defined in this DEBUG_INSN. */
5340 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5341 {
5342 tree *slot = deep_ter_debug_map->get (value);
5343 if (slot)
5344 {
5345 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5346 *slot = NULL_TREE;
5347 else
5348 slot = NULL;
5349 }
5350 val = expand_debug_expr (value);
5351 if (slot)
5352 *slot = INSN_VAR_LOCATION_DECL (insn);
5353 }
5354 else
5355 val = expand_debug_expr (value);
5356 gcc_assert (last == get_last_insn ());
5357 }
5358
5359 if (!val)
5360 val = gen_rtx_UNKNOWN_VAR_LOC ();
5361 else
5362 {
5363 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5364
5365 gcc_assert (mode == GET_MODE (val)
5366 || (GET_MODE (val) == VOIDmode
5367 && (CONST_SCALAR_INT_P (val)
5368 || GET_CODE (val) == CONST_FIXED
5369 || GET_CODE (val) == LABEL_REF)));
5370 }
5371
5372 INSN_VAR_LOCATION_LOC (insn) = val;
5373 prev_insn = PREV_INSN (insn);
5374 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5375 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5376 }
5377
5378 flag_strict_aliasing = save_strict_alias;
5379 }
5380
5381 /* Performs swapping operands of commutative operations to expand
5382 the expensive one first. */
5383
5384 static void
5385 reorder_operands (basic_block bb)
5386 {
5387 unsigned int *lattice; /* Hold cost of each statement. */
5388 unsigned int i = 0, n = 0;
5389 gimple_stmt_iterator gsi;
5390 gimple_seq stmts;
5391 gimple *stmt;
5392 bool swap;
5393 tree op0, op1;
5394 ssa_op_iter iter;
5395 use_operand_p use_p;
5396 gimple *def0, *def1;
5397
5398 /* Compute cost of each statement using estimate_num_insns. */
5399 stmts = bb_seq (bb);
5400 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5401 {
5402 stmt = gsi_stmt (gsi);
5403 if (!is_gimple_debug (stmt))
5404 gimple_set_uid (stmt, n++);
5405 }
5406 lattice = XNEWVEC (unsigned int, n);
5407 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5408 {
5409 unsigned cost;
5410 stmt = gsi_stmt (gsi);
5411 if (is_gimple_debug (stmt))
5412 continue;
5413 cost = estimate_num_insns (stmt, &eni_size_weights);
5414 lattice[i] = cost;
5415 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5416 {
5417 tree use = USE_FROM_PTR (use_p);
5418 gimple *def_stmt;
5419 if (TREE_CODE (use) != SSA_NAME)
5420 continue;
5421 def_stmt = get_gimple_for_ssa_name (use);
5422 if (!def_stmt)
5423 continue;
5424 lattice[i] += lattice[gimple_uid (def_stmt)];
5425 }
5426 i++;
5427 if (!is_gimple_assign (stmt)
5428 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5429 continue;
5430 op0 = gimple_op (stmt, 1);
5431 op1 = gimple_op (stmt, 2);
5432 if (TREE_CODE (op0) != SSA_NAME
5433 || TREE_CODE (op1) != SSA_NAME)
5434 continue;
5435 /* Swap operands if the second one is more expensive. */
5436 def0 = get_gimple_for_ssa_name (op0);
5437 def1 = get_gimple_for_ssa_name (op1);
5438 if (!def1)
5439 continue;
5440 swap = false;
5441 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5442 swap = true;
5443 if (swap)
5444 {
5445 if (dump_file && (dump_flags & TDF_DETAILS))
5446 {
5447 fprintf (dump_file, "Swap operands in stmt:\n");
5448 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5449 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5450 def0 ? lattice[gimple_uid (def0)] : 0,
5451 lattice[gimple_uid (def1)]);
5452 }
5453 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5454 gimple_assign_rhs2_ptr (stmt));
5455 }
5456 }
5457 XDELETE (lattice);
5458 }
5459
5460 /* Expand basic block BB from GIMPLE trees to RTL. */
5461
5462 static basic_block
5463 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5464 {
5465 gimple_stmt_iterator gsi;
5466 gimple_seq stmts;
5467 gimple *stmt = NULL;
5468 rtx_note *note = NULL;
5469 rtx_insn *last;
5470 edge e;
5471 edge_iterator ei;
5472
5473 if (dump_file)
5474 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5475 bb->index);
5476
5477 /* Note that since we are now transitioning from GIMPLE to RTL, we
5478 cannot use the gsi_*_bb() routines because they expect the basic
5479 block to be in GIMPLE, instead of RTL. Therefore, we need to
5480 access the BB sequence directly. */
5481 if (optimize)
5482 reorder_operands (bb);
5483 stmts = bb_seq (bb);
5484 bb->il.gimple.seq = NULL;
5485 bb->il.gimple.phi_nodes = NULL;
5486 rtl_profile_for_bb (bb);
5487 init_rtl_bb_info (bb);
5488 bb->flags |= BB_RTL;
5489
5490 /* Remove the RETURN_EXPR if we may fall though to the exit
5491 instead. */
5492 gsi = gsi_last (stmts);
5493 if (!gsi_end_p (gsi)
5494 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5495 {
5496 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5497
5498 gcc_assert (single_succ_p (bb));
5499 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5500
5501 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5502 && !gimple_return_retval (ret_stmt))
5503 {
5504 gsi_remove (&gsi, false);
5505 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5506 }
5507 }
5508
5509 gsi = gsi_start_nondebug (stmts);
5510 if (!gsi_end_p (gsi))
5511 {
5512 stmt = gsi_stmt (gsi);
5513 if (gimple_code (stmt) != GIMPLE_LABEL)
5514 stmt = NULL;
5515 }
5516 gsi = gsi_start (stmts);
5517
5518 gimple *label_stmt = stmt;
5519 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5520
5521 if (stmt || elt)
5522 {
5523 gcc_checking_assert (!note);
5524 last = get_last_insn ();
5525
5526 if (stmt)
5527 {
5528 expand_gimple_stmt (stmt);
5529 if (gsi_stmt (gsi) == stmt)
5530 gsi_next (&gsi);
5531 }
5532
5533 if (elt)
5534 emit_label (*elt);
5535
5536 BB_HEAD (bb) = NEXT_INSN (last);
5537 if (NOTE_P (BB_HEAD (bb)))
5538 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5539 gcc_assert (LABEL_P (BB_HEAD (bb)));
5540 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5541
5542 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5543 }
5544 else
5545 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5546
5547 if (note)
5548 NOTE_BASIC_BLOCK (note) = bb;
5549
5550 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5551 {
5552 basic_block new_bb;
5553
5554 stmt = gsi_stmt (gsi);
5555
5556 if (stmt == label_stmt)
5557 continue;
5558
5559 /* If this statement is a non-debug one, and we generate debug
5560 insns, then this one might be the last real use of a TERed
5561 SSA_NAME, but where there are still some debug uses further
5562 down. Expanding the current SSA name in such further debug
5563 uses by their RHS might lead to wrong debug info, as coalescing
5564 might make the operands of such RHS be placed into the same
5565 pseudo as something else. Like so:
5566 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5567 use(a_1);
5568 a_2 = ...
5569 #DEBUG ... => a_1
5570 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5571 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5572 the write to a_2 would actually have clobbered the place which
5573 formerly held a_0.
5574
5575 So, instead of that, we recognize the situation, and generate
5576 debug temporaries at the last real use of TERed SSA names:
5577 a_1 = a_0 + 1;
5578 #DEBUG #D1 => a_1
5579 use(a_1);
5580 a_2 = ...
5581 #DEBUG ... => #D1
5582 */
5583 if (MAY_HAVE_DEBUG_BIND_INSNS
5584 && SA.values
5585 && !is_gimple_debug (stmt))
5586 {
5587 ssa_op_iter iter;
5588 tree op;
5589 gimple *def;
5590
5591 location_t sloc = curr_insn_location ();
5592
5593 /* Look for SSA names that have their last use here (TERed
5594 names always have only one real use). */
5595 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5596 if ((def = get_gimple_for_ssa_name (op)))
5597 {
5598 imm_use_iterator imm_iter;
5599 use_operand_p use_p;
5600 bool have_debug_uses = false;
5601
5602 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5603 {
5604 if (gimple_debug_bind_p (USE_STMT (use_p)))
5605 {
5606 have_debug_uses = true;
5607 break;
5608 }
5609 }
5610
5611 if (have_debug_uses)
5612 {
5613 /* OP is a TERed SSA name, with DEF its defining
5614 statement, and where OP is used in further debug
5615 instructions. Generate a debug temporary, and
5616 replace all uses of OP in debug insns with that
5617 temporary. */
5618 gimple *debugstmt;
5619 tree value = gimple_assign_rhs_to_tree (def);
5620 tree vexpr = make_node (DEBUG_EXPR_DECL);
5621 rtx val;
5622 machine_mode mode;
5623
5624 set_curr_insn_location (gimple_location (def));
5625
5626 DECL_ARTIFICIAL (vexpr) = 1;
5627 TREE_TYPE (vexpr) = TREE_TYPE (value);
5628 if (DECL_P (value))
5629 mode = DECL_MODE (value);
5630 else
5631 mode = TYPE_MODE (TREE_TYPE (value));
5632 SET_DECL_MODE (vexpr, mode);
5633
5634 val = gen_rtx_VAR_LOCATION
5635 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5636
5637 emit_debug_insn (val);
5638
5639 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5640 {
5641 if (!gimple_debug_bind_p (debugstmt))
5642 continue;
5643
5644 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5645 SET_USE (use_p, vexpr);
5646
5647 update_stmt (debugstmt);
5648 }
5649 }
5650 }
5651 set_curr_insn_location (sloc);
5652 }
5653
5654 currently_expanding_gimple_stmt = stmt;
5655
5656 /* Expand this statement, then evaluate the resulting RTL and
5657 fixup the CFG accordingly. */
5658 if (gimple_code (stmt) == GIMPLE_COND)
5659 {
5660 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5661 if (new_bb)
5662 return new_bb;
5663 }
5664 else if (is_gimple_debug (stmt))
5665 {
5666 location_t sloc = curr_insn_location ();
5667 gimple_stmt_iterator nsi = gsi;
5668
5669 for (;;)
5670 {
5671 tree var;
5672 tree value = NULL_TREE;
5673 rtx val = NULL_RTX;
5674 machine_mode mode;
5675
5676 if (!gimple_debug_nonbind_marker_p (stmt))
5677 {
5678 if (gimple_debug_bind_p (stmt))
5679 {
5680 var = gimple_debug_bind_get_var (stmt);
5681
5682 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5683 && TREE_CODE (var) != LABEL_DECL
5684 && !target_for_debug_bind (var))
5685 goto delink_debug_stmt;
5686
5687 if (DECL_P (var))
5688 mode = DECL_MODE (var);
5689 else
5690 mode = TYPE_MODE (TREE_TYPE (var));
5691
5692 if (gimple_debug_bind_has_value_p (stmt))
5693 value = gimple_debug_bind_get_value (stmt);
5694
5695 val = gen_rtx_VAR_LOCATION
5696 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5697 }
5698 else if (gimple_debug_source_bind_p (stmt))
5699 {
5700 var = gimple_debug_source_bind_get_var (stmt);
5701
5702 value = gimple_debug_source_bind_get_value (stmt);
5703
5704 mode = DECL_MODE (var);
5705
5706 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5707 VAR_INIT_STATUS_UNINITIALIZED);
5708 }
5709 else
5710 gcc_unreachable ();
5711 }
5712 /* If this function was first compiled with markers
5713 enabled, but they're now disable (e.g. LTO), drop
5714 them on the floor. */
5715 else if (gimple_debug_nonbind_marker_p (stmt)
5716 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5717 goto delink_debug_stmt;
5718 else if (gimple_debug_begin_stmt_p (stmt))
5719 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5720 else
5721 gcc_unreachable ();
5722
5723 last = get_last_insn ();
5724
5725 set_curr_insn_location (gimple_location (stmt));
5726
5727 emit_debug_insn (val);
5728
5729 if (dump_file && (dump_flags & TDF_DETAILS))
5730 {
5731 /* We can't dump the insn with a TREE where an RTX
5732 is expected. */
5733 if (GET_CODE (val) == VAR_LOCATION)
5734 {
5735 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5736 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5737 }
5738 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5739 if (GET_CODE (val) == VAR_LOCATION)
5740 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5741 }
5742
5743 delink_debug_stmt:
5744 /* In order not to generate too many debug temporaries,
5745 we delink all uses of debug statements we already expanded.
5746 Therefore debug statements between definition and real
5747 use of TERed SSA names will continue to use the SSA name,
5748 and not be replaced with debug temps. */
5749 delink_stmt_imm_use (stmt);
5750
5751 gsi = nsi;
5752 gsi_next (&nsi);
5753 if (gsi_end_p (nsi))
5754 break;
5755 stmt = gsi_stmt (nsi);
5756 if (!is_gimple_debug (stmt))
5757 break;
5758 }
5759
5760 set_curr_insn_location (sloc);
5761 }
5762 else
5763 {
5764 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5765 if (call_stmt
5766 && gimple_call_tail_p (call_stmt)
5767 && disable_tail_calls)
5768 gimple_call_set_tail (call_stmt, false);
5769
5770 if (call_stmt && gimple_call_tail_p (call_stmt))
5771 {
5772 bool can_fallthru;
5773 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5774 if (new_bb)
5775 {
5776 if (can_fallthru)
5777 bb = new_bb;
5778 else
5779 return new_bb;
5780 }
5781 }
5782 else
5783 {
5784 def_operand_p def_p;
5785 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5786
5787 if (def_p != NULL)
5788 {
5789 /* Ignore this stmt if it is in the list of
5790 replaceable expressions. */
5791 if (SA.values
5792 && bitmap_bit_p (SA.values,
5793 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5794 continue;
5795 }
5796 last = expand_gimple_stmt (stmt);
5797 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5798 }
5799 }
5800 }
5801
5802 currently_expanding_gimple_stmt = NULL;
5803
5804 /* Expand implicit goto and convert goto_locus. */
5805 FOR_EACH_EDGE (e, ei, bb->succs)
5806 {
5807 if (e->goto_locus != UNKNOWN_LOCATION)
5808 set_curr_insn_location (e->goto_locus);
5809 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5810 {
5811 emit_jump (label_rtx_for_bb (e->dest));
5812 e->flags &= ~EDGE_FALLTHRU;
5813 }
5814 }
5815
5816 /* Expanded RTL can create a jump in the last instruction of block.
5817 This later might be assumed to be a jump to successor and break edge insertion.
5818 We need to insert dummy move to prevent this. PR41440. */
5819 if (single_succ_p (bb)
5820 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5821 && (last = get_last_insn ())
5822 && (JUMP_P (last)
5823 || (DEBUG_INSN_P (last)
5824 && JUMP_P (prev_nondebug_insn (last)))))
5825 {
5826 rtx dummy = gen_reg_rtx (SImode);
5827 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5828 }
5829
5830 do_pending_stack_adjust ();
5831
5832 /* Find the block tail. The last insn in the block is the insn
5833 before a barrier and/or table jump insn. */
5834 last = get_last_insn ();
5835 if (BARRIER_P (last))
5836 last = PREV_INSN (last);
5837 if (JUMP_TABLE_DATA_P (last))
5838 last = PREV_INSN (PREV_INSN (last));
5839 BB_END (bb) = last;
5840
5841 update_bb_for_insn (bb);
5842
5843 return bb;
5844 }
5845
5846
5847 /* Create a basic block for initialization code. */
5848
5849 static basic_block
5850 construct_init_block (void)
5851 {
5852 basic_block init_block, first_block;
5853 edge e = NULL;
5854 int flags;
5855
5856 /* Multiple entry points not supported yet. */
5857 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5858 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5859 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5860 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5861 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5862
5863 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5864
5865 /* When entry edge points to first basic block, we don't need jump,
5866 otherwise we have to jump into proper target. */
5867 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5868 {
5869 tree label = gimple_block_label (e->dest);
5870
5871 emit_jump (jump_target_rtx (label));
5872 flags = 0;
5873 }
5874 else
5875 flags = EDGE_FALLTHRU;
5876
5877 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5878 get_last_insn (),
5879 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5880 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5881 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5882 if (e)
5883 {
5884 first_block = e->dest;
5885 redirect_edge_succ (e, init_block);
5886 e = make_single_succ_edge (init_block, first_block, flags);
5887 }
5888 else
5889 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5890 EDGE_FALLTHRU);
5891
5892 update_bb_for_insn (init_block);
5893 return init_block;
5894 }
5895
5896 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5897 found in the block tree. */
5898
5899 static void
5900 set_block_levels (tree block, int level)
5901 {
5902 while (block)
5903 {
5904 BLOCK_NUMBER (block) = level;
5905 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5906 block = BLOCK_CHAIN (block);
5907 }
5908 }
5909
5910 /* Create a block containing landing pads and similar stuff. */
5911
5912 static void
5913 construct_exit_block (void)
5914 {
5915 rtx_insn *head = get_last_insn ();
5916 rtx_insn *end;
5917 basic_block exit_block;
5918 edge e, e2;
5919 unsigned ix;
5920 edge_iterator ei;
5921 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5922 rtx_insn *orig_end = BB_END (prev_bb);
5923
5924 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5925
5926 /* Make sure the locus is set to the end of the function, so that
5927 epilogue line numbers and warnings are set properly. */
5928 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5929 input_location = cfun->function_end_locus;
5930
5931 /* Generate rtl for function exit. */
5932 expand_function_end ();
5933
5934 end = get_last_insn ();
5935 if (head == end)
5936 return;
5937 /* While emitting the function end we could move end of the last basic
5938 block. */
5939 BB_END (prev_bb) = orig_end;
5940 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5941 head = NEXT_INSN (head);
5942 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5943 bb count counting will be confused. Any instructions before that
5944 label are emitted for the case where PREV_BB falls through into the
5945 exit block, so append those instructions to prev_bb in that case. */
5946 if (NEXT_INSN (head) != return_label)
5947 {
5948 while (NEXT_INSN (head) != return_label)
5949 {
5950 if (!NOTE_P (NEXT_INSN (head)))
5951 BB_END (prev_bb) = NEXT_INSN (head);
5952 head = NEXT_INSN (head);
5953 }
5954 }
5955 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5956 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5957 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5958
5959 ix = 0;
5960 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5961 {
5962 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5963 if (!(e->flags & EDGE_ABNORMAL))
5964 redirect_edge_succ (e, exit_block);
5965 else
5966 ix++;
5967 }
5968
5969 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5970 EDGE_FALLTHRU);
5971 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5972 if (e2 != e)
5973 {
5974 exit_block->count -= e2->count ();
5975 }
5976 update_bb_for_insn (exit_block);
5977 }
5978
5979 /* Helper function for discover_nonconstant_array_refs.
5980 Look for ARRAY_REF nodes with non-constant indexes and mark them
5981 addressable. */
5982
5983 static tree
5984 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5985 void *data ATTRIBUTE_UNUSED)
5986 {
5987 tree t = *tp;
5988
5989 if (IS_TYPE_OR_DECL_P (t))
5990 *walk_subtrees = 0;
5991 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5992 {
5993 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5994 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5995 && (!TREE_OPERAND (t, 2)
5996 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5997 || (TREE_CODE (t) == COMPONENT_REF
5998 && (!TREE_OPERAND (t,2)
5999 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6000 || TREE_CODE (t) == BIT_FIELD_REF
6001 || TREE_CODE (t) == REALPART_EXPR
6002 || TREE_CODE (t) == IMAGPART_EXPR
6003 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6004 || CONVERT_EXPR_P (t))
6005 t = TREE_OPERAND (t, 0);
6006
6007 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6008 {
6009 t = get_base_address (t);
6010 if (t && DECL_P (t)
6011 && DECL_MODE (t) != BLKmode)
6012 TREE_ADDRESSABLE (t) = 1;
6013 }
6014
6015 *walk_subtrees = 0;
6016 }
6017
6018 return NULL_TREE;
6019 }
6020
6021 /* RTL expansion is not able to compile array references with variable
6022 offsets for arrays stored in single register. Discover such
6023 expressions and mark variables as addressable to avoid this
6024 scenario. */
6025
6026 static void
6027 discover_nonconstant_array_refs (void)
6028 {
6029 basic_block bb;
6030 gimple_stmt_iterator gsi;
6031
6032 FOR_EACH_BB_FN (bb, cfun)
6033 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6034 {
6035 gimple *stmt = gsi_stmt (gsi);
6036 if (!is_gimple_debug (stmt))
6037 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6038 }
6039 }
6040
6041 /* This function sets crtl->args.internal_arg_pointer to a virtual
6042 register if DRAP is needed. Local register allocator will replace
6043 virtual_incoming_args_rtx with the virtual register. */
6044
6045 static void
6046 expand_stack_alignment (void)
6047 {
6048 rtx drap_rtx;
6049 unsigned int preferred_stack_boundary;
6050
6051 if (! SUPPORTS_STACK_ALIGNMENT)
6052 return;
6053
6054 if (cfun->calls_alloca
6055 || cfun->has_nonlocal_label
6056 || crtl->has_nonlocal_goto)
6057 crtl->need_drap = true;
6058
6059 /* Call update_stack_boundary here again to update incoming stack
6060 boundary. It may set incoming stack alignment to a different
6061 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6062 use the minimum incoming stack alignment to check if it is OK
6063 to perform sibcall optimization since sibcall optimization will
6064 only align the outgoing stack to incoming stack boundary. */
6065 if (targetm.calls.update_stack_boundary)
6066 targetm.calls.update_stack_boundary ();
6067
6068 /* The incoming stack frame has to be aligned at least at
6069 parm_stack_boundary. */
6070 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6071
6072 /* Update crtl->stack_alignment_estimated and use it later to align
6073 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6074 exceptions since callgraph doesn't collect incoming stack alignment
6075 in this case. */
6076 if (cfun->can_throw_non_call_exceptions
6077 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6078 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6079 else
6080 preferred_stack_boundary = crtl->preferred_stack_boundary;
6081 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6082 crtl->stack_alignment_estimated = preferred_stack_boundary;
6083 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6084 crtl->stack_alignment_needed = preferred_stack_boundary;
6085
6086 gcc_assert (crtl->stack_alignment_needed
6087 <= crtl->stack_alignment_estimated);
6088
6089 crtl->stack_realign_needed
6090 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6091 crtl->stack_realign_tried = crtl->stack_realign_needed;
6092
6093 crtl->stack_realign_processed = true;
6094
6095 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6096 alignment. */
6097 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6098 drap_rtx = targetm.calls.get_drap_rtx ();
6099
6100 /* stack_realign_drap and drap_rtx must match. */
6101 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6102
6103 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6104 if (drap_rtx != NULL)
6105 {
6106 crtl->args.internal_arg_pointer = drap_rtx;
6107
6108 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6109 needed. */
6110 fixup_tail_calls ();
6111 }
6112 }
6113 \f
6114
6115 static void
6116 expand_main_function (void)
6117 {
6118 #if (defined(INVOKE__main) \
6119 || (!defined(HAS_INIT_SECTION) \
6120 && !defined(INIT_SECTION_ASM_OP) \
6121 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6122 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6123 #endif
6124 }
6125 \f
6126
6127 /* Expand code to initialize the stack_protect_guard. This is invoked at
6128 the beginning of a function to be protected. */
6129
6130 static void
6131 stack_protect_prologue (void)
6132 {
6133 tree guard_decl = targetm.stack_protect_guard ();
6134 rtx x, y;
6135
6136 x = expand_normal (crtl->stack_protect_guard);
6137 if (guard_decl)
6138 y = expand_normal (guard_decl);
6139 else
6140 y = const0_rtx;
6141
6142 /* Allow the target to copy from Y to X without leaking Y into a
6143 register. */
6144 if (targetm.have_stack_protect_set ())
6145 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6146 {
6147 emit_insn (insn);
6148 return;
6149 }
6150
6151 /* Otherwise do a straight move. */
6152 emit_move_insn (x, y);
6153 }
6154
6155 /* Translate the intermediate representation contained in the CFG
6156 from GIMPLE trees to RTL.
6157
6158 We do conversion per basic block and preserve/update the tree CFG.
6159 This implies we have to do some magic as the CFG can simultaneously
6160 consist of basic blocks containing RTL and GIMPLE trees. This can
6161 confuse the CFG hooks, so be careful to not manipulate CFG during
6162 the expansion. */
6163
6164 namespace {
6165
6166 const pass_data pass_data_expand =
6167 {
6168 RTL_PASS, /* type */
6169 "expand", /* name */
6170 OPTGROUP_NONE, /* optinfo_flags */
6171 TV_EXPAND, /* tv_id */
6172 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6173 | PROP_gimple_lcx
6174 | PROP_gimple_lvec
6175 | PROP_gimple_lva), /* properties_required */
6176 PROP_rtl, /* properties_provided */
6177 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6178 0, /* todo_flags_start */
6179 0, /* todo_flags_finish */
6180 };
6181
6182 class pass_expand : public rtl_opt_pass
6183 {
6184 public:
6185 pass_expand (gcc::context *ctxt)
6186 : rtl_opt_pass (pass_data_expand, ctxt)
6187 {}
6188
6189 /* opt_pass methods: */
6190 virtual unsigned int execute (function *);
6191
6192 }; // class pass_expand
6193
6194 unsigned int
6195 pass_expand::execute (function *fun)
6196 {
6197 basic_block bb, init_block;
6198 edge_iterator ei;
6199 edge e;
6200 rtx_insn *var_seq, *var_ret_seq;
6201 unsigned i;
6202
6203 timevar_push (TV_OUT_OF_SSA);
6204 rewrite_out_of_ssa (&SA);
6205 timevar_pop (TV_OUT_OF_SSA);
6206 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6207
6208 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6209 {
6210 gimple_stmt_iterator gsi;
6211 FOR_EACH_BB_FN (bb, cfun)
6212 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6213 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6214 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6215 }
6216
6217 /* Make sure all values used by the optimization passes have sane
6218 defaults. */
6219 reg_renumber = 0;
6220
6221 /* Some backends want to know that we are expanding to RTL. */
6222 currently_expanding_to_rtl = 1;
6223 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6224 free_dominance_info (CDI_DOMINATORS);
6225
6226 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6227
6228 if (chkp_function_instrumented_p (current_function_decl))
6229 chkp_reset_rtl_bounds ();
6230
6231 insn_locations_init ();
6232 if (!DECL_IS_BUILTIN (current_function_decl))
6233 {
6234 /* Eventually, all FEs should explicitly set function_start_locus. */
6235 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6236 set_curr_insn_location
6237 (DECL_SOURCE_LOCATION (current_function_decl));
6238 else
6239 set_curr_insn_location (fun->function_start_locus);
6240 }
6241 else
6242 set_curr_insn_location (UNKNOWN_LOCATION);
6243 prologue_location = curr_insn_location ();
6244
6245 #ifdef INSN_SCHEDULING
6246 init_sched_attrs ();
6247 #endif
6248
6249 /* Make sure first insn is a note even if we don't want linenums.
6250 This makes sure the first insn will never be deleted.
6251 Also, final expects a note to appear there. */
6252 emit_note (NOTE_INSN_DELETED);
6253
6254 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6255 discover_nonconstant_array_refs ();
6256
6257 targetm.expand_to_rtl_hook ();
6258 crtl->init_stack_alignment ();
6259 fun->cfg->max_jumptable_ents = 0;
6260
6261 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6262 of the function section at exapnsion time to predict distance of calls. */
6263 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6264
6265 /* Expand the variables recorded during gimple lowering. */
6266 timevar_push (TV_VAR_EXPAND);
6267 start_sequence ();
6268
6269 var_ret_seq = expand_used_vars ();
6270
6271 var_seq = get_insns ();
6272 end_sequence ();
6273 timevar_pop (TV_VAR_EXPAND);
6274
6275 /* Honor stack protection warnings. */
6276 if (warn_stack_protect)
6277 {
6278 if (fun->calls_alloca)
6279 warning (OPT_Wstack_protector,
6280 "stack protector not protecting local variables: "
6281 "variable length buffer");
6282 if (has_short_buffer && !crtl->stack_protect_guard)
6283 warning (OPT_Wstack_protector,
6284 "stack protector not protecting function: "
6285 "all local arrays are less than %d bytes long",
6286 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6287 }
6288
6289 /* Set up parameters and prepare for return, for the function. */
6290 expand_function_start (current_function_decl);
6291
6292 /* If we emitted any instructions for setting up the variables,
6293 emit them before the FUNCTION_START note. */
6294 if (var_seq)
6295 {
6296 emit_insn_before (var_seq, parm_birth_insn);
6297
6298 /* In expand_function_end we'll insert the alloca save/restore
6299 before parm_birth_insn. We've just insertted an alloca call.
6300 Adjust the pointer to match. */
6301 parm_birth_insn = var_seq;
6302 }
6303
6304 /* Now propagate the RTL assignment of each partition to the
6305 underlying var of each SSA_NAME. */
6306 tree name;
6307
6308 FOR_EACH_SSA_NAME (i, name, cfun)
6309 {
6310 /* We might have generated new SSA names in
6311 update_alias_info_with_stack_vars. They will have a NULL
6312 defining statements, and won't be part of the partitioning,
6313 so ignore those. */
6314 if (!SSA_NAME_DEF_STMT (name))
6315 continue;
6316
6317 adjust_one_expanded_partition_var (name);
6318 }
6319
6320 /* Clean up RTL of variables that straddle across multiple
6321 partitions, and check that the rtl of any PARM_DECLs that are not
6322 cleaned up is that of their default defs. */
6323 FOR_EACH_SSA_NAME (i, name, cfun)
6324 {
6325 int part;
6326
6327 /* We might have generated new SSA names in
6328 update_alias_info_with_stack_vars. They will have a NULL
6329 defining statements, and won't be part of the partitioning,
6330 so ignore those. */
6331 if (!SSA_NAME_DEF_STMT (name))
6332 continue;
6333 part = var_to_partition (SA.map, name);
6334 if (part == NO_PARTITION)
6335 continue;
6336
6337 /* If this decl was marked as living in multiple places, reset
6338 this now to NULL. */
6339 tree var = SSA_NAME_VAR (name);
6340 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6341 SET_DECL_RTL (var, NULL);
6342 /* Check that the pseudos chosen by assign_parms are those of
6343 the corresponding default defs. */
6344 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6345 && (TREE_CODE (var) == PARM_DECL
6346 || TREE_CODE (var) == RESULT_DECL))
6347 {
6348 rtx in = DECL_RTL_IF_SET (var);
6349 gcc_assert (in);
6350 rtx out = SA.partition_to_pseudo[part];
6351 gcc_assert (in == out);
6352
6353 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6354 those expected by debug backends for each parm and for
6355 the result. This is particularly important for stabs,
6356 whose register elimination from parm's DECL_RTL may cause
6357 -fcompare-debug differences as SET_DECL_RTL changes reg's
6358 attrs. So, make sure the RTL already has the parm as the
6359 EXPR, so that it won't change. */
6360 SET_DECL_RTL (var, NULL_RTX);
6361 if (MEM_P (in))
6362 set_mem_attributes (in, var, true);
6363 SET_DECL_RTL (var, in);
6364 }
6365 }
6366
6367 /* If this function is `main', emit a call to `__main'
6368 to run global initializers, etc. */
6369 if (DECL_NAME (current_function_decl)
6370 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6371 && DECL_FILE_SCOPE_P (current_function_decl))
6372 expand_main_function ();
6373
6374 /* Initialize the stack_protect_guard field. This must happen after the
6375 call to __main (if any) so that the external decl is initialized. */
6376 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6377 stack_protect_prologue ();
6378
6379 expand_phi_nodes (&SA);
6380
6381 /* Release any stale SSA redirection data. */
6382 redirect_edge_var_map_empty ();
6383
6384 /* Register rtl specific functions for cfg. */
6385 rtl_register_cfg_hooks ();
6386
6387 init_block = construct_init_block ();
6388
6389 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6390 remaining edges later. */
6391 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6392 e->flags &= ~EDGE_EXECUTABLE;
6393
6394 /* If the function has too many markers, drop them while expanding. */
6395 if (cfun->debug_marker_count
6396 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6397 cfun->debug_nonbind_markers = false;
6398
6399 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6400 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6401 next_bb)
6402 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6403
6404 if (MAY_HAVE_DEBUG_BIND_INSNS)
6405 expand_debug_locations ();
6406
6407 if (deep_ter_debug_map)
6408 {
6409 delete deep_ter_debug_map;
6410 deep_ter_debug_map = NULL;
6411 }
6412
6413 /* Free stuff we no longer need after GIMPLE optimizations. */
6414 free_dominance_info (CDI_DOMINATORS);
6415 free_dominance_info (CDI_POST_DOMINATORS);
6416 delete_tree_cfg_annotations (fun);
6417
6418 timevar_push (TV_OUT_OF_SSA);
6419 finish_out_of_ssa (&SA);
6420 timevar_pop (TV_OUT_OF_SSA);
6421
6422 timevar_push (TV_POST_EXPAND);
6423 /* We are no longer in SSA form. */
6424 fun->gimple_df->in_ssa_p = false;
6425 loops_state_clear (LOOP_CLOSED_SSA);
6426
6427 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6428 conservatively to true until they are all profile aware. */
6429 delete lab_rtx_for_bb;
6430 free_histograms (fun);
6431
6432 construct_exit_block ();
6433 insn_locations_finalize ();
6434
6435 if (var_ret_seq)
6436 {
6437 rtx_insn *after = return_label;
6438 rtx_insn *next = NEXT_INSN (after);
6439 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6440 after = next;
6441 emit_insn_after (var_ret_seq, after);
6442 }
6443
6444 /* Zap the tree EH table. */
6445 set_eh_throw_stmt_table (fun, NULL);
6446
6447 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6448 split edges which edge insertions might do. */
6449 rebuild_jump_labels (get_insns ());
6450
6451 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6452 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6453 {
6454 edge e;
6455 edge_iterator ei;
6456 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6457 {
6458 if (e->insns.r)
6459 {
6460 rebuild_jump_labels_chain (e->insns.r);
6461 /* Put insns after parm birth, but before
6462 NOTE_INSNS_FUNCTION_BEG. */
6463 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6464 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6465 {
6466 rtx_insn *insns = e->insns.r;
6467 e->insns.r = NULL;
6468 if (NOTE_P (parm_birth_insn)
6469 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6470 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6471 else
6472 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6473 }
6474 else
6475 commit_one_edge_insertion (e);
6476 }
6477 else
6478 ei_next (&ei);
6479 }
6480 }
6481
6482 /* We're done expanding trees to RTL. */
6483 currently_expanding_to_rtl = 0;
6484
6485 flush_mark_addressable_queue ();
6486
6487 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6488 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6489 {
6490 edge e;
6491 edge_iterator ei;
6492 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6493 {
6494 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6495 e->flags &= ~EDGE_EXECUTABLE;
6496
6497 /* At the moment not all abnormal edges match the RTL
6498 representation. It is safe to remove them here as
6499 find_many_sub_basic_blocks will rediscover them.
6500 In the future we should get this fixed properly. */
6501 if ((e->flags & EDGE_ABNORMAL)
6502 && !(e->flags & EDGE_SIBCALL))
6503 remove_edge (e);
6504 else
6505 ei_next (&ei);
6506 }
6507 }
6508
6509 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6510 bitmap_ones (blocks);
6511 find_many_sub_basic_blocks (blocks);
6512 purge_all_dead_edges ();
6513
6514 expand_stack_alignment ();
6515
6516 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6517 function. */
6518 if (crtl->tail_call_emit)
6519 fixup_tail_calls ();
6520
6521 /* After initial rtl generation, call back to finish generating
6522 exception support code. We need to do this before cleaning up
6523 the CFG as the code does not expect dead landing pads. */
6524 if (fun->eh->region_tree != NULL)
6525 finish_eh_generation ();
6526
6527 /* BB subdivision may have created basic blocks that are are only reachable
6528 from unlikely bbs but not marked as such in the profile. */
6529 if (optimize)
6530 propagate_unlikely_bbs_forward ();
6531
6532 /* Remove unreachable blocks, otherwise we cannot compute dominators
6533 which are needed for loop state verification. As a side-effect
6534 this also compacts blocks.
6535 ??? We cannot remove trivially dead insns here as for example
6536 the DRAP reg on i?86 is not magically live at this point.
6537 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6538 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6539
6540 checking_verify_flow_info ();
6541
6542 /* Initialize pseudos allocated for hard registers. */
6543 emit_initial_value_sets ();
6544
6545 /* And finally unshare all RTL. */
6546 unshare_all_rtl ();
6547
6548 /* There's no need to defer outputting this function any more; we
6549 know we want to output it. */
6550 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6551
6552 /* Now that we're done expanding trees to RTL, we shouldn't have any
6553 more CONCATs anywhere. */
6554 generating_concat_p = 0;
6555
6556 if (dump_file)
6557 {
6558 fprintf (dump_file,
6559 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6560 /* And the pass manager will dump RTL for us. */
6561 }
6562
6563 /* If we're emitting a nested function, make sure its parent gets
6564 emitted as well. Doing otherwise confuses debug info. */
6565 {
6566 tree parent;
6567 for (parent = DECL_CONTEXT (current_function_decl);
6568 parent != NULL_TREE;
6569 parent = get_containing_scope (parent))
6570 if (TREE_CODE (parent) == FUNCTION_DECL)
6571 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6572 }
6573
6574 TREE_ASM_WRITTEN (current_function_decl) = 1;
6575
6576 /* After expanding, the return labels are no longer needed. */
6577 return_label = NULL;
6578 naked_return_label = NULL;
6579
6580 /* After expanding, the tm_restart map is no longer needed. */
6581 if (fun->gimple_df->tm_restart)
6582 fun->gimple_df->tm_restart = NULL;
6583
6584 /* Tag the blocks with a depth number so that change_scope can find
6585 the common parent easily. */
6586 set_block_levels (DECL_INITIAL (fun->decl), 0);
6587 default_rtl_profile ();
6588
6589 /* For -dx discard loops now, otherwise IL verify in clean_state will
6590 ICE. */
6591 if (rtl_dump_and_exit)
6592 {
6593 cfun->curr_properties &= ~PROP_loops;
6594 loop_optimizer_finalize ();
6595 }
6596
6597 timevar_pop (TV_POST_EXPAND);
6598
6599 return 0;
6600 }
6601
6602 } // anon namespace
6603
6604 rtl_opt_pass *
6605 make_pass_expand (gcc::context *ctxt)
6606 {
6607 return new pass_expand (ctxt);
6608 }