]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
[testsuite] Add missing dg-require-effective-target label_values
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85
86 /* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88 struct ssaexpand SA;
89
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple *currently_expanding_gimple_stmt;
93
94 static rtx expand_debug_expr (tree);
95
96 static bool defer_stack_allocation (tree, bool);
97
98 static void record_alignment_for_reg_var (unsigned int);
99
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
102
103 tree
104 gimple_assign_rhs_to_tree (gimple *stmt)
105 {
106 tree t;
107 enum gimple_rhs_class grhs_class;
108
109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
110
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
122 else if (grhs_class == GIMPLE_UNARY_RHS)
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
126 else if (grhs_class == GIMPLE_SINGLE_RHS)
127 {
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
134 && EXPR_P (t)))
135 t = copy_node (t);
136 }
137 else
138 gcc_unreachable ();
139
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
142
143 return t;
144 }
145
146
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
150
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
157
158 static tree
159 leader_merge (tree cur, tree next)
160 {
161 if (cur == NULL || cur == next)
162 return next;
163
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
166
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
169
170 return cur;
171 }
172
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176 static inline void
177 set_rtl (tree t, rtx x)
178 {
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
217
218 if (x)
219 {
220 bool skip = false;
221 tree cur = NULL_TREE;
222 rtx xm = x;
223
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
230 {
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
234 }
235 else if (GET_CODE (xm) == CONCAT)
236 {
237 xm = XEXP (xm, 0);
238 goto retry;
239 }
240 else if (GET_CODE (xm) == PARALLEL)
241 {
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
246 }
247 else if (xm == pc_rtx)
248 skip = true;
249 else
250 gcc_unreachable ();
251
252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
253
254 if (cur != next)
255 {
256 if (MEM_P (x))
257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
261 else
262 set_reg_attrs_for_decl_rtl (next, x);
263 }
264 }
265
266 if (TREE_CODE (t) == SSA_NAME)
267 {
268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
270 {
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
275 }
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
283 {
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
288 /* If we have it set already to "multiple places" don't
289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
291 ;
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
300 }
301 }
302 else
303 SET_DECL_RTL (t, x);
304 }
305
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308 struct stack_var
309 {
310 /* The Variable. */
311 tree decl;
312
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
315 poly_uint64 size;
316
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
320
321 /* The partition representative. */
322 size_t representative;
323
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
326
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
329 };
330
331 #define EOC ((size_t)-1)
332
333 /* We have an array of such objects while deciding allocation. */
334 static struct stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
338
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack;
342
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted;
346
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase;
351
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls;
355
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer;
359
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
362
363 static unsigned int
364 align_local_variable (tree decl, bool really_expand)
365 {
366 unsigned int align;
367
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
371 {
372 align = LOCAL_DECL_ALIGNMENT (decl);
373 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 That is done before IPA and could bump alignment based on host
375 backend even for offloaded code which wants different
376 LOCAL_DECL_ALIGNMENT. */
377 if (really_expand)
378 SET_DECL_ALIGN (decl, align);
379 }
380 return align / BITS_PER_UNIT;
381 }
382
383 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
384 down otherwise. Return truncated BASE value. */
385
386 static inline unsigned HOST_WIDE_INT
387 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
388 {
389 return align_up ? (base + align - 1) & -align : base & -align;
390 }
391
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393 Return the frame offset. */
394
395 static poly_int64
396 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
397 {
398 poly_int64 offset, new_frame_offset;
399
400 if (FRAME_GROWS_DOWNWARD)
401 {
402 new_frame_offset
403 = aligned_lower_bound (frame_offset - frame_phase - size,
404 align) + frame_phase;
405 offset = new_frame_offset;
406 }
407 else
408 {
409 new_frame_offset
410 = aligned_upper_bound (frame_offset - frame_phase,
411 align) + frame_phase;
412 offset = new_frame_offset;
413 new_frame_offset += size;
414 }
415 frame_offset = new_frame_offset;
416
417 if (frame_offset_overflow (frame_offset, cfun->decl))
418 frame_offset = offset = 0;
419
420 return offset;
421 }
422
423 /* Accumulate DECL into STACK_VARS. */
424
425 static void
426 add_stack_var (tree decl, bool really_expand)
427 {
428 struct stack_var *v;
429
430 if (stack_vars_num >= stack_vars_alloc)
431 {
432 if (stack_vars_alloc)
433 stack_vars_alloc = stack_vars_alloc * 3 / 2;
434 else
435 stack_vars_alloc = 32;
436 stack_vars
437 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
438 }
439 if (!decl_to_stack_part)
440 decl_to_stack_part = new hash_map<tree, size_t>;
441
442 v = &stack_vars[stack_vars_num];
443 decl_to_stack_part->put (decl, stack_vars_num);
444
445 v->decl = decl;
446 tree size = TREE_CODE (decl) == SSA_NAME
447 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
448 : DECL_SIZE_UNIT (decl);
449 v->size = tree_to_poly_uint64 (size);
450 /* Ensure that all variables have size, so that &a != &b for any two
451 variables that are simultaneously live. */
452 if (known_eq (v->size, 0U))
453 v->size = 1;
454 v->alignb = align_local_variable (decl, really_expand);
455 /* An alignment of zero can mightily confuse us later. */
456 gcc_assert (v->alignb != 0);
457
458 /* All variables are initially in their own partition. */
459 v->representative = stack_vars_num;
460 v->next = EOC;
461
462 /* All variables initially conflict with no other. */
463 v->conflicts = NULL;
464
465 /* Ensure that this decl doesn't get put onto the list twice. */
466 set_rtl (decl, pc_rtx);
467
468 stack_vars_num++;
469 }
470
471 /* Make the decls associated with luid's X and Y conflict. */
472
473 static void
474 add_stack_var_conflict (size_t x, size_t y)
475 {
476 struct stack_var *a = &stack_vars[x];
477 struct stack_var *b = &stack_vars[y];
478 if (x == y)
479 return;
480 if (!a->conflicts)
481 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
482 if (!b->conflicts)
483 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
484 bitmap_set_bit (a->conflicts, y);
485 bitmap_set_bit (b->conflicts, x);
486 }
487
488 /* Check whether the decls associated with luid's X and Y conflict. */
489
490 static bool
491 stack_var_conflict_p (size_t x, size_t y)
492 {
493 struct stack_var *a = &stack_vars[x];
494 struct stack_var *b = &stack_vars[y];
495 if (x == y)
496 return false;
497 /* Partitions containing an SSA name result from gimple registers
498 with things like unsupported modes. They are top-level and
499 hence conflict with everything else. */
500 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
501 return true;
502
503 if (!a->conflicts || !b->conflicts)
504 return false;
505 return bitmap_bit_p (a->conflicts, y);
506 }
507
508 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
509 enter its partition number into bitmap DATA. */
510
511 static bool
512 visit_op (gimple *, tree op, tree, void *data)
513 {
514 bitmap active = (bitmap)data;
515 op = get_base_address (op);
516 if (op
517 && DECL_P (op)
518 && DECL_RTL_IF_SET (op) == pc_rtx)
519 {
520 size_t *v = decl_to_stack_part->get (op);
521 if (v)
522 bitmap_set_bit (active, *v);
523 }
524 return false;
525 }
526
527 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
528 record conflicts between it and all currently active other partitions
529 from bitmap DATA. */
530
531 static bool
532 visit_conflict (gimple *, tree op, tree, void *data)
533 {
534 bitmap active = (bitmap)data;
535 op = get_base_address (op);
536 if (op
537 && DECL_P (op)
538 && DECL_RTL_IF_SET (op) == pc_rtx)
539 {
540 size_t *v = decl_to_stack_part->get (op);
541 if (v && bitmap_set_bit (active, *v))
542 {
543 size_t num = *v;
544 bitmap_iterator bi;
545 unsigned i;
546 gcc_assert (num < stack_vars_num);
547 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
548 add_stack_var_conflict (num, i);
549 }
550 }
551 return false;
552 }
553
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555 at the end of BB, leaving the result in WORK. We're called to generate
556 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
557 liveness. */
558
559 static void
560 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
561 {
562 edge e;
563 edge_iterator ei;
564 gimple_stmt_iterator gsi;
565 walk_stmt_load_store_addr_fn visit;
566
567 bitmap_clear (work);
568 FOR_EACH_EDGE (e, ei, bb->preds)
569 bitmap_ior_into (work, (bitmap)e->src->aux);
570
571 visit = visit_op;
572
573 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574 {
575 gimple *stmt = gsi_stmt (gsi);
576 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
577 }
578 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
579 {
580 gimple *stmt = gsi_stmt (gsi);
581
582 if (gimple_clobber_p (stmt))
583 {
584 tree lhs = gimple_assign_lhs (stmt);
585 size_t *v;
586 /* Nested function lowering might introduce LHSs
587 that are COMPONENT_REFs. */
588 if (!VAR_P (lhs))
589 continue;
590 if (DECL_RTL_IF_SET (lhs) == pc_rtx
591 && (v = decl_to_stack_part->get (lhs)))
592 bitmap_clear_bit (work, *v);
593 }
594 else if (!is_gimple_debug (stmt))
595 {
596 if (for_conflict
597 && visit == visit_op)
598 {
599 /* If this is the first real instruction in this BB we need
600 to add conflicts for everything live at this point now.
601 Unlike classical liveness for named objects we can't
602 rely on seeing a def/use of the names we're interested in.
603 There might merely be indirect loads/stores. We'd not add any
604 conflicts for such partitions. */
605 bitmap_iterator bi;
606 unsigned i;
607 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
608 {
609 struct stack_var *a = &stack_vars[i];
610 if (!a->conflicts)
611 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
612 bitmap_ior_into (a->conflicts, work);
613 }
614 visit = visit_conflict;
615 }
616 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
617 }
618 }
619 }
620
621 /* Generate stack partition conflicts between all partitions that are
622 simultaneously live. */
623
624 static void
625 add_scope_conflicts (void)
626 {
627 basic_block bb;
628 bool changed;
629 bitmap work = BITMAP_ALLOC (NULL);
630 int *rpo;
631 int n_bbs;
632
633 /* We approximate the live range of a stack variable by taking the first
634 mention of its name as starting point(s), and by the end-of-scope
635 death clobber added by gimplify as ending point(s) of the range.
636 This overapproximates in the case we for instance moved an address-taken
637 operation upward, without also moving a dereference to it upwards.
638 But it's conservatively correct as a variable never can hold values
639 before its name is mentioned at least once.
640
641 We then do a mostly classical bitmap liveness algorithm. */
642
643 FOR_ALL_BB_FN (bb, cfun)
644 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
645
646 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
647 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
648
649 changed = true;
650 while (changed)
651 {
652 int i;
653 changed = false;
654 for (i = 0; i < n_bbs; i++)
655 {
656 bitmap active;
657 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
658 active = (bitmap)bb->aux;
659 add_scope_conflicts_1 (bb, work, false);
660 if (bitmap_ior_into (active, work))
661 changed = true;
662 }
663 }
664
665 FOR_EACH_BB_FN (bb, cfun)
666 add_scope_conflicts_1 (bb, work, true);
667
668 free (rpo);
669 BITMAP_FREE (work);
670 FOR_ALL_BB_FN (bb, cfun)
671 BITMAP_FREE (bb->aux);
672 }
673
674 /* A subroutine of partition_stack_vars. A comparison function for qsort,
675 sorting an array of indices by the properties of the object. */
676
677 static int
678 stack_var_cmp (const void *a, const void *b)
679 {
680 size_t ia = *(const size_t *)a;
681 size_t ib = *(const size_t *)b;
682 unsigned int aligna = stack_vars[ia].alignb;
683 unsigned int alignb = stack_vars[ib].alignb;
684 poly_int64 sizea = stack_vars[ia].size;
685 poly_int64 sizeb = stack_vars[ib].size;
686 tree decla = stack_vars[ia].decl;
687 tree declb = stack_vars[ib].decl;
688 bool largea, largeb;
689 unsigned int uida, uidb;
690
691 /* Primary compare on "large" alignment. Large comes first. */
692 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
693 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
694 if (largea != largeb)
695 return (int)largeb - (int)largea;
696
697 /* Secondary compare on size, decreasing */
698 int diff = compare_sizes_for_sort (sizeb, sizea);
699 if (diff != 0)
700 return diff;
701
702 /* Tertiary compare on true alignment, decreasing. */
703 if (aligna < alignb)
704 return -1;
705 if (aligna > alignb)
706 return 1;
707
708 /* Final compare on ID for sort stability, increasing.
709 Two SSA names are compared by their version, SSA names come before
710 non-SSA names, and two normal decls are compared by their DECL_UID. */
711 if (TREE_CODE (decla) == SSA_NAME)
712 {
713 if (TREE_CODE (declb) == SSA_NAME)
714 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
715 else
716 return -1;
717 }
718 else if (TREE_CODE (declb) == SSA_NAME)
719 return 1;
720 else
721 uida = DECL_UID (decla), uidb = DECL_UID (declb);
722 if (uida < uidb)
723 return 1;
724 if (uida > uidb)
725 return -1;
726 return 0;
727 }
728
729 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
730 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
731
732 /* If the points-to solution *PI points to variables that are in a partition
733 together with other variables add all partition members to the pointed-to
734 variables bitmap. */
735
736 static void
737 add_partitioned_vars_to_ptset (struct pt_solution *pt,
738 part_hashmap *decls_to_partitions,
739 hash_set<bitmap> *visited, bitmap temp)
740 {
741 bitmap_iterator bi;
742 unsigned i;
743 bitmap *part;
744
745 if (pt->anything
746 || pt->vars == NULL
747 /* The pointed-to vars bitmap is shared, it is enough to
748 visit it once. */
749 || visited->add (pt->vars))
750 return;
751
752 bitmap_clear (temp);
753
754 /* By using a temporary bitmap to store all members of the partitions
755 we have to add we make sure to visit each of the partitions only
756 once. */
757 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
758 if ((!temp
759 || !bitmap_bit_p (temp, i))
760 && (part = decls_to_partitions->get (i)))
761 bitmap_ior_into (temp, *part);
762 if (!bitmap_empty_p (temp))
763 bitmap_ior_into (pt->vars, temp);
764 }
765
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767 The bitmaps representing stack partitions will be saved until expand,
768 where partitioned decls used as bases in memory expressions will be
769 rewritten. */
770
771 static void
772 update_alias_info_with_stack_vars (void)
773 {
774 part_hashmap *decls_to_partitions = NULL;
775 size_t i, j;
776 tree var = NULL_TREE;
777
778 for (i = 0; i < stack_vars_num; i++)
779 {
780 bitmap part = NULL;
781 tree name;
782 struct ptr_info_def *pi;
783
784 /* Not interested in partitions with single variable. */
785 if (stack_vars[i].representative != i
786 || stack_vars[i].next == EOC)
787 continue;
788
789 if (!decls_to_partitions)
790 {
791 decls_to_partitions = new part_hashmap;
792 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
793 }
794
795 /* Create an SSA_NAME that points to the partition for use
796 as base during alias-oracle queries on RTL for bases that
797 have been partitioned. */
798 if (var == NULL_TREE)
799 var = create_tmp_var (ptr_type_node);
800 name = make_ssa_name (var);
801
802 /* Create bitmaps representing partitions. They will be used for
803 points-to sets later, so use GGC alloc. */
804 part = BITMAP_GGC_ALLOC ();
805 for (j = i; j != EOC; j = stack_vars[j].next)
806 {
807 tree decl = stack_vars[j].decl;
808 unsigned int uid = DECL_PT_UID (decl);
809 bitmap_set_bit (part, uid);
810 decls_to_partitions->put (uid, part);
811 cfun->gimple_df->decls_to_pointers->put (decl, name);
812 if (TREE_ADDRESSABLE (decl))
813 TREE_ADDRESSABLE (name) = 1;
814 }
815
816 /* Make the SSA name point to all partition members. */
817 pi = get_ptr_info (name);
818 pt_solution_set (&pi->pt, part, false);
819 }
820
821 /* Make all points-to sets that contain one member of a partition
822 contain all members of the partition. */
823 if (decls_to_partitions)
824 {
825 unsigned i;
826 tree name;
827 hash_set<bitmap> visited;
828 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
829
830 FOR_EACH_SSA_NAME (i, name, cfun)
831 {
832 struct ptr_info_def *pi;
833
834 if (POINTER_TYPE_P (TREE_TYPE (name))
835 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
836 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
837 &visited, temp);
838 }
839
840 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
841 decls_to_partitions, &visited, temp);
842
843 delete decls_to_partitions;
844 BITMAP_FREE (temp);
845 }
846 }
847
848 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
849 partitioning algorithm. Partitions A and B are known to be non-conflicting.
850 Merge them into a single partition A. */
851
852 static void
853 union_stack_vars (size_t a, size_t b)
854 {
855 struct stack_var *vb = &stack_vars[b];
856 bitmap_iterator bi;
857 unsigned u;
858
859 gcc_assert (stack_vars[b].next == EOC);
860 /* Add B to A's partition. */
861 stack_vars[b].next = stack_vars[a].next;
862 stack_vars[b].representative = a;
863 stack_vars[a].next = b;
864
865 /* Update the required alignment of partition A to account for B. */
866 if (stack_vars[a].alignb < stack_vars[b].alignb)
867 stack_vars[a].alignb = stack_vars[b].alignb;
868
869 /* Update the interference graph and merge the conflicts. */
870 if (vb->conflicts)
871 {
872 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
873 add_stack_var_conflict (a, stack_vars[u].representative);
874 BITMAP_FREE (vb->conflicts);
875 }
876 }
877
878 /* A subroutine of expand_used_vars. Binpack the variables into
879 partitions constrained by the interference graph. The overall
880 algorithm used is as follows:
881
882 Sort the objects by size in descending order.
883 For each object A {
884 S = size(A)
885 O = 0
886 loop {
887 Look for the largest non-conflicting object B with size <= S.
888 UNION (A, B)
889 }
890 }
891 */
892
893 static void
894 partition_stack_vars (void)
895 {
896 size_t si, sj, n = stack_vars_num;
897
898 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
899 for (si = 0; si < n; ++si)
900 stack_vars_sorted[si] = si;
901
902 if (n == 1)
903 return;
904
905 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
906
907 for (si = 0; si < n; ++si)
908 {
909 size_t i = stack_vars_sorted[si];
910 unsigned int ialign = stack_vars[i].alignb;
911 poly_int64 isize = stack_vars[i].size;
912
913 /* Ignore objects that aren't partition representatives. If we
914 see a var that is not a partition representative, it must
915 have been merged earlier. */
916 if (stack_vars[i].representative != i)
917 continue;
918
919 for (sj = si + 1; sj < n; ++sj)
920 {
921 size_t j = stack_vars_sorted[sj];
922 unsigned int jalign = stack_vars[j].alignb;
923 poly_int64 jsize = stack_vars[j].size;
924
925 /* Ignore objects that aren't partition representatives. */
926 if (stack_vars[j].representative != j)
927 continue;
928
929 /* Do not mix objects of "small" (supported) alignment
930 and "large" (unsupported) alignment. */
931 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
932 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
933 break;
934
935 /* For Address Sanitizer do not mix objects with different
936 sizes, as the shorter vars wouldn't be adequately protected.
937 Don't do that for "large" (unsupported) alignment objects,
938 those aren't protected anyway. */
939 if (asan_sanitize_stack_p ()
940 && maybe_ne (isize, jsize)
941 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
942 break;
943
944 /* Ignore conflicting objects. */
945 if (stack_var_conflict_p (i, j))
946 continue;
947
948 /* UNION the objects, placing J at OFFSET. */
949 union_stack_vars (i, j);
950 }
951 }
952
953 update_alias_info_with_stack_vars ();
954 }
955
956 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
957
958 static void
959 dump_stack_var_partition (void)
960 {
961 size_t si, i, j, n = stack_vars_num;
962
963 for (si = 0; si < n; ++si)
964 {
965 i = stack_vars_sorted[si];
966
967 /* Skip variables that aren't partition representatives, for now. */
968 if (stack_vars[i].representative != i)
969 continue;
970
971 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
972 print_dec (stack_vars[i].size, dump_file);
973 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
974
975 for (j = i; j != EOC; j = stack_vars[j].next)
976 {
977 fputc ('\t', dump_file);
978 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
979 }
980 fputc ('\n', dump_file);
981 }
982 }
983
984 /* Assign rtl to DECL at BASE + OFFSET. */
985
986 static void
987 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
988 poly_int64 offset)
989 {
990 unsigned align;
991 rtx x;
992
993 /* If this fails, we've overflowed the stack frame. Error nicely? */
994 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
995
996 x = plus_constant (Pmode, base, offset);
997 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
998 ? TYPE_MODE (TREE_TYPE (decl))
999 : DECL_MODE (SSAVAR (decl)), x);
1000
1001 if (TREE_CODE (decl) != SSA_NAME)
1002 {
1003 /* Set alignment we actually gave this decl if it isn't an SSA name.
1004 If it is we generate stack slots only accidentally so it isn't as
1005 important, we'll simply use the alignment that is already set. */
1006 if (base == virtual_stack_vars_rtx)
1007 offset -= frame_phase;
1008 align = known_alignment (offset);
1009 align *= BITS_PER_UNIT;
1010 if (align == 0 || align > base_align)
1011 align = base_align;
1012
1013 /* One would think that we could assert that we're not decreasing
1014 alignment here, but (at least) the i386 port does exactly this
1015 via the MINIMUM_ALIGNMENT hook. */
1016
1017 SET_DECL_ALIGN (decl, align);
1018 DECL_USER_ALIGN (decl) = 0;
1019 }
1020
1021 set_rtl (decl, x);
1022 }
1023
1024 struct stack_vars_data
1025 {
1026 /* Vector of offset pairs, always end of some padding followed
1027 by start of the padding that needs Address Sanitizer protection.
1028 The vector is in reversed, highest offset pairs come first. */
1029 auto_vec<HOST_WIDE_INT> asan_vec;
1030
1031 /* Vector of partition representative decls in between the paddings. */
1032 auto_vec<tree> asan_decl_vec;
1033
1034 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1035 rtx asan_base;
1036
1037 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1038 unsigned int asan_alignb;
1039 };
1040
1041 /* A subroutine of expand_used_vars. Give each partition representative
1042 a unique location within the stack frame. Update each partition member
1043 with that location. */
1044
1045 static void
1046 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1047 {
1048 size_t si, i, j, n = stack_vars_num;
1049 poly_uint64 large_size = 0, large_alloc = 0;
1050 rtx large_base = NULL;
1051 unsigned large_align = 0;
1052 bool large_allocation_done = false;
1053 tree decl;
1054
1055 /* Determine if there are any variables requiring "large" alignment.
1056 Since these are dynamically allocated, we only process these if
1057 no predicate involved. */
1058 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1059 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1060 {
1061 /* Find the total size of these variables. */
1062 for (si = 0; si < n; ++si)
1063 {
1064 unsigned alignb;
1065
1066 i = stack_vars_sorted[si];
1067 alignb = stack_vars[i].alignb;
1068
1069 /* All "large" alignment decls come before all "small" alignment
1070 decls, but "large" alignment decls are not sorted based on
1071 their alignment. Increase large_align to track the largest
1072 required alignment. */
1073 if ((alignb * BITS_PER_UNIT) > large_align)
1074 large_align = alignb * BITS_PER_UNIT;
1075
1076 /* Stop when we get to the first decl with "small" alignment. */
1077 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1078 break;
1079
1080 /* Skip variables that aren't partition representatives. */
1081 if (stack_vars[i].representative != i)
1082 continue;
1083
1084 /* Skip variables that have already had rtl assigned. See also
1085 add_stack_var where we perpetrate this pc_rtx hack. */
1086 decl = stack_vars[i].decl;
1087 if (TREE_CODE (decl) == SSA_NAME
1088 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1089 : DECL_RTL (decl) != pc_rtx)
1090 continue;
1091
1092 large_size = aligned_upper_bound (large_size, alignb);
1093 large_size += stack_vars[i].size;
1094 }
1095 }
1096
1097 for (si = 0; si < n; ++si)
1098 {
1099 rtx base;
1100 unsigned base_align, alignb;
1101 poly_int64 offset;
1102
1103 i = stack_vars_sorted[si];
1104
1105 /* Skip variables that aren't partition representatives, for now. */
1106 if (stack_vars[i].representative != i)
1107 continue;
1108
1109 /* Skip variables that have already had rtl assigned. See also
1110 add_stack_var where we perpetrate this pc_rtx hack. */
1111 decl = stack_vars[i].decl;
1112 if (TREE_CODE (decl) == SSA_NAME
1113 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1114 : DECL_RTL (decl) != pc_rtx)
1115 continue;
1116
1117 /* Check the predicate to see whether this variable should be
1118 allocated in this pass. */
1119 if (pred && !pred (i))
1120 continue;
1121
1122 alignb = stack_vars[i].alignb;
1123 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1124 {
1125 base = virtual_stack_vars_rtx;
1126 /* ASAN description strings don't yet have a syntax for expressing
1127 polynomial offsets. */
1128 HOST_WIDE_INT prev_offset;
1129 if (asan_sanitize_stack_p ()
1130 && pred
1131 && frame_offset.is_constant (&prev_offset)
1132 && stack_vars[i].size.is_constant ())
1133 {
1134 if (data->asan_vec.is_empty ())
1135 {
1136 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1137 prev_offset = frame_offset.to_constant ();
1138 }
1139 prev_offset = align_base (prev_offset,
1140 ASAN_MIN_RED_ZONE_SIZE,
1141 !FRAME_GROWS_DOWNWARD);
1142 tree repr_decl = NULL_TREE;
1143 unsigned HOST_WIDE_INT size
1144 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1145 if (data->asan_vec.is_empty ())
1146 size = MAX (size, ASAN_RED_ZONE_SIZE);
1147
1148 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1149 ASAN_MIN_RED_ZONE_SIZE);
1150 offset = alloc_stack_frame_space (size, alignment);
1151
1152 data->asan_vec.safe_push (prev_offset);
1153 /* Allocating a constant amount of space from a constant
1154 starting offset must give a constant result. */
1155 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1156 .to_constant ());
1157 /* Find best representative of the partition.
1158 Prefer those with DECL_NAME, even better
1159 satisfying asan_protect_stack_decl predicate. */
1160 for (j = i; j != EOC; j = stack_vars[j].next)
1161 if (asan_protect_stack_decl (stack_vars[j].decl)
1162 && DECL_NAME (stack_vars[j].decl))
1163 {
1164 repr_decl = stack_vars[j].decl;
1165 break;
1166 }
1167 else if (repr_decl == NULL_TREE
1168 && DECL_P (stack_vars[j].decl)
1169 && DECL_NAME (stack_vars[j].decl))
1170 repr_decl = stack_vars[j].decl;
1171 if (repr_decl == NULL_TREE)
1172 repr_decl = stack_vars[i].decl;
1173 data->asan_decl_vec.safe_push (repr_decl);
1174
1175 /* Make sure a representative is unpoison if another
1176 variable in the partition is handled by
1177 use-after-scope sanitization. */
1178 if (asan_handled_variables != NULL
1179 && !asan_handled_variables->contains (repr_decl))
1180 {
1181 for (j = i; j != EOC; j = stack_vars[j].next)
1182 if (asan_handled_variables->contains (stack_vars[j].decl))
1183 break;
1184 if (j != EOC)
1185 asan_handled_variables->add (repr_decl);
1186 }
1187
1188 data->asan_alignb = MAX (data->asan_alignb, alignb);
1189 if (data->asan_base == NULL)
1190 data->asan_base = gen_reg_rtx (Pmode);
1191 base = data->asan_base;
1192
1193 if (!STRICT_ALIGNMENT)
1194 base_align = crtl->max_used_stack_slot_alignment;
1195 else
1196 base_align = MAX (crtl->max_used_stack_slot_alignment,
1197 GET_MODE_ALIGNMENT (SImode)
1198 << ASAN_SHADOW_SHIFT);
1199 }
1200 else
1201 {
1202 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1203 base_align = crtl->max_used_stack_slot_alignment;
1204 }
1205 }
1206 else
1207 {
1208 /* Large alignment is only processed in the last pass. */
1209 if (pred)
1210 continue;
1211
1212 /* If there were any variables requiring "large" alignment, allocate
1213 space. */
1214 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1215 {
1216 poly_int64 loffset;
1217 rtx large_allocsize;
1218
1219 large_allocsize = gen_int_mode (large_size, Pmode);
1220 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1221 loffset = alloc_stack_frame_space
1222 (rtx_to_poly_int64 (large_allocsize),
1223 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1224 large_base = get_dynamic_stack_base (loffset, large_align);
1225 large_allocation_done = true;
1226 }
1227 gcc_assert (large_base != NULL);
1228
1229 large_alloc = aligned_upper_bound (large_alloc, alignb);
1230 offset = large_alloc;
1231 large_alloc += stack_vars[i].size;
1232
1233 base = large_base;
1234 base_align = large_align;
1235 }
1236
1237 /* Create rtl for each variable based on their location within the
1238 partition. */
1239 for (j = i; j != EOC; j = stack_vars[j].next)
1240 {
1241 expand_one_stack_var_at (stack_vars[j].decl,
1242 base, base_align,
1243 offset);
1244 }
1245 }
1246
1247 gcc_assert (known_eq (large_alloc, large_size));
1248 }
1249
1250 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1251 static poly_uint64
1252 account_stack_vars (void)
1253 {
1254 size_t si, j, i, n = stack_vars_num;
1255 poly_uint64 size = 0;
1256
1257 for (si = 0; si < n; ++si)
1258 {
1259 i = stack_vars_sorted[si];
1260
1261 /* Skip variables that aren't partition representatives, for now. */
1262 if (stack_vars[i].representative != i)
1263 continue;
1264
1265 size += stack_vars[i].size;
1266 for (j = i; j != EOC; j = stack_vars[j].next)
1267 set_rtl (stack_vars[j].decl, NULL);
1268 }
1269 return size;
1270 }
1271
1272 /* Record the RTL assignment X for the default def of PARM. */
1273
1274 extern void
1275 set_parm_rtl (tree parm, rtx x)
1276 {
1277 gcc_assert (TREE_CODE (parm) == PARM_DECL
1278 || TREE_CODE (parm) == RESULT_DECL);
1279
1280 if (x && !MEM_P (x))
1281 {
1282 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1283 TYPE_MODE (TREE_TYPE (parm)),
1284 TYPE_ALIGN (TREE_TYPE (parm)));
1285
1286 /* If the variable alignment is very large we'll dynamicaly
1287 allocate it, which means that in-frame portion is just a
1288 pointer. ??? We've got a pseudo for sure here, do we
1289 actually dynamically allocate its spilling area if needed?
1290 ??? Isn't it a problem when Pmode alignment also exceeds
1291 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1292 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1293 align = GET_MODE_ALIGNMENT (Pmode);
1294
1295 record_alignment_for_reg_var (align);
1296 }
1297
1298 tree ssa = ssa_default_def (cfun, parm);
1299 if (!ssa)
1300 return set_rtl (parm, x);
1301
1302 int part = var_to_partition (SA.map, ssa);
1303 gcc_assert (part != NO_PARTITION);
1304
1305 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1306 gcc_assert (changed);
1307
1308 set_rtl (ssa, x);
1309 gcc_assert (DECL_RTL (parm) == x);
1310 }
1311
1312 /* A subroutine of expand_one_var. Called to immediately assign rtl
1313 to a variable to be allocated in the stack frame. */
1314
1315 static void
1316 expand_one_stack_var_1 (tree var)
1317 {
1318 poly_uint64 size;
1319 poly_int64 offset;
1320 unsigned byte_align;
1321
1322 if (TREE_CODE (var) == SSA_NAME)
1323 {
1324 tree type = TREE_TYPE (var);
1325 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1326 byte_align = TYPE_ALIGN_UNIT (type);
1327 }
1328 else
1329 {
1330 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1331 byte_align = align_local_variable (var, true);
1332 }
1333
1334 /* We handle highly aligned variables in expand_stack_vars. */
1335 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1336
1337 offset = alloc_stack_frame_space (size, byte_align);
1338
1339 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1340 crtl->max_used_stack_slot_alignment, offset);
1341 }
1342
1343 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1344 already assigned some MEM. */
1345
1346 static void
1347 expand_one_stack_var (tree var)
1348 {
1349 if (TREE_CODE (var) == SSA_NAME)
1350 {
1351 int part = var_to_partition (SA.map, var);
1352 if (part != NO_PARTITION)
1353 {
1354 rtx x = SA.partition_to_pseudo[part];
1355 gcc_assert (x);
1356 gcc_assert (MEM_P (x));
1357 return;
1358 }
1359 }
1360
1361 return expand_one_stack_var_1 (var);
1362 }
1363
1364 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1365 that will reside in a hard register. */
1366
1367 static void
1368 expand_one_hard_reg_var (tree var)
1369 {
1370 rest_of_decl_compilation (var, 0, 0);
1371 }
1372
1373 /* Record the alignment requirements of some variable assigned to a
1374 pseudo. */
1375
1376 static void
1377 record_alignment_for_reg_var (unsigned int align)
1378 {
1379 if (SUPPORTS_STACK_ALIGNMENT
1380 && crtl->stack_alignment_estimated < align)
1381 {
1382 /* stack_alignment_estimated shouldn't change after stack
1383 realign decision made */
1384 gcc_assert (!crtl->stack_realign_processed);
1385 crtl->stack_alignment_estimated = align;
1386 }
1387
1388 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1389 So here we only make sure stack_alignment_needed >= align. */
1390 if (crtl->stack_alignment_needed < align)
1391 crtl->stack_alignment_needed = align;
1392 if (crtl->max_used_stack_slot_alignment < align)
1393 crtl->max_used_stack_slot_alignment = align;
1394 }
1395
1396 /* Create RTL for an SSA partition. */
1397
1398 static void
1399 expand_one_ssa_partition (tree var)
1400 {
1401 int part = var_to_partition (SA.map, var);
1402 gcc_assert (part != NO_PARTITION);
1403
1404 if (SA.partition_to_pseudo[part])
1405 return;
1406
1407 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1408 TYPE_MODE (TREE_TYPE (var)),
1409 TYPE_ALIGN (TREE_TYPE (var)));
1410
1411 /* If the variable alignment is very large we'll dynamicaly allocate
1412 it, which means that in-frame portion is just a pointer. */
1413 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1414 align = GET_MODE_ALIGNMENT (Pmode);
1415
1416 record_alignment_for_reg_var (align);
1417
1418 if (!use_register_for_decl (var))
1419 {
1420 if (defer_stack_allocation (var, true))
1421 add_stack_var (var, true);
1422 else
1423 expand_one_stack_var_1 (var);
1424 return;
1425 }
1426
1427 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1428 rtx x = gen_reg_rtx (reg_mode);
1429
1430 set_rtl (var, x);
1431
1432 /* For a promoted variable, X will not be used directly but wrapped in a
1433 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1434 will assume that its upper bits can be inferred from its lower bits.
1435 Therefore, if X isn't initialized on every path from the entry, then
1436 we must do it manually in order to fulfill the above assumption. */
1437 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1438 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1439 emit_move_insn (x, CONST0_RTX (reg_mode));
1440 }
1441
1442 /* Record the association between the RTL generated for partition PART
1443 and the underlying variable of the SSA_NAME VAR. */
1444
1445 static void
1446 adjust_one_expanded_partition_var (tree var)
1447 {
1448 if (!var)
1449 return;
1450
1451 tree decl = SSA_NAME_VAR (var);
1452
1453 int part = var_to_partition (SA.map, var);
1454 if (part == NO_PARTITION)
1455 return;
1456
1457 rtx x = SA.partition_to_pseudo[part];
1458
1459 gcc_assert (x);
1460
1461 set_rtl (var, x);
1462
1463 if (!REG_P (x))
1464 return;
1465
1466 /* Note if the object is a user variable. */
1467 if (decl && !DECL_ARTIFICIAL (decl))
1468 mark_user_reg (x);
1469
1470 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1471 mark_reg_pointer (x, get_pointer_alignment (var));
1472 }
1473
1474 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1475 that will reside in a pseudo register. */
1476
1477 static void
1478 expand_one_register_var (tree var)
1479 {
1480 if (TREE_CODE (var) == SSA_NAME)
1481 {
1482 int part = var_to_partition (SA.map, var);
1483 if (part != NO_PARTITION)
1484 {
1485 rtx x = SA.partition_to_pseudo[part];
1486 gcc_assert (x);
1487 gcc_assert (REG_P (x));
1488 return;
1489 }
1490 gcc_unreachable ();
1491 }
1492
1493 tree decl = var;
1494 tree type = TREE_TYPE (decl);
1495 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1496 rtx x = gen_reg_rtx (reg_mode);
1497
1498 set_rtl (var, x);
1499
1500 /* Note if the object is a user variable. */
1501 if (!DECL_ARTIFICIAL (decl))
1502 mark_user_reg (x);
1503
1504 if (POINTER_TYPE_P (type))
1505 mark_reg_pointer (x, get_pointer_alignment (var));
1506 }
1507
1508 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1509 has some associated error, e.g. its type is error-mark. We just need
1510 to pick something that won't crash the rest of the compiler. */
1511
1512 static void
1513 expand_one_error_var (tree var)
1514 {
1515 machine_mode mode = DECL_MODE (var);
1516 rtx x;
1517
1518 if (mode == BLKmode)
1519 x = gen_rtx_MEM (BLKmode, const0_rtx);
1520 else if (mode == VOIDmode)
1521 x = const0_rtx;
1522 else
1523 x = gen_reg_rtx (mode);
1524
1525 SET_DECL_RTL (var, x);
1526 }
1527
1528 /* A subroutine of expand_one_var. VAR is a variable that will be
1529 allocated to the local stack frame. Return true if we wish to
1530 add VAR to STACK_VARS so that it will be coalesced with other
1531 variables. Return false to allocate VAR immediately.
1532
1533 This function is used to reduce the number of variables considered
1534 for coalescing, which reduces the size of the quadratic problem. */
1535
1536 static bool
1537 defer_stack_allocation (tree var, bool toplevel)
1538 {
1539 tree size_unit = TREE_CODE (var) == SSA_NAME
1540 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1541 : DECL_SIZE_UNIT (var);
1542 poly_uint64 size;
1543
1544 /* Whether the variable is small enough for immediate allocation not to be
1545 a problem with regard to the frame size. */
1546 bool smallish
1547 = (poly_int_tree_p (size_unit, &size)
1548 && (estimated_poly_value (size)
1549 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1550
1551 /* If stack protection is enabled, *all* stack variables must be deferred,
1552 so that we can re-order the strings to the top of the frame.
1553 Similarly for Address Sanitizer. */
1554 if (flag_stack_protect || asan_sanitize_stack_p ())
1555 return true;
1556
1557 unsigned int align = TREE_CODE (var) == SSA_NAME
1558 ? TYPE_ALIGN (TREE_TYPE (var))
1559 : DECL_ALIGN (var);
1560
1561 /* We handle "large" alignment via dynamic allocation. We want to handle
1562 this extra complication in only one place, so defer them. */
1563 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1564 return true;
1565
1566 bool ignored = TREE_CODE (var) == SSA_NAME
1567 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1568 : DECL_IGNORED_P (var);
1569
1570 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1571 might be detached from their block and appear at toplevel when we reach
1572 here. We want to coalesce them with variables from other blocks when
1573 the immediate contribution to the frame size would be noticeable. */
1574 if (toplevel && optimize > 0 && ignored && !smallish)
1575 return true;
1576
1577 /* Variables declared in the outermost scope automatically conflict
1578 with every other variable. The only reason to want to defer them
1579 at all is that, after sorting, we can more efficiently pack
1580 small variables in the stack frame. Continue to defer at -O2. */
1581 if (toplevel && optimize < 2)
1582 return false;
1583
1584 /* Without optimization, *most* variables are allocated from the
1585 stack, which makes the quadratic problem large exactly when we
1586 want compilation to proceed as quickly as possible. On the
1587 other hand, we don't want the function's stack frame size to
1588 get completely out of hand. So we avoid adding scalars and
1589 "small" aggregates to the list at all. */
1590 if (optimize == 0 && smallish)
1591 return false;
1592
1593 return true;
1594 }
1595
1596 /* A subroutine of expand_used_vars. Expand one variable according to
1597 its flavor. Variables to be placed on the stack are not actually
1598 expanded yet, merely recorded.
1599 When REALLY_EXPAND is false, only add stack values to be allocated.
1600 Return stack usage this variable is supposed to take.
1601 */
1602
1603 static poly_uint64
1604 expand_one_var (tree var, bool toplevel, bool really_expand)
1605 {
1606 unsigned int align = BITS_PER_UNIT;
1607 tree origvar = var;
1608
1609 var = SSAVAR (var);
1610
1611 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1612 {
1613 if (is_global_var (var))
1614 return 0;
1615
1616 /* Because we don't know if VAR will be in register or on stack,
1617 we conservatively assume it will be on stack even if VAR is
1618 eventually put into register after RA pass. For non-automatic
1619 variables, which won't be on stack, we collect alignment of
1620 type and ignore user specified alignment. Similarly for
1621 SSA_NAMEs for which use_register_for_decl returns true. */
1622 if (TREE_STATIC (var)
1623 || DECL_EXTERNAL (var)
1624 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1625 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1626 TYPE_MODE (TREE_TYPE (var)),
1627 TYPE_ALIGN (TREE_TYPE (var)));
1628 else if (DECL_HAS_VALUE_EXPR_P (var)
1629 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1630 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1631 or variables which were assigned a stack slot already by
1632 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1633 changed from the offset chosen to it. */
1634 align = crtl->stack_alignment_estimated;
1635 else
1636 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1637
1638 /* If the variable alignment is very large we'll dynamicaly allocate
1639 it, which means that in-frame portion is just a pointer. */
1640 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1641 align = GET_MODE_ALIGNMENT (Pmode);
1642 }
1643
1644 record_alignment_for_reg_var (align);
1645
1646 poly_uint64 size;
1647 if (TREE_CODE (origvar) == SSA_NAME)
1648 {
1649 gcc_assert (!VAR_P (var)
1650 || (!DECL_EXTERNAL (var)
1651 && !DECL_HAS_VALUE_EXPR_P (var)
1652 && !TREE_STATIC (var)
1653 && TREE_TYPE (var) != error_mark_node
1654 && !DECL_HARD_REGISTER (var)
1655 && really_expand));
1656 }
1657 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1658 ;
1659 else if (DECL_EXTERNAL (var))
1660 ;
1661 else if (DECL_HAS_VALUE_EXPR_P (var))
1662 ;
1663 else if (TREE_STATIC (var))
1664 ;
1665 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1666 ;
1667 else if (TREE_TYPE (var) == error_mark_node)
1668 {
1669 if (really_expand)
1670 expand_one_error_var (var);
1671 }
1672 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1673 {
1674 if (really_expand)
1675 {
1676 expand_one_hard_reg_var (var);
1677 if (!DECL_HARD_REGISTER (var))
1678 /* Invalid register specification. */
1679 expand_one_error_var (var);
1680 }
1681 }
1682 else if (use_register_for_decl (var))
1683 {
1684 if (really_expand)
1685 expand_one_register_var (origvar);
1686 }
1687 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1688 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1689 {
1690 /* Reject variables which cover more than half of the address-space. */
1691 if (really_expand)
1692 {
1693 if (DECL_NONLOCAL_FRAME (var))
1694 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1695 "total size of local objects is too large");
1696 else
1697 error_at (DECL_SOURCE_LOCATION (var),
1698 "size of variable %q+D is too large", var);
1699 expand_one_error_var (var);
1700 }
1701 }
1702 else if (defer_stack_allocation (var, toplevel))
1703 add_stack_var (origvar, really_expand);
1704 else
1705 {
1706 if (really_expand)
1707 {
1708 if (lookup_attribute ("naked",
1709 DECL_ATTRIBUTES (current_function_decl)))
1710 error ("cannot allocate stack for variable %q+D, naked function",
1711 var);
1712
1713 expand_one_stack_var (origvar);
1714 }
1715 return size;
1716 }
1717 return 0;
1718 }
1719
1720 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1721 expanding variables. Those variables that can be put into registers
1722 are allocated pseudos; those that can't are put on the stack.
1723
1724 TOPLEVEL is true if this is the outermost BLOCK. */
1725
1726 static void
1727 expand_used_vars_for_block (tree block, bool toplevel)
1728 {
1729 tree t;
1730
1731 /* Expand all variables at this level. */
1732 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1733 if (TREE_USED (t)
1734 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1735 || !DECL_NONSHAREABLE (t)))
1736 expand_one_var (t, toplevel, true);
1737
1738 /* Expand all variables at containing levels. */
1739 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1740 expand_used_vars_for_block (t, false);
1741 }
1742
1743 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1744 and clear TREE_USED on all local variables. */
1745
1746 static void
1747 clear_tree_used (tree block)
1748 {
1749 tree t;
1750
1751 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1752 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1753 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1754 || !DECL_NONSHAREABLE (t))
1755 TREE_USED (t) = 0;
1756
1757 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1758 clear_tree_used (t);
1759 }
1760
1761 enum {
1762 SPCT_FLAG_DEFAULT = 1,
1763 SPCT_FLAG_ALL = 2,
1764 SPCT_FLAG_STRONG = 3,
1765 SPCT_FLAG_EXPLICIT = 4
1766 };
1767
1768 /* Examine TYPE and determine a bit mask of the following features. */
1769
1770 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1771 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1772 #define SPCT_HAS_ARRAY 4
1773 #define SPCT_HAS_AGGREGATE 8
1774
1775 static unsigned int
1776 stack_protect_classify_type (tree type)
1777 {
1778 unsigned int ret = 0;
1779 tree t;
1780
1781 switch (TREE_CODE (type))
1782 {
1783 case ARRAY_TYPE:
1784 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1785 if (t == char_type_node
1786 || t == signed_char_type_node
1787 || t == unsigned_char_type_node)
1788 {
1789 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1790 unsigned HOST_WIDE_INT len;
1791
1792 if (!TYPE_SIZE_UNIT (type)
1793 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1794 len = max;
1795 else
1796 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1797
1798 if (len < max)
1799 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1800 else
1801 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1802 }
1803 else
1804 ret = SPCT_HAS_ARRAY;
1805 break;
1806
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE:
1809 case RECORD_TYPE:
1810 ret = SPCT_HAS_AGGREGATE;
1811 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1812 if (TREE_CODE (t) == FIELD_DECL)
1813 ret |= stack_protect_classify_type (TREE_TYPE (t));
1814 break;
1815
1816 default:
1817 break;
1818 }
1819
1820 return ret;
1821 }
1822
1823 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1824 part of the local stack frame. Remember if we ever return nonzero for
1825 any variable in this function. The return value is the phase number in
1826 which the variable should be allocated. */
1827
1828 static int
1829 stack_protect_decl_phase (tree decl)
1830 {
1831 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1832 int ret = 0;
1833
1834 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1835 has_short_buffer = true;
1836
1837 if (flag_stack_protect == SPCT_FLAG_ALL
1838 || flag_stack_protect == SPCT_FLAG_STRONG
1839 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1840 && lookup_attribute ("stack_protect",
1841 DECL_ATTRIBUTES (current_function_decl))))
1842 {
1843 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1844 && !(bits & SPCT_HAS_AGGREGATE))
1845 ret = 1;
1846 else if (bits & SPCT_HAS_ARRAY)
1847 ret = 2;
1848 }
1849 else
1850 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1851
1852 if (ret)
1853 has_protected_decls = true;
1854
1855 return ret;
1856 }
1857
1858 /* Two helper routines that check for phase 1 and phase 2. These are used
1859 as callbacks for expand_stack_vars. */
1860
1861 static bool
1862 stack_protect_decl_phase_1 (size_t i)
1863 {
1864 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1865 }
1866
1867 static bool
1868 stack_protect_decl_phase_2 (size_t i)
1869 {
1870 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1871 }
1872
1873 /* And helper function that checks for asan phase (with stack protector
1874 it is phase 3). This is used as callback for expand_stack_vars.
1875 Returns true if any of the vars in the partition need to be protected. */
1876
1877 static bool
1878 asan_decl_phase_3 (size_t i)
1879 {
1880 while (i != EOC)
1881 {
1882 if (asan_protect_stack_decl (stack_vars[i].decl))
1883 return true;
1884 i = stack_vars[i].next;
1885 }
1886 return false;
1887 }
1888
1889 /* Ensure that variables in different stack protection phases conflict
1890 so that they are not merged and share the same stack slot. */
1891
1892 static void
1893 add_stack_protection_conflicts (void)
1894 {
1895 size_t i, j, n = stack_vars_num;
1896 unsigned char *phase;
1897
1898 phase = XNEWVEC (unsigned char, n);
1899 for (i = 0; i < n; ++i)
1900 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1901
1902 for (i = 0; i < n; ++i)
1903 {
1904 unsigned char ph_i = phase[i];
1905 for (j = i + 1; j < n; ++j)
1906 if (ph_i != phase[j])
1907 add_stack_var_conflict (i, j);
1908 }
1909
1910 XDELETEVEC (phase);
1911 }
1912
1913 /* Create a decl for the guard at the top of the stack frame. */
1914
1915 static void
1916 create_stack_guard (void)
1917 {
1918 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1919 VAR_DECL, NULL, ptr_type_node);
1920 TREE_THIS_VOLATILE (guard) = 1;
1921 TREE_USED (guard) = 1;
1922 expand_one_stack_var (guard);
1923 crtl->stack_protect_guard = guard;
1924 }
1925
1926 /* Prepare for expanding variables. */
1927 static void
1928 init_vars_expansion (void)
1929 {
1930 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1931 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1932
1933 /* A map from decl to stack partition. */
1934 decl_to_stack_part = new hash_map<tree, size_t>;
1935
1936 /* Initialize local stack smashing state. */
1937 has_protected_decls = false;
1938 has_short_buffer = false;
1939 }
1940
1941 /* Free up stack variable graph data. */
1942 static void
1943 fini_vars_expansion (void)
1944 {
1945 bitmap_obstack_release (&stack_var_bitmap_obstack);
1946 if (stack_vars)
1947 XDELETEVEC (stack_vars);
1948 if (stack_vars_sorted)
1949 XDELETEVEC (stack_vars_sorted);
1950 stack_vars = NULL;
1951 stack_vars_sorted = NULL;
1952 stack_vars_alloc = stack_vars_num = 0;
1953 delete decl_to_stack_part;
1954 decl_to_stack_part = NULL;
1955 }
1956
1957 /* Make a fair guess for the size of the stack frame of the function
1958 in NODE. This doesn't have to be exact, the result is only used in
1959 the inline heuristics. So we don't want to run the full stack var
1960 packing algorithm (which is quadratic in the number of stack vars).
1961 Instead, we calculate the total size of all stack vars. This turns
1962 out to be a pretty fair estimate -- packing of stack vars doesn't
1963 happen very often. */
1964
1965 HOST_WIDE_INT
1966 estimated_stack_frame_size (struct cgraph_node *node)
1967 {
1968 poly_int64 size = 0;
1969 size_t i;
1970 tree var;
1971 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1972
1973 push_cfun (fn);
1974
1975 init_vars_expansion ();
1976
1977 FOR_EACH_LOCAL_DECL (fn, i, var)
1978 if (auto_var_in_fn_p (var, fn->decl))
1979 size += expand_one_var (var, true, false);
1980
1981 if (stack_vars_num > 0)
1982 {
1983 /* Fake sorting the stack vars for account_stack_vars (). */
1984 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1985 for (i = 0; i < stack_vars_num; ++i)
1986 stack_vars_sorted[i] = i;
1987 size += account_stack_vars ();
1988 }
1989
1990 fini_vars_expansion ();
1991 pop_cfun ();
1992 return estimated_poly_value (size);
1993 }
1994
1995 /* Helper routine to check if a record or union contains an array field. */
1996
1997 static int
1998 record_or_union_type_has_array_p (const_tree tree_type)
1999 {
2000 tree fields = TYPE_FIELDS (tree_type);
2001 tree f;
2002
2003 for (f = fields; f; f = DECL_CHAIN (f))
2004 if (TREE_CODE (f) == FIELD_DECL)
2005 {
2006 tree field_type = TREE_TYPE (f);
2007 if (RECORD_OR_UNION_TYPE_P (field_type)
2008 && record_or_union_type_has_array_p (field_type))
2009 return 1;
2010 if (TREE_CODE (field_type) == ARRAY_TYPE)
2011 return 1;
2012 }
2013 return 0;
2014 }
2015
2016 /* Check if the current function has local referenced variables that
2017 have their addresses taken, contain an array, or are arrays. */
2018
2019 static bool
2020 stack_protect_decl_p ()
2021 {
2022 unsigned i;
2023 tree var;
2024
2025 FOR_EACH_LOCAL_DECL (cfun, i, var)
2026 if (!is_global_var (var))
2027 {
2028 tree var_type = TREE_TYPE (var);
2029 if (VAR_P (var)
2030 && (TREE_CODE (var_type) == ARRAY_TYPE
2031 || TREE_ADDRESSABLE (var)
2032 || (RECORD_OR_UNION_TYPE_P (var_type)
2033 && record_or_union_type_has_array_p (var_type))))
2034 return true;
2035 }
2036 return false;
2037 }
2038
2039 /* Check if the current function has calls that use a return slot. */
2040
2041 static bool
2042 stack_protect_return_slot_p ()
2043 {
2044 basic_block bb;
2045
2046 FOR_ALL_BB_FN (bb, cfun)
2047 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2048 !gsi_end_p (gsi); gsi_next (&gsi))
2049 {
2050 gimple *stmt = gsi_stmt (gsi);
2051 /* This assumes that calls to internal-only functions never
2052 use a return slot. */
2053 if (is_gimple_call (stmt)
2054 && !gimple_call_internal_p (stmt)
2055 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2056 gimple_call_fndecl (stmt)))
2057 return true;
2058 }
2059 return false;
2060 }
2061
2062 /* Expand all variables used in the function. */
2063
2064 static rtx_insn *
2065 expand_used_vars (void)
2066 {
2067 tree var, outer_block = DECL_INITIAL (current_function_decl);
2068 auto_vec<tree> maybe_local_decls;
2069 rtx_insn *var_end_seq = NULL;
2070 unsigned i;
2071 unsigned len;
2072 bool gen_stack_protect_signal = false;
2073
2074 /* Compute the phase of the stack frame for this function. */
2075 {
2076 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2077 int off = targetm.starting_frame_offset () % align;
2078 frame_phase = off ? align - off : 0;
2079 }
2080
2081 /* Set TREE_USED on all variables in the local_decls. */
2082 FOR_EACH_LOCAL_DECL (cfun, i, var)
2083 TREE_USED (var) = 1;
2084 /* Clear TREE_USED on all variables associated with a block scope. */
2085 clear_tree_used (DECL_INITIAL (current_function_decl));
2086
2087 init_vars_expansion ();
2088
2089 if (targetm.use_pseudo_pic_reg ())
2090 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2091
2092 for (i = 0; i < SA.map->num_partitions; i++)
2093 {
2094 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2095 continue;
2096
2097 tree var = partition_to_var (SA.map, i);
2098
2099 gcc_assert (!virtual_operand_p (var));
2100
2101 expand_one_ssa_partition (var);
2102 }
2103
2104 if (flag_stack_protect == SPCT_FLAG_STRONG)
2105 gen_stack_protect_signal
2106 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2107
2108 /* At this point all variables on the local_decls with TREE_USED
2109 set are not associated with any block scope. Lay them out. */
2110
2111 len = vec_safe_length (cfun->local_decls);
2112 FOR_EACH_LOCAL_DECL (cfun, i, var)
2113 {
2114 bool expand_now = false;
2115
2116 /* Expanded above already. */
2117 if (is_gimple_reg (var))
2118 {
2119 TREE_USED (var) = 0;
2120 goto next;
2121 }
2122 /* We didn't set a block for static or extern because it's hard
2123 to tell the difference between a global variable (re)declared
2124 in a local scope, and one that's really declared there to
2125 begin with. And it doesn't really matter much, since we're
2126 not giving them stack space. Expand them now. */
2127 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2128 expand_now = true;
2129
2130 /* Expand variables not associated with any block now. Those created by
2131 the optimizers could be live anywhere in the function. Those that
2132 could possibly have been scoped originally and detached from their
2133 block will have their allocation deferred so we coalesce them with
2134 others when optimization is enabled. */
2135 else if (TREE_USED (var))
2136 expand_now = true;
2137
2138 /* Finally, mark all variables on the list as used. We'll use
2139 this in a moment when we expand those associated with scopes. */
2140 TREE_USED (var) = 1;
2141
2142 if (expand_now)
2143 expand_one_var (var, true, true);
2144
2145 next:
2146 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2147 {
2148 rtx rtl = DECL_RTL_IF_SET (var);
2149
2150 /* Keep artificial non-ignored vars in cfun->local_decls
2151 chain until instantiate_decls. */
2152 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2153 add_local_decl (cfun, var);
2154 else if (rtl == NULL_RTX)
2155 /* If rtl isn't set yet, which can happen e.g. with
2156 -fstack-protector, retry before returning from this
2157 function. */
2158 maybe_local_decls.safe_push (var);
2159 }
2160 }
2161
2162 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2163
2164 +-----------------+-----------------+
2165 | ...processed... | ...duplicates...|
2166 +-----------------+-----------------+
2167 ^
2168 +-- LEN points here.
2169
2170 We just want the duplicates, as those are the artificial
2171 non-ignored vars that we want to keep until instantiate_decls.
2172 Move them down and truncate the array. */
2173 if (!vec_safe_is_empty (cfun->local_decls))
2174 cfun->local_decls->block_remove (0, len);
2175
2176 /* At this point, all variables within the block tree with TREE_USED
2177 set are actually used by the optimized function. Lay them out. */
2178 expand_used_vars_for_block (outer_block, true);
2179
2180 if (stack_vars_num > 0)
2181 {
2182 add_scope_conflicts ();
2183
2184 /* If stack protection is enabled, we don't share space between
2185 vulnerable data and non-vulnerable data. */
2186 if (flag_stack_protect != 0
2187 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2188 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2189 && lookup_attribute ("stack_protect",
2190 DECL_ATTRIBUTES (current_function_decl)))))
2191 add_stack_protection_conflicts ();
2192
2193 /* Now that we have collected all stack variables, and have computed a
2194 minimal interference graph, attempt to save some stack space. */
2195 partition_stack_vars ();
2196 if (dump_file)
2197 dump_stack_var_partition ();
2198 }
2199
2200 switch (flag_stack_protect)
2201 {
2202 case SPCT_FLAG_ALL:
2203 create_stack_guard ();
2204 break;
2205
2206 case SPCT_FLAG_STRONG:
2207 if (gen_stack_protect_signal
2208 || cfun->calls_alloca || has_protected_decls
2209 || lookup_attribute ("stack_protect",
2210 DECL_ATTRIBUTES (current_function_decl)))
2211 create_stack_guard ();
2212 break;
2213
2214 case SPCT_FLAG_DEFAULT:
2215 if (cfun->calls_alloca || has_protected_decls
2216 || lookup_attribute ("stack_protect",
2217 DECL_ATTRIBUTES (current_function_decl)))
2218 create_stack_guard ();
2219 break;
2220
2221 case SPCT_FLAG_EXPLICIT:
2222 if (lookup_attribute ("stack_protect",
2223 DECL_ATTRIBUTES (current_function_decl)))
2224 create_stack_guard ();
2225 break;
2226 default:
2227 ;
2228 }
2229
2230 /* Assign rtl to each variable based on these partitions. */
2231 if (stack_vars_num > 0)
2232 {
2233 struct stack_vars_data data;
2234
2235 data.asan_base = NULL_RTX;
2236 data.asan_alignb = 0;
2237
2238 /* Reorder decls to be protected by iterating over the variables
2239 array multiple times, and allocating out of each phase in turn. */
2240 /* ??? We could probably integrate this into the qsort we did
2241 earlier, such that we naturally see these variables first,
2242 and thus naturally allocate things in the right order. */
2243 if (has_protected_decls)
2244 {
2245 /* Phase 1 contains only character arrays. */
2246 expand_stack_vars (stack_protect_decl_phase_1, &data);
2247
2248 /* Phase 2 contains other kinds of arrays. */
2249 if (flag_stack_protect == SPCT_FLAG_ALL
2250 || flag_stack_protect == SPCT_FLAG_STRONG
2251 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2252 && lookup_attribute ("stack_protect",
2253 DECL_ATTRIBUTES (current_function_decl))))
2254 expand_stack_vars (stack_protect_decl_phase_2, &data);
2255 }
2256
2257 if (asan_sanitize_stack_p ())
2258 /* Phase 3, any partitions that need asan protection
2259 in addition to phase 1 and 2. */
2260 expand_stack_vars (asan_decl_phase_3, &data);
2261
2262 /* ASAN description strings don't yet have a syntax for expressing
2263 polynomial offsets. */
2264 HOST_WIDE_INT prev_offset;
2265 if (!data.asan_vec.is_empty ()
2266 && frame_offset.is_constant (&prev_offset))
2267 {
2268 HOST_WIDE_INT offset, sz, redzonesz;
2269 redzonesz = ASAN_RED_ZONE_SIZE;
2270 sz = data.asan_vec[0] - prev_offset;
2271 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2272 && data.asan_alignb <= 4096
2273 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2274 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2275 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2276 /* Allocating a constant amount of space from a constant
2277 starting offset must give a constant result. */
2278 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2279 .to_constant ());
2280 data.asan_vec.safe_push (prev_offset);
2281 data.asan_vec.safe_push (offset);
2282 /* Leave space for alignment if STRICT_ALIGNMENT. */
2283 if (STRICT_ALIGNMENT)
2284 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2285 << ASAN_SHADOW_SHIFT)
2286 / BITS_PER_UNIT, 1);
2287
2288 var_end_seq
2289 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2290 data.asan_base,
2291 data.asan_alignb,
2292 data.asan_vec.address (),
2293 data.asan_decl_vec.address (),
2294 data.asan_vec.length ());
2295 }
2296
2297 expand_stack_vars (NULL, &data);
2298 }
2299
2300 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2301 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2302 virtual_stack_vars_rtx,
2303 var_end_seq);
2304
2305 fini_vars_expansion ();
2306
2307 /* If there were any artificial non-ignored vars without rtl
2308 found earlier, see if deferred stack allocation hasn't assigned
2309 rtl to them. */
2310 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2311 {
2312 rtx rtl = DECL_RTL_IF_SET (var);
2313
2314 /* Keep artificial non-ignored vars in cfun->local_decls
2315 chain until instantiate_decls. */
2316 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2317 add_local_decl (cfun, var);
2318 }
2319
2320 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2321 if (STACK_ALIGNMENT_NEEDED)
2322 {
2323 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2324 if (FRAME_GROWS_DOWNWARD)
2325 frame_offset = aligned_lower_bound (frame_offset, align);
2326 else
2327 frame_offset = aligned_upper_bound (frame_offset, align);
2328 }
2329
2330 return var_end_seq;
2331 }
2332
2333
2334 /* If we need to produce a detailed dump, print the tree representation
2335 for STMT to the dump file. SINCE is the last RTX after which the RTL
2336 generated for STMT should have been appended. */
2337
2338 static void
2339 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2340 {
2341 if (dump_file && (dump_flags & TDF_DETAILS))
2342 {
2343 fprintf (dump_file, "\n;; ");
2344 print_gimple_stmt (dump_file, stmt, 0,
2345 TDF_SLIM | (dump_flags & TDF_LINENO));
2346 fprintf (dump_file, "\n");
2347
2348 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2349 }
2350 }
2351
2352 /* Maps the blocks that do not contain tree labels to rtx labels. */
2353
2354 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2355
2356 /* Returns the label_rtx expression for a label starting basic block BB. */
2357
2358 static rtx_code_label *
2359 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2360 {
2361 gimple_stmt_iterator gsi;
2362 tree lab;
2363
2364 if (bb->flags & BB_RTL)
2365 return block_label (bb);
2366
2367 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2368 if (elt)
2369 return *elt;
2370
2371 /* Find the tree label if it is present. */
2372
2373 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2374 {
2375 glabel *lab_stmt;
2376
2377 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2378 if (!lab_stmt)
2379 break;
2380
2381 lab = gimple_label_label (lab_stmt);
2382 if (DECL_NONLOCAL (lab))
2383 break;
2384
2385 return jump_target_rtx (lab);
2386 }
2387
2388 rtx_code_label *l = gen_label_rtx ();
2389 lab_rtx_for_bb->put (bb, l);
2390 return l;
2391 }
2392
2393
2394 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2395 of a basic block where we just expanded the conditional at the end,
2396 possibly clean up the CFG and instruction sequence. LAST is the
2397 last instruction before the just emitted jump sequence. */
2398
2399 static void
2400 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2401 {
2402 /* Special case: when jumpif decides that the condition is
2403 trivial it emits an unconditional jump (and the necessary
2404 barrier). But we still have two edges, the fallthru one is
2405 wrong. purge_dead_edges would clean this up later. Unfortunately
2406 we have to insert insns (and split edges) before
2407 find_many_sub_basic_blocks and hence before purge_dead_edges.
2408 But splitting edges might create new blocks which depend on the
2409 fact that if there are two edges there's no barrier. So the
2410 barrier would get lost and verify_flow_info would ICE. Instead
2411 of auditing all edge splitters to care for the barrier (which
2412 normally isn't there in a cleaned CFG), fix it here. */
2413 if (BARRIER_P (get_last_insn ()))
2414 {
2415 rtx_insn *insn;
2416 remove_edge (e);
2417 /* Now, we have a single successor block, if we have insns to
2418 insert on the remaining edge we potentially will insert
2419 it at the end of this block (if the dest block isn't feasible)
2420 in order to avoid splitting the edge. This insertion will take
2421 place in front of the last jump. But we might have emitted
2422 multiple jumps (conditional and one unconditional) to the
2423 same destination. Inserting in front of the last one then
2424 is a problem. See PR 40021. We fix this by deleting all
2425 jumps except the last unconditional one. */
2426 insn = PREV_INSN (get_last_insn ());
2427 /* Make sure we have an unconditional jump. Otherwise we're
2428 confused. */
2429 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2430 for (insn = PREV_INSN (insn); insn != last;)
2431 {
2432 insn = PREV_INSN (insn);
2433 if (JUMP_P (NEXT_INSN (insn)))
2434 {
2435 if (!any_condjump_p (NEXT_INSN (insn)))
2436 {
2437 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2438 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2439 }
2440 delete_insn (NEXT_INSN (insn));
2441 }
2442 }
2443 }
2444 }
2445
2446 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2447 Returns a new basic block if we've terminated the current basic
2448 block and created a new one. */
2449
2450 static basic_block
2451 expand_gimple_cond (basic_block bb, gcond *stmt)
2452 {
2453 basic_block new_bb, dest;
2454 edge true_edge;
2455 edge false_edge;
2456 rtx_insn *last2, *last;
2457 enum tree_code code;
2458 tree op0, op1;
2459
2460 code = gimple_cond_code (stmt);
2461 op0 = gimple_cond_lhs (stmt);
2462 op1 = gimple_cond_rhs (stmt);
2463 /* We're sometimes presented with such code:
2464 D.123_1 = x < y;
2465 if (D.123_1 != 0)
2466 ...
2467 This would expand to two comparisons which then later might
2468 be cleaned up by combine. But some pattern matchers like if-conversion
2469 work better when there's only one compare, so make up for this
2470 here as special exception if TER would have made the same change. */
2471 if (SA.values
2472 && TREE_CODE (op0) == SSA_NAME
2473 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2474 && TREE_CODE (op1) == INTEGER_CST
2475 && ((gimple_cond_code (stmt) == NE_EXPR
2476 && integer_zerop (op1))
2477 || (gimple_cond_code (stmt) == EQ_EXPR
2478 && integer_onep (op1)))
2479 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2480 {
2481 gimple *second = SSA_NAME_DEF_STMT (op0);
2482 if (gimple_code (second) == GIMPLE_ASSIGN)
2483 {
2484 enum tree_code code2 = gimple_assign_rhs_code (second);
2485 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2486 {
2487 code = code2;
2488 op0 = gimple_assign_rhs1 (second);
2489 op1 = gimple_assign_rhs2 (second);
2490 }
2491 /* If jumps are cheap and the target does not support conditional
2492 compare, turn some more codes into jumpy sequences. */
2493 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2494 && targetm.gen_ccmp_first == NULL)
2495 {
2496 if ((code2 == BIT_AND_EXPR
2497 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2498 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2499 || code2 == TRUTH_AND_EXPR)
2500 {
2501 code = TRUTH_ANDIF_EXPR;
2502 op0 = gimple_assign_rhs1 (second);
2503 op1 = gimple_assign_rhs2 (second);
2504 }
2505 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2506 {
2507 code = TRUTH_ORIF_EXPR;
2508 op0 = gimple_assign_rhs1 (second);
2509 op1 = gimple_assign_rhs2 (second);
2510 }
2511 }
2512 }
2513 }
2514
2515 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2516 into (x - C2) * C3 < C4. */
2517 if ((code == EQ_EXPR || code == NE_EXPR)
2518 && TREE_CODE (op0) == SSA_NAME
2519 && TREE_CODE (op1) == INTEGER_CST)
2520 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2521
2522 last2 = last = get_last_insn ();
2523
2524 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2525 set_curr_insn_location (gimple_location (stmt));
2526
2527 /* These flags have no purpose in RTL land. */
2528 true_edge->flags &= ~EDGE_TRUE_VALUE;
2529 false_edge->flags &= ~EDGE_FALSE_VALUE;
2530
2531 /* We can either have a pure conditional jump with one fallthru edge or
2532 two-way jump that needs to be decomposed into two basic blocks. */
2533 if (false_edge->dest == bb->next_bb)
2534 {
2535 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2536 true_edge->probability);
2537 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2538 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2539 set_curr_insn_location (true_edge->goto_locus);
2540 false_edge->flags |= EDGE_FALLTHRU;
2541 maybe_cleanup_end_of_block (false_edge, last);
2542 return NULL;
2543 }
2544 if (true_edge->dest == bb->next_bb)
2545 {
2546 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2547 false_edge->probability);
2548 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2549 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2550 set_curr_insn_location (false_edge->goto_locus);
2551 true_edge->flags |= EDGE_FALLTHRU;
2552 maybe_cleanup_end_of_block (true_edge, last);
2553 return NULL;
2554 }
2555
2556 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2557 true_edge->probability);
2558 last = get_last_insn ();
2559 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2560 set_curr_insn_location (false_edge->goto_locus);
2561 emit_jump (label_rtx_for_bb (false_edge->dest));
2562
2563 BB_END (bb) = last;
2564 if (BARRIER_P (BB_END (bb)))
2565 BB_END (bb) = PREV_INSN (BB_END (bb));
2566 update_bb_for_insn (bb);
2567
2568 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2569 dest = false_edge->dest;
2570 redirect_edge_succ (false_edge, new_bb);
2571 false_edge->flags |= EDGE_FALLTHRU;
2572 new_bb->count = false_edge->count ();
2573 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2574 add_bb_to_loop (new_bb, loop);
2575 if (loop->latch == bb
2576 && loop->header == dest)
2577 loop->latch = new_bb;
2578 make_single_succ_edge (new_bb, dest, 0);
2579 if (BARRIER_P (BB_END (new_bb)))
2580 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2581 update_bb_for_insn (new_bb);
2582
2583 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2584
2585 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2586 {
2587 set_curr_insn_location (true_edge->goto_locus);
2588 true_edge->goto_locus = curr_insn_location ();
2589 }
2590
2591 return new_bb;
2592 }
2593
2594 /* Mark all calls that can have a transaction restart. */
2595
2596 static void
2597 mark_transaction_restart_calls (gimple *stmt)
2598 {
2599 struct tm_restart_node dummy;
2600 tm_restart_node **slot;
2601
2602 if (!cfun->gimple_df->tm_restart)
2603 return;
2604
2605 dummy.stmt = stmt;
2606 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2607 if (slot)
2608 {
2609 struct tm_restart_node *n = *slot;
2610 tree list = n->label_or_list;
2611 rtx_insn *insn;
2612
2613 for (insn = next_real_insn (get_last_insn ());
2614 !CALL_P (insn);
2615 insn = next_real_insn (insn))
2616 continue;
2617
2618 if (TREE_CODE (list) == LABEL_DECL)
2619 add_reg_note (insn, REG_TM, label_rtx (list));
2620 else
2621 for (; list ; list = TREE_CHAIN (list))
2622 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2623 }
2624 }
2625
2626 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2627 statement STMT. */
2628
2629 static void
2630 expand_call_stmt (gcall *stmt)
2631 {
2632 tree exp, decl, lhs;
2633 bool builtin_p;
2634 size_t i;
2635
2636 if (gimple_call_internal_p (stmt))
2637 {
2638 expand_internal_call (stmt);
2639 return;
2640 }
2641
2642 /* If this is a call to a built-in function and it has no effect other
2643 than setting the lhs, try to implement it using an internal function
2644 instead. */
2645 decl = gimple_call_fndecl (stmt);
2646 if (gimple_call_lhs (stmt)
2647 && !gimple_has_side_effects (stmt)
2648 && (optimize || (decl && called_as_built_in (decl))))
2649 {
2650 internal_fn ifn = replacement_internal_fn (stmt);
2651 if (ifn != IFN_LAST)
2652 {
2653 expand_internal_call (ifn, stmt);
2654 return;
2655 }
2656 }
2657
2658 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2659
2660 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2661 builtin_p = decl && fndecl_built_in_p (decl);
2662
2663 /* If this is not a builtin function, the function type through which the
2664 call is made may be different from the type of the function. */
2665 if (!builtin_p)
2666 CALL_EXPR_FN (exp)
2667 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2668 CALL_EXPR_FN (exp));
2669
2670 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2671 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2672
2673 for (i = 0; i < gimple_call_num_args (stmt); i++)
2674 {
2675 tree arg = gimple_call_arg (stmt, i);
2676 gimple *def;
2677 /* TER addresses into arguments of builtin functions so we have a
2678 chance to infer more correct alignment information. See PR39954. */
2679 if (builtin_p
2680 && TREE_CODE (arg) == SSA_NAME
2681 && (def = get_gimple_for_ssa_name (arg))
2682 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2683 arg = gimple_assign_rhs1 (def);
2684 CALL_EXPR_ARG (exp, i) = arg;
2685 }
2686
2687 if (gimple_has_side_effects (stmt))
2688 TREE_SIDE_EFFECTS (exp) = 1;
2689
2690 if (gimple_call_nothrow_p (stmt))
2691 TREE_NOTHROW (exp) = 1;
2692
2693 if (gimple_no_warning_p (stmt))
2694 TREE_NO_WARNING (exp) = 1;
2695
2696 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2697 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2698 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2699 if (decl
2700 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2701 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2702 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2703 else
2704 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2705 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2706 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2707 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2708
2709 /* Ensure RTL is created for debug args. */
2710 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2711 {
2712 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2713 unsigned int ix;
2714 tree dtemp;
2715
2716 if (debug_args)
2717 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2718 {
2719 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2720 expand_debug_expr (dtemp);
2721 }
2722 }
2723
2724 rtx_insn *before_call = get_last_insn ();
2725 lhs = gimple_call_lhs (stmt);
2726 if (lhs)
2727 expand_assignment (lhs, exp, false);
2728 else
2729 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2730
2731 /* If the gimple call is an indirect call and has 'nocf_check'
2732 attribute find a generated CALL insn to mark it as no
2733 control-flow verification is needed. */
2734 if (gimple_call_nocf_check_p (stmt)
2735 && !gimple_call_fndecl (stmt))
2736 {
2737 rtx_insn *last = get_last_insn ();
2738 while (!CALL_P (last)
2739 && last != before_call)
2740 last = PREV_INSN (last);
2741
2742 if (last != before_call)
2743 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2744 }
2745
2746 mark_transaction_restart_calls (stmt);
2747 }
2748
2749
2750 /* Generate RTL for an asm statement (explicit assembler code).
2751 STRING is a STRING_CST node containing the assembler code text,
2752 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2753 insn is volatile; don't optimize it. */
2754
2755 static void
2756 expand_asm_loc (tree string, int vol, location_t locus)
2757 {
2758 rtx body;
2759
2760 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2761 ggc_strdup (TREE_STRING_POINTER (string)),
2762 locus);
2763
2764 MEM_VOLATILE_P (body) = vol;
2765
2766 /* Non-empty basic ASM implicitly clobbers memory. */
2767 if (TREE_STRING_LENGTH (string) != 0)
2768 {
2769 rtx asm_op, clob;
2770 unsigned i, nclobbers;
2771 auto_vec<rtx> input_rvec, output_rvec;
2772 auto_vec<const char *> constraints;
2773 auto_vec<rtx> clobber_rvec;
2774 HARD_REG_SET clobbered_regs;
2775 CLEAR_HARD_REG_SET (clobbered_regs);
2776
2777 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2778 clobber_rvec.safe_push (clob);
2779
2780 if (targetm.md_asm_adjust)
2781 targetm.md_asm_adjust (output_rvec, input_rvec,
2782 constraints, clobber_rvec,
2783 clobbered_regs);
2784
2785 asm_op = body;
2786 nclobbers = clobber_rvec.length ();
2787 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2788
2789 XVECEXP (body, 0, 0) = asm_op;
2790 for (i = 0; i < nclobbers; i++)
2791 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2792 }
2793
2794 emit_insn (body);
2795 }
2796
2797 /* Return the number of times character C occurs in string S. */
2798 static int
2799 n_occurrences (int c, const char *s)
2800 {
2801 int n = 0;
2802 while (*s)
2803 n += (*s++ == c);
2804 return n;
2805 }
2806
2807 /* A subroutine of expand_asm_operands. Check that all operands have
2808 the same number of alternatives. Return true if so. */
2809
2810 static bool
2811 check_operand_nalternatives (const vec<const char *> &constraints)
2812 {
2813 unsigned len = constraints.length();
2814 if (len > 0)
2815 {
2816 int nalternatives = n_occurrences (',', constraints[0]);
2817
2818 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2819 {
2820 error ("too many alternatives in %<asm%>");
2821 return false;
2822 }
2823
2824 for (unsigned i = 1; i < len; ++i)
2825 if (n_occurrences (',', constraints[i]) != nalternatives)
2826 {
2827 error ("operand constraints for %<asm%> differ "
2828 "in number of alternatives");
2829 return false;
2830 }
2831 }
2832 return true;
2833 }
2834
2835 /* Check for overlap between registers marked in CLOBBERED_REGS and
2836 anything inappropriate in T. Emit error and return the register
2837 variable definition for error, NULL_TREE for ok. */
2838
2839 static bool
2840 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2841 {
2842 /* Conflicts between asm-declared register variables and the clobber
2843 list are not allowed. */
2844 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2845
2846 if (overlap)
2847 {
2848 error ("%<asm%> specifier for variable %qE conflicts with "
2849 "%<asm%> clobber list",
2850 DECL_NAME (overlap));
2851
2852 /* Reset registerness to stop multiple errors emitted for a single
2853 variable. */
2854 DECL_REGISTER (overlap) = 0;
2855 return true;
2856 }
2857
2858 return false;
2859 }
2860
2861 /* Check that the given REGNO spanning NREGS is a valid
2862 asm clobber operand. Some HW registers cannot be
2863 saved/restored, hence they should not be clobbered by
2864 asm statements. */
2865 static bool
2866 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2867 {
2868 bool is_valid = true;
2869 HARD_REG_SET regset;
2870
2871 CLEAR_HARD_REG_SET (regset);
2872
2873 add_range_to_hard_reg_set (&regset, regno, nregs);
2874
2875 /* Clobbering the PIC register is an error. */
2876 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2877 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2878 {
2879 /* ??? Diagnose during gimplification? */
2880 error ("PIC register clobbered by %qs in %<asm%>", regname);
2881 is_valid = false;
2882 }
2883 else if (!in_hard_reg_set_p
2884 (accessible_reg_set, reg_raw_mode[regno], regno))
2885 {
2886 /* ??? Diagnose during gimplification? */
2887 error ("the register %qs cannot be clobbered in %<asm%>"
2888 " for the current target", regname);
2889 is_valid = false;
2890 }
2891
2892 /* Clobbering the stack pointer register is deprecated. GCC expects
2893 the value of the stack pointer after an asm statement to be the same
2894 as it was before, so no asm can validly clobber the stack pointer in
2895 the usual sense. Adding the stack pointer to the clobber list has
2896 traditionally had some undocumented and somewhat obscure side-effects. */
2897 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM)
2898 && warning (OPT_Wdeprecated, "listing the stack pointer register"
2899 " %qs in a clobber list is deprecated", regname))
2900 inform (input_location, "the value of the stack pointer after an %<asm%>"
2901 " statement must be the same as it was before the statement");
2902
2903 return is_valid;
2904 }
2905
2906 /* Generate RTL for an asm statement with arguments.
2907 STRING is the instruction template.
2908 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2909 Each output or input has an expression in the TREE_VALUE and
2910 a tree list in TREE_PURPOSE which in turn contains a constraint
2911 name in TREE_VALUE (or NULL_TREE) and a constraint string
2912 in TREE_PURPOSE.
2913 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2914 that is clobbered by this insn.
2915
2916 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2917 should be the fallthru basic block of the asm goto.
2918
2919 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2920 Some elements of OUTPUTS may be replaced with trees representing temporary
2921 values. The caller should copy those temporary values to the originally
2922 specified lvalues.
2923
2924 VOL nonzero means the insn is volatile; don't optimize it. */
2925
2926 static void
2927 expand_asm_stmt (gasm *stmt)
2928 {
2929 class save_input_location
2930 {
2931 location_t old;
2932
2933 public:
2934 explicit save_input_location(location_t where)
2935 {
2936 old = input_location;
2937 input_location = where;
2938 }
2939
2940 ~save_input_location()
2941 {
2942 input_location = old;
2943 }
2944 };
2945
2946 location_t locus = gimple_location (stmt);
2947
2948 if (gimple_asm_input_p (stmt))
2949 {
2950 const char *s = gimple_asm_string (stmt);
2951 tree string = build_string (strlen (s), s);
2952 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2953 return;
2954 }
2955
2956 /* There are some legacy diagnostics in here, and also avoids a
2957 sixth parameger to targetm.md_asm_adjust. */
2958 save_input_location s_i_l(locus);
2959
2960 unsigned noutputs = gimple_asm_noutputs (stmt);
2961 unsigned ninputs = gimple_asm_ninputs (stmt);
2962 unsigned nlabels = gimple_asm_nlabels (stmt);
2963 unsigned i;
2964
2965 /* ??? Diagnose during gimplification? */
2966 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2967 {
2968 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2969 return;
2970 }
2971
2972 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2973 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2974 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2975
2976 /* Copy the gimple vectors into new vectors that we can manipulate. */
2977
2978 output_tvec.safe_grow (noutputs);
2979 input_tvec.safe_grow (ninputs);
2980 constraints.safe_grow (noutputs + ninputs);
2981
2982 for (i = 0; i < noutputs; ++i)
2983 {
2984 tree t = gimple_asm_output_op (stmt, i);
2985 output_tvec[i] = TREE_VALUE (t);
2986 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2987 }
2988 for (i = 0; i < ninputs; i++)
2989 {
2990 tree t = gimple_asm_input_op (stmt, i);
2991 input_tvec[i] = TREE_VALUE (t);
2992 constraints[i + noutputs]
2993 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2994 }
2995
2996 /* ??? Diagnose during gimplification? */
2997 if (! check_operand_nalternatives (constraints))
2998 return;
2999
3000 /* Count the number of meaningful clobbered registers, ignoring what
3001 we would ignore later. */
3002 auto_vec<rtx> clobber_rvec;
3003 HARD_REG_SET clobbered_regs;
3004 CLEAR_HARD_REG_SET (clobbered_regs);
3005
3006 if (unsigned n = gimple_asm_nclobbers (stmt))
3007 {
3008 clobber_rvec.reserve (n);
3009 for (i = 0; i < n; i++)
3010 {
3011 tree t = gimple_asm_clobber_op (stmt, i);
3012 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
3013 int nregs, j;
3014
3015 j = decode_reg_name_and_count (regname, &nregs);
3016 if (j < 0)
3017 {
3018 if (j == -2)
3019 {
3020 /* ??? Diagnose during gimplification? */
3021 error ("unknown register name %qs in %<asm%>", regname);
3022 }
3023 else if (j == -4)
3024 {
3025 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3026 clobber_rvec.safe_push (x);
3027 }
3028 else
3029 {
3030 /* Otherwise we should have -1 == empty string
3031 or -3 == cc, which is not a register. */
3032 gcc_assert (j == -1 || j == -3);
3033 }
3034 }
3035 else
3036 for (int reg = j; reg < j + nregs; reg++)
3037 {
3038 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3039 return;
3040
3041 SET_HARD_REG_BIT (clobbered_regs, reg);
3042 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3043 clobber_rvec.safe_push (x);
3044 }
3045 }
3046 }
3047 unsigned nclobbers = clobber_rvec.length();
3048
3049 /* First pass over inputs and outputs checks validity and sets
3050 mark_addressable if needed. */
3051 /* ??? Diagnose during gimplification? */
3052
3053 for (i = 0; i < noutputs; ++i)
3054 {
3055 tree val = output_tvec[i];
3056 tree type = TREE_TYPE (val);
3057 const char *constraint;
3058 bool is_inout;
3059 bool allows_reg;
3060 bool allows_mem;
3061
3062 /* Try to parse the output constraint. If that fails, there's
3063 no point in going further. */
3064 constraint = constraints[i];
3065 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3066 &allows_mem, &allows_reg, &is_inout))
3067 return;
3068
3069 /* If the output is a hard register, verify it doesn't conflict with
3070 any other operand's possible hard register use. */
3071 if (DECL_P (val)
3072 && REG_P (DECL_RTL (val))
3073 && HARD_REGISTER_P (DECL_RTL (val)))
3074 {
3075 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3076 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3077 unsigned long match;
3078
3079 /* Verify the other outputs do not use the same hard register. */
3080 for (j = i + 1; j < noutputs; ++j)
3081 if (DECL_P (output_tvec[j])
3082 && REG_P (DECL_RTL (output_tvec[j]))
3083 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3084 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3085 error ("invalid hard register usage between output operands");
3086
3087 /* Verify matching constraint operands use the same hard register
3088 and that the non-matching constraint operands do not use the same
3089 hard register if the output is an early clobber operand. */
3090 for (j = 0; j < ninputs; ++j)
3091 if (DECL_P (input_tvec[j])
3092 && REG_P (DECL_RTL (input_tvec[j]))
3093 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3094 {
3095 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3096 switch (*constraints[j + noutputs])
3097 {
3098 case '0': case '1': case '2': case '3': case '4':
3099 case '5': case '6': case '7': case '8': case '9':
3100 match = strtoul (constraints[j + noutputs], NULL, 10);
3101 break;
3102 default:
3103 match = ULONG_MAX;
3104 break;
3105 }
3106 if (i == match
3107 && output_hregno != input_hregno)
3108 error ("invalid hard register usage between output operand "
3109 "and matching constraint operand");
3110 else if (early_clobber_p
3111 && i != match
3112 && output_hregno == input_hregno)
3113 error ("invalid hard register usage between earlyclobber "
3114 "operand and input operand");
3115 }
3116 }
3117
3118 if (! allows_reg
3119 && (allows_mem
3120 || is_inout
3121 || (DECL_P (val)
3122 && REG_P (DECL_RTL (val))
3123 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3124 mark_addressable (val);
3125 }
3126
3127 for (i = 0; i < ninputs; ++i)
3128 {
3129 bool allows_reg, allows_mem;
3130 const char *constraint;
3131
3132 constraint = constraints[i + noutputs];
3133 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3134 constraints.address (),
3135 &allows_mem, &allows_reg))
3136 return;
3137
3138 if (! allows_reg && allows_mem)
3139 mark_addressable (input_tvec[i]);
3140 }
3141
3142 /* Second pass evaluates arguments. */
3143
3144 /* Make sure stack is consistent for asm goto. */
3145 if (nlabels > 0)
3146 do_pending_stack_adjust ();
3147 int old_generating_concat_p = generating_concat_p;
3148
3149 /* Vector of RTX's of evaluated output operands. */
3150 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3151 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3152 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3153
3154 output_rvec.safe_grow (noutputs);
3155
3156 for (i = 0; i < noutputs; ++i)
3157 {
3158 tree val = output_tvec[i];
3159 tree type = TREE_TYPE (val);
3160 bool is_inout, allows_reg, allows_mem, ok;
3161 rtx op;
3162
3163 ok = parse_output_constraint (&constraints[i], i, ninputs,
3164 noutputs, &allows_mem, &allows_reg,
3165 &is_inout);
3166 gcc_assert (ok);
3167
3168 /* If an output operand is not a decl or indirect ref and our constraint
3169 allows a register, make a temporary to act as an intermediate.
3170 Make the asm insn write into that, then we will copy it to
3171 the real output operand. Likewise for promoted variables. */
3172
3173 generating_concat_p = 0;
3174
3175 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3176 || (DECL_P (val)
3177 && (allows_mem || REG_P (DECL_RTL (val)))
3178 && ! (REG_P (DECL_RTL (val))
3179 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3180 || ! allows_reg
3181 || is_inout
3182 || TREE_ADDRESSABLE (type))
3183 {
3184 op = expand_expr (val, NULL_RTX, VOIDmode,
3185 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3186 if (MEM_P (op))
3187 op = validize_mem (op);
3188
3189 if (! allows_reg && !MEM_P (op))
3190 error ("output number %d not directly addressable", i);
3191 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3192 || GET_CODE (op) == CONCAT)
3193 {
3194 rtx old_op = op;
3195 op = gen_reg_rtx (GET_MODE (op));
3196
3197 generating_concat_p = old_generating_concat_p;
3198
3199 if (is_inout)
3200 emit_move_insn (op, old_op);
3201
3202 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3203 emit_move_insn (old_op, op);
3204 after_rtl_seq = get_insns ();
3205 after_rtl_end = get_last_insn ();
3206 end_sequence ();
3207 }
3208 }
3209 else
3210 {
3211 op = assign_temp (type, 0, 1);
3212 op = validize_mem (op);
3213 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3214 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3215
3216 generating_concat_p = old_generating_concat_p;
3217
3218 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3219 expand_assignment (val, make_tree (type, op), false);
3220 after_rtl_seq = get_insns ();
3221 after_rtl_end = get_last_insn ();
3222 end_sequence ();
3223 }
3224 output_rvec[i] = op;
3225
3226 if (is_inout)
3227 inout_opnum.safe_push (i);
3228 }
3229
3230 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3231 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3232
3233 input_rvec.safe_grow (ninputs);
3234 input_mode.safe_grow (ninputs);
3235
3236 generating_concat_p = 0;
3237
3238 for (i = 0; i < ninputs; ++i)
3239 {
3240 tree val = input_tvec[i];
3241 tree type = TREE_TYPE (val);
3242 bool allows_reg, allows_mem, ok;
3243 const char *constraint;
3244 rtx op;
3245
3246 constraint = constraints[i + noutputs];
3247 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3248 constraints.address (),
3249 &allows_mem, &allows_reg);
3250 gcc_assert (ok);
3251
3252 /* EXPAND_INITIALIZER will not generate code for valid initializer
3253 constants, but will still generate code for other types of operand.
3254 This is the behavior we want for constant constraints. */
3255 op = expand_expr (val, NULL_RTX, VOIDmode,
3256 allows_reg ? EXPAND_NORMAL
3257 : allows_mem ? EXPAND_MEMORY
3258 : EXPAND_INITIALIZER);
3259
3260 /* Never pass a CONCAT to an ASM. */
3261 if (GET_CODE (op) == CONCAT)
3262 op = force_reg (GET_MODE (op), op);
3263 else if (MEM_P (op))
3264 op = validize_mem (op);
3265
3266 if (asm_operand_ok (op, constraint, NULL) <= 0)
3267 {
3268 if (allows_reg && TYPE_MODE (type) != BLKmode)
3269 op = force_reg (TYPE_MODE (type), op);
3270 else if (!allows_mem)
3271 warning (0, "%<asm%> operand %d probably does not match "
3272 "constraints",
3273 i + noutputs);
3274 else if (MEM_P (op))
3275 {
3276 /* We won't recognize either volatile memory or memory
3277 with a queued address as available a memory_operand
3278 at this point. Ignore it: clearly this *is* a memory. */
3279 }
3280 else
3281 gcc_unreachable ();
3282 }
3283 input_rvec[i] = op;
3284 input_mode[i] = TYPE_MODE (type);
3285 }
3286
3287 /* For in-out operands, copy output rtx to input rtx. */
3288 unsigned ninout = inout_opnum.length();
3289 for (i = 0; i < ninout; i++)
3290 {
3291 int j = inout_opnum[i];
3292 rtx o = output_rvec[j];
3293
3294 input_rvec.safe_push (o);
3295 input_mode.safe_push (GET_MODE (o));
3296
3297 char buffer[16];
3298 sprintf (buffer, "%d", j);
3299 constraints.safe_push (ggc_strdup (buffer));
3300 }
3301 ninputs += ninout;
3302
3303 /* Sometimes we wish to automatically clobber registers across an asm.
3304 Case in point is when the i386 backend moved from cc0 to a hard reg --
3305 maintaining source-level compatibility means automatically clobbering
3306 the flags register. */
3307 rtx_insn *after_md_seq = NULL;
3308 if (targetm.md_asm_adjust)
3309 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3310 constraints, clobber_rvec,
3311 clobbered_regs);
3312
3313 /* Do not allow the hook to change the output and input count,
3314 lest it mess up the operand numbering. */
3315 gcc_assert (output_rvec.length() == noutputs);
3316 gcc_assert (input_rvec.length() == ninputs);
3317 gcc_assert (constraints.length() == noutputs + ninputs);
3318
3319 /* But it certainly can adjust the clobbers. */
3320 nclobbers = clobber_rvec.length();
3321
3322 /* Third pass checks for easy conflicts. */
3323 /* ??? Why are we doing this on trees instead of rtx. */
3324
3325 bool clobber_conflict_found = 0;
3326 for (i = 0; i < noutputs; ++i)
3327 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3328 clobber_conflict_found = 1;
3329 for (i = 0; i < ninputs - ninout; ++i)
3330 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3331 clobber_conflict_found = 1;
3332
3333 /* Make vectors for the expression-rtx, constraint strings,
3334 and named operands. */
3335
3336 rtvec argvec = rtvec_alloc (ninputs);
3337 rtvec constraintvec = rtvec_alloc (ninputs);
3338 rtvec labelvec = rtvec_alloc (nlabels);
3339
3340 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3341 : GET_MODE (output_rvec[0])),
3342 ggc_strdup (gimple_asm_string (stmt)),
3343 "", 0, argvec, constraintvec,
3344 labelvec, locus);
3345 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3346
3347 for (i = 0; i < ninputs; ++i)
3348 {
3349 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3350 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3351 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3352 constraints[i + noutputs],
3353 locus);
3354 }
3355
3356 /* Copy labels to the vector. */
3357 rtx_code_label *fallthru_label = NULL;
3358 if (nlabels > 0)
3359 {
3360 basic_block fallthru_bb = NULL;
3361 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3362 if (fallthru)
3363 fallthru_bb = fallthru->dest;
3364
3365 for (i = 0; i < nlabels; ++i)
3366 {
3367 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3368 rtx_insn *r;
3369 /* If asm goto has any labels in the fallthru basic block, use
3370 a label that we emit immediately after the asm goto. Expansion
3371 may insert further instructions into the same basic block after
3372 asm goto and if we don't do this, insertion of instructions on
3373 the fallthru edge might misbehave. See PR58670. */
3374 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3375 {
3376 if (fallthru_label == NULL_RTX)
3377 fallthru_label = gen_label_rtx ();
3378 r = fallthru_label;
3379 }
3380 else
3381 r = label_rtx (label);
3382 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3383 }
3384 }
3385
3386 /* Now, for each output, construct an rtx
3387 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3388 ARGVEC CONSTRAINTS OPNAMES))
3389 If there is more than one, put them inside a PARALLEL. */
3390
3391 if (nlabels > 0 && nclobbers == 0)
3392 {
3393 gcc_assert (noutputs == 0);
3394 emit_jump_insn (body);
3395 }
3396 else if (noutputs == 0 && nclobbers == 0)
3397 {
3398 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3399 emit_insn (body);
3400 }
3401 else if (noutputs == 1 && nclobbers == 0)
3402 {
3403 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3404 emit_insn (gen_rtx_SET (output_rvec[0], body));
3405 }
3406 else
3407 {
3408 rtx obody = body;
3409 int num = noutputs;
3410
3411 if (num == 0)
3412 num = 1;
3413
3414 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3415
3416 /* For each output operand, store a SET. */
3417 for (i = 0; i < noutputs; ++i)
3418 {
3419 rtx src, o = output_rvec[i];
3420 if (i == 0)
3421 {
3422 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3423 src = obody;
3424 }
3425 else
3426 {
3427 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3428 ASM_OPERANDS_TEMPLATE (obody),
3429 constraints[i], i, argvec,
3430 constraintvec, labelvec, locus);
3431 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3432 }
3433 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3434 }
3435
3436 /* If there are no outputs (but there are some clobbers)
3437 store the bare ASM_OPERANDS into the PARALLEL. */
3438 if (i == 0)
3439 XVECEXP (body, 0, i++) = obody;
3440
3441 /* Store (clobber REG) for each clobbered register specified. */
3442 for (unsigned j = 0; j < nclobbers; ++j)
3443 {
3444 rtx clobbered_reg = clobber_rvec[j];
3445
3446 /* Do sanity check for overlap between clobbers and respectively
3447 input and outputs that hasn't been handled. Such overlap
3448 should have been detected and reported above. */
3449 if (!clobber_conflict_found && REG_P (clobbered_reg))
3450 {
3451 /* We test the old body (obody) contents to avoid
3452 tripping over the under-construction body. */
3453 for (unsigned k = 0; k < noutputs; ++k)
3454 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3455 internal_error ("%<asm%> clobber conflict with "
3456 "output operand");
3457
3458 for (unsigned k = 0; k < ninputs - ninout; ++k)
3459 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3460 internal_error ("%<asm%> clobber conflict with "
3461 "input operand");
3462 }
3463
3464 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3465 }
3466
3467 if (nlabels > 0)
3468 emit_jump_insn (body);
3469 else
3470 emit_insn (body);
3471 }
3472
3473 generating_concat_p = old_generating_concat_p;
3474
3475 if (fallthru_label)
3476 emit_label (fallthru_label);
3477
3478 if (after_md_seq)
3479 emit_insn (after_md_seq);
3480 if (after_rtl_seq)
3481 emit_insn (after_rtl_seq);
3482
3483 free_temp_slots ();
3484 crtl->has_asm_statement = 1;
3485 }
3486
3487 /* Emit code to jump to the address
3488 specified by the pointer expression EXP. */
3489
3490 static void
3491 expand_computed_goto (tree exp)
3492 {
3493 rtx x = expand_normal (exp);
3494
3495 do_pending_stack_adjust ();
3496 emit_indirect_jump (x);
3497 }
3498
3499 /* Generate RTL code for a `goto' statement with target label LABEL.
3500 LABEL should be a LABEL_DECL tree node that was or will later be
3501 defined with `expand_label'. */
3502
3503 static void
3504 expand_goto (tree label)
3505 {
3506 if (flag_checking)
3507 {
3508 /* Check for a nonlocal goto to a containing function. Should have
3509 gotten translated to __builtin_nonlocal_goto. */
3510 tree context = decl_function_context (label);
3511 gcc_assert (!context || context == current_function_decl);
3512 }
3513
3514 emit_jump (jump_target_rtx (label));
3515 }
3516
3517 /* Output a return with no value. */
3518
3519 static void
3520 expand_null_return_1 (void)
3521 {
3522 clear_pending_stack_adjust ();
3523 do_pending_stack_adjust ();
3524 emit_jump (return_label);
3525 }
3526
3527 /* Generate RTL to return from the current function, with no value.
3528 (That is, we do not do anything about returning any value.) */
3529
3530 void
3531 expand_null_return (void)
3532 {
3533 /* If this function was declared to return a value, but we
3534 didn't, clobber the return registers so that they are not
3535 propagated live to the rest of the function. */
3536 clobber_return_register ();
3537
3538 expand_null_return_1 ();
3539 }
3540
3541 /* Generate RTL to return from the current function, with value VAL. */
3542
3543 static void
3544 expand_value_return (rtx val)
3545 {
3546 /* Copy the value to the return location unless it's already there. */
3547
3548 tree decl = DECL_RESULT (current_function_decl);
3549 rtx return_reg = DECL_RTL (decl);
3550 if (return_reg != val)
3551 {
3552 tree funtype = TREE_TYPE (current_function_decl);
3553 tree type = TREE_TYPE (decl);
3554 int unsignedp = TYPE_UNSIGNED (type);
3555 machine_mode old_mode = DECL_MODE (decl);
3556 machine_mode mode;
3557 if (DECL_BY_REFERENCE (decl))
3558 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3559 else
3560 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3561
3562 if (mode != old_mode)
3563 val = convert_modes (mode, old_mode, val, unsignedp);
3564
3565 if (GET_CODE (return_reg) == PARALLEL)
3566 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3567 else
3568 emit_move_insn (return_reg, val);
3569 }
3570
3571 expand_null_return_1 ();
3572 }
3573
3574 /* Generate RTL to evaluate the expression RETVAL and return it
3575 from the current function. */
3576
3577 static void
3578 expand_return (tree retval)
3579 {
3580 rtx result_rtl;
3581 rtx val = 0;
3582 tree retval_rhs;
3583
3584 /* If function wants no value, give it none. */
3585 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3586 {
3587 expand_normal (retval);
3588 expand_null_return ();
3589 return;
3590 }
3591
3592 if (retval == error_mark_node)
3593 {
3594 /* Treat this like a return of no value from a function that
3595 returns a value. */
3596 expand_null_return ();
3597 return;
3598 }
3599 else if ((TREE_CODE (retval) == MODIFY_EXPR
3600 || TREE_CODE (retval) == INIT_EXPR)
3601 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3602 retval_rhs = TREE_OPERAND (retval, 1);
3603 else
3604 retval_rhs = retval;
3605
3606 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3607
3608 /* If we are returning the RESULT_DECL, then the value has already
3609 been stored into it, so we don't have to do anything special. */
3610 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3611 expand_value_return (result_rtl);
3612
3613 /* If the result is an aggregate that is being returned in one (or more)
3614 registers, load the registers here. */
3615
3616 else if (retval_rhs != 0
3617 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3618 && REG_P (result_rtl))
3619 {
3620 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3621 if (val)
3622 {
3623 /* Use the mode of the result value on the return register. */
3624 PUT_MODE (result_rtl, GET_MODE (val));
3625 expand_value_return (val);
3626 }
3627 else
3628 expand_null_return ();
3629 }
3630 else if (retval_rhs != 0
3631 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3632 && (REG_P (result_rtl)
3633 || (GET_CODE (result_rtl) == PARALLEL)))
3634 {
3635 /* Compute the return value into a temporary (usually a pseudo reg). */
3636 val
3637 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3638 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3639 val = force_not_mem (val);
3640 expand_value_return (val);
3641 }
3642 else
3643 {
3644 /* No hard reg used; calculate value into hard return reg. */
3645 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3646 expand_value_return (result_rtl);
3647 }
3648 }
3649
3650 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3651 register, tell the rtl optimizers that its value is no longer
3652 needed. */
3653
3654 static void
3655 expand_clobber (tree lhs)
3656 {
3657 if (DECL_P (lhs))
3658 {
3659 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3660 if (decl_rtl && REG_P (decl_rtl))
3661 {
3662 machine_mode decl_mode = GET_MODE (decl_rtl);
3663 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3664 REGMODE_NATURAL_SIZE (decl_mode)))
3665 emit_clobber (decl_rtl);
3666 }
3667 }
3668 }
3669
3670 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3671 STMT that doesn't require special handling for outgoing edges. That
3672 is no tailcalls and no GIMPLE_COND. */
3673
3674 static void
3675 expand_gimple_stmt_1 (gimple *stmt)
3676 {
3677 tree op0;
3678
3679 set_curr_insn_location (gimple_location (stmt));
3680
3681 switch (gimple_code (stmt))
3682 {
3683 case GIMPLE_GOTO:
3684 op0 = gimple_goto_dest (stmt);
3685 if (TREE_CODE (op0) == LABEL_DECL)
3686 expand_goto (op0);
3687 else
3688 expand_computed_goto (op0);
3689 break;
3690 case GIMPLE_LABEL:
3691 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3692 break;
3693 case GIMPLE_NOP:
3694 case GIMPLE_PREDICT:
3695 break;
3696 case GIMPLE_SWITCH:
3697 {
3698 gswitch *swtch = as_a <gswitch *> (stmt);
3699 if (gimple_switch_num_labels (swtch) == 1)
3700 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3701 else
3702 expand_case (swtch);
3703 }
3704 break;
3705 case GIMPLE_ASM:
3706 expand_asm_stmt (as_a <gasm *> (stmt));
3707 break;
3708 case GIMPLE_CALL:
3709 expand_call_stmt (as_a <gcall *> (stmt));
3710 break;
3711
3712 case GIMPLE_RETURN:
3713 {
3714 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3715
3716 if (op0 && op0 != error_mark_node)
3717 {
3718 tree result = DECL_RESULT (current_function_decl);
3719
3720 /* If we are not returning the current function's RESULT_DECL,
3721 build an assignment to it. */
3722 if (op0 != result)
3723 {
3724 /* I believe that a function's RESULT_DECL is unique. */
3725 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3726
3727 /* ??? We'd like to use simply expand_assignment here,
3728 but this fails if the value is of BLKmode but the return
3729 decl is a register. expand_return has special handling
3730 for this combination, which eventually should move
3731 to common code. See comments there. Until then, let's
3732 build a modify expression :-/ */
3733 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3734 result, op0);
3735 }
3736 }
3737
3738 if (!op0)
3739 expand_null_return ();
3740 else
3741 expand_return (op0);
3742 }
3743 break;
3744
3745 case GIMPLE_ASSIGN:
3746 {
3747 gassign *assign_stmt = as_a <gassign *> (stmt);
3748 tree lhs = gimple_assign_lhs (assign_stmt);
3749
3750 /* Tree expand used to fiddle with |= and &= of two bitfield
3751 COMPONENT_REFs here. This can't happen with gimple, the LHS
3752 of binary assigns must be a gimple reg. */
3753
3754 if (TREE_CODE (lhs) != SSA_NAME
3755 || get_gimple_rhs_class (gimple_expr_code (stmt))
3756 == GIMPLE_SINGLE_RHS)
3757 {
3758 tree rhs = gimple_assign_rhs1 (assign_stmt);
3759 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3760 == GIMPLE_SINGLE_RHS);
3761 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3762 /* Do not put locations on possibly shared trees. */
3763 && !is_gimple_min_invariant (rhs))
3764 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3765 if (TREE_CLOBBER_P (rhs))
3766 /* This is a clobber to mark the going out of scope for
3767 this LHS. */
3768 expand_clobber (lhs);
3769 else
3770 expand_assignment (lhs, rhs,
3771 gimple_assign_nontemporal_move_p (
3772 assign_stmt));
3773 }
3774 else
3775 {
3776 rtx target, temp;
3777 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3778 struct separate_ops ops;
3779 bool promoted = false;
3780
3781 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3782 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3783 promoted = true;
3784
3785 ops.code = gimple_assign_rhs_code (assign_stmt);
3786 ops.type = TREE_TYPE (lhs);
3787 switch (get_gimple_rhs_class (ops.code))
3788 {
3789 case GIMPLE_TERNARY_RHS:
3790 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3791 /* Fallthru */
3792 case GIMPLE_BINARY_RHS:
3793 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3794 /* Fallthru */
3795 case GIMPLE_UNARY_RHS:
3796 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3797 break;
3798 default:
3799 gcc_unreachable ();
3800 }
3801 ops.location = gimple_location (stmt);
3802
3803 /* If we want to use a nontemporal store, force the value to
3804 register first. If we store into a promoted register,
3805 don't directly expand to target. */
3806 temp = nontemporal || promoted ? NULL_RTX : target;
3807 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3808 EXPAND_NORMAL);
3809
3810 if (temp == target)
3811 ;
3812 else if (promoted)
3813 {
3814 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3815 /* If TEMP is a VOIDmode constant, use convert_modes to make
3816 sure that we properly convert it. */
3817 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3818 {
3819 temp = convert_modes (GET_MODE (target),
3820 TYPE_MODE (ops.type),
3821 temp, unsignedp);
3822 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3823 GET_MODE (target), temp, unsignedp);
3824 }
3825
3826 convert_move (SUBREG_REG (target), temp, unsignedp);
3827 }
3828 else if (nontemporal && emit_storent_insn (target, temp))
3829 ;
3830 else
3831 {
3832 temp = force_operand (temp, target);
3833 if (temp != target)
3834 emit_move_insn (target, temp);
3835 }
3836 }
3837 }
3838 break;
3839
3840 default:
3841 gcc_unreachable ();
3842 }
3843 }
3844
3845 /* Expand one gimple statement STMT and return the last RTL instruction
3846 before any of the newly generated ones.
3847
3848 In addition to generating the necessary RTL instructions this also
3849 sets REG_EH_REGION notes if necessary and sets the current source
3850 location for diagnostics. */
3851
3852 static rtx_insn *
3853 expand_gimple_stmt (gimple *stmt)
3854 {
3855 location_t saved_location = input_location;
3856 rtx_insn *last = get_last_insn ();
3857 int lp_nr;
3858
3859 gcc_assert (cfun);
3860
3861 /* We need to save and restore the current source location so that errors
3862 discovered during expansion are emitted with the right location. But
3863 it would be better if the diagnostic routines used the source location
3864 embedded in the tree nodes rather than globals. */
3865 if (gimple_has_location (stmt))
3866 input_location = gimple_location (stmt);
3867
3868 expand_gimple_stmt_1 (stmt);
3869
3870 /* Free any temporaries used to evaluate this statement. */
3871 free_temp_slots ();
3872
3873 input_location = saved_location;
3874
3875 /* Mark all insns that may trap. */
3876 lp_nr = lookup_stmt_eh_lp (stmt);
3877 if (lp_nr)
3878 {
3879 rtx_insn *insn;
3880 for (insn = next_real_insn (last); insn;
3881 insn = next_real_insn (insn))
3882 {
3883 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3884 /* If we want exceptions for non-call insns, any
3885 may_trap_p instruction may throw. */
3886 && GET_CODE (PATTERN (insn)) != CLOBBER
3887 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
3888 && GET_CODE (PATTERN (insn)) != USE
3889 && insn_could_throw_p (insn))
3890 make_reg_eh_region_note (insn, 0, lp_nr);
3891 }
3892 }
3893
3894 return last;
3895 }
3896
3897 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3898 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3899 generated a tail call (something that might be denied by the ABI
3900 rules governing the call; see calls.c).
3901
3902 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3903 can still reach the rest of BB. The case here is __builtin_sqrt,
3904 where the NaN result goes through the external function (with a
3905 tailcall) and the normal result happens via a sqrt instruction. */
3906
3907 static basic_block
3908 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3909 {
3910 rtx_insn *last2, *last;
3911 edge e;
3912 edge_iterator ei;
3913 profile_probability probability;
3914
3915 last2 = last = expand_gimple_stmt (stmt);
3916
3917 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3918 if (CALL_P (last) && SIBLING_CALL_P (last))
3919 goto found;
3920
3921 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3922
3923 *can_fallthru = true;
3924 return NULL;
3925
3926 found:
3927 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3928 Any instructions emitted here are about to be deleted. */
3929 do_pending_stack_adjust ();
3930
3931 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3932 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3933 EH or abnormal edges, we shouldn't have created a tail call in
3934 the first place. So it seems to me we should just be removing
3935 all edges here, or redirecting the existing fallthru edge to
3936 the exit block. */
3937
3938 probability = profile_probability::never ();
3939
3940 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3941 {
3942 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3943 {
3944 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3945 e->dest->count -= e->count ();
3946 probability += e->probability;
3947 remove_edge (e);
3948 }
3949 else
3950 ei_next (&ei);
3951 }
3952
3953 /* This is somewhat ugly: the call_expr expander often emits instructions
3954 after the sibcall (to perform the function return). These confuse the
3955 find_many_sub_basic_blocks code, so we need to get rid of these. */
3956 last = NEXT_INSN (last);
3957 gcc_assert (BARRIER_P (last));
3958
3959 *can_fallthru = false;
3960 while (NEXT_INSN (last))
3961 {
3962 /* For instance an sqrt builtin expander expands if with
3963 sibcall in the then and label for `else`. */
3964 if (LABEL_P (NEXT_INSN (last)))
3965 {
3966 *can_fallthru = true;
3967 break;
3968 }
3969 delete_insn (NEXT_INSN (last));
3970 }
3971
3972 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3973 | EDGE_SIBCALL);
3974 e->probability = probability;
3975 BB_END (bb) = last;
3976 update_bb_for_insn (bb);
3977
3978 if (NEXT_INSN (last))
3979 {
3980 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3981
3982 last = BB_END (bb);
3983 if (BARRIER_P (last))
3984 BB_END (bb) = PREV_INSN (last);
3985 }
3986
3987 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3988
3989 return bb;
3990 }
3991
3992 /* Return the difference between the floor and the truncated result of
3993 a signed division by OP1 with remainder MOD. */
3994 static rtx
3995 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3996 {
3997 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3998 return gen_rtx_IF_THEN_ELSE
3999 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4000 gen_rtx_IF_THEN_ELSE
4001 (mode, gen_rtx_LT (BImode,
4002 gen_rtx_DIV (mode, op1, mod),
4003 const0_rtx),
4004 constm1_rtx, const0_rtx),
4005 const0_rtx);
4006 }
4007
4008 /* Return the difference between the ceil and the truncated result of
4009 a signed division by OP1 with remainder MOD. */
4010 static rtx
4011 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4012 {
4013 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4014 return gen_rtx_IF_THEN_ELSE
4015 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4016 gen_rtx_IF_THEN_ELSE
4017 (mode, gen_rtx_GT (BImode,
4018 gen_rtx_DIV (mode, op1, mod),
4019 const0_rtx),
4020 const1_rtx, const0_rtx),
4021 const0_rtx);
4022 }
4023
4024 /* Return the difference between the ceil and the truncated result of
4025 an unsigned division by OP1 with remainder MOD. */
4026 static rtx
4027 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4028 {
4029 /* (mod != 0 ? 1 : 0) */
4030 return gen_rtx_IF_THEN_ELSE
4031 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4032 const1_rtx, const0_rtx);
4033 }
4034
4035 /* Return the difference between the rounded and the truncated result
4036 of a signed division by OP1 with remainder MOD. Halfway cases are
4037 rounded away from zero, rather than to the nearest even number. */
4038 static rtx
4039 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4040 {
4041 /* (abs (mod) >= abs (op1) - abs (mod)
4042 ? (op1 / mod > 0 ? 1 : -1)
4043 : 0) */
4044 return gen_rtx_IF_THEN_ELSE
4045 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4046 gen_rtx_MINUS (mode,
4047 gen_rtx_ABS (mode, op1),
4048 gen_rtx_ABS (mode, mod))),
4049 gen_rtx_IF_THEN_ELSE
4050 (mode, gen_rtx_GT (BImode,
4051 gen_rtx_DIV (mode, op1, mod),
4052 const0_rtx),
4053 const1_rtx, constm1_rtx),
4054 const0_rtx);
4055 }
4056
4057 /* Return the difference between the rounded and the truncated result
4058 of a unsigned division by OP1 with remainder MOD. Halfway cases
4059 are rounded away from zero, rather than to the nearest even
4060 number. */
4061 static rtx
4062 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4063 {
4064 /* (mod >= op1 - mod ? 1 : 0) */
4065 return gen_rtx_IF_THEN_ELSE
4066 (mode, gen_rtx_GE (BImode, mod,
4067 gen_rtx_MINUS (mode, op1, mod)),
4068 const1_rtx, const0_rtx);
4069 }
4070
4071 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4072 any rtl. */
4073
4074 static rtx
4075 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4076 addr_space_t as)
4077 {
4078 #ifndef POINTERS_EXTEND_UNSIGNED
4079 gcc_assert (mode == Pmode
4080 || mode == targetm.addr_space.address_mode (as));
4081 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4082 #else
4083 rtx temp;
4084
4085 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4086
4087 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4088 return x;
4089
4090 /* X must have some form of address mode already. */
4091 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4092 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4093 x = lowpart_subreg (mode, x, xmode);
4094 else if (POINTERS_EXTEND_UNSIGNED > 0)
4095 x = gen_rtx_ZERO_EXTEND (mode, x);
4096 else if (!POINTERS_EXTEND_UNSIGNED)
4097 x = gen_rtx_SIGN_EXTEND (mode, x);
4098 else
4099 {
4100 switch (GET_CODE (x))
4101 {
4102 case SUBREG:
4103 if ((SUBREG_PROMOTED_VAR_P (x)
4104 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4105 || (GET_CODE (SUBREG_REG (x)) == PLUS
4106 && REG_P (XEXP (SUBREG_REG (x), 0))
4107 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4108 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4109 && GET_MODE (SUBREG_REG (x)) == mode)
4110 return SUBREG_REG (x);
4111 break;
4112 case LABEL_REF:
4113 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4114 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4115 return temp;
4116 case SYMBOL_REF:
4117 temp = shallow_copy_rtx (x);
4118 PUT_MODE (temp, mode);
4119 return temp;
4120 case CONST:
4121 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4122 if (temp)
4123 temp = gen_rtx_CONST (mode, temp);
4124 return temp;
4125 case PLUS:
4126 case MINUS:
4127 if (CONST_INT_P (XEXP (x, 1)))
4128 {
4129 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4130 if (temp)
4131 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4132 }
4133 break;
4134 default:
4135 break;
4136 }
4137 /* Don't know how to express ptr_extend as operation in debug info. */
4138 return NULL;
4139 }
4140 #endif /* POINTERS_EXTEND_UNSIGNED */
4141
4142 return x;
4143 }
4144
4145 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4146 by avoid_deep_ter_for_debug. */
4147
4148 static hash_map<tree, tree> *deep_ter_debug_map;
4149
4150 /* Split too deep TER chains for debug stmts using debug temporaries. */
4151
4152 static void
4153 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4154 {
4155 use_operand_p use_p;
4156 ssa_op_iter iter;
4157 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4158 {
4159 tree use = USE_FROM_PTR (use_p);
4160 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4161 continue;
4162 gimple *g = get_gimple_for_ssa_name (use);
4163 if (g == NULL)
4164 continue;
4165 if (depth > 6 && !stmt_ends_bb_p (g))
4166 {
4167 if (deep_ter_debug_map == NULL)
4168 deep_ter_debug_map = new hash_map<tree, tree>;
4169
4170 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4171 if (vexpr != NULL)
4172 continue;
4173 vexpr = make_node (DEBUG_EXPR_DECL);
4174 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4175 DECL_ARTIFICIAL (vexpr) = 1;
4176 TREE_TYPE (vexpr) = TREE_TYPE (use);
4177 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4178 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4179 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4180 avoid_deep_ter_for_debug (def_temp, 0);
4181 }
4182 else
4183 avoid_deep_ter_for_debug (g, depth + 1);
4184 }
4185 }
4186
4187 /* Return an RTX equivalent to the value of the parameter DECL. */
4188
4189 static rtx
4190 expand_debug_parm_decl (tree decl)
4191 {
4192 rtx incoming = DECL_INCOMING_RTL (decl);
4193
4194 if (incoming
4195 && GET_MODE (incoming) != BLKmode
4196 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4197 || (MEM_P (incoming)
4198 && REG_P (XEXP (incoming, 0))
4199 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4200 {
4201 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4202
4203 #ifdef HAVE_window_save
4204 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4205 If the target machine has an explicit window save instruction, the
4206 actual entry value is the corresponding OUTGOING_REGNO instead. */
4207 if (REG_P (incoming)
4208 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4209 incoming
4210 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4211 OUTGOING_REGNO (REGNO (incoming)), 0);
4212 else if (MEM_P (incoming))
4213 {
4214 rtx reg = XEXP (incoming, 0);
4215 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4216 {
4217 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4218 incoming = replace_equiv_address_nv (incoming, reg);
4219 }
4220 else
4221 incoming = copy_rtx (incoming);
4222 }
4223 #endif
4224
4225 ENTRY_VALUE_EXP (rtl) = incoming;
4226 return rtl;
4227 }
4228
4229 if (incoming
4230 && GET_MODE (incoming) != BLKmode
4231 && !TREE_ADDRESSABLE (decl)
4232 && MEM_P (incoming)
4233 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4234 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4235 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4236 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4237 return copy_rtx (incoming);
4238
4239 return NULL_RTX;
4240 }
4241
4242 /* Return an RTX equivalent to the value of the tree expression EXP. */
4243
4244 static rtx
4245 expand_debug_expr (tree exp)
4246 {
4247 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4248 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4249 machine_mode inner_mode = VOIDmode;
4250 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4251 addr_space_t as;
4252 scalar_int_mode op0_mode, op1_mode, addr_mode;
4253
4254 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4255 {
4256 case tcc_expression:
4257 switch (TREE_CODE (exp))
4258 {
4259 case COND_EXPR:
4260 case DOT_PROD_EXPR:
4261 case SAD_EXPR:
4262 case WIDEN_MULT_PLUS_EXPR:
4263 case WIDEN_MULT_MINUS_EXPR:
4264 goto ternary;
4265
4266 case TRUTH_ANDIF_EXPR:
4267 case TRUTH_ORIF_EXPR:
4268 case TRUTH_AND_EXPR:
4269 case TRUTH_OR_EXPR:
4270 case TRUTH_XOR_EXPR:
4271 goto binary;
4272
4273 case TRUTH_NOT_EXPR:
4274 goto unary;
4275
4276 default:
4277 break;
4278 }
4279 break;
4280
4281 ternary:
4282 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4283 if (!op2)
4284 return NULL_RTX;
4285 /* Fall through. */
4286
4287 binary:
4288 case tcc_binary:
4289 if (mode == BLKmode)
4290 return NULL_RTX;
4291 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4292 if (!op1)
4293 return NULL_RTX;
4294 switch (TREE_CODE (exp))
4295 {
4296 case LSHIFT_EXPR:
4297 case RSHIFT_EXPR:
4298 case LROTATE_EXPR:
4299 case RROTATE_EXPR:
4300 case WIDEN_LSHIFT_EXPR:
4301 /* Ensure second operand isn't wider than the first one. */
4302 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4303 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4304 && (GET_MODE_UNIT_PRECISION (mode)
4305 < GET_MODE_PRECISION (op1_mode)))
4306 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4307 break;
4308 default:
4309 break;
4310 }
4311 /* Fall through. */
4312
4313 unary:
4314 case tcc_unary:
4315 if (mode == BLKmode)
4316 return NULL_RTX;
4317 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4318 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4319 if (!op0)
4320 return NULL_RTX;
4321 break;
4322
4323 case tcc_comparison:
4324 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4325 goto binary;
4326
4327 case tcc_type:
4328 case tcc_statement:
4329 gcc_unreachable ();
4330
4331 case tcc_constant:
4332 case tcc_exceptional:
4333 case tcc_declaration:
4334 case tcc_reference:
4335 case tcc_vl_exp:
4336 break;
4337 }
4338
4339 switch (TREE_CODE (exp))
4340 {
4341 case STRING_CST:
4342 if (!lookup_constant_def (exp))
4343 {
4344 if (strlen (TREE_STRING_POINTER (exp)) + 1
4345 != (size_t) TREE_STRING_LENGTH (exp))
4346 return NULL_RTX;
4347 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4348 op0 = gen_rtx_MEM (BLKmode, op0);
4349 set_mem_attributes (op0, exp, 0);
4350 return op0;
4351 }
4352 /* Fall through. */
4353
4354 case INTEGER_CST:
4355 case REAL_CST:
4356 case FIXED_CST:
4357 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4358 return op0;
4359
4360 case POLY_INT_CST:
4361 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4362
4363 case COMPLEX_CST:
4364 gcc_assert (COMPLEX_MODE_P (mode));
4365 op0 = expand_debug_expr (TREE_REALPART (exp));
4366 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4367 return gen_rtx_CONCAT (mode, op0, op1);
4368
4369 case DEBUG_EXPR_DECL:
4370 op0 = DECL_RTL_IF_SET (exp);
4371
4372 if (op0)
4373 return op0;
4374
4375 op0 = gen_rtx_DEBUG_EXPR (mode);
4376 DEBUG_EXPR_TREE_DECL (op0) = exp;
4377 SET_DECL_RTL (exp, op0);
4378
4379 return op0;
4380
4381 case VAR_DECL:
4382 case PARM_DECL:
4383 case FUNCTION_DECL:
4384 case LABEL_DECL:
4385 case CONST_DECL:
4386 case RESULT_DECL:
4387 op0 = DECL_RTL_IF_SET (exp);
4388
4389 /* This decl was probably optimized away. */
4390 if (!op0
4391 /* At least label RTXen are sometimes replaced by
4392 NOTE_INSN_DELETED_LABEL. Any notes here are not
4393 handled by copy_rtx. */
4394 || NOTE_P (op0))
4395 {
4396 if (!VAR_P (exp)
4397 || DECL_EXTERNAL (exp)
4398 || !TREE_STATIC (exp)
4399 || !DECL_NAME (exp)
4400 || DECL_HARD_REGISTER (exp)
4401 || DECL_IN_CONSTANT_POOL (exp)
4402 || mode == VOIDmode)
4403 return NULL;
4404
4405 op0 = make_decl_rtl_for_debug (exp);
4406 if (!MEM_P (op0)
4407 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4408 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4409 return NULL;
4410 }
4411 else
4412 op0 = copy_rtx (op0);
4413
4414 if (GET_MODE (op0) == BLKmode
4415 /* If op0 is not BLKmode, but mode is, adjust_mode
4416 below would ICE. While it is likely a FE bug,
4417 try to be robust here. See PR43166. */
4418 || mode == BLKmode
4419 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4420 {
4421 gcc_assert (MEM_P (op0));
4422 op0 = adjust_address_nv (op0, mode, 0);
4423 return op0;
4424 }
4425
4426 /* Fall through. */
4427
4428 adjust_mode:
4429 case PAREN_EXPR:
4430 CASE_CONVERT:
4431 {
4432 inner_mode = GET_MODE (op0);
4433
4434 if (mode == inner_mode)
4435 return op0;
4436
4437 if (inner_mode == VOIDmode)
4438 {
4439 if (TREE_CODE (exp) == SSA_NAME)
4440 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4441 else
4442 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4443 if (mode == inner_mode)
4444 return op0;
4445 }
4446
4447 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4448 {
4449 if (GET_MODE_UNIT_BITSIZE (mode)
4450 == GET_MODE_UNIT_BITSIZE (inner_mode))
4451 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4452 else if (GET_MODE_UNIT_BITSIZE (mode)
4453 < GET_MODE_UNIT_BITSIZE (inner_mode))
4454 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4455 else
4456 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4457 }
4458 else if (FLOAT_MODE_P (mode))
4459 {
4460 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4461 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4462 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4463 else
4464 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4465 }
4466 else if (FLOAT_MODE_P (inner_mode))
4467 {
4468 if (unsignedp)
4469 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4470 else
4471 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4472 }
4473 else if (GET_MODE_UNIT_PRECISION (mode)
4474 == GET_MODE_UNIT_PRECISION (inner_mode))
4475 op0 = lowpart_subreg (mode, op0, inner_mode);
4476 else if (GET_MODE_UNIT_PRECISION (mode)
4477 < GET_MODE_UNIT_PRECISION (inner_mode))
4478 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4479 else if (UNARY_CLASS_P (exp)
4480 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4481 : unsignedp)
4482 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4483 else
4484 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4485
4486 return op0;
4487 }
4488
4489 case MEM_REF:
4490 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4491 {
4492 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4493 TREE_OPERAND (exp, 0),
4494 TREE_OPERAND (exp, 1));
4495 if (newexp)
4496 return expand_debug_expr (newexp);
4497 }
4498 /* FALLTHROUGH */
4499 case INDIRECT_REF:
4500 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4501 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4502 if (!op0)
4503 return NULL;
4504
4505 if (TREE_CODE (exp) == MEM_REF)
4506 {
4507 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4508 || (GET_CODE (op0) == PLUS
4509 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4510 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4511 Instead just use get_inner_reference. */
4512 goto component_ref;
4513
4514 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4515 poly_int64 offset;
4516 if (!op1 || !poly_int_rtx_p (op1, &offset))
4517 return NULL;
4518
4519 op0 = plus_constant (inner_mode, op0, offset);
4520 }
4521
4522 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4523
4524 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4525 op0, as);
4526 if (op0 == NULL_RTX)
4527 return NULL;
4528
4529 op0 = gen_rtx_MEM (mode, op0);
4530 set_mem_attributes (op0, exp, 0);
4531 if (TREE_CODE (exp) == MEM_REF
4532 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4533 set_mem_expr (op0, NULL_TREE);
4534 set_mem_addr_space (op0, as);
4535
4536 return op0;
4537
4538 case TARGET_MEM_REF:
4539 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4540 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4541 return NULL;
4542
4543 op0 = expand_debug_expr
4544 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4545 if (!op0)
4546 return NULL;
4547
4548 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4549 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4550 op0, as);
4551 if (op0 == NULL_RTX)
4552 return NULL;
4553
4554 op0 = gen_rtx_MEM (mode, op0);
4555
4556 set_mem_attributes (op0, exp, 0);
4557 set_mem_addr_space (op0, as);
4558
4559 return op0;
4560
4561 component_ref:
4562 case ARRAY_REF:
4563 case ARRAY_RANGE_REF:
4564 case COMPONENT_REF:
4565 case BIT_FIELD_REF:
4566 case REALPART_EXPR:
4567 case IMAGPART_EXPR:
4568 case VIEW_CONVERT_EXPR:
4569 {
4570 machine_mode mode1;
4571 poly_int64 bitsize, bitpos;
4572 tree offset;
4573 int reversep, volatilep = 0;
4574 tree tem
4575 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4576 &unsignedp, &reversep, &volatilep);
4577 rtx orig_op0;
4578
4579 if (known_eq (bitsize, 0))
4580 return NULL;
4581
4582 orig_op0 = op0 = expand_debug_expr (tem);
4583
4584 if (!op0)
4585 return NULL;
4586
4587 if (offset)
4588 {
4589 machine_mode addrmode, offmode;
4590
4591 if (!MEM_P (op0))
4592 return NULL;
4593
4594 op0 = XEXP (op0, 0);
4595 addrmode = GET_MODE (op0);
4596 if (addrmode == VOIDmode)
4597 addrmode = Pmode;
4598
4599 op1 = expand_debug_expr (offset);
4600 if (!op1)
4601 return NULL;
4602
4603 offmode = GET_MODE (op1);
4604 if (offmode == VOIDmode)
4605 offmode = TYPE_MODE (TREE_TYPE (offset));
4606
4607 if (addrmode != offmode)
4608 op1 = lowpart_subreg (addrmode, op1, offmode);
4609
4610 /* Don't use offset_address here, we don't need a
4611 recognizable address, and we don't want to generate
4612 code. */
4613 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4614 op0, op1));
4615 }
4616
4617 if (MEM_P (op0))
4618 {
4619 if (mode1 == VOIDmode)
4620 {
4621 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4622 return NULL;
4623 /* Bitfield. */
4624 mode1 = smallest_int_mode_for_size (bitsize);
4625 }
4626 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4627 if (maybe_ne (bytepos, 0))
4628 {
4629 op0 = adjust_address_nv (op0, mode1, bytepos);
4630 bitpos = num_trailing_bits (bitpos);
4631 }
4632 else if (known_eq (bitpos, 0)
4633 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4634 op0 = adjust_address_nv (op0, mode, 0);
4635 else if (GET_MODE (op0) != mode1)
4636 op0 = adjust_address_nv (op0, mode1, 0);
4637 else
4638 op0 = copy_rtx (op0);
4639 if (op0 == orig_op0)
4640 op0 = shallow_copy_rtx (op0);
4641 set_mem_attributes (op0, exp, 0);
4642 }
4643
4644 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4645 return op0;
4646
4647 if (maybe_lt (bitpos, 0))
4648 return NULL;
4649
4650 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4651 return NULL;
4652
4653 poly_int64 bytepos;
4654 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4655 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4656 {
4657 machine_mode opmode = GET_MODE (op0);
4658
4659 if (opmode == VOIDmode)
4660 opmode = TYPE_MODE (TREE_TYPE (tem));
4661
4662 /* This condition may hold if we're expanding the address
4663 right past the end of an array that turned out not to
4664 be addressable (i.e., the address was only computed in
4665 debug stmts). The gen_subreg below would rightfully
4666 crash, and the address doesn't really exist, so just
4667 drop it. */
4668 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4669 return NULL;
4670
4671 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4672 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4673 }
4674
4675 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4676 && TYPE_UNSIGNED (TREE_TYPE (exp))
4677 ? SIGN_EXTRACT
4678 : ZERO_EXTRACT, mode,
4679 GET_MODE (op0) != VOIDmode
4680 ? GET_MODE (op0)
4681 : TYPE_MODE (TREE_TYPE (tem)),
4682 op0, gen_int_mode (bitsize, word_mode),
4683 gen_int_mode (bitpos, word_mode));
4684 }
4685
4686 case ABS_EXPR:
4687 case ABSU_EXPR:
4688 return simplify_gen_unary (ABS, mode, op0, mode);
4689
4690 case NEGATE_EXPR:
4691 return simplify_gen_unary (NEG, mode, op0, mode);
4692
4693 case BIT_NOT_EXPR:
4694 return simplify_gen_unary (NOT, mode, op0, mode);
4695
4696 case FLOAT_EXPR:
4697 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4698 0)))
4699 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4700 inner_mode);
4701
4702 case FIX_TRUNC_EXPR:
4703 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4704 inner_mode);
4705
4706 case POINTER_PLUS_EXPR:
4707 /* For the rare target where pointers are not the same size as
4708 size_t, we need to check for mis-matched modes and correct
4709 the addend. */
4710 if (op0 && op1
4711 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4712 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4713 && op0_mode != op1_mode)
4714 {
4715 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4716 /* If OP0 is a partial mode, then we must truncate, even
4717 if it has the same bitsize as OP1 as GCC's
4718 representation of partial modes is opaque. */
4719 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4720 && (GET_MODE_BITSIZE (op0_mode)
4721 == GET_MODE_BITSIZE (op1_mode))))
4722 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4723 else
4724 /* We always sign-extend, regardless of the signedness of
4725 the operand, because the operand is always unsigned
4726 here even if the original C expression is signed. */
4727 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4728 }
4729 /* Fall through. */
4730 case PLUS_EXPR:
4731 return simplify_gen_binary (PLUS, mode, op0, op1);
4732
4733 case MINUS_EXPR:
4734 case POINTER_DIFF_EXPR:
4735 return simplify_gen_binary (MINUS, mode, op0, op1);
4736
4737 case MULT_EXPR:
4738 return simplify_gen_binary (MULT, mode, op0, op1);
4739
4740 case RDIV_EXPR:
4741 case TRUNC_DIV_EXPR:
4742 case EXACT_DIV_EXPR:
4743 if (unsignedp)
4744 return simplify_gen_binary (UDIV, mode, op0, op1);
4745 else
4746 return simplify_gen_binary (DIV, mode, op0, op1);
4747
4748 case TRUNC_MOD_EXPR:
4749 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4750
4751 case FLOOR_DIV_EXPR:
4752 if (unsignedp)
4753 return simplify_gen_binary (UDIV, mode, op0, op1);
4754 else
4755 {
4756 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4757 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4758 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4759 return simplify_gen_binary (PLUS, mode, div, adj);
4760 }
4761
4762 case FLOOR_MOD_EXPR:
4763 if (unsignedp)
4764 return simplify_gen_binary (UMOD, mode, op0, op1);
4765 else
4766 {
4767 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4768 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4769 adj = simplify_gen_unary (NEG, mode,
4770 simplify_gen_binary (MULT, mode, adj, op1),
4771 mode);
4772 return simplify_gen_binary (PLUS, mode, mod, adj);
4773 }
4774
4775 case CEIL_DIV_EXPR:
4776 if (unsignedp)
4777 {
4778 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4779 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4780 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4781 return simplify_gen_binary (PLUS, mode, div, adj);
4782 }
4783 else
4784 {
4785 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4786 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4787 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4788 return simplify_gen_binary (PLUS, mode, div, adj);
4789 }
4790
4791 case CEIL_MOD_EXPR:
4792 if (unsignedp)
4793 {
4794 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4795 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4796 adj = simplify_gen_unary (NEG, mode,
4797 simplify_gen_binary (MULT, mode, adj, op1),
4798 mode);
4799 return simplify_gen_binary (PLUS, mode, mod, adj);
4800 }
4801 else
4802 {
4803 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4804 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4805 adj = simplify_gen_unary (NEG, mode,
4806 simplify_gen_binary (MULT, mode, adj, op1),
4807 mode);
4808 return simplify_gen_binary (PLUS, mode, mod, adj);
4809 }
4810
4811 case ROUND_DIV_EXPR:
4812 if (unsignedp)
4813 {
4814 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4815 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4816 rtx adj = round_udiv_adjust (mode, mod, op1);
4817 return simplify_gen_binary (PLUS, mode, div, adj);
4818 }
4819 else
4820 {
4821 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4822 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4823 rtx adj = round_sdiv_adjust (mode, mod, op1);
4824 return simplify_gen_binary (PLUS, mode, div, adj);
4825 }
4826
4827 case ROUND_MOD_EXPR:
4828 if (unsignedp)
4829 {
4830 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4831 rtx adj = round_udiv_adjust (mode, mod, op1);
4832 adj = simplify_gen_unary (NEG, mode,
4833 simplify_gen_binary (MULT, mode, adj, op1),
4834 mode);
4835 return simplify_gen_binary (PLUS, mode, mod, adj);
4836 }
4837 else
4838 {
4839 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4840 rtx adj = round_sdiv_adjust (mode, mod, op1);
4841 adj = simplify_gen_unary (NEG, mode,
4842 simplify_gen_binary (MULT, mode, adj, op1),
4843 mode);
4844 return simplify_gen_binary (PLUS, mode, mod, adj);
4845 }
4846
4847 case LSHIFT_EXPR:
4848 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4849
4850 case RSHIFT_EXPR:
4851 if (unsignedp)
4852 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4853 else
4854 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4855
4856 case LROTATE_EXPR:
4857 return simplify_gen_binary (ROTATE, mode, op0, op1);
4858
4859 case RROTATE_EXPR:
4860 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4861
4862 case MIN_EXPR:
4863 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4864
4865 case MAX_EXPR:
4866 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4867
4868 case BIT_AND_EXPR:
4869 case TRUTH_AND_EXPR:
4870 return simplify_gen_binary (AND, mode, op0, op1);
4871
4872 case BIT_IOR_EXPR:
4873 case TRUTH_OR_EXPR:
4874 return simplify_gen_binary (IOR, mode, op0, op1);
4875
4876 case BIT_XOR_EXPR:
4877 case TRUTH_XOR_EXPR:
4878 return simplify_gen_binary (XOR, mode, op0, op1);
4879
4880 case TRUTH_ANDIF_EXPR:
4881 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4882
4883 case TRUTH_ORIF_EXPR:
4884 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4885
4886 case TRUTH_NOT_EXPR:
4887 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4888
4889 case LT_EXPR:
4890 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4891 op0, op1);
4892
4893 case LE_EXPR:
4894 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4895 op0, op1);
4896
4897 case GT_EXPR:
4898 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4899 op0, op1);
4900
4901 case GE_EXPR:
4902 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4903 op0, op1);
4904
4905 case EQ_EXPR:
4906 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4907
4908 case NE_EXPR:
4909 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4910
4911 case UNORDERED_EXPR:
4912 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4913
4914 case ORDERED_EXPR:
4915 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4916
4917 case UNLT_EXPR:
4918 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4919
4920 case UNLE_EXPR:
4921 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4922
4923 case UNGT_EXPR:
4924 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4925
4926 case UNGE_EXPR:
4927 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4928
4929 case UNEQ_EXPR:
4930 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4931
4932 case LTGT_EXPR:
4933 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4934
4935 case COND_EXPR:
4936 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4937
4938 case COMPLEX_EXPR:
4939 gcc_assert (COMPLEX_MODE_P (mode));
4940 if (GET_MODE (op0) == VOIDmode)
4941 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4942 if (GET_MODE (op1) == VOIDmode)
4943 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4944 return gen_rtx_CONCAT (mode, op0, op1);
4945
4946 case CONJ_EXPR:
4947 if (GET_CODE (op0) == CONCAT)
4948 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4949 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4950 XEXP (op0, 1),
4951 GET_MODE_INNER (mode)));
4952 else
4953 {
4954 scalar_mode imode = GET_MODE_INNER (mode);
4955 rtx re, im;
4956
4957 if (MEM_P (op0))
4958 {
4959 re = adjust_address_nv (op0, imode, 0);
4960 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4961 }
4962 else
4963 {
4964 scalar_int_mode ifmode;
4965 scalar_int_mode ihmode;
4966 rtx halfsize;
4967 if (!int_mode_for_mode (mode).exists (&ifmode)
4968 || !int_mode_for_mode (imode).exists (&ihmode))
4969 return NULL;
4970 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4971 re = op0;
4972 if (mode != ifmode)
4973 re = gen_rtx_SUBREG (ifmode, re, 0);
4974 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4975 if (imode != ihmode)
4976 re = gen_rtx_SUBREG (imode, re, 0);
4977 im = copy_rtx (op0);
4978 if (mode != ifmode)
4979 im = gen_rtx_SUBREG (ifmode, im, 0);
4980 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4981 if (imode != ihmode)
4982 im = gen_rtx_SUBREG (imode, im, 0);
4983 }
4984 im = gen_rtx_NEG (imode, im);
4985 return gen_rtx_CONCAT (mode, re, im);
4986 }
4987
4988 case ADDR_EXPR:
4989 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4990 if (!op0 || !MEM_P (op0))
4991 {
4992 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4993 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4994 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4995 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4996 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4997 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4998
4999 if (handled_component_p (TREE_OPERAND (exp, 0)))
5000 {
5001 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
5002 bool reverse;
5003 tree decl
5004 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
5005 &bitsize, &maxsize, &reverse);
5006 if ((VAR_P (decl)
5007 || TREE_CODE (decl) == PARM_DECL
5008 || TREE_CODE (decl) == RESULT_DECL)
5009 && (!TREE_ADDRESSABLE (decl)
5010 || target_for_debug_bind (decl))
5011 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
5012 && known_gt (bitsize, 0)
5013 && known_eq (bitsize, maxsize))
5014 {
5015 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
5016 return plus_constant (mode, base, byteoffset);
5017 }
5018 }
5019
5020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5021 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5022 == ADDR_EXPR)
5023 {
5024 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5025 0));
5026 if (op0 != NULL
5027 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5028 || (GET_CODE (op0) == PLUS
5029 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5030 && CONST_INT_P (XEXP (op0, 1)))))
5031 {
5032 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5033 1));
5034 poly_int64 offset;
5035 if (!op1 || !poly_int_rtx_p (op1, &offset))
5036 return NULL;
5037
5038 return plus_constant (mode, op0, offset);
5039 }
5040 }
5041
5042 return NULL;
5043 }
5044
5045 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5046 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5047 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5048
5049 return op0;
5050
5051 case VECTOR_CST:
5052 {
5053 unsigned HOST_WIDE_INT i, nelts;
5054
5055 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5056 return NULL;
5057
5058 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5059
5060 for (i = 0; i < nelts; ++i)
5061 {
5062 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5063 if (!op1)
5064 return NULL;
5065 XVECEXP (op0, 0, i) = op1;
5066 }
5067
5068 return op0;
5069 }
5070
5071 case CONSTRUCTOR:
5072 if (TREE_CLOBBER_P (exp))
5073 return NULL;
5074 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5075 {
5076 unsigned i;
5077 unsigned HOST_WIDE_INT nelts;
5078 tree val;
5079
5080 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5081 goto flag_unsupported;
5082
5083 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5084
5085 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5086 {
5087 op1 = expand_debug_expr (val);
5088 if (!op1)
5089 return NULL;
5090 XVECEXP (op0, 0, i) = op1;
5091 }
5092
5093 if (i < nelts)
5094 {
5095 op1 = expand_debug_expr
5096 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5097
5098 if (!op1)
5099 return NULL;
5100
5101 for (; i < nelts; i++)
5102 XVECEXP (op0, 0, i) = op1;
5103 }
5104
5105 return op0;
5106 }
5107 else
5108 goto flag_unsupported;
5109
5110 case CALL_EXPR:
5111 /* ??? Maybe handle some builtins? */
5112 return NULL;
5113
5114 case SSA_NAME:
5115 {
5116 gimple *g = get_gimple_for_ssa_name (exp);
5117 if (g)
5118 {
5119 tree t = NULL_TREE;
5120 if (deep_ter_debug_map)
5121 {
5122 tree *slot = deep_ter_debug_map->get (exp);
5123 if (slot)
5124 t = *slot;
5125 }
5126 if (t == NULL_TREE)
5127 t = gimple_assign_rhs_to_tree (g);
5128 op0 = expand_debug_expr (t);
5129 if (!op0)
5130 return NULL;
5131 }
5132 else
5133 {
5134 /* If this is a reference to an incoming value of
5135 parameter that is never used in the code or where the
5136 incoming value is never used in the code, use
5137 PARM_DECL's DECL_RTL if set. */
5138 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5139 && SSA_NAME_VAR (exp)
5140 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5141 && has_zero_uses (exp))
5142 {
5143 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5144 if (op0)
5145 goto adjust_mode;
5146 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5147 if (op0)
5148 goto adjust_mode;
5149 }
5150
5151 int part = var_to_partition (SA.map, exp);
5152
5153 if (part == NO_PARTITION)
5154 return NULL;
5155
5156 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5157
5158 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5159 }
5160 goto adjust_mode;
5161 }
5162
5163 case ERROR_MARK:
5164 return NULL;
5165
5166 /* Vector stuff. For most of the codes we don't have rtl codes. */
5167 case REALIGN_LOAD_EXPR:
5168 case VEC_COND_EXPR:
5169 case VEC_PACK_FIX_TRUNC_EXPR:
5170 case VEC_PACK_FLOAT_EXPR:
5171 case VEC_PACK_SAT_EXPR:
5172 case VEC_PACK_TRUNC_EXPR:
5173 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5174 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5175 case VEC_UNPACK_FLOAT_HI_EXPR:
5176 case VEC_UNPACK_FLOAT_LO_EXPR:
5177 case VEC_UNPACK_HI_EXPR:
5178 case VEC_UNPACK_LO_EXPR:
5179 case VEC_WIDEN_MULT_HI_EXPR:
5180 case VEC_WIDEN_MULT_LO_EXPR:
5181 case VEC_WIDEN_MULT_EVEN_EXPR:
5182 case VEC_WIDEN_MULT_ODD_EXPR:
5183 case VEC_WIDEN_LSHIFT_HI_EXPR:
5184 case VEC_WIDEN_LSHIFT_LO_EXPR:
5185 case VEC_PERM_EXPR:
5186 case VEC_DUPLICATE_EXPR:
5187 case VEC_SERIES_EXPR:
5188 return NULL;
5189
5190 /* Misc codes. */
5191 case ADDR_SPACE_CONVERT_EXPR:
5192 case FIXED_CONVERT_EXPR:
5193 case OBJ_TYPE_REF:
5194 case WITH_SIZE_EXPR:
5195 case BIT_INSERT_EXPR:
5196 return NULL;
5197
5198 case DOT_PROD_EXPR:
5199 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5200 && SCALAR_INT_MODE_P (mode))
5201 {
5202 op0
5203 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5204 0)))
5205 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5206 inner_mode);
5207 op1
5208 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5209 1)))
5210 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5211 inner_mode);
5212 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5213 return simplify_gen_binary (PLUS, mode, op0, op2);
5214 }
5215 return NULL;
5216
5217 case WIDEN_MULT_EXPR:
5218 case WIDEN_MULT_PLUS_EXPR:
5219 case WIDEN_MULT_MINUS_EXPR:
5220 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5221 && SCALAR_INT_MODE_P (mode))
5222 {
5223 inner_mode = GET_MODE (op0);
5224 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5225 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5226 else
5227 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5228 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5229 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5230 else
5231 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5232 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5233 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5234 return op0;
5235 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5236 return simplify_gen_binary (PLUS, mode, op0, op2);
5237 else
5238 return simplify_gen_binary (MINUS, mode, op2, op0);
5239 }
5240 return NULL;
5241
5242 case MULT_HIGHPART_EXPR:
5243 /* ??? Similar to the above. */
5244 return NULL;
5245
5246 case WIDEN_SUM_EXPR:
5247 case WIDEN_LSHIFT_EXPR:
5248 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5249 && SCALAR_INT_MODE_P (mode))
5250 {
5251 op0
5252 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5253 0)))
5254 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5255 inner_mode);
5256 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5257 ? ASHIFT : PLUS, mode, op0, op1);
5258 }
5259 return NULL;
5260
5261 default:
5262 flag_unsupported:
5263 if (flag_checking)
5264 {
5265 debug_tree (exp);
5266 gcc_unreachable ();
5267 }
5268 return NULL;
5269 }
5270 }
5271
5272 /* Return an RTX equivalent to the source bind value of the tree expression
5273 EXP. */
5274
5275 static rtx
5276 expand_debug_source_expr (tree exp)
5277 {
5278 rtx op0 = NULL_RTX;
5279 machine_mode mode = VOIDmode, inner_mode;
5280
5281 switch (TREE_CODE (exp))
5282 {
5283 case VAR_DECL:
5284 if (DECL_ABSTRACT_ORIGIN (exp))
5285 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5286 break;
5287 case PARM_DECL:
5288 {
5289 mode = DECL_MODE (exp);
5290 op0 = expand_debug_parm_decl (exp);
5291 if (op0)
5292 break;
5293 /* See if this isn't an argument that has been completely
5294 optimized out. */
5295 if (!DECL_RTL_SET_P (exp)
5296 && !DECL_INCOMING_RTL (exp)
5297 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5298 {
5299 tree aexp = DECL_ORIGIN (exp);
5300 if (DECL_CONTEXT (aexp)
5301 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5302 {
5303 vec<tree, va_gc> **debug_args;
5304 unsigned int ix;
5305 tree ddecl;
5306 debug_args = decl_debug_args_lookup (current_function_decl);
5307 if (debug_args != NULL)
5308 {
5309 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5310 ix += 2)
5311 if (ddecl == aexp)
5312 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5313 }
5314 }
5315 }
5316 break;
5317 }
5318 default:
5319 break;
5320 }
5321
5322 if (op0 == NULL_RTX)
5323 return NULL_RTX;
5324
5325 inner_mode = GET_MODE (op0);
5326 if (mode == inner_mode)
5327 return op0;
5328
5329 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5330 {
5331 if (GET_MODE_UNIT_BITSIZE (mode)
5332 == GET_MODE_UNIT_BITSIZE (inner_mode))
5333 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5334 else if (GET_MODE_UNIT_BITSIZE (mode)
5335 < GET_MODE_UNIT_BITSIZE (inner_mode))
5336 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5337 else
5338 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5339 }
5340 else if (FLOAT_MODE_P (mode))
5341 gcc_unreachable ();
5342 else if (FLOAT_MODE_P (inner_mode))
5343 {
5344 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5345 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5346 else
5347 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5348 }
5349 else if (GET_MODE_UNIT_PRECISION (mode)
5350 == GET_MODE_UNIT_PRECISION (inner_mode))
5351 op0 = lowpart_subreg (mode, op0, inner_mode);
5352 else if (GET_MODE_UNIT_PRECISION (mode)
5353 < GET_MODE_UNIT_PRECISION (inner_mode))
5354 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5355 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5356 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5357 else
5358 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5359
5360 return op0;
5361 }
5362
5363 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5364 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5365 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5366
5367 static void
5368 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5369 {
5370 rtx exp = *exp_p;
5371
5372 if (exp == NULL_RTX)
5373 return;
5374
5375 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5376 return;
5377
5378 if (depth == 4)
5379 {
5380 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5381 rtx dval = make_debug_expr_from_rtl (exp);
5382
5383 /* Emit a debug bind insn before INSN. */
5384 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5385 DEBUG_EXPR_TREE_DECL (dval), exp,
5386 VAR_INIT_STATUS_INITIALIZED);
5387
5388 emit_debug_insn_before (bind, insn);
5389 *exp_p = dval;
5390 return;
5391 }
5392
5393 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5394 int i, j;
5395 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5396 switch (*format_ptr++)
5397 {
5398 case 'e':
5399 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5400 break;
5401
5402 case 'E':
5403 case 'V':
5404 for (j = 0; j < XVECLEN (exp, i); j++)
5405 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5406 break;
5407
5408 default:
5409 break;
5410 }
5411 }
5412
5413 /* Expand the _LOCs in debug insns. We run this after expanding all
5414 regular insns, so that any variables referenced in the function
5415 will have their DECL_RTLs set. */
5416
5417 static void
5418 expand_debug_locations (void)
5419 {
5420 rtx_insn *insn;
5421 rtx_insn *last = get_last_insn ();
5422 int save_strict_alias = flag_strict_aliasing;
5423
5424 /* New alias sets while setting up memory attributes cause
5425 -fcompare-debug failures, even though it doesn't bring about any
5426 codegen changes. */
5427 flag_strict_aliasing = 0;
5428
5429 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5430 if (DEBUG_BIND_INSN_P (insn))
5431 {
5432 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5433 rtx val;
5434 rtx_insn *prev_insn, *insn2;
5435 machine_mode mode;
5436
5437 if (value == NULL_TREE)
5438 val = NULL_RTX;
5439 else
5440 {
5441 if (INSN_VAR_LOCATION_STATUS (insn)
5442 == VAR_INIT_STATUS_UNINITIALIZED)
5443 val = expand_debug_source_expr (value);
5444 /* The avoid_deep_ter_for_debug function inserts
5445 debug bind stmts after SSA_NAME definition, with the
5446 SSA_NAME as the whole bind location. Disable temporarily
5447 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5448 being defined in this DEBUG_INSN. */
5449 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5450 {
5451 tree *slot = deep_ter_debug_map->get (value);
5452 if (slot)
5453 {
5454 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5455 *slot = NULL_TREE;
5456 else
5457 slot = NULL;
5458 }
5459 val = expand_debug_expr (value);
5460 if (slot)
5461 *slot = INSN_VAR_LOCATION_DECL (insn);
5462 }
5463 else
5464 val = expand_debug_expr (value);
5465 gcc_assert (last == get_last_insn ());
5466 }
5467
5468 if (!val)
5469 val = gen_rtx_UNKNOWN_VAR_LOC ();
5470 else
5471 {
5472 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5473
5474 gcc_assert (mode == GET_MODE (val)
5475 || (GET_MODE (val) == VOIDmode
5476 && (CONST_SCALAR_INT_P (val)
5477 || GET_CODE (val) == CONST_FIXED
5478 || GET_CODE (val) == LABEL_REF)));
5479 }
5480
5481 INSN_VAR_LOCATION_LOC (insn) = val;
5482 prev_insn = PREV_INSN (insn);
5483 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5484 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5485 }
5486
5487 flag_strict_aliasing = save_strict_alias;
5488 }
5489
5490 /* Performs swapping operands of commutative operations to expand
5491 the expensive one first. */
5492
5493 static void
5494 reorder_operands (basic_block bb)
5495 {
5496 unsigned int *lattice; /* Hold cost of each statement. */
5497 unsigned int i = 0, n = 0;
5498 gimple_stmt_iterator gsi;
5499 gimple_seq stmts;
5500 gimple *stmt;
5501 bool swap;
5502 tree op0, op1;
5503 ssa_op_iter iter;
5504 use_operand_p use_p;
5505 gimple *def0, *def1;
5506
5507 /* Compute cost of each statement using estimate_num_insns. */
5508 stmts = bb_seq (bb);
5509 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5510 {
5511 stmt = gsi_stmt (gsi);
5512 if (!is_gimple_debug (stmt))
5513 gimple_set_uid (stmt, n++);
5514 }
5515 lattice = XNEWVEC (unsigned int, n);
5516 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5517 {
5518 unsigned cost;
5519 stmt = gsi_stmt (gsi);
5520 if (is_gimple_debug (stmt))
5521 continue;
5522 cost = estimate_num_insns (stmt, &eni_size_weights);
5523 lattice[i] = cost;
5524 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5525 {
5526 tree use = USE_FROM_PTR (use_p);
5527 gimple *def_stmt;
5528 if (TREE_CODE (use) != SSA_NAME)
5529 continue;
5530 def_stmt = get_gimple_for_ssa_name (use);
5531 if (!def_stmt)
5532 continue;
5533 lattice[i] += lattice[gimple_uid (def_stmt)];
5534 }
5535 i++;
5536 if (!is_gimple_assign (stmt)
5537 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5538 continue;
5539 op0 = gimple_op (stmt, 1);
5540 op1 = gimple_op (stmt, 2);
5541 if (TREE_CODE (op0) != SSA_NAME
5542 || TREE_CODE (op1) != SSA_NAME)
5543 continue;
5544 /* Swap operands if the second one is more expensive. */
5545 def0 = get_gimple_for_ssa_name (op0);
5546 def1 = get_gimple_for_ssa_name (op1);
5547 if (!def1)
5548 continue;
5549 swap = false;
5550 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5551 swap = true;
5552 if (swap)
5553 {
5554 if (dump_file && (dump_flags & TDF_DETAILS))
5555 {
5556 fprintf (dump_file, "Swap operands in stmt:\n");
5557 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5558 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5559 def0 ? lattice[gimple_uid (def0)] : 0,
5560 lattice[gimple_uid (def1)]);
5561 }
5562 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5563 gimple_assign_rhs2_ptr (stmt));
5564 }
5565 }
5566 XDELETE (lattice);
5567 }
5568
5569 /* Expand basic block BB from GIMPLE trees to RTL. */
5570
5571 static basic_block
5572 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5573 {
5574 gimple_stmt_iterator gsi;
5575 gimple_seq stmts;
5576 gimple *stmt = NULL;
5577 rtx_note *note = NULL;
5578 rtx_insn *last;
5579 edge e;
5580 edge_iterator ei;
5581
5582 if (dump_file)
5583 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5584 bb->index);
5585
5586 /* Note that since we are now transitioning from GIMPLE to RTL, we
5587 cannot use the gsi_*_bb() routines because they expect the basic
5588 block to be in GIMPLE, instead of RTL. Therefore, we need to
5589 access the BB sequence directly. */
5590 if (optimize)
5591 reorder_operands (bb);
5592 stmts = bb_seq (bb);
5593 bb->il.gimple.seq = NULL;
5594 bb->il.gimple.phi_nodes = NULL;
5595 rtl_profile_for_bb (bb);
5596 init_rtl_bb_info (bb);
5597 bb->flags |= BB_RTL;
5598
5599 /* Remove the RETURN_EXPR if we may fall though to the exit
5600 instead. */
5601 gsi = gsi_last (stmts);
5602 if (!gsi_end_p (gsi)
5603 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5604 {
5605 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5606
5607 gcc_assert (single_succ_p (bb));
5608 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5609
5610 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5611 && !gimple_return_retval (ret_stmt))
5612 {
5613 gsi_remove (&gsi, false);
5614 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5615 }
5616 }
5617
5618 gsi = gsi_start (stmts);
5619 if (!gsi_end_p (gsi))
5620 {
5621 stmt = gsi_stmt (gsi);
5622 if (gimple_code (stmt) != GIMPLE_LABEL)
5623 stmt = NULL;
5624 }
5625
5626 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5627
5628 if (stmt || elt)
5629 {
5630 gcc_checking_assert (!note);
5631 last = get_last_insn ();
5632
5633 if (stmt)
5634 {
5635 expand_gimple_stmt (stmt);
5636 gsi_next (&gsi);
5637 }
5638
5639 if (elt)
5640 emit_label (*elt);
5641
5642 BB_HEAD (bb) = NEXT_INSN (last);
5643 if (NOTE_P (BB_HEAD (bb)))
5644 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5645 gcc_assert (LABEL_P (BB_HEAD (bb)));
5646 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5647
5648 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5649 }
5650 else
5651 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5652
5653 if (note)
5654 NOTE_BASIC_BLOCK (note) = bb;
5655
5656 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5657 {
5658 basic_block new_bb;
5659
5660 stmt = gsi_stmt (gsi);
5661
5662 /* If this statement is a non-debug one, and we generate debug
5663 insns, then this one might be the last real use of a TERed
5664 SSA_NAME, but where there are still some debug uses further
5665 down. Expanding the current SSA name in such further debug
5666 uses by their RHS might lead to wrong debug info, as coalescing
5667 might make the operands of such RHS be placed into the same
5668 pseudo as something else. Like so:
5669 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5670 use(a_1);
5671 a_2 = ...
5672 #DEBUG ... => a_1
5673 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5674 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5675 the write to a_2 would actually have clobbered the place which
5676 formerly held a_0.
5677
5678 So, instead of that, we recognize the situation, and generate
5679 debug temporaries at the last real use of TERed SSA names:
5680 a_1 = a_0 + 1;
5681 #DEBUG #D1 => a_1
5682 use(a_1);
5683 a_2 = ...
5684 #DEBUG ... => #D1
5685 */
5686 if (MAY_HAVE_DEBUG_BIND_INSNS
5687 && SA.values
5688 && !is_gimple_debug (stmt))
5689 {
5690 ssa_op_iter iter;
5691 tree op;
5692 gimple *def;
5693
5694 location_t sloc = curr_insn_location ();
5695
5696 /* Look for SSA names that have their last use here (TERed
5697 names always have only one real use). */
5698 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5699 if ((def = get_gimple_for_ssa_name (op)))
5700 {
5701 imm_use_iterator imm_iter;
5702 use_operand_p use_p;
5703 bool have_debug_uses = false;
5704
5705 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5706 {
5707 if (gimple_debug_bind_p (USE_STMT (use_p)))
5708 {
5709 have_debug_uses = true;
5710 break;
5711 }
5712 }
5713
5714 if (have_debug_uses)
5715 {
5716 /* OP is a TERed SSA name, with DEF its defining
5717 statement, and where OP is used in further debug
5718 instructions. Generate a debug temporary, and
5719 replace all uses of OP in debug insns with that
5720 temporary. */
5721 gimple *debugstmt;
5722 tree value = gimple_assign_rhs_to_tree (def);
5723 tree vexpr = make_node (DEBUG_EXPR_DECL);
5724 rtx val;
5725 machine_mode mode;
5726
5727 set_curr_insn_location (gimple_location (def));
5728
5729 DECL_ARTIFICIAL (vexpr) = 1;
5730 TREE_TYPE (vexpr) = TREE_TYPE (value);
5731 if (DECL_P (value))
5732 mode = DECL_MODE (value);
5733 else
5734 mode = TYPE_MODE (TREE_TYPE (value));
5735 SET_DECL_MODE (vexpr, mode);
5736
5737 val = gen_rtx_VAR_LOCATION
5738 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5739
5740 emit_debug_insn (val);
5741
5742 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5743 {
5744 if (!gimple_debug_bind_p (debugstmt))
5745 continue;
5746
5747 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5748 SET_USE (use_p, vexpr);
5749
5750 update_stmt (debugstmt);
5751 }
5752 }
5753 }
5754 set_curr_insn_location (sloc);
5755 }
5756
5757 currently_expanding_gimple_stmt = stmt;
5758
5759 /* Expand this statement, then evaluate the resulting RTL and
5760 fixup the CFG accordingly. */
5761 if (gimple_code (stmt) == GIMPLE_COND)
5762 {
5763 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5764 if (new_bb)
5765 return new_bb;
5766 }
5767 else if (is_gimple_debug (stmt))
5768 {
5769 location_t sloc = curr_insn_location ();
5770 gimple_stmt_iterator nsi = gsi;
5771
5772 for (;;)
5773 {
5774 tree var;
5775 tree value = NULL_TREE;
5776 rtx val = NULL_RTX;
5777 machine_mode mode;
5778
5779 if (!gimple_debug_nonbind_marker_p (stmt))
5780 {
5781 if (gimple_debug_bind_p (stmt))
5782 {
5783 var = gimple_debug_bind_get_var (stmt);
5784
5785 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5786 && TREE_CODE (var) != LABEL_DECL
5787 && !target_for_debug_bind (var))
5788 goto delink_debug_stmt;
5789
5790 if (DECL_P (var))
5791 mode = DECL_MODE (var);
5792 else
5793 mode = TYPE_MODE (TREE_TYPE (var));
5794
5795 if (gimple_debug_bind_has_value_p (stmt))
5796 value = gimple_debug_bind_get_value (stmt);
5797
5798 val = gen_rtx_VAR_LOCATION
5799 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5800 }
5801 else if (gimple_debug_source_bind_p (stmt))
5802 {
5803 var = gimple_debug_source_bind_get_var (stmt);
5804
5805 value = gimple_debug_source_bind_get_value (stmt);
5806
5807 mode = DECL_MODE (var);
5808
5809 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5810 VAR_INIT_STATUS_UNINITIALIZED);
5811 }
5812 else
5813 gcc_unreachable ();
5814 }
5815 /* If this function was first compiled with markers
5816 enabled, but they're now disable (e.g. LTO), drop
5817 them on the floor. */
5818 else if (gimple_debug_nonbind_marker_p (stmt)
5819 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5820 goto delink_debug_stmt;
5821 else if (gimple_debug_begin_stmt_p (stmt))
5822 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5823 else if (gimple_debug_inline_entry_p (stmt))
5824 {
5825 tree block = gimple_block (stmt);
5826
5827 if (block)
5828 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5829 else
5830 goto delink_debug_stmt;
5831 }
5832 else
5833 gcc_unreachable ();
5834
5835 last = get_last_insn ();
5836
5837 set_curr_insn_location (gimple_location (stmt));
5838
5839 emit_debug_insn (val);
5840
5841 if (dump_file && (dump_flags & TDF_DETAILS))
5842 {
5843 /* We can't dump the insn with a TREE where an RTX
5844 is expected. */
5845 if (GET_CODE (val) == VAR_LOCATION)
5846 {
5847 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5848 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5849 }
5850 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5851 if (GET_CODE (val) == VAR_LOCATION)
5852 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5853 }
5854
5855 delink_debug_stmt:
5856 /* In order not to generate too many debug temporaries,
5857 we delink all uses of debug statements we already expanded.
5858 Therefore debug statements between definition and real
5859 use of TERed SSA names will continue to use the SSA name,
5860 and not be replaced with debug temps. */
5861 delink_stmt_imm_use (stmt);
5862
5863 gsi = nsi;
5864 gsi_next (&nsi);
5865 if (gsi_end_p (nsi))
5866 break;
5867 stmt = gsi_stmt (nsi);
5868 if (!is_gimple_debug (stmt))
5869 break;
5870 }
5871
5872 set_curr_insn_location (sloc);
5873 }
5874 else
5875 {
5876 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5877 if (call_stmt
5878 && gimple_call_tail_p (call_stmt)
5879 && disable_tail_calls)
5880 gimple_call_set_tail (call_stmt, false);
5881
5882 if (call_stmt && gimple_call_tail_p (call_stmt))
5883 {
5884 bool can_fallthru;
5885 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5886 if (new_bb)
5887 {
5888 if (can_fallthru)
5889 bb = new_bb;
5890 else
5891 return new_bb;
5892 }
5893 }
5894 else
5895 {
5896 def_operand_p def_p;
5897 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5898
5899 if (def_p != NULL)
5900 {
5901 /* Ignore this stmt if it is in the list of
5902 replaceable expressions. */
5903 if (SA.values
5904 && bitmap_bit_p (SA.values,
5905 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5906 continue;
5907 }
5908 last = expand_gimple_stmt (stmt);
5909 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5910 }
5911 }
5912 }
5913
5914 currently_expanding_gimple_stmt = NULL;
5915
5916 /* Expand implicit goto and convert goto_locus. */
5917 FOR_EACH_EDGE (e, ei, bb->succs)
5918 {
5919 if (e->goto_locus != UNKNOWN_LOCATION)
5920 set_curr_insn_location (e->goto_locus);
5921 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5922 {
5923 emit_jump (label_rtx_for_bb (e->dest));
5924 e->flags &= ~EDGE_FALLTHRU;
5925 }
5926 }
5927
5928 /* Expanded RTL can create a jump in the last instruction of block.
5929 This later might be assumed to be a jump to successor and break edge insertion.
5930 We need to insert dummy move to prevent this. PR41440. */
5931 if (single_succ_p (bb)
5932 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5933 && (last = get_last_insn ())
5934 && (JUMP_P (last)
5935 || (DEBUG_INSN_P (last)
5936 && JUMP_P (prev_nondebug_insn (last)))))
5937 {
5938 rtx dummy = gen_reg_rtx (SImode);
5939 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5940 }
5941
5942 do_pending_stack_adjust ();
5943
5944 /* Find the block tail. The last insn in the block is the insn
5945 before a barrier and/or table jump insn. */
5946 last = get_last_insn ();
5947 if (BARRIER_P (last))
5948 last = PREV_INSN (last);
5949 if (JUMP_TABLE_DATA_P (last))
5950 last = PREV_INSN (PREV_INSN (last));
5951 if (BARRIER_P (last))
5952 last = PREV_INSN (last);
5953 BB_END (bb) = last;
5954
5955 update_bb_for_insn (bb);
5956
5957 return bb;
5958 }
5959
5960
5961 /* Create a basic block for initialization code. */
5962
5963 static basic_block
5964 construct_init_block (void)
5965 {
5966 basic_block init_block, first_block;
5967 edge e = NULL;
5968 int flags;
5969
5970 /* Multiple entry points not supported yet. */
5971 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5972 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5973 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5974 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5975 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5976
5977 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5978
5979 /* When entry edge points to first basic block, we don't need jump,
5980 otherwise we have to jump into proper target. */
5981 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5982 {
5983 tree label = gimple_block_label (e->dest);
5984
5985 emit_jump (jump_target_rtx (label));
5986 flags = 0;
5987 }
5988 else
5989 flags = EDGE_FALLTHRU;
5990
5991 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5992 get_last_insn (),
5993 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5994 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5995 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5996 if (e)
5997 {
5998 first_block = e->dest;
5999 redirect_edge_succ (e, init_block);
6000 e = make_single_succ_edge (init_block, first_block, flags);
6001 }
6002 else
6003 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6004 EDGE_FALLTHRU);
6005
6006 update_bb_for_insn (init_block);
6007 return init_block;
6008 }
6009
6010 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6011 found in the block tree. */
6012
6013 static void
6014 set_block_levels (tree block, int level)
6015 {
6016 while (block)
6017 {
6018 BLOCK_NUMBER (block) = level;
6019 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6020 block = BLOCK_CHAIN (block);
6021 }
6022 }
6023
6024 /* Create a block containing landing pads and similar stuff. */
6025
6026 static void
6027 construct_exit_block (void)
6028 {
6029 rtx_insn *head = get_last_insn ();
6030 rtx_insn *end;
6031 basic_block exit_block;
6032 edge e, e2;
6033 unsigned ix;
6034 edge_iterator ei;
6035 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6036 rtx_insn *orig_end = BB_END (prev_bb);
6037
6038 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6039
6040 /* Make sure the locus is set to the end of the function, so that
6041 epilogue line numbers and warnings are set properly. */
6042 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6043 input_location = cfun->function_end_locus;
6044
6045 /* Generate rtl for function exit. */
6046 expand_function_end ();
6047
6048 end = get_last_insn ();
6049 if (head == end)
6050 return;
6051 /* While emitting the function end we could move end of the last basic
6052 block. */
6053 BB_END (prev_bb) = orig_end;
6054 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6055 head = NEXT_INSN (head);
6056 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6057 bb count counting will be confused. Any instructions before that
6058 label are emitted for the case where PREV_BB falls through into the
6059 exit block, so append those instructions to prev_bb in that case. */
6060 if (NEXT_INSN (head) != return_label)
6061 {
6062 while (NEXT_INSN (head) != return_label)
6063 {
6064 if (!NOTE_P (NEXT_INSN (head)))
6065 BB_END (prev_bb) = NEXT_INSN (head);
6066 head = NEXT_INSN (head);
6067 }
6068 }
6069 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6070 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6071 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6072
6073 ix = 0;
6074 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6075 {
6076 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6077 if (!(e->flags & EDGE_ABNORMAL))
6078 redirect_edge_succ (e, exit_block);
6079 else
6080 ix++;
6081 }
6082
6083 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6084 EDGE_FALLTHRU);
6085 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6086 if (e2 != e)
6087 {
6088 exit_block->count -= e2->count ();
6089 }
6090 update_bb_for_insn (exit_block);
6091 }
6092
6093 /* Helper function for discover_nonconstant_array_refs.
6094 Look for ARRAY_REF nodes with non-constant indexes and mark them
6095 addressable. */
6096
6097 static tree
6098 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6099 void *data ATTRIBUTE_UNUSED)
6100 {
6101 tree t = *tp;
6102
6103 if (IS_TYPE_OR_DECL_P (t))
6104 *walk_subtrees = 0;
6105 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6106 {
6107 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6108 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6109 && (!TREE_OPERAND (t, 2)
6110 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6111 || (TREE_CODE (t) == COMPONENT_REF
6112 && (!TREE_OPERAND (t,2)
6113 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6114 || TREE_CODE (t) == BIT_FIELD_REF
6115 || TREE_CODE (t) == REALPART_EXPR
6116 || TREE_CODE (t) == IMAGPART_EXPR
6117 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6118 || CONVERT_EXPR_P (t))
6119 t = TREE_OPERAND (t, 0);
6120
6121 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6122 {
6123 t = get_base_address (t);
6124 if (t && DECL_P (t)
6125 && DECL_MODE (t) != BLKmode)
6126 TREE_ADDRESSABLE (t) = 1;
6127 }
6128
6129 *walk_subtrees = 0;
6130 }
6131
6132 return NULL_TREE;
6133 }
6134
6135 /* RTL expansion is not able to compile array references with variable
6136 offsets for arrays stored in single register. Discover such
6137 expressions and mark variables as addressable to avoid this
6138 scenario. */
6139
6140 static void
6141 discover_nonconstant_array_refs (void)
6142 {
6143 basic_block bb;
6144 gimple_stmt_iterator gsi;
6145
6146 FOR_EACH_BB_FN (bb, cfun)
6147 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6148 {
6149 gimple *stmt = gsi_stmt (gsi);
6150 if (!is_gimple_debug (stmt))
6151 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6152 }
6153 }
6154
6155 /* This function sets crtl->args.internal_arg_pointer to a virtual
6156 register if DRAP is needed. Local register allocator will replace
6157 virtual_incoming_args_rtx with the virtual register. */
6158
6159 static void
6160 expand_stack_alignment (void)
6161 {
6162 rtx drap_rtx;
6163 unsigned int preferred_stack_boundary;
6164
6165 if (! SUPPORTS_STACK_ALIGNMENT)
6166 return;
6167
6168 if (cfun->calls_alloca
6169 || cfun->has_nonlocal_label
6170 || crtl->has_nonlocal_goto)
6171 crtl->need_drap = true;
6172
6173 /* Call update_stack_boundary here again to update incoming stack
6174 boundary. It may set incoming stack alignment to a different
6175 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6176 use the minimum incoming stack alignment to check if it is OK
6177 to perform sibcall optimization since sibcall optimization will
6178 only align the outgoing stack to incoming stack boundary. */
6179 if (targetm.calls.update_stack_boundary)
6180 targetm.calls.update_stack_boundary ();
6181
6182 /* The incoming stack frame has to be aligned at least at
6183 parm_stack_boundary. */
6184 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6185
6186 /* Update crtl->stack_alignment_estimated and use it later to align
6187 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6188 exceptions since callgraph doesn't collect incoming stack alignment
6189 in this case. */
6190 if (cfun->can_throw_non_call_exceptions
6191 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6192 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6193 else
6194 preferred_stack_boundary = crtl->preferred_stack_boundary;
6195 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6196 crtl->stack_alignment_estimated = preferred_stack_boundary;
6197 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6198 crtl->stack_alignment_needed = preferred_stack_boundary;
6199
6200 gcc_assert (crtl->stack_alignment_needed
6201 <= crtl->stack_alignment_estimated);
6202
6203 crtl->stack_realign_needed
6204 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6205 crtl->stack_realign_tried = crtl->stack_realign_needed;
6206
6207 crtl->stack_realign_processed = true;
6208
6209 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6210 alignment. */
6211 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6212 drap_rtx = targetm.calls.get_drap_rtx ();
6213
6214 /* stack_realign_drap and drap_rtx must match. */
6215 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6216
6217 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6218 if (drap_rtx != NULL)
6219 {
6220 crtl->args.internal_arg_pointer = drap_rtx;
6221
6222 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6223 needed. */
6224 fixup_tail_calls ();
6225 }
6226 }
6227 \f
6228
6229 static void
6230 expand_main_function (void)
6231 {
6232 #if (defined(INVOKE__main) \
6233 || (!defined(HAS_INIT_SECTION) \
6234 && !defined(INIT_SECTION_ASM_OP) \
6235 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6236 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6237 #endif
6238 }
6239 \f
6240
6241 /* Expand code to initialize the stack_protect_guard. This is invoked at
6242 the beginning of a function to be protected. */
6243
6244 static void
6245 stack_protect_prologue (void)
6246 {
6247 tree guard_decl = targetm.stack_protect_guard ();
6248 rtx x, y;
6249
6250 crtl->stack_protect_guard_decl = guard_decl;
6251 x = expand_normal (crtl->stack_protect_guard);
6252
6253 if (targetm.have_stack_protect_combined_set () && guard_decl)
6254 {
6255 gcc_assert (DECL_P (guard_decl));
6256 y = DECL_RTL (guard_decl);
6257
6258 /* Allow the target to compute address of Y and copy it to X without
6259 leaking Y into a register. This combined address + copy pattern
6260 allows the target to prevent spilling of any intermediate results by
6261 splitting it after register allocator. */
6262 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6263 {
6264 emit_insn (insn);
6265 return;
6266 }
6267 }
6268
6269 if (guard_decl)
6270 y = expand_normal (guard_decl);
6271 else
6272 y = const0_rtx;
6273
6274 /* Allow the target to copy from Y to X without leaking Y into a
6275 register. */
6276 if (targetm.have_stack_protect_set ())
6277 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6278 {
6279 emit_insn (insn);
6280 return;
6281 }
6282
6283 /* Otherwise do a straight move. */
6284 emit_move_insn (x, y);
6285 }
6286
6287 /* Translate the intermediate representation contained in the CFG
6288 from GIMPLE trees to RTL.
6289
6290 We do conversion per basic block and preserve/update the tree CFG.
6291 This implies we have to do some magic as the CFG can simultaneously
6292 consist of basic blocks containing RTL and GIMPLE trees. This can
6293 confuse the CFG hooks, so be careful to not manipulate CFG during
6294 the expansion. */
6295
6296 namespace {
6297
6298 const pass_data pass_data_expand =
6299 {
6300 RTL_PASS, /* type */
6301 "expand", /* name */
6302 OPTGROUP_NONE, /* optinfo_flags */
6303 TV_EXPAND, /* tv_id */
6304 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6305 | PROP_gimple_lcx
6306 | PROP_gimple_lvec
6307 | PROP_gimple_lva), /* properties_required */
6308 PROP_rtl, /* properties_provided */
6309 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6310 0, /* todo_flags_start */
6311 0, /* todo_flags_finish */
6312 };
6313
6314 class pass_expand : public rtl_opt_pass
6315 {
6316 public:
6317 pass_expand (gcc::context *ctxt)
6318 : rtl_opt_pass (pass_data_expand, ctxt)
6319 {}
6320
6321 /* opt_pass methods: */
6322 virtual unsigned int execute (function *);
6323
6324 }; // class pass_expand
6325
6326 unsigned int
6327 pass_expand::execute (function *fun)
6328 {
6329 basic_block bb, init_block;
6330 edge_iterator ei;
6331 edge e;
6332 rtx_insn *var_seq, *var_ret_seq;
6333 unsigned i;
6334
6335 timevar_push (TV_OUT_OF_SSA);
6336 rewrite_out_of_ssa (&SA);
6337 timevar_pop (TV_OUT_OF_SSA);
6338 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6339
6340 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6341 {
6342 gimple_stmt_iterator gsi;
6343 FOR_EACH_BB_FN (bb, cfun)
6344 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6345 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6346 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6347 }
6348
6349 /* Make sure all values used by the optimization passes have sane
6350 defaults. */
6351 reg_renumber = 0;
6352
6353 /* Some backends want to know that we are expanding to RTL. */
6354 currently_expanding_to_rtl = 1;
6355 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6356 free_dominance_info (CDI_DOMINATORS);
6357
6358 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6359
6360 insn_locations_init ();
6361 if (!DECL_IS_BUILTIN (current_function_decl))
6362 {
6363 /* Eventually, all FEs should explicitly set function_start_locus. */
6364 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6365 set_curr_insn_location
6366 (DECL_SOURCE_LOCATION (current_function_decl));
6367 else
6368 set_curr_insn_location (fun->function_start_locus);
6369 }
6370 else
6371 set_curr_insn_location (UNKNOWN_LOCATION);
6372 prologue_location = curr_insn_location ();
6373
6374 #ifdef INSN_SCHEDULING
6375 init_sched_attrs ();
6376 #endif
6377
6378 /* Make sure first insn is a note even if we don't want linenums.
6379 This makes sure the first insn will never be deleted.
6380 Also, final expects a note to appear there. */
6381 emit_note (NOTE_INSN_DELETED);
6382
6383 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6384 discover_nonconstant_array_refs ();
6385
6386 targetm.expand_to_rtl_hook ();
6387 crtl->init_stack_alignment ();
6388 fun->cfg->max_jumptable_ents = 0;
6389
6390 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6391 of the function section at exapnsion time to predict distance of calls. */
6392 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6393
6394 /* Expand the variables recorded during gimple lowering. */
6395 timevar_push (TV_VAR_EXPAND);
6396 start_sequence ();
6397
6398 var_ret_seq = expand_used_vars ();
6399
6400 var_seq = get_insns ();
6401 end_sequence ();
6402 timevar_pop (TV_VAR_EXPAND);
6403
6404 /* Honor stack protection warnings. */
6405 if (warn_stack_protect)
6406 {
6407 if (fun->calls_alloca)
6408 warning (OPT_Wstack_protector,
6409 "stack protector not protecting local variables: "
6410 "variable length buffer");
6411 if (has_short_buffer && !crtl->stack_protect_guard)
6412 warning (OPT_Wstack_protector,
6413 "stack protector not protecting function: "
6414 "all local arrays are less than %d bytes long",
6415 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6416 }
6417
6418 /* Set up parameters and prepare for return, for the function. */
6419 expand_function_start (current_function_decl);
6420
6421 /* If we emitted any instructions for setting up the variables,
6422 emit them before the FUNCTION_START note. */
6423 if (var_seq)
6424 {
6425 emit_insn_before (var_seq, parm_birth_insn);
6426
6427 /* In expand_function_end we'll insert the alloca save/restore
6428 before parm_birth_insn. We've just insertted an alloca call.
6429 Adjust the pointer to match. */
6430 parm_birth_insn = var_seq;
6431 }
6432
6433 /* Now propagate the RTL assignment of each partition to the
6434 underlying var of each SSA_NAME. */
6435 tree name;
6436
6437 FOR_EACH_SSA_NAME (i, name, cfun)
6438 {
6439 /* We might have generated new SSA names in
6440 update_alias_info_with_stack_vars. They will have a NULL
6441 defining statements, and won't be part of the partitioning,
6442 so ignore those. */
6443 if (!SSA_NAME_DEF_STMT (name))
6444 continue;
6445
6446 adjust_one_expanded_partition_var (name);
6447 }
6448
6449 /* Clean up RTL of variables that straddle across multiple
6450 partitions, and check that the rtl of any PARM_DECLs that are not
6451 cleaned up is that of their default defs. */
6452 FOR_EACH_SSA_NAME (i, name, cfun)
6453 {
6454 int part;
6455
6456 /* We might have generated new SSA names in
6457 update_alias_info_with_stack_vars. They will have a NULL
6458 defining statements, and won't be part of the partitioning,
6459 so ignore those. */
6460 if (!SSA_NAME_DEF_STMT (name))
6461 continue;
6462 part = var_to_partition (SA.map, name);
6463 if (part == NO_PARTITION)
6464 continue;
6465
6466 /* If this decl was marked as living in multiple places, reset
6467 this now to NULL. */
6468 tree var = SSA_NAME_VAR (name);
6469 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6470 SET_DECL_RTL (var, NULL);
6471 /* Check that the pseudos chosen by assign_parms are those of
6472 the corresponding default defs. */
6473 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6474 && (TREE_CODE (var) == PARM_DECL
6475 || TREE_CODE (var) == RESULT_DECL))
6476 {
6477 rtx in = DECL_RTL_IF_SET (var);
6478 gcc_assert (in);
6479 rtx out = SA.partition_to_pseudo[part];
6480 gcc_assert (in == out);
6481
6482 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6483 those expected by debug backends for each parm and for
6484 the result. This is particularly important for stabs,
6485 whose register elimination from parm's DECL_RTL may cause
6486 -fcompare-debug differences as SET_DECL_RTL changes reg's
6487 attrs. So, make sure the RTL already has the parm as the
6488 EXPR, so that it won't change. */
6489 SET_DECL_RTL (var, NULL_RTX);
6490 if (MEM_P (in))
6491 set_mem_attributes (in, var, true);
6492 SET_DECL_RTL (var, in);
6493 }
6494 }
6495
6496 /* If this function is `main', emit a call to `__main'
6497 to run global initializers, etc. */
6498 if (DECL_NAME (current_function_decl)
6499 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6500 && DECL_FILE_SCOPE_P (current_function_decl))
6501 expand_main_function ();
6502
6503 /* Initialize the stack_protect_guard field. This must happen after the
6504 call to __main (if any) so that the external decl is initialized. */
6505 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6506 stack_protect_prologue ();
6507
6508 expand_phi_nodes (&SA);
6509
6510 /* Release any stale SSA redirection data. */
6511 redirect_edge_var_map_empty ();
6512
6513 /* Register rtl specific functions for cfg. */
6514 rtl_register_cfg_hooks ();
6515
6516 init_block = construct_init_block ();
6517
6518 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6519 remaining edges later. */
6520 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6521 e->flags &= ~EDGE_EXECUTABLE;
6522
6523 /* If the function has too many markers, drop them while expanding. */
6524 if (cfun->debug_marker_count
6525 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6526 cfun->debug_nonbind_markers = false;
6527
6528 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6529 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6530 next_bb)
6531 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6532
6533 if (MAY_HAVE_DEBUG_BIND_INSNS)
6534 expand_debug_locations ();
6535
6536 if (deep_ter_debug_map)
6537 {
6538 delete deep_ter_debug_map;
6539 deep_ter_debug_map = NULL;
6540 }
6541
6542 /* Free stuff we no longer need after GIMPLE optimizations. */
6543 free_dominance_info (CDI_DOMINATORS);
6544 free_dominance_info (CDI_POST_DOMINATORS);
6545 delete_tree_cfg_annotations (fun);
6546
6547 timevar_push (TV_OUT_OF_SSA);
6548 finish_out_of_ssa (&SA);
6549 timevar_pop (TV_OUT_OF_SSA);
6550
6551 timevar_push (TV_POST_EXPAND);
6552 /* We are no longer in SSA form. */
6553 fun->gimple_df->in_ssa_p = false;
6554 loops_state_clear (LOOP_CLOSED_SSA);
6555
6556 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6557 conservatively to true until they are all profile aware. */
6558 delete lab_rtx_for_bb;
6559 free_histograms (fun);
6560
6561 construct_exit_block ();
6562 insn_locations_finalize ();
6563
6564 if (var_ret_seq)
6565 {
6566 rtx_insn *after = return_label;
6567 rtx_insn *next = NEXT_INSN (after);
6568 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6569 after = next;
6570 emit_insn_after (var_ret_seq, after);
6571 }
6572
6573 /* Zap the tree EH table. */
6574 set_eh_throw_stmt_table (fun, NULL);
6575
6576 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6577 split edges which edge insertions might do. */
6578 rebuild_jump_labels (get_insns ());
6579
6580 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6581 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6582 {
6583 edge e;
6584 edge_iterator ei;
6585 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6586 {
6587 if (e->insns.r)
6588 {
6589 rebuild_jump_labels_chain (e->insns.r);
6590 /* Put insns after parm birth, but before
6591 NOTE_INSNS_FUNCTION_BEG. */
6592 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6593 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6594 {
6595 rtx_insn *insns = e->insns.r;
6596 e->insns.r = NULL;
6597 if (NOTE_P (parm_birth_insn)
6598 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6599 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6600 else
6601 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6602 }
6603 else
6604 commit_one_edge_insertion (e);
6605 }
6606 else
6607 ei_next (&ei);
6608 }
6609 }
6610
6611 /* We're done expanding trees to RTL. */
6612 currently_expanding_to_rtl = 0;
6613
6614 flush_mark_addressable_queue ();
6615
6616 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6617 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6618 {
6619 edge e;
6620 edge_iterator ei;
6621 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6622 {
6623 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6624 e->flags &= ~EDGE_EXECUTABLE;
6625
6626 /* At the moment not all abnormal edges match the RTL
6627 representation. It is safe to remove them here as
6628 find_many_sub_basic_blocks will rediscover them.
6629 In the future we should get this fixed properly. */
6630 if ((e->flags & EDGE_ABNORMAL)
6631 && !(e->flags & EDGE_SIBCALL))
6632 remove_edge (e);
6633 else
6634 ei_next (&ei);
6635 }
6636 }
6637
6638 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6639 bitmap_ones (blocks);
6640 find_many_sub_basic_blocks (blocks);
6641 purge_all_dead_edges ();
6642
6643 /* After initial rtl generation, call back to finish generating
6644 exception support code. We need to do this before cleaning up
6645 the CFG as the code does not expect dead landing pads. */
6646 if (fun->eh->region_tree != NULL)
6647 finish_eh_generation ();
6648
6649 /* Call expand_stack_alignment after finishing all
6650 updates to crtl->preferred_stack_boundary. */
6651 expand_stack_alignment ();
6652
6653 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6654 function. */
6655 if (crtl->tail_call_emit)
6656 fixup_tail_calls ();
6657
6658 /* BB subdivision may have created basic blocks that are are only reachable
6659 from unlikely bbs but not marked as such in the profile. */
6660 if (optimize)
6661 propagate_unlikely_bbs_forward ();
6662
6663 /* Remove unreachable blocks, otherwise we cannot compute dominators
6664 which are needed for loop state verification. As a side-effect
6665 this also compacts blocks.
6666 ??? We cannot remove trivially dead insns here as for example
6667 the DRAP reg on i?86 is not magically live at this point.
6668 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6669 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6670
6671 checking_verify_flow_info ();
6672
6673 /* Initialize pseudos allocated for hard registers. */
6674 emit_initial_value_sets ();
6675
6676 /* And finally unshare all RTL. */
6677 unshare_all_rtl ();
6678
6679 /* There's no need to defer outputting this function any more; we
6680 know we want to output it. */
6681 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6682
6683 /* Now that we're done expanding trees to RTL, we shouldn't have any
6684 more CONCATs anywhere. */
6685 generating_concat_p = 0;
6686
6687 if (dump_file)
6688 {
6689 fprintf (dump_file,
6690 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6691 /* And the pass manager will dump RTL for us. */
6692 }
6693
6694 /* If we're emitting a nested function, make sure its parent gets
6695 emitted as well. Doing otherwise confuses debug info. */
6696 {
6697 tree parent;
6698 for (parent = DECL_CONTEXT (current_function_decl);
6699 parent != NULL_TREE;
6700 parent = get_containing_scope (parent))
6701 if (TREE_CODE (parent) == FUNCTION_DECL)
6702 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6703 }
6704
6705 TREE_ASM_WRITTEN (current_function_decl) = 1;
6706
6707 /* After expanding, the return labels are no longer needed. */
6708 return_label = NULL;
6709 naked_return_label = NULL;
6710
6711 /* After expanding, the tm_restart map is no longer needed. */
6712 if (fun->gimple_df->tm_restart)
6713 fun->gimple_df->tm_restart = NULL;
6714
6715 /* Tag the blocks with a depth number so that change_scope can find
6716 the common parent easily. */
6717 set_block_levels (DECL_INITIAL (fun->decl), 0);
6718 default_rtl_profile ();
6719
6720 /* For -dx discard loops now, otherwise IL verify in clean_state will
6721 ICE. */
6722 if (rtl_dump_and_exit)
6723 {
6724 cfun->curr_properties &= ~PROP_loops;
6725 loop_optimizer_finalize ();
6726 }
6727
6728 timevar_pop (TV_POST_EXPAND);
6729
6730 return 0;
6731 }
6732
6733 } // anon namespace
6734
6735 rtl_opt_pass *
6736 make_pass_expand (gcc::context *ctxt)
6737 {
6738 return new pass_expand (ctxt);
6739 }