]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.cc
c++, mingw: Fix up types of dtor hooks to __cxa_{,thread_}atexit/__cxa_throw on mingw...
[thirdparty/gcc.git] / gcc / cfgexpand.cc
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "cfgloop.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
70 #include "stringpool.h"
71 #include "attribs.h"
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "output.h"
75 #include "builtins.h"
76 #include "opts.h"
77
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85
86 /* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88 struct ssaexpand SA;
89
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple *currently_expanding_gimple_stmt;
93
94 static rtx expand_debug_expr (tree);
95
96 static bool defer_stack_allocation (tree, bool);
97
98 static void record_alignment_for_reg_var (unsigned int);
99
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
102
103 tree
104 gimple_assign_rhs_to_tree (gimple *stmt)
105 {
106 tree t;
107 switch (gimple_assign_rhs_class (stmt))
108 {
109 case GIMPLE_TERNARY_RHS:
110 t = build3 (gimple_assign_rhs_code (stmt),
111 TREE_TYPE (gimple_assign_lhs (stmt)),
112 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt),
113 gimple_assign_rhs3 (stmt));
114 break;
115 case GIMPLE_BINARY_RHS:
116 t = build2 (gimple_assign_rhs_code (stmt),
117 TREE_TYPE (gimple_assign_lhs (stmt)),
118 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
119 break;
120 case GIMPLE_UNARY_RHS:
121 t = build1 (gimple_assign_rhs_code (stmt),
122 TREE_TYPE (gimple_assign_lhs (stmt)),
123 gimple_assign_rhs1 (stmt));
124 break;
125 case GIMPLE_SINGLE_RHS:
126 {
127 t = gimple_assign_rhs1 (stmt);
128 /* Avoid modifying this tree in place below. */
129 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
130 && gimple_location (stmt) != EXPR_LOCATION (t))
131 || (gimple_block (stmt) && currently_expanding_to_rtl
132 && EXPR_P (t)))
133 t = copy_node (t);
134 break;
135 }
136 default:
137 gcc_unreachable ();
138 }
139
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
142
143 return t;
144 }
145
146
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
150
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
157
158 static tree
159 leader_merge (tree cur, tree next)
160 {
161 if (cur == NULL || cur == next)
162 return next;
163
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
166
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
169
170 return cur;
171 }
172
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176 static inline void
177 set_rtl (tree t, rtx x)
178 {
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
217
218 if (x)
219 {
220 bool skip = false;
221 tree cur = NULL_TREE;
222 rtx xm = x;
223
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
230 {
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
234 }
235 else if (GET_CODE (xm) == CONCAT)
236 {
237 xm = XEXP (xm, 0);
238 goto retry;
239 }
240 else if (GET_CODE (xm) == PARALLEL)
241 {
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
246 }
247 else if (xm == pc_rtx)
248 skip = true;
249 else
250 gcc_unreachable ();
251
252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
253
254 if (cur != next)
255 {
256 if (MEM_P (x))
257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
261 else
262 set_reg_attrs_for_decl_rtl (next, x);
263 }
264 }
265
266 if (TREE_CODE (t) == SSA_NAME)
267 {
268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
270 {
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
275 }
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
283 {
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
288 /* If we have it set already to "multiple places" don't
289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
291 ;
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
300 }
301 }
302 else
303 SET_DECL_RTL (t, x);
304 }
305
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308 class stack_var
309 {
310 public:
311 /* The Variable. */
312 tree decl;
313
314 /* Initially, the size of the variable. Later, the size of the partition,
315 if this variable becomes it's partition's representative. */
316 poly_uint64 size;
317
318 /* The *byte* alignment required for this variable. Or as, with the
319 size, the alignment for this partition. */
320 unsigned int alignb;
321
322 /* The partition representative. */
323 unsigned representative;
324
325 /* The next stack variable in the partition, or EOC. */
326 unsigned next;
327
328 /* The numbers of conflicting stack variables. */
329 bitmap conflicts;
330 };
331
332 #define EOC ((unsigned)-1)
333
334 /* We have an array of such objects while deciding allocation. */
335 static class stack_var *stack_vars;
336 static unsigned stack_vars_alloc;
337 static unsigned stack_vars_num;
338 static hash_map<tree, unsigned> *decl_to_stack_part;
339
340 /* Conflict bitmaps go on this obstack. This allows us to destroy
341 all of them in one big sweep. */
342 static bitmap_obstack stack_var_bitmap_obstack;
343
344 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
345 is non-decreasing. */
346 static unsigned *stack_vars_sorted;
347
348 /* The phase of the stack frame. This is the known misalignment of
349 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
350 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
351 static int frame_phase;
352
353 /* Used during expand_used_vars to remember if we saw any decls for
354 which we'd like to enable stack smashing protection. */
355 static bool has_protected_decls;
356
357 /* Used during expand_used_vars. Remember if we say a character buffer
358 smaller than our cutoff threshold. Used for -Wstack-protector. */
359 static bool has_short_buffer;
360
361 /* Compute the byte alignment to use for DECL. Ignore alignment
362 we can't do with expected alignment of the stack boundary. */
363
364 static unsigned int
365 align_local_variable (tree decl, bool really_expand)
366 {
367 unsigned int align;
368
369 if (TREE_CODE (decl) == SSA_NAME)
370 {
371 tree type = TREE_TYPE (decl);
372 machine_mode mode = TYPE_MODE (type);
373
374 align = TYPE_ALIGN (type);
375 if (mode != BLKmode
376 && align < GET_MODE_ALIGNMENT (mode))
377 align = GET_MODE_ALIGNMENT (mode);
378 }
379 else
380 align = LOCAL_DECL_ALIGNMENT (decl);
381
382 if (hwasan_sanitize_stack_p ())
383 align = MAX (align, (unsigned) HWASAN_TAG_GRANULE_SIZE * BITS_PER_UNIT);
384
385 if (TREE_CODE (decl) != SSA_NAME && really_expand)
386 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
387 That is done before IPA and could bump alignment based on host
388 backend even for offloaded code which wants different
389 LOCAL_DECL_ALIGNMENT. */
390 SET_DECL_ALIGN (decl, align);
391
392 return align / BITS_PER_UNIT;
393 }
394
395 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
396 down otherwise. Return truncated BASE value. */
397
398 static inline unsigned HOST_WIDE_INT
399 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
400 {
401 return align_up ? (base + align - 1) & -align : base & -align;
402 }
403
404 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
405 Return the frame offset. */
406
407 static poly_int64
408 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
409 {
410 poly_int64 offset, new_frame_offset;
411
412 if (FRAME_GROWS_DOWNWARD)
413 {
414 new_frame_offset
415 = aligned_lower_bound (frame_offset - frame_phase - size,
416 align) + frame_phase;
417 offset = new_frame_offset;
418 }
419 else
420 {
421 new_frame_offset
422 = aligned_upper_bound (frame_offset - frame_phase,
423 align) + frame_phase;
424 offset = new_frame_offset;
425 new_frame_offset += size;
426 }
427 frame_offset = new_frame_offset;
428
429 if (frame_offset_overflow (frame_offset, cfun->decl))
430 frame_offset = offset = 0;
431
432 return offset;
433 }
434
435 /* Ensure that the stack is aligned to ALIGN bytes.
436 Return the new frame offset. */
437 static poly_int64
438 align_frame_offset (unsigned HOST_WIDE_INT align)
439 {
440 return alloc_stack_frame_space (0, align);
441 }
442
443 /* Accumulate DECL into STACK_VARS. */
444
445 static void
446 add_stack_var (tree decl, bool really_expand)
447 {
448 class stack_var *v;
449
450 if (stack_vars_num >= stack_vars_alloc)
451 {
452 if (stack_vars_alloc)
453 stack_vars_alloc = stack_vars_alloc * 3 / 2;
454 else
455 stack_vars_alloc = 32;
456 stack_vars
457 = XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
458 }
459 if (!decl_to_stack_part)
460 decl_to_stack_part = new hash_map<tree, unsigned>;
461
462 v = &stack_vars[stack_vars_num];
463 decl_to_stack_part->put (decl, stack_vars_num);
464
465 v->decl = decl;
466 tree size = TREE_CODE (decl) == SSA_NAME
467 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
468 : DECL_SIZE_UNIT (decl);
469 v->size = tree_to_poly_uint64 (size);
470 /* Ensure that all variables have size, so that &a != &b for any two
471 variables that are simultaneously live. */
472 if (known_eq (v->size, 0U))
473 v->size = 1;
474 v->alignb = align_local_variable (decl, really_expand);
475 /* An alignment of zero can mightily confuse us later. */
476 gcc_assert (v->alignb != 0);
477
478 /* All variables are initially in their own partition. */
479 v->representative = stack_vars_num;
480 v->next = EOC;
481
482 /* All variables initially conflict with no other. */
483 v->conflicts = NULL;
484
485 /* Ensure that this decl doesn't get put onto the list twice. */
486 set_rtl (decl, pc_rtx);
487
488 stack_vars_num++;
489 }
490
491 /* Make the decls associated with luid's X and Y conflict. */
492
493 static void
494 add_stack_var_conflict (unsigned x, unsigned y)
495 {
496 class stack_var *a = &stack_vars[x];
497 class stack_var *b = &stack_vars[y];
498 if (x == y)
499 return;
500 if (!a->conflicts)
501 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
502 if (!b->conflicts)
503 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
504 bitmap_set_bit (a->conflicts, y);
505 bitmap_set_bit (b->conflicts, x);
506 }
507
508 /* Check whether the decls associated with luid's X and Y conflict. */
509
510 static bool
511 stack_var_conflict_p (unsigned x, unsigned y)
512 {
513 class stack_var *a = &stack_vars[x];
514 class stack_var *b = &stack_vars[y];
515 if (x == y)
516 return false;
517 /* Partitions containing an SSA name result from gimple registers
518 with things like unsupported modes. They are top-level and
519 hence conflict with everything else. */
520 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
521 return true;
522
523 if (!a->conflicts || !b->conflicts)
524 return false;
525 return bitmap_bit_p (a->conflicts, y);
526 }
527
528 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
529 enter its partition number into bitmap DATA. */
530
531 static bool
532 visit_op (gimple *, tree op, tree, void *data)
533 {
534 bitmap active = (bitmap)data;
535 op = get_base_address (op);
536 if (op
537 && DECL_P (op)
538 && DECL_RTL_IF_SET (op) == pc_rtx)
539 {
540 unsigned *v = decl_to_stack_part->get (op);
541 if (v)
542 bitmap_set_bit (active, *v);
543 }
544 return false;
545 }
546
547 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
548 record conflicts between it and all currently active other partitions
549 from bitmap DATA. */
550
551 static bool
552 visit_conflict (gimple *, tree op, tree, void *data)
553 {
554 bitmap active = (bitmap)data;
555 op = get_base_address (op);
556 if (op
557 && DECL_P (op)
558 && DECL_RTL_IF_SET (op) == pc_rtx)
559 {
560 unsigned *v = decl_to_stack_part->get (op);
561 if (v && bitmap_set_bit (active, *v))
562 {
563 unsigned num = *v;
564 bitmap_iterator bi;
565 unsigned i;
566 gcc_assert (num < stack_vars_num);
567 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
568 add_stack_var_conflict (num, i);
569 }
570 }
571 return false;
572 }
573
574 /* Helper function for add_scope_conflicts_1. For USE on
575 a stmt, if it is a SSA_NAME and in its SSA_NAME_DEF_STMT is known to be
576 based on some ADDR_EXPR, invoke VISIT on that ADDR_EXPR. */
577
578 static inline void
579 add_scope_conflicts_2 (tree use, bitmap work,
580 walk_stmt_load_store_addr_fn visit)
581 {
582 if (TREE_CODE (use) == SSA_NAME
583 && (POINTER_TYPE_P (TREE_TYPE (use))
584 || INTEGRAL_TYPE_P (TREE_TYPE (use))))
585 {
586 gimple *g = SSA_NAME_DEF_STMT (use);
587 if (is_gimple_assign (g))
588 if (tree op = gimple_assign_rhs1 (g))
589 if (TREE_CODE (op) == ADDR_EXPR)
590 visit (g, TREE_OPERAND (op, 0), op, work);
591 }
592 }
593
594 /* Helper routine for add_scope_conflicts, calculating the active partitions
595 at the end of BB, leaving the result in WORK. We're called to generate
596 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
597 liveness. */
598
599 static void
600 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
601 {
602 edge e;
603 edge_iterator ei;
604 gimple_stmt_iterator gsi;
605 walk_stmt_load_store_addr_fn visit;
606 use_operand_p use_p;
607 ssa_op_iter iter;
608
609 bitmap_clear (work);
610 FOR_EACH_EDGE (e, ei, bb->preds)
611 bitmap_ior_into (work, (bitmap)e->src->aux);
612
613 visit = visit_op;
614
615 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
616 {
617 gimple *stmt = gsi_stmt (gsi);
618 gphi *phi = as_a <gphi *> (stmt);
619 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
620 FOR_EACH_PHI_ARG (use_p, phi, iter, SSA_OP_USE)
621 add_scope_conflicts_2 (USE_FROM_PTR (use_p), work, visit);
622 }
623 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
624 {
625 gimple *stmt = gsi_stmt (gsi);
626
627 if (gimple_clobber_p (stmt))
628 {
629 tree lhs = gimple_assign_lhs (stmt);
630 unsigned *v;
631 /* Nested function lowering might introduce LHSs
632 that are COMPONENT_REFs. */
633 if (!VAR_P (lhs))
634 continue;
635 if (DECL_RTL_IF_SET (lhs) == pc_rtx
636 && (v = decl_to_stack_part->get (lhs)))
637 bitmap_clear_bit (work, *v);
638 }
639 else if (!is_gimple_debug (stmt))
640 {
641 if (for_conflict && visit == visit_op)
642 {
643 /* When we are inheriting live variables from our predecessors
644 through a CFG merge we might not see an actual mention of
645 the variables to record the approprate conflict as defs/uses
646 might be through indirect stores/loads. For this reason
647 we have to make sure each live variable conflicts with
648 each other. When there's just a single predecessor the
649 set of conflicts is already up-to-date.
650 We perform this delayed at the first real instruction to
651 allow clobbers starting this block to remove variables from
652 the set of live variables. */
653 bitmap_iterator bi;
654 unsigned i;
655 if (EDGE_COUNT (bb->preds) > 1)
656 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
657 {
658 class stack_var *a = &stack_vars[i];
659 if (!a->conflicts)
660 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
661 bitmap_ior_into (a->conflicts, work);
662 }
663 visit = visit_conflict;
664 }
665 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
666 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
667 add_scope_conflicts_2 (USE_FROM_PTR (use_p), work, visit);
668 }
669 }
670
671 /* When there was no real instruction but there's a CFG merge we need
672 to add the conflicts now. */
673 if (for_conflict && visit == visit_op && EDGE_COUNT (bb->preds) > 1)
674 {
675 bitmap_iterator bi;
676 unsigned i;
677 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
678 {
679 class stack_var *a = &stack_vars[i];
680 if (!a->conflicts)
681 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
682 bitmap_ior_into (a->conflicts, work);
683 }
684 }
685 }
686
687 /* Generate stack partition conflicts between all partitions that are
688 simultaneously live. */
689
690 static void
691 add_scope_conflicts (void)
692 {
693 basic_block bb;
694 bool changed;
695 bitmap work = BITMAP_ALLOC (NULL);
696 int *rpo;
697 int n_bbs;
698
699 /* We approximate the live range of a stack variable by taking the first
700 mention of its name as starting point(s), and by the end-of-scope
701 death clobber added by gimplify as ending point(s) of the range.
702 This overapproximates in the case we for instance moved an address-taken
703 operation upward, without also moving a dereference to it upwards.
704 But it's conservatively correct as a variable never can hold values
705 before its name is mentioned at least once.
706
707 We then do a mostly classical bitmap liveness algorithm. */
708
709 FOR_ALL_BB_FN (bb, cfun)
710 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
711
712 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
713 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
714
715 changed = true;
716 while (changed)
717 {
718 int i;
719 changed = false;
720 for (i = 0; i < n_bbs; i++)
721 {
722 bitmap active;
723 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
724 active = (bitmap)bb->aux;
725 add_scope_conflicts_1 (bb, work, false);
726 if (bitmap_ior_into (active, work))
727 changed = true;
728 }
729 }
730
731 FOR_EACH_BB_FN (bb, cfun)
732 add_scope_conflicts_1 (bb, work, true);
733
734 free (rpo);
735 BITMAP_FREE (work);
736 FOR_ALL_BB_FN (bb, cfun)
737 BITMAP_FREE (bb->aux);
738 }
739
740 /* A subroutine of partition_stack_vars. A comparison function for qsort,
741 sorting an array of indices by the properties of the object. */
742
743 static int
744 stack_var_cmp (const void *a, const void *b)
745 {
746 unsigned ia = *(const unsigned *)a;
747 unsigned ib = *(const unsigned *)b;
748 unsigned int aligna = stack_vars[ia].alignb;
749 unsigned int alignb = stack_vars[ib].alignb;
750 poly_int64 sizea = stack_vars[ia].size;
751 poly_int64 sizeb = stack_vars[ib].size;
752 tree decla = stack_vars[ia].decl;
753 tree declb = stack_vars[ib].decl;
754 bool largea, largeb;
755 unsigned int uida, uidb;
756
757 /* Primary compare on "large" alignment. Large comes first. */
758 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
759 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
760 if (largea != largeb)
761 return (int)largeb - (int)largea;
762
763 /* Secondary compare on size, decreasing */
764 int diff = compare_sizes_for_sort (sizeb, sizea);
765 if (diff != 0)
766 return diff;
767
768 /* Tertiary compare on true alignment, decreasing. */
769 if (aligna < alignb)
770 return -1;
771 if (aligna > alignb)
772 return 1;
773
774 /* Final compare on ID for sort stability, increasing.
775 Two SSA names are compared by their version, SSA names come before
776 non-SSA names, and two normal decls are compared by their DECL_UID. */
777 if (TREE_CODE (decla) == SSA_NAME)
778 {
779 if (TREE_CODE (declb) == SSA_NAME)
780 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
781 else
782 return -1;
783 }
784 else if (TREE_CODE (declb) == SSA_NAME)
785 return 1;
786 else
787 uida = DECL_UID (decla), uidb = DECL_UID (declb);
788 if (uida < uidb)
789 return 1;
790 if (uida > uidb)
791 return -1;
792 return 0;
793 }
794
795 struct part_traits : unbounded_int_hashmap_traits <unsigned , bitmap> {};
796 typedef hash_map<unsigned, bitmap, part_traits> part_hashmap;
797
798 /* If the points-to solution *PI points to variables that are in a partition
799 together with other variables add all partition members to the pointed-to
800 variables bitmap. */
801
802 static void
803 add_partitioned_vars_to_ptset (struct pt_solution *pt,
804 part_hashmap *decls_to_partitions,
805 hash_set<bitmap> *visited, bitmap temp)
806 {
807 bitmap_iterator bi;
808 unsigned i;
809 bitmap *part;
810
811 if (pt->anything
812 || pt->vars == NULL
813 /* The pointed-to vars bitmap is shared, it is enough to
814 visit it once. */
815 || visited->add (pt->vars))
816 return;
817
818 bitmap_clear (temp);
819
820 /* By using a temporary bitmap to store all members of the partitions
821 we have to add we make sure to visit each of the partitions only
822 once. */
823 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
824 if ((!temp
825 || !bitmap_bit_p (temp, i))
826 && (part = decls_to_partitions->get (i)))
827 bitmap_ior_into (temp, *part);
828 if (!bitmap_empty_p (temp))
829 bitmap_ior_into (pt->vars, temp);
830 }
831
832 /* Update points-to sets based on partition info, so we can use them on RTL.
833 The bitmaps representing stack partitions will be saved until expand,
834 where partitioned decls used as bases in memory expressions will be
835 rewritten.
836
837 It is not necessary to update TBAA info on accesses to the coalesced
838 storage since our memory model doesn't allow TBAA to be used for
839 WAW or WAR dependences. For RAW when the write is to an old object
840 the new object would not have been initialized at the point of the
841 read, invoking undefined behavior. */
842
843 static void
844 update_alias_info_with_stack_vars (void)
845 {
846 part_hashmap *decls_to_partitions = NULL;
847 unsigned i, j;
848 tree var = NULL_TREE;
849
850 for (i = 0; i < stack_vars_num; i++)
851 {
852 bitmap part = NULL;
853 tree name;
854 struct ptr_info_def *pi;
855
856 /* Not interested in partitions with single variable. */
857 if (stack_vars[i].representative != i
858 || stack_vars[i].next == EOC)
859 continue;
860
861 if (!decls_to_partitions)
862 {
863 decls_to_partitions = new part_hashmap;
864 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
865 }
866
867 /* Create an SSA_NAME that points to the partition for use
868 as base during alias-oracle queries on RTL for bases that
869 have been partitioned. */
870 if (var == NULL_TREE)
871 var = create_tmp_var (ptr_type_node);
872 name = make_ssa_name (var);
873
874 /* Create bitmaps representing partitions. They will be used for
875 points-to sets later, so use GGC alloc. */
876 part = BITMAP_GGC_ALLOC ();
877 for (j = i; j != EOC; j = stack_vars[j].next)
878 {
879 tree decl = stack_vars[j].decl;
880 unsigned int uid = DECL_PT_UID (decl);
881 bitmap_set_bit (part, uid);
882 decls_to_partitions->put (uid, part);
883 cfun->gimple_df->decls_to_pointers->put (decl, name);
884 if (TREE_ADDRESSABLE (decl))
885 TREE_ADDRESSABLE (name) = 1;
886 }
887
888 /* Make the SSA name point to all partition members. */
889 pi = get_ptr_info (name);
890 pt_solution_set (&pi->pt, part, false);
891 }
892
893 /* Make all points-to sets that contain one member of a partition
894 contain all members of the partition. */
895 if (decls_to_partitions)
896 {
897 unsigned i;
898 tree name;
899 hash_set<bitmap> visited;
900 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
901
902 FOR_EACH_SSA_NAME (i, name, cfun)
903 {
904 struct ptr_info_def *pi;
905
906 if (POINTER_TYPE_P (TREE_TYPE (name))
907 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
908 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
909 &visited, temp);
910 }
911
912 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
913 decls_to_partitions, &visited, temp);
914 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped_return,
915 decls_to_partitions, &visited, temp);
916 delete decls_to_partitions;
917 BITMAP_FREE (temp);
918 }
919 }
920
921 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
922 partitioning algorithm. Partitions A and B are known to be non-conflicting.
923 Merge them into a single partition A. */
924
925 static void
926 union_stack_vars (unsigned a, unsigned b)
927 {
928 class stack_var *vb = &stack_vars[b];
929 bitmap_iterator bi;
930 unsigned u;
931
932 gcc_assert (stack_vars[b].next == EOC);
933 /* Add B to A's partition. */
934 stack_vars[b].next = stack_vars[a].next;
935 stack_vars[b].representative = a;
936 stack_vars[a].next = b;
937
938 /* Make sure A is big enough to hold B. */
939 stack_vars[a].size = upper_bound (stack_vars[a].size, stack_vars[b].size);
940
941 /* Update the required alignment of partition A to account for B. */
942 if (stack_vars[a].alignb < stack_vars[b].alignb)
943 stack_vars[a].alignb = stack_vars[b].alignb;
944
945 /* Update the interference graph and merge the conflicts. */
946 if (vb->conflicts)
947 {
948 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
949 add_stack_var_conflict (a, stack_vars[u].representative);
950 BITMAP_FREE (vb->conflicts);
951 }
952 }
953
954 /* A subroutine of expand_used_vars. Binpack the variables into
955 partitions constrained by the interference graph. The overall
956 algorithm used is as follows:
957
958 Sort the objects by size in descending order.
959 For each object A {
960 S = size(A)
961 O = 0
962 loop {
963 Look for the largest non-conflicting object B with size <= S.
964 UNION (A, B)
965 }
966 }
967 */
968
969 static void
970 partition_stack_vars (void)
971 {
972 unsigned si, sj, n = stack_vars_num;
973
974 stack_vars_sorted = XNEWVEC (unsigned, stack_vars_num);
975 for (si = 0; si < n; ++si)
976 stack_vars_sorted[si] = si;
977
978 if (n == 1)
979 return;
980
981 qsort (stack_vars_sorted, n, sizeof (unsigned), stack_var_cmp);
982
983 for (si = 0; si < n; ++si)
984 {
985 unsigned i = stack_vars_sorted[si];
986 unsigned int ialign = stack_vars[i].alignb;
987 poly_int64 isize = stack_vars[i].size;
988
989 /* Ignore objects that aren't partition representatives. If we
990 see a var that is not a partition representative, it must
991 have been merged earlier. */
992 if (stack_vars[i].representative != i)
993 continue;
994
995 for (sj = si + 1; sj < n; ++sj)
996 {
997 unsigned j = stack_vars_sorted[sj];
998 unsigned int jalign = stack_vars[j].alignb;
999 poly_int64 jsize = stack_vars[j].size;
1000
1001 /* Ignore objects that aren't partition representatives. */
1002 if (stack_vars[j].representative != j)
1003 continue;
1004
1005 /* Do not mix objects of "small" (supported) alignment
1006 and "large" (unsupported) alignment. */
1007 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1008 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
1009 break;
1010
1011 /* For Address Sanitizer do not mix objects with different
1012 sizes, as the shorter vars wouldn't be adequately protected.
1013 Don't do that for "large" (unsupported) alignment objects,
1014 those aren't protected anyway. */
1015 if (asan_sanitize_stack_p ()
1016 && maybe_ne (isize, jsize)
1017 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1018 break;
1019
1020 /* Ignore conflicting objects. */
1021 if (stack_var_conflict_p (i, j))
1022 continue;
1023
1024 /* UNION the objects, placing J at OFFSET. */
1025 union_stack_vars (i, j);
1026 }
1027 }
1028
1029 update_alias_info_with_stack_vars ();
1030 }
1031
1032 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
1033
1034 static void
1035 dump_stack_var_partition (void)
1036 {
1037 unsigned si, i, j, n = stack_vars_num;
1038
1039 for (si = 0; si < n; ++si)
1040 {
1041 i = stack_vars_sorted[si];
1042
1043 /* Skip variables that aren't partition representatives, for now. */
1044 if (stack_vars[i].representative != i)
1045 continue;
1046
1047 fprintf (dump_file, "Partition %u: size ", i);
1048 print_dec (stack_vars[i].size, dump_file);
1049 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
1050
1051 for (j = i; j != EOC; j = stack_vars[j].next)
1052 {
1053 fputc ('\t', dump_file);
1054 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1055 }
1056 fputc ('\n', dump_file);
1057 }
1058 }
1059
1060 /* Assign rtl to DECL at BASE + OFFSET. */
1061
1062 static void
1063 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
1064 poly_int64 offset)
1065 {
1066 unsigned align;
1067 rtx x;
1068
1069 /* If this fails, we've overflowed the stack frame. Error nicely? */
1070 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
1071
1072 if (hwasan_sanitize_stack_p ())
1073 x = targetm.memtag.add_tag (base, offset,
1074 hwasan_current_frame_tag ());
1075 else
1076 x = plus_constant (Pmode, base, offset);
1077
1078 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1079 ? TYPE_MODE (TREE_TYPE (decl))
1080 : DECL_MODE (decl), x);
1081
1082 /* Set alignment we actually gave this decl if it isn't an SSA name.
1083 If it is we generate stack slots only accidentally so it isn't as
1084 important, we'll simply set the alignment directly on the MEM. */
1085
1086 if (stack_vars_base_reg_p (base))
1087 offset -= frame_phase;
1088 align = known_alignment (offset);
1089 align *= BITS_PER_UNIT;
1090 if (align == 0 || align > base_align)
1091 align = base_align;
1092
1093 if (TREE_CODE (decl) != SSA_NAME)
1094 {
1095 /* One would think that we could assert that we're not decreasing
1096 alignment here, but (at least) the i386 port does exactly this
1097 via the MINIMUM_ALIGNMENT hook. */
1098
1099 SET_DECL_ALIGN (decl, align);
1100 DECL_USER_ALIGN (decl) = 0;
1101 }
1102
1103 set_rtl (decl, x);
1104
1105 set_mem_align (x, align);
1106 }
1107
1108 class stack_vars_data
1109 {
1110 public:
1111 /* Vector of offset pairs, always end of some padding followed
1112 by start of the padding that needs Address Sanitizer protection.
1113 The vector is in reversed, highest offset pairs come first. */
1114 auto_vec<HOST_WIDE_INT> asan_vec;
1115
1116 /* Vector of partition representative decls in between the paddings. */
1117 auto_vec<tree> asan_decl_vec;
1118
1119 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1120 rtx asan_base;
1121
1122 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1123 unsigned int asan_alignb;
1124 };
1125
1126 /* A subroutine of expand_used_vars. Give each partition representative
1127 a unique location within the stack frame. Update each partition member
1128 with that location. */
1129 static void
1130 expand_stack_vars (bool (*pred) (unsigned), class stack_vars_data *data)
1131 {
1132 unsigned si, i, j, n = stack_vars_num;
1133 poly_uint64 large_size = 0, large_alloc = 0;
1134 rtx large_base = NULL;
1135 rtx large_untagged_base = NULL;
1136 unsigned large_align = 0;
1137 bool large_allocation_done = false;
1138 tree decl;
1139
1140 /* Determine if there are any variables requiring "large" alignment.
1141 Since these are dynamically allocated, we only process these if
1142 no predicate involved. */
1143 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1144 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1145 {
1146 /* Find the total size of these variables. */
1147 for (si = 0; si < n; ++si)
1148 {
1149 unsigned alignb;
1150
1151 i = stack_vars_sorted[si];
1152 alignb = stack_vars[i].alignb;
1153
1154 /* All "large" alignment decls come before all "small" alignment
1155 decls, but "large" alignment decls are not sorted based on
1156 their alignment. Increase large_align to track the largest
1157 required alignment. */
1158 if ((alignb * BITS_PER_UNIT) > large_align)
1159 large_align = alignb * BITS_PER_UNIT;
1160
1161 /* Stop when we get to the first decl with "small" alignment. */
1162 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1163 break;
1164
1165 /* Skip variables that aren't partition representatives. */
1166 if (stack_vars[i].representative != i)
1167 continue;
1168
1169 /* Skip variables that have already had rtl assigned. See also
1170 add_stack_var where we perpetrate this pc_rtx hack. */
1171 decl = stack_vars[i].decl;
1172 if (TREE_CODE (decl) == SSA_NAME
1173 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1174 : DECL_RTL (decl) != pc_rtx)
1175 continue;
1176
1177 large_size = aligned_upper_bound (large_size, alignb);
1178 large_size += stack_vars[i].size;
1179 }
1180 }
1181
1182 for (si = 0; si < n; ++si)
1183 {
1184 rtx base;
1185 unsigned base_align, alignb;
1186 poly_int64 offset = 0;
1187
1188 i = stack_vars_sorted[si];
1189
1190 /* Skip variables that aren't partition representatives, for now. */
1191 if (stack_vars[i].representative != i)
1192 continue;
1193
1194 /* Skip variables that have already had rtl assigned. See also
1195 add_stack_var where we perpetrate this pc_rtx hack. */
1196 decl = stack_vars[i].decl;
1197 if (TREE_CODE (decl) == SSA_NAME
1198 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1199 : DECL_RTL (decl) != pc_rtx)
1200 continue;
1201
1202 /* Check the predicate to see whether this variable should be
1203 allocated in this pass. */
1204 if (pred && !pred (i))
1205 continue;
1206
1207 base = (hwasan_sanitize_stack_p ()
1208 ? hwasan_frame_base ()
1209 : virtual_stack_vars_rtx);
1210 alignb = stack_vars[i].alignb;
1211 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1212 {
1213 poly_int64 hwasan_orig_offset;
1214 if (hwasan_sanitize_stack_p ())
1215 {
1216 /* There must be no tag granule "shared" between different
1217 objects. This means that no HWASAN_TAG_GRANULE_SIZE byte
1218 chunk can have more than one object in it.
1219
1220 We ensure this by forcing the end of the last bit of data to
1221 be aligned to HWASAN_TAG_GRANULE_SIZE bytes here, and setting
1222 the start of each variable to be aligned to
1223 HWASAN_TAG_GRANULE_SIZE bytes in `align_local_variable`.
1224
1225 We can't align just one of the start or end, since there are
1226 untagged things stored on the stack which we do not align to
1227 HWASAN_TAG_GRANULE_SIZE bytes. If we only aligned the start
1228 or the end of tagged objects then untagged objects could end
1229 up sharing the first granule of a tagged object or sharing the
1230 last granule of a tagged object respectively. */
1231 hwasan_orig_offset = align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
1232 gcc_assert (stack_vars[i].alignb >= HWASAN_TAG_GRANULE_SIZE);
1233 }
1234 /* ASAN description strings don't yet have a syntax for expressing
1235 polynomial offsets. */
1236 HOST_WIDE_INT prev_offset;
1237 if (asan_sanitize_stack_p ()
1238 && pred
1239 && frame_offset.is_constant (&prev_offset)
1240 && stack_vars[i].size.is_constant ())
1241 {
1242 if (data->asan_vec.is_empty ())
1243 {
1244 align_frame_offset (ASAN_RED_ZONE_SIZE);
1245 prev_offset = frame_offset.to_constant ();
1246 }
1247 prev_offset = align_base (prev_offset,
1248 ASAN_MIN_RED_ZONE_SIZE,
1249 !FRAME_GROWS_DOWNWARD);
1250 tree repr_decl = NULL_TREE;
1251 unsigned HOST_WIDE_INT size
1252 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1253 if (data->asan_vec.is_empty ())
1254 size = MAX (size, ASAN_RED_ZONE_SIZE);
1255
1256 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1257 ASAN_MIN_RED_ZONE_SIZE);
1258 offset = alloc_stack_frame_space (size, alignment);
1259
1260 data->asan_vec.safe_push (prev_offset);
1261 /* Allocating a constant amount of space from a constant
1262 starting offset must give a constant result. */
1263 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1264 .to_constant ());
1265 /* Find best representative of the partition.
1266 Prefer those with DECL_NAME, even better
1267 satisfying asan_protect_stack_decl predicate. */
1268 for (j = i; j != EOC; j = stack_vars[j].next)
1269 if (asan_protect_stack_decl (stack_vars[j].decl)
1270 && DECL_NAME (stack_vars[j].decl))
1271 {
1272 repr_decl = stack_vars[j].decl;
1273 break;
1274 }
1275 else if (repr_decl == NULL_TREE
1276 && DECL_P (stack_vars[j].decl)
1277 && DECL_NAME (stack_vars[j].decl))
1278 repr_decl = stack_vars[j].decl;
1279 if (repr_decl == NULL_TREE)
1280 repr_decl = stack_vars[i].decl;
1281 data->asan_decl_vec.safe_push (repr_decl);
1282
1283 /* Make sure a representative is unpoison if another
1284 variable in the partition is handled by
1285 use-after-scope sanitization. */
1286 if (asan_handled_variables != NULL
1287 && !asan_handled_variables->contains (repr_decl))
1288 {
1289 for (j = i; j != EOC; j = stack_vars[j].next)
1290 if (asan_handled_variables->contains (stack_vars[j].decl))
1291 break;
1292 if (j != EOC)
1293 asan_handled_variables->add (repr_decl);
1294 }
1295
1296 data->asan_alignb = MAX (data->asan_alignb, alignb);
1297 if (data->asan_base == NULL)
1298 data->asan_base = gen_reg_rtx (Pmode);
1299 base = data->asan_base;
1300
1301 if (!STRICT_ALIGNMENT)
1302 base_align = crtl->max_used_stack_slot_alignment;
1303 else
1304 base_align = MAX (crtl->max_used_stack_slot_alignment,
1305 GET_MODE_ALIGNMENT (SImode)
1306 << ASAN_SHADOW_SHIFT);
1307 }
1308 else
1309 {
1310 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1311 base_align = crtl->max_used_stack_slot_alignment;
1312
1313 if (hwasan_sanitize_stack_p ())
1314 {
1315 /* Align again since the point of this alignment is to handle
1316 the "end" of the object (i.e. smallest address after the
1317 stack object). For FRAME_GROWS_DOWNWARD that requires
1318 aligning the stack before allocating, but for a frame that
1319 grows upwards that requires aligning the stack after
1320 allocation.
1321
1322 Use `frame_offset` to record the offset value rather than
1323 `offset` since the `frame_offset` describes the extent
1324 allocated for this particular variable while `offset`
1325 describes the address that this variable starts at. */
1326 align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
1327 hwasan_record_stack_var (virtual_stack_vars_rtx, base,
1328 hwasan_orig_offset, frame_offset);
1329 }
1330 }
1331 }
1332 else
1333 {
1334 /* Large alignment is only processed in the last pass. */
1335 if (pred)
1336 continue;
1337
1338 /* If there were any variables requiring "large" alignment, allocate
1339 space. */
1340 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1341 {
1342 poly_int64 loffset;
1343 rtx large_allocsize;
1344
1345 large_allocsize = gen_int_mode (large_size, Pmode);
1346 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1347 loffset = alloc_stack_frame_space
1348 (rtx_to_poly_int64 (large_allocsize),
1349 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1350 large_base = get_dynamic_stack_base (loffset, large_align, base);
1351 large_allocation_done = true;
1352 }
1353
1354 gcc_assert (large_base != NULL);
1355 large_alloc = aligned_upper_bound (large_alloc, alignb);
1356 offset = large_alloc;
1357 large_alloc += stack_vars[i].size;
1358 if (hwasan_sanitize_stack_p ())
1359 {
1360 /* An object with a large alignment requirement means that the
1361 alignment requirement is greater than the required alignment
1362 for tags. */
1363 if (!large_untagged_base)
1364 large_untagged_base
1365 = targetm.memtag.untagged_pointer (large_base, NULL_RTX);
1366 /* Ensure the end of the variable is also aligned correctly. */
1367 poly_int64 align_again
1368 = aligned_upper_bound (large_alloc, HWASAN_TAG_GRANULE_SIZE);
1369 /* For large allocations we always allocate a chunk of space
1370 (which is addressed by large_untagged_base/large_base) and
1371 then use positive offsets from that. Hence the farthest
1372 offset is `align_again` and the nearest offset from the base
1373 is `offset`. */
1374 hwasan_record_stack_var (large_untagged_base, large_base,
1375 offset, align_again);
1376 }
1377
1378 base = large_base;
1379 base_align = large_align;
1380 }
1381
1382 /* Create rtl for each variable based on their location within the
1383 partition. */
1384 for (j = i; j != EOC; j = stack_vars[j].next)
1385 {
1386 expand_one_stack_var_at (stack_vars[j].decl,
1387 base, base_align, offset);
1388 }
1389 if (hwasan_sanitize_stack_p ())
1390 hwasan_increment_frame_tag ();
1391 }
1392
1393 gcc_assert (known_eq (large_alloc, large_size));
1394 }
1395
1396 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1397 static poly_uint64
1398 account_stack_vars (void)
1399 {
1400 unsigned si, j, i, n = stack_vars_num;
1401 poly_uint64 size = 0;
1402
1403 for (si = 0; si < n; ++si)
1404 {
1405 i = stack_vars_sorted[si];
1406
1407 /* Skip variables that aren't partition representatives, for now. */
1408 if (stack_vars[i].representative != i)
1409 continue;
1410
1411 size += stack_vars[i].size;
1412 for (j = i; j != EOC; j = stack_vars[j].next)
1413 set_rtl (stack_vars[j].decl, NULL);
1414 }
1415 return size;
1416 }
1417
1418 /* Record the RTL assignment X for the default def of PARM. */
1419
1420 extern void
1421 set_parm_rtl (tree parm, rtx x)
1422 {
1423 gcc_assert (TREE_CODE (parm) == PARM_DECL
1424 || TREE_CODE (parm) == RESULT_DECL);
1425
1426 if (x && !MEM_P (x))
1427 {
1428 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1429 TYPE_MODE (TREE_TYPE (parm)),
1430 TYPE_ALIGN (TREE_TYPE (parm)));
1431
1432 /* If the variable alignment is very large we'll dynamicaly
1433 allocate it, which means that in-frame portion is just a
1434 pointer. ??? We've got a pseudo for sure here, do we
1435 actually dynamically allocate its spilling area if needed?
1436 ??? Isn't it a problem when Pmode alignment also exceeds
1437 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1438 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1439 align = GET_MODE_ALIGNMENT (Pmode);
1440
1441 record_alignment_for_reg_var (align);
1442 }
1443
1444 tree ssa = ssa_default_def (cfun, parm);
1445 if (!ssa)
1446 return set_rtl (parm, x);
1447
1448 int part = var_to_partition (SA.map, ssa);
1449 gcc_assert (part != NO_PARTITION);
1450
1451 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1452 gcc_assert (changed);
1453
1454 set_rtl (ssa, x);
1455 gcc_assert (DECL_RTL (parm) == x);
1456 }
1457
1458 /* A subroutine of expand_one_var. Called to immediately assign rtl
1459 to a variable to be allocated in the stack frame. */
1460
1461 static void
1462 expand_one_stack_var_1 (tree var)
1463 {
1464 poly_uint64 size;
1465 poly_int64 offset;
1466 unsigned byte_align;
1467
1468 if (TREE_CODE (var) == SSA_NAME)
1469 {
1470 tree type = TREE_TYPE (var);
1471 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1472 }
1473 else
1474 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1475
1476 byte_align = align_local_variable (var, true);
1477
1478 /* We handle highly aligned variables in expand_stack_vars. */
1479 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1480
1481 rtx base;
1482 if (hwasan_sanitize_stack_p ())
1483 {
1484 /* Allocate zero bytes to align the stack. */
1485 poly_int64 hwasan_orig_offset
1486 = align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
1487 offset = alloc_stack_frame_space (size, byte_align);
1488 align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
1489 base = hwasan_frame_base ();
1490 /* Use `frame_offset` to automatically account for machines where the
1491 frame grows upwards.
1492
1493 `offset` will always point to the "start" of the stack object, which
1494 will be the smallest address, for ! FRAME_GROWS_DOWNWARD this is *not*
1495 the "furthest" offset from the base delimiting the current stack
1496 object. `frame_offset` will always delimit the extent that the frame.
1497 */
1498 hwasan_record_stack_var (virtual_stack_vars_rtx, base,
1499 hwasan_orig_offset, frame_offset);
1500 }
1501 else
1502 {
1503 offset = alloc_stack_frame_space (size, byte_align);
1504 base = virtual_stack_vars_rtx;
1505 }
1506
1507 expand_one_stack_var_at (var, base,
1508 crtl->max_used_stack_slot_alignment, offset);
1509
1510 if (hwasan_sanitize_stack_p ())
1511 hwasan_increment_frame_tag ();
1512 }
1513
1514 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1515 already assigned some MEM. */
1516
1517 static void
1518 expand_one_stack_var (tree var)
1519 {
1520 if (TREE_CODE (var) == SSA_NAME)
1521 {
1522 int part = var_to_partition (SA.map, var);
1523 if (part != NO_PARTITION)
1524 {
1525 rtx x = SA.partition_to_pseudo[part];
1526 gcc_assert (x);
1527 gcc_assert (MEM_P (x));
1528 return;
1529 }
1530 }
1531
1532 return expand_one_stack_var_1 (var);
1533 }
1534
1535 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1536 that will reside in a hard register. */
1537
1538 static void
1539 expand_one_hard_reg_var (tree var)
1540 {
1541 rest_of_decl_compilation (var, 0, 0);
1542 }
1543
1544 /* Record the alignment requirements of some variable assigned to a
1545 pseudo. */
1546
1547 static void
1548 record_alignment_for_reg_var (unsigned int align)
1549 {
1550 if (SUPPORTS_STACK_ALIGNMENT
1551 && crtl->stack_alignment_estimated < align)
1552 {
1553 /* stack_alignment_estimated shouldn't change after stack
1554 realign decision made */
1555 gcc_assert (!crtl->stack_realign_processed);
1556 crtl->stack_alignment_estimated = align;
1557 }
1558
1559 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1560 So here we only make sure stack_alignment_needed >= align. */
1561 if (crtl->stack_alignment_needed < align)
1562 crtl->stack_alignment_needed = align;
1563 if (crtl->max_used_stack_slot_alignment < align)
1564 crtl->max_used_stack_slot_alignment = align;
1565 }
1566
1567 /* Create RTL for an SSA partition. */
1568
1569 static void
1570 expand_one_ssa_partition (tree var)
1571 {
1572 int part = var_to_partition (SA.map, var);
1573 gcc_assert (part != NO_PARTITION);
1574
1575 if (SA.partition_to_pseudo[part])
1576 return;
1577
1578 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1579 TYPE_MODE (TREE_TYPE (var)),
1580 TYPE_ALIGN (TREE_TYPE (var)));
1581
1582 /* If the variable alignment is very large we'll dynamicaly allocate
1583 it, which means that in-frame portion is just a pointer. */
1584 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1585 align = GET_MODE_ALIGNMENT (Pmode);
1586
1587 record_alignment_for_reg_var (align);
1588
1589 if (!use_register_for_decl (var))
1590 {
1591 if (defer_stack_allocation (var, true))
1592 add_stack_var (var, true);
1593 else
1594 expand_one_stack_var_1 (var);
1595 return;
1596 }
1597
1598 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1599 rtx x = gen_reg_rtx (reg_mode);
1600
1601 set_rtl (var, x);
1602
1603 /* For a promoted variable, X will not be used directly but wrapped in a
1604 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1605 will assume that its upper bits can be inferred from its lower bits.
1606 Therefore, if X isn't initialized on every path from the entry, then
1607 we must do it manually in order to fulfill the above assumption. */
1608 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1609 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1610 emit_move_insn (x, CONST0_RTX (reg_mode));
1611 }
1612
1613 /* Record the association between the RTL generated for partition PART
1614 and the underlying variable of the SSA_NAME VAR. */
1615
1616 static void
1617 adjust_one_expanded_partition_var (tree var)
1618 {
1619 if (!var)
1620 return;
1621
1622 tree decl = SSA_NAME_VAR (var);
1623
1624 int part = var_to_partition (SA.map, var);
1625 if (part == NO_PARTITION)
1626 return;
1627
1628 rtx x = SA.partition_to_pseudo[part];
1629
1630 gcc_assert (x);
1631
1632 set_rtl (var, x);
1633
1634 if (!REG_P (x))
1635 return;
1636
1637 /* Note if the object is a user variable. */
1638 if (decl && !DECL_ARTIFICIAL (decl))
1639 mark_user_reg (x);
1640
1641 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1642 mark_reg_pointer (x, get_pointer_alignment (var));
1643 }
1644
1645 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1646 that will reside in a pseudo register. */
1647
1648 static void
1649 expand_one_register_var (tree var)
1650 {
1651 if (TREE_CODE (var) == SSA_NAME)
1652 {
1653 int part = var_to_partition (SA.map, var);
1654 if (part != NO_PARTITION)
1655 {
1656 rtx x = SA.partition_to_pseudo[part];
1657 gcc_assert (x);
1658 gcc_assert (REG_P (x));
1659 return;
1660 }
1661 gcc_unreachable ();
1662 }
1663
1664 tree decl = var;
1665 tree type = TREE_TYPE (decl);
1666 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1667 rtx x = gen_reg_rtx (reg_mode);
1668
1669 set_rtl (var, x);
1670
1671 /* Note if the object is a user variable. */
1672 if (!DECL_ARTIFICIAL (decl))
1673 mark_user_reg (x);
1674
1675 if (POINTER_TYPE_P (type))
1676 mark_reg_pointer (x, get_pointer_alignment (var));
1677 }
1678
1679 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1680 has some associated error, e.g. its type is error-mark. We just need
1681 to pick something that won't crash the rest of the compiler. */
1682
1683 static void
1684 expand_one_error_var (tree var)
1685 {
1686 machine_mode mode = DECL_MODE (var);
1687 rtx x;
1688
1689 if (mode == BLKmode)
1690 x = gen_rtx_MEM (BLKmode, const0_rtx);
1691 else if (mode == VOIDmode)
1692 x = const0_rtx;
1693 else
1694 x = gen_reg_rtx (mode);
1695
1696 SET_DECL_RTL (var, x);
1697 }
1698
1699 /* A subroutine of expand_one_var. VAR is a variable that will be
1700 allocated to the local stack frame. Return true if we wish to
1701 add VAR to STACK_VARS so that it will be coalesced with other
1702 variables. Return false to allocate VAR immediately.
1703
1704 This function is used to reduce the number of variables considered
1705 for coalescing, which reduces the size of the quadratic problem. */
1706
1707 static bool
1708 defer_stack_allocation (tree var, bool toplevel)
1709 {
1710 tree size_unit = TREE_CODE (var) == SSA_NAME
1711 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1712 : DECL_SIZE_UNIT (var);
1713 poly_uint64 size;
1714
1715 /* Whether the variable is small enough for immediate allocation not to be
1716 a problem with regard to the frame size. */
1717 bool smallish
1718 = (poly_int_tree_p (size_unit, &size)
1719 && (estimated_poly_value (size)
1720 < param_min_size_for_stack_sharing));
1721
1722 /* If stack protection is enabled, *all* stack variables must be deferred,
1723 so that we can re-order the strings to the top of the frame.
1724 Similarly for Address Sanitizer. */
1725 if (flag_stack_protect || asan_sanitize_stack_p ())
1726 return true;
1727
1728 unsigned int align = TREE_CODE (var) == SSA_NAME
1729 ? TYPE_ALIGN (TREE_TYPE (var))
1730 : DECL_ALIGN (var);
1731
1732 /* We handle "large" alignment via dynamic allocation. We want to handle
1733 this extra complication in only one place, so defer them. */
1734 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1735 return true;
1736
1737 bool ignored = TREE_CODE (var) == SSA_NAME
1738 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1739 : DECL_IGNORED_P (var);
1740
1741 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1742 might be detached from their block and appear at toplevel when we reach
1743 here. We want to coalesce them with variables from other blocks when
1744 the immediate contribution to the frame size would be noticeable. */
1745 if (toplevel && optimize > 0 && ignored && !smallish)
1746 return true;
1747
1748 /* Variables declared in the outermost scope automatically conflict
1749 with every other variable. The only reason to want to defer them
1750 at all is that, after sorting, we can more efficiently pack
1751 small variables in the stack frame. Continue to defer at -O2. */
1752 if (toplevel && optimize < 2)
1753 return false;
1754
1755 /* Without optimization, *most* variables are allocated from the
1756 stack, which makes the quadratic problem large exactly when we
1757 want compilation to proceed as quickly as possible. On the
1758 other hand, we don't want the function's stack frame size to
1759 get completely out of hand. So we avoid adding scalars and
1760 "small" aggregates to the list at all. */
1761 if (optimize == 0 && smallish)
1762 return false;
1763
1764 return true;
1765 }
1766
1767 /* A subroutine of expand_used_vars. Expand one variable according to
1768 its flavor. Variables to be placed on the stack are not actually
1769 expanded yet, merely recorded.
1770 When REALLY_EXPAND is false, only add stack values to be allocated.
1771 Return stack usage this variable is supposed to take.
1772 */
1773
1774 static poly_uint64
1775 expand_one_var (tree var, bool toplevel, bool really_expand,
1776 bitmap forced_stack_var = NULL)
1777 {
1778 unsigned int align = BITS_PER_UNIT;
1779 tree origvar = var;
1780
1781 var = SSAVAR (var);
1782
1783 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1784 {
1785 if (is_global_var (var))
1786 return 0;
1787
1788 /* Because we don't know if VAR will be in register or on stack,
1789 we conservatively assume it will be on stack even if VAR is
1790 eventually put into register after RA pass. For non-automatic
1791 variables, which won't be on stack, we collect alignment of
1792 type and ignore user specified alignment. Similarly for
1793 SSA_NAMEs for which use_register_for_decl returns true. */
1794 if (TREE_STATIC (var)
1795 || DECL_EXTERNAL (var)
1796 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1797 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1798 TYPE_MODE (TREE_TYPE (var)),
1799 TYPE_ALIGN (TREE_TYPE (var)));
1800 else if (DECL_HAS_VALUE_EXPR_P (var)
1801 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1802 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1803 or variables which were assigned a stack slot already by
1804 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1805 changed from the offset chosen to it. */
1806 align = crtl->stack_alignment_estimated;
1807 else
1808 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1809
1810 /* If the variable alignment is very large we'll dynamicaly allocate
1811 it, which means that in-frame portion is just a pointer. */
1812 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1813 align = GET_MODE_ALIGNMENT (Pmode);
1814 }
1815
1816 record_alignment_for_reg_var (align);
1817
1818 poly_uint64 size;
1819 if (TREE_CODE (origvar) == SSA_NAME)
1820 {
1821 gcc_assert (!VAR_P (var)
1822 || (!DECL_EXTERNAL (var)
1823 && !DECL_HAS_VALUE_EXPR_P (var)
1824 && !TREE_STATIC (var)
1825 && TREE_TYPE (var) != error_mark_node
1826 && !DECL_HARD_REGISTER (var)
1827 && really_expand));
1828 }
1829 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1830 ;
1831 else if (DECL_EXTERNAL (var))
1832 ;
1833 else if (DECL_HAS_VALUE_EXPR_P (var))
1834 ;
1835 else if (TREE_STATIC (var))
1836 ;
1837 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1838 ;
1839 else if (TREE_TYPE (var) == error_mark_node)
1840 {
1841 if (really_expand)
1842 expand_one_error_var (var);
1843 }
1844 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1845 {
1846 if (really_expand)
1847 {
1848 expand_one_hard_reg_var (var);
1849 if (!DECL_HARD_REGISTER (var))
1850 /* Invalid register specification. */
1851 expand_one_error_var (var);
1852 }
1853 }
1854 else if (use_register_for_decl (var)
1855 && (!forced_stack_var
1856 || !bitmap_bit_p (forced_stack_var, DECL_UID (var))))
1857 {
1858 if (really_expand)
1859 expand_one_register_var (origvar);
1860 }
1861 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1862 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1863 {
1864 /* Reject variables which cover more than half of the address-space. */
1865 if (really_expand)
1866 {
1867 if (DECL_NONLOCAL_FRAME (var))
1868 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1869 "total size of local objects is too large");
1870 else
1871 error_at (DECL_SOURCE_LOCATION (var),
1872 "size of variable %q+D is too large", var);
1873 expand_one_error_var (var);
1874 }
1875 }
1876 else if (defer_stack_allocation (var, toplevel))
1877 add_stack_var (origvar, really_expand);
1878 else
1879 {
1880 if (really_expand)
1881 {
1882 if (lookup_attribute ("naked",
1883 DECL_ATTRIBUTES (current_function_decl)))
1884 error ("cannot allocate stack for variable %q+D, naked function",
1885 var);
1886
1887 expand_one_stack_var (origvar);
1888 }
1889 return size;
1890 }
1891 return 0;
1892 }
1893
1894 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1895 expanding variables. Those variables that can be put into registers
1896 are allocated pseudos; those that can't are put on the stack.
1897
1898 TOPLEVEL is true if this is the outermost BLOCK. */
1899
1900 static void
1901 expand_used_vars_for_block (tree block, bool toplevel, bitmap forced_stack_vars)
1902 {
1903 tree t;
1904
1905 /* Expand all variables at this level. */
1906 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1907 if (TREE_USED (t)
1908 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1909 || !DECL_NONSHAREABLE (t)))
1910 expand_one_var (t, toplevel, true, forced_stack_vars);
1911
1912 /* Expand all variables at containing levels. */
1913 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1914 expand_used_vars_for_block (t, false, forced_stack_vars);
1915 }
1916
1917 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1918 and clear TREE_USED on all local variables. */
1919
1920 static void
1921 clear_tree_used (tree block)
1922 {
1923 tree t;
1924
1925 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1926 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1927 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1928 || !DECL_NONSHAREABLE (t))
1929 TREE_USED (t) = 0;
1930
1931 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1932 clear_tree_used (t);
1933 }
1934
1935 /* Examine TYPE and determine a bit mask of the following features. */
1936
1937 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1938 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1939 #define SPCT_HAS_ARRAY 4
1940 #define SPCT_HAS_AGGREGATE 8
1941
1942 static unsigned int
1943 stack_protect_classify_type (tree type)
1944 {
1945 unsigned int ret = 0;
1946 tree t;
1947
1948 switch (TREE_CODE (type))
1949 {
1950 case ARRAY_TYPE:
1951 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1952 if (t == char_type_node
1953 || t == signed_char_type_node
1954 || t == unsigned_char_type_node)
1955 {
1956 unsigned HOST_WIDE_INT max = param_ssp_buffer_size;
1957 unsigned HOST_WIDE_INT len;
1958
1959 if (!TYPE_SIZE_UNIT (type)
1960 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1961 len = max;
1962 else
1963 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1964
1965 if (len < max)
1966 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1967 else
1968 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1969 }
1970 else
1971 ret = SPCT_HAS_ARRAY;
1972 break;
1973
1974 case UNION_TYPE:
1975 case QUAL_UNION_TYPE:
1976 case RECORD_TYPE:
1977 ret = SPCT_HAS_AGGREGATE;
1978 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1979 if (TREE_CODE (t) == FIELD_DECL)
1980 ret |= stack_protect_classify_type (TREE_TYPE (t));
1981 break;
1982
1983 default:
1984 break;
1985 }
1986
1987 return ret;
1988 }
1989
1990 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1991 part of the local stack frame. Remember if we ever return nonzero for
1992 any variable in this function. The return value is the phase number in
1993 which the variable should be allocated. */
1994
1995 static int
1996 stack_protect_decl_phase (tree decl)
1997 {
1998 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1999 int ret = 0;
2000
2001 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
2002 has_short_buffer = true;
2003
2004 tree attribs = DECL_ATTRIBUTES (current_function_decl);
2005 if (!lookup_attribute ("no_stack_protector", attribs)
2006 && (flag_stack_protect == SPCT_FLAG_ALL
2007 || flag_stack_protect == SPCT_FLAG_STRONG
2008 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2009 && lookup_attribute ("stack_protect", attribs))))
2010 {
2011 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
2012 && !(bits & SPCT_HAS_AGGREGATE))
2013 ret = 1;
2014 else if (bits & SPCT_HAS_ARRAY)
2015 ret = 2;
2016 }
2017 else
2018 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
2019
2020 if (ret)
2021 has_protected_decls = true;
2022
2023 return ret;
2024 }
2025
2026 /* Two helper routines that check for phase 1 and phase 2. These are used
2027 as callbacks for expand_stack_vars. */
2028
2029 static bool
2030 stack_protect_decl_phase_1 (unsigned i)
2031 {
2032 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
2033 }
2034
2035 static bool
2036 stack_protect_decl_phase_2 (unsigned i)
2037 {
2038 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
2039 }
2040
2041 /* And helper function that checks for asan phase (with stack protector
2042 it is phase 3). This is used as callback for expand_stack_vars.
2043 Returns true if any of the vars in the partition need to be protected. */
2044
2045 static bool
2046 asan_decl_phase_3 (unsigned i)
2047 {
2048 while (i != EOC)
2049 {
2050 if (asan_protect_stack_decl (stack_vars[i].decl))
2051 return true;
2052 i = stack_vars[i].next;
2053 }
2054 return false;
2055 }
2056
2057 /* Ensure that variables in different stack protection phases conflict
2058 so that they are not merged and share the same stack slot.
2059 Return true if there are any address taken variables. */
2060
2061 static bool
2062 add_stack_protection_conflicts (void)
2063 {
2064 unsigned i, j, n = stack_vars_num;
2065 unsigned char *phase;
2066 bool ret = false;
2067
2068 phase = XNEWVEC (unsigned char, n);
2069 for (i = 0; i < n; ++i)
2070 {
2071 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
2072 if (TREE_ADDRESSABLE (stack_vars[i].decl))
2073 ret = true;
2074 }
2075
2076 for (i = 0; i < n; ++i)
2077 {
2078 unsigned char ph_i = phase[i];
2079 for (j = i + 1; j < n; ++j)
2080 if (ph_i != phase[j])
2081 add_stack_var_conflict (i, j);
2082 }
2083
2084 XDELETEVEC (phase);
2085 return ret;
2086 }
2087
2088 /* Create a decl for the guard at the top of the stack frame. */
2089
2090 static void
2091 create_stack_guard (void)
2092 {
2093 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2094 VAR_DECL, NULL, ptr_type_node);
2095 TREE_THIS_VOLATILE (guard) = 1;
2096 TREE_USED (guard) = 1;
2097 expand_one_stack_var (guard);
2098 crtl->stack_protect_guard = guard;
2099 }
2100
2101 /* Prepare for expanding variables. */
2102 static void
2103 init_vars_expansion (void)
2104 {
2105 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
2106 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
2107
2108 /* A map from decl to stack partition. */
2109 decl_to_stack_part = new hash_map<tree, unsigned>;
2110
2111 /* Initialize local stack smashing state. */
2112 has_protected_decls = false;
2113 has_short_buffer = false;
2114 if (hwasan_sanitize_stack_p ())
2115 hwasan_record_frame_init ();
2116 }
2117
2118 /* Free up stack variable graph data. */
2119 static void
2120 fini_vars_expansion (void)
2121 {
2122 bitmap_obstack_release (&stack_var_bitmap_obstack);
2123 if (stack_vars)
2124 XDELETEVEC (stack_vars);
2125 if (stack_vars_sorted)
2126 XDELETEVEC (stack_vars_sorted);
2127 stack_vars = NULL;
2128 stack_vars_sorted = NULL;
2129 stack_vars_alloc = stack_vars_num = 0;
2130 delete decl_to_stack_part;
2131 decl_to_stack_part = NULL;
2132 }
2133
2134 /* Make a fair guess for the size of the stack frame of the function
2135 in NODE. This doesn't have to be exact, the result is only used in
2136 the inline heuristics. So we don't want to run the full stack var
2137 packing algorithm (which is quadratic in the number of stack vars).
2138 Instead, we calculate the total size of all stack vars. This turns
2139 out to be a pretty fair estimate -- packing of stack vars doesn't
2140 happen very often. */
2141
2142 HOST_WIDE_INT
2143 estimated_stack_frame_size (struct cgraph_node *node)
2144 {
2145 poly_int64 size = 0;
2146 unsigned i;
2147 tree var;
2148 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2149
2150 push_cfun (fn);
2151
2152 init_vars_expansion ();
2153
2154 FOR_EACH_LOCAL_DECL (fn, i, var)
2155 if (auto_var_in_fn_p (var, fn->decl))
2156 size += expand_one_var (var, true, false);
2157
2158 if (stack_vars_num > 0)
2159 {
2160 /* Fake sorting the stack vars for account_stack_vars (). */
2161 stack_vars_sorted = XNEWVEC (unsigned , stack_vars_num);
2162 for (i = 0; i < stack_vars_num; ++i)
2163 stack_vars_sorted[i] = i;
2164 size += account_stack_vars ();
2165 }
2166
2167 fini_vars_expansion ();
2168 pop_cfun ();
2169 return estimated_poly_value (size);
2170 }
2171
2172 /* Check if the current function has calls that use a return slot. */
2173
2174 static bool
2175 stack_protect_return_slot_p ()
2176 {
2177 basic_block bb;
2178
2179 FOR_ALL_BB_FN (bb, cfun)
2180 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2181 !gsi_end_p (gsi); gsi_next (&gsi))
2182 {
2183 gimple *stmt = gsi_stmt (gsi);
2184 /* This assumes that calls to internal-only functions never
2185 use a return slot. */
2186 if (is_gimple_call (stmt)
2187 && !gimple_call_internal_p (stmt)
2188 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2189 gimple_call_fndecl (stmt)))
2190 return true;
2191 }
2192 return false;
2193 }
2194
2195 /* Expand all variables used in the function. */
2196
2197 static rtx_insn *
2198 expand_used_vars (bitmap forced_stack_vars)
2199 {
2200 tree var, outer_block = DECL_INITIAL (current_function_decl);
2201 auto_vec<tree> maybe_local_decls;
2202 rtx_insn *var_end_seq = NULL;
2203 unsigned i;
2204 unsigned len;
2205 bool gen_stack_protect_signal = false;
2206
2207 /* Compute the phase of the stack frame for this function. */
2208 {
2209 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2210 int off = targetm.starting_frame_offset () % align;
2211 frame_phase = off ? align - off : 0;
2212 }
2213
2214 /* Set TREE_USED on all variables in the local_decls. */
2215 FOR_EACH_LOCAL_DECL (cfun, i, var)
2216 TREE_USED (var) = 1;
2217 /* Clear TREE_USED on all variables associated with a block scope. */
2218 clear_tree_used (DECL_INITIAL (current_function_decl));
2219
2220 init_vars_expansion ();
2221
2222 if (targetm.use_pseudo_pic_reg ())
2223 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2224
2225 for (i = 0; i < SA.map->num_partitions; i++)
2226 {
2227 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2228 continue;
2229
2230 tree var = partition_to_var (SA.map, i);
2231
2232 gcc_assert (!virtual_operand_p (var));
2233
2234 expand_one_ssa_partition (var);
2235 }
2236
2237 if (flag_stack_protect == SPCT_FLAG_STRONG)
2238 gen_stack_protect_signal = stack_protect_return_slot_p ();
2239
2240 /* At this point all variables on the local_decls with TREE_USED
2241 set are not associated with any block scope. Lay them out. */
2242
2243 len = vec_safe_length (cfun->local_decls);
2244 FOR_EACH_LOCAL_DECL (cfun, i, var)
2245 {
2246 bool expand_now = false;
2247
2248 /* Expanded above already. */
2249 if (is_gimple_reg (var))
2250 {
2251 TREE_USED (var) = 0;
2252 goto next;
2253 }
2254 /* We didn't set a block for static or extern because it's hard
2255 to tell the difference between a global variable (re)declared
2256 in a local scope, and one that's really declared there to
2257 begin with. And it doesn't really matter much, since we're
2258 not giving them stack space. Expand them now. */
2259 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2260 expand_now = true;
2261
2262 /* Expand variables not associated with any block now. Those created by
2263 the optimizers could be live anywhere in the function. Those that
2264 could possibly have been scoped originally and detached from their
2265 block will have their allocation deferred so we coalesce them with
2266 others when optimization is enabled. */
2267 else if (TREE_USED (var))
2268 expand_now = true;
2269
2270 /* Finally, mark all variables on the list as used. We'll use
2271 this in a moment when we expand those associated with scopes. */
2272 TREE_USED (var) = 1;
2273
2274 if (expand_now)
2275 expand_one_var (var, true, true, forced_stack_vars);
2276
2277 next:
2278 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2279 {
2280 rtx rtl = DECL_RTL_IF_SET (var);
2281
2282 /* Keep artificial non-ignored vars in cfun->local_decls
2283 chain until instantiate_decls. */
2284 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2285 add_local_decl (cfun, var);
2286 else if (rtl == NULL_RTX)
2287 /* If rtl isn't set yet, which can happen e.g. with
2288 -fstack-protector, retry before returning from this
2289 function. */
2290 maybe_local_decls.safe_push (var);
2291 }
2292 }
2293
2294 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2295
2296 +-----------------+-----------------+
2297 | ...processed... | ...duplicates...|
2298 +-----------------+-----------------+
2299 ^
2300 +-- LEN points here.
2301
2302 We just want the duplicates, as those are the artificial
2303 non-ignored vars that we want to keep until instantiate_decls.
2304 Move them down and truncate the array. */
2305 if (!vec_safe_is_empty (cfun->local_decls))
2306 cfun->local_decls->block_remove (0, len);
2307
2308 /* At this point, all variables within the block tree with TREE_USED
2309 set are actually used by the optimized function. Lay them out. */
2310 expand_used_vars_for_block (outer_block, true, forced_stack_vars);
2311
2312 tree attribs = DECL_ATTRIBUTES (current_function_decl);
2313 if (stack_vars_num > 0)
2314 {
2315 bool has_addressable_vars = false;
2316
2317 add_scope_conflicts ();
2318
2319 /* If stack protection is enabled, we don't share space between
2320 vulnerable data and non-vulnerable data. */
2321 if (flag_stack_protect != 0
2322 && !lookup_attribute ("no_stack_protector", attribs)
2323 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2324 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2325 && lookup_attribute ("stack_protect", attribs))))
2326 has_addressable_vars = add_stack_protection_conflicts ();
2327
2328 if (flag_stack_protect == SPCT_FLAG_STRONG && has_addressable_vars)
2329 gen_stack_protect_signal = true;
2330
2331 /* Now that we have collected all stack variables, and have computed a
2332 minimal interference graph, attempt to save some stack space. */
2333 partition_stack_vars ();
2334 if (dump_file)
2335 dump_stack_var_partition ();
2336 }
2337
2338
2339 if (!lookup_attribute ("no_stack_protector", attribs))
2340 switch (flag_stack_protect)
2341 {
2342 case SPCT_FLAG_ALL:
2343 create_stack_guard ();
2344 break;
2345
2346 case SPCT_FLAG_STRONG:
2347 if (gen_stack_protect_signal
2348 || cfun->calls_alloca
2349 || has_protected_decls
2350 || lookup_attribute ("stack_protect", attribs))
2351 create_stack_guard ();
2352 break;
2353
2354 case SPCT_FLAG_DEFAULT:
2355 if (cfun->calls_alloca
2356 || has_protected_decls
2357 || lookup_attribute ("stack_protect", attribs))
2358 create_stack_guard ();
2359 break;
2360
2361 case SPCT_FLAG_EXPLICIT:
2362 if (lookup_attribute ("stack_protect", attribs))
2363 create_stack_guard ();
2364 break;
2365
2366 default:
2367 break;
2368 }
2369
2370 /* Assign rtl to each variable based on these partitions. */
2371 if (stack_vars_num > 0)
2372 {
2373 class stack_vars_data data;
2374
2375 data.asan_base = NULL_RTX;
2376 data.asan_alignb = 0;
2377
2378 /* Reorder decls to be protected by iterating over the variables
2379 array multiple times, and allocating out of each phase in turn. */
2380 /* ??? We could probably integrate this into the qsort we did
2381 earlier, such that we naturally see these variables first,
2382 and thus naturally allocate things in the right order. */
2383 if (has_protected_decls)
2384 {
2385 /* Phase 1 contains only character arrays. */
2386 expand_stack_vars (stack_protect_decl_phase_1, &data);
2387
2388 /* Phase 2 contains other kinds of arrays. */
2389 if (!lookup_attribute ("no_stack_protector", attribs)
2390 && (flag_stack_protect == SPCT_FLAG_ALL
2391 || flag_stack_protect == SPCT_FLAG_STRONG
2392 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2393 && lookup_attribute ("stack_protect", attribs))))
2394 expand_stack_vars (stack_protect_decl_phase_2, &data);
2395 }
2396
2397 if (asan_sanitize_stack_p ())
2398 /* Phase 3, any partitions that need asan protection
2399 in addition to phase 1 and 2. */
2400 expand_stack_vars (asan_decl_phase_3, &data);
2401
2402 /* ASAN description strings don't yet have a syntax for expressing
2403 polynomial offsets. */
2404 HOST_WIDE_INT prev_offset;
2405 if (!data.asan_vec.is_empty ()
2406 && frame_offset.is_constant (&prev_offset))
2407 {
2408 HOST_WIDE_INT offset, sz, redzonesz;
2409 redzonesz = ASAN_RED_ZONE_SIZE;
2410 sz = data.asan_vec[0] - prev_offset;
2411 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2412 && data.asan_alignb <= 4096
2413 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2414 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2415 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2416 /* Allocating a constant amount of space from a constant
2417 starting offset must give a constant result. */
2418 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2419 .to_constant ());
2420 data.asan_vec.safe_push (prev_offset);
2421 data.asan_vec.safe_push (offset);
2422 /* Leave space for alignment if STRICT_ALIGNMENT. */
2423 if (STRICT_ALIGNMENT)
2424 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2425 << ASAN_SHADOW_SHIFT)
2426 / BITS_PER_UNIT, 1);
2427
2428 var_end_seq
2429 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2430 data.asan_base,
2431 data.asan_alignb,
2432 data.asan_vec.address (),
2433 data.asan_decl_vec.address (),
2434 data.asan_vec.length ());
2435 }
2436
2437 expand_stack_vars (NULL, &data);
2438 }
2439
2440 if (hwasan_sanitize_stack_p ())
2441 hwasan_emit_prologue ();
2442 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2443 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2444 virtual_stack_vars_rtx,
2445 var_end_seq);
2446 else if (hwasan_sanitize_allocas_p () && cfun->calls_alloca)
2447 /* When using out-of-line instrumentation we only want to emit one function
2448 call for clearing the tags in a region of shadow stack. When there are
2449 alloca calls in this frame we want to emit a call using the
2450 virtual_stack_dynamic_rtx, but when not we use the hwasan_frame_extent
2451 rtx we created in expand_stack_vars. */
2452 var_end_seq = hwasan_emit_untag_frame (virtual_stack_dynamic_rtx,
2453 virtual_stack_vars_rtx);
2454 else if (hwasan_sanitize_stack_p ())
2455 /* If no variables were stored on the stack, `hwasan_get_frame_extent`
2456 will return NULL_RTX and hence `hwasan_emit_untag_frame` will return
2457 NULL (i.e. an empty sequence). */
2458 var_end_seq = hwasan_emit_untag_frame (hwasan_get_frame_extent (),
2459 virtual_stack_vars_rtx);
2460
2461 fini_vars_expansion ();
2462
2463 /* If there were any artificial non-ignored vars without rtl
2464 found earlier, see if deferred stack allocation hasn't assigned
2465 rtl to them. */
2466 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2467 {
2468 rtx rtl = DECL_RTL_IF_SET (var);
2469
2470 /* Keep artificial non-ignored vars in cfun->local_decls
2471 chain until instantiate_decls. */
2472 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2473 add_local_decl (cfun, var);
2474 }
2475
2476 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2477 if (STACK_ALIGNMENT_NEEDED)
2478 {
2479 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2480 if (FRAME_GROWS_DOWNWARD)
2481 frame_offset = aligned_lower_bound (frame_offset, align);
2482 else
2483 frame_offset = aligned_upper_bound (frame_offset, align);
2484 }
2485
2486 return var_end_seq;
2487 }
2488
2489
2490 /* If we need to produce a detailed dump, print the tree representation
2491 for STMT to the dump file. SINCE is the last RTX after which the RTL
2492 generated for STMT should have been appended. */
2493
2494 static void
2495 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2496 {
2497 if (dump_file && (dump_flags & TDF_DETAILS))
2498 {
2499 fprintf (dump_file, "\n;; ");
2500 print_gimple_stmt (dump_file, stmt, 0,
2501 TDF_SLIM | (dump_flags & TDF_LINENO));
2502 fprintf (dump_file, "\n");
2503
2504 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2505 }
2506 }
2507
2508 /* Maps the blocks that do not contain tree labels to rtx labels. */
2509
2510 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2511
2512 /* Returns the label_rtx expression for a label starting basic block BB. */
2513
2514 static rtx_code_label *
2515 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2516 {
2517 if (bb->flags & BB_RTL)
2518 return block_label (bb);
2519
2520 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2521 if (elt)
2522 return *elt;
2523
2524 /* Find the tree label if it is present. */
2525 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2526 glabel *lab_stmt;
2527 if (!gsi_end_p (gsi)
2528 && (lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
2529 && !DECL_NONLOCAL (gimple_label_label (lab_stmt)))
2530 return jump_target_rtx (gimple_label_label (lab_stmt));
2531
2532 rtx_code_label *l = gen_label_rtx ();
2533 lab_rtx_for_bb->put (bb, l);
2534 return l;
2535 }
2536
2537
2538 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2539 of a basic block where we just expanded the conditional at the end,
2540 possibly clean up the CFG and instruction sequence. LAST is the
2541 last instruction before the just emitted jump sequence. */
2542
2543 static void
2544 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2545 {
2546 /* Special case: when jumpif decides that the condition is
2547 trivial it emits an unconditional jump (and the necessary
2548 barrier). But we still have two edges, the fallthru one is
2549 wrong. purge_dead_edges would clean this up later. Unfortunately
2550 we have to insert insns (and split edges) before
2551 find_many_sub_basic_blocks and hence before purge_dead_edges.
2552 But splitting edges might create new blocks which depend on the
2553 fact that if there are two edges there's no barrier. So the
2554 barrier would get lost and verify_flow_info would ICE. Instead
2555 of auditing all edge splitters to care for the barrier (which
2556 normally isn't there in a cleaned CFG), fix it here. */
2557 if (BARRIER_P (get_last_insn ()))
2558 {
2559 rtx_insn *insn;
2560 remove_edge (e);
2561 /* Now, we have a single successor block, if we have insns to
2562 insert on the remaining edge we potentially will insert
2563 it at the end of this block (if the dest block isn't feasible)
2564 in order to avoid splitting the edge. This insertion will take
2565 place in front of the last jump. But we might have emitted
2566 multiple jumps (conditional and one unconditional) to the
2567 same destination. Inserting in front of the last one then
2568 is a problem. See PR 40021. We fix this by deleting all
2569 jumps except the last unconditional one. */
2570 insn = PREV_INSN (get_last_insn ());
2571 /* Make sure we have an unconditional jump. Otherwise we're
2572 confused. */
2573 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2574 for (insn = PREV_INSN (insn); insn != last;)
2575 {
2576 insn = PREV_INSN (insn);
2577 if (JUMP_P (NEXT_INSN (insn)))
2578 {
2579 if (!any_condjump_p (NEXT_INSN (insn)))
2580 {
2581 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2582 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2583 }
2584 delete_insn (NEXT_INSN (insn));
2585 }
2586 }
2587 }
2588 }
2589
2590 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2591 Returns a new basic block if we've terminated the current basic
2592 block and created a new one. */
2593
2594 static basic_block
2595 expand_gimple_cond (basic_block bb, gcond *stmt)
2596 {
2597 basic_block new_bb, dest;
2598 edge true_edge;
2599 edge false_edge;
2600 rtx_insn *last2, *last;
2601 enum tree_code code;
2602 tree op0, op1;
2603
2604 code = gimple_cond_code (stmt);
2605 op0 = gimple_cond_lhs (stmt);
2606 op1 = gimple_cond_rhs (stmt);
2607 /* We're sometimes presented with such code:
2608 D.123_1 = x < y;
2609 if (D.123_1 != 0)
2610 ...
2611 This would expand to two comparisons which then later might
2612 be cleaned up by combine. But some pattern matchers like if-conversion
2613 work better when there's only one compare, so make up for this
2614 here as special exception if TER would have made the same change. */
2615 if (SA.values
2616 && TREE_CODE (op0) == SSA_NAME
2617 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2618 && TREE_CODE (op1) == INTEGER_CST
2619 && ((gimple_cond_code (stmt) == NE_EXPR
2620 && integer_zerop (op1))
2621 || (gimple_cond_code (stmt) == EQ_EXPR
2622 && integer_onep (op1)))
2623 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2624 {
2625 gimple *second = SSA_NAME_DEF_STMT (op0);
2626 if (gimple_code (second) == GIMPLE_ASSIGN)
2627 {
2628 enum tree_code code2 = gimple_assign_rhs_code (second);
2629 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2630 {
2631 code = code2;
2632 op0 = gimple_assign_rhs1 (second);
2633 op1 = gimple_assign_rhs2 (second);
2634 }
2635 /* If jumps are cheap and the target does not support conditional
2636 compare, turn some more codes into jumpy sequences. */
2637 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2638 && targetm.gen_ccmp_first == NULL)
2639 {
2640 if ((code2 == BIT_AND_EXPR
2641 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2642 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2643 || code2 == TRUTH_AND_EXPR)
2644 {
2645 code = TRUTH_ANDIF_EXPR;
2646 op0 = gimple_assign_rhs1 (second);
2647 op1 = gimple_assign_rhs2 (second);
2648 }
2649 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2650 {
2651 code = TRUTH_ORIF_EXPR;
2652 op0 = gimple_assign_rhs1 (second);
2653 op1 = gimple_assign_rhs2 (second);
2654 }
2655 }
2656 }
2657 }
2658
2659 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2660 into (x - C2) * C3 < C4. */
2661 if ((code == EQ_EXPR || code == NE_EXPR)
2662 && TREE_CODE (op0) == SSA_NAME
2663 && TREE_CODE (op1) == INTEGER_CST)
2664 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2665
2666 /* Optimize (x - y) < 0 into x < y if x - y has undefined overflow. */
2667 if (!TYPE_UNSIGNED (TREE_TYPE (op0))
2668 && (code == LT_EXPR || code == LE_EXPR
2669 || code == GT_EXPR || code == GE_EXPR)
2670 && integer_zerop (op1)
2671 && TREE_CODE (op0) == SSA_NAME)
2672 maybe_optimize_sub_cmp_0 (code, &op0, &op1);
2673
2674 last2 = last = get_last_insn ();
2675
2676 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2677 set_curr_insn_location (gimple_location (stmt));
2678
2679 /* These flags have no purpose in RTL land. */
2680 true_edge->flags &= ~EDGE_TRUE_VALUE;
2681 false_edge->flags &= ~EDGE_FALSE_VALUE;
2682
2683 /* We can either have a pure conditional jump with one fallthru edge or
2684 two-way jump that needs to be decomposed into two basic blocks. */
2685 if (false_edge->dest == bb->next_bb)
2686 {
2687 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2688 true_edge->probability);
2689 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2690 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2691 set_curr_insn_location (true_edge->goto_locus);
2692 false_edge->flags |= EDGE_FALLTHRU;
2693 maybe_cleanup_end_of_block (false_edge, last);
2694 return NULL;
2695 }
2696 if (true_edge->dest == bb->next_bb)
2697 {
2698 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2699 false_edge->probability);
2700 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2701 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2702 set_curr_insn_location (false_edge->goto_locus);
2703 true_edge->flags |= EDGE_FALLTHRU;
2704 maybe_cleanup_end_of_block (true_edge, last);
2705 return NULL;
2706 }
2707
2708 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2709 true_edge->probability);
2710 last = get_last_insn ();
2711 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2712 set_curr_insn_location (false_edge->goto_locus);
2713 emit_jump (label_rtx_for_bb (false_edge->dest));
2714
2715 BB_END (bb) = last;
2716 if (BARRIER_P (BB_END (bb)))
2717 BB_END (bb) = PREV_INSN (BB_END (bb));
2718 update_bb_for_insn (bb);
2719
2720 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2721 dest = false_edge->dest;
2722 redirect_edge_succ (false_edge, new_bb);
2723 false_edge->flags |= EDGE_FALLTHRU;
2724 new_bb->count = false_edge->count ();
2725 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2726 add_bb_to_loop (new_bb, loop);
2727 if (loop->latch == bb
2728 && loop->header == dest)
2729 loop->latch = new_bb;
2730 make_single_succ_edge (new_bb, dest, 0);
2731 if (BARRIER_P (BB_END (new_bb)))
2732 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2733 update_bb_for_insn (new_bb);
2734
2735 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2736
2737 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2738 {
2739 set_curr_insn_location (true_edge->goto_locus);
2740 true_edge->goto_locus = curr_insn_location ();
2741 }
2742
2743 return new_bb;
2744 }
2745
2746 /* Mark all calls that can have a transaction restart. */
2747
2748 static void
2749 mark_transaction_restart_calls (gimple *stmt)
2750 {
2751 struct tm_restart_node dummy;
2752 tm_restart_node **slot;
2753
2754 if (!cfun->gimple_df->tm_restart)
2755 return;
2756
2757 dummy.stmt = stmt;
2758 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2759 if (slot)
2760 {
2761 struct tm_restart_node *n = *slot;
2762 tree list = n->label_or_list;
2763 rtx_insn *insn;
2764
2765 for (insn = next_real_insn (get_last_insn ());
2766 !CALL_P (insn);
2767 insn = next_real_insn (insn))
2768 continue;
2769
2770 if (TREE_CODE (list) == LABEL_DECL)
2771 add_reg_note (insn, REG_TM, label_rtx (list));
2772 else
2773 for (; list ; list = TREE_CHAIN (list))
2774 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2775 }
2776 }
2777
2778 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2779 statement STMT. */
2780
2781 static void
2782 expand_call_stmt (gcall *stmt)
2783 {
2784 tree exp, decl, lhs;
2785 bool builtin_p;
2786 size_t i;
2787
2788 if (gimple_call_internal_p (stmt))
2789 {
2790 expand_internal_call (stmt);
2791 return;
2792 }
2793
2794 /* If this is a call to a built-in function and it has no effect other
2795 than setting the lhs, try to implement it using an internal function
2796 instead. */
2797 decl = gimple_call_fndecl (stmt);
2798 if (gimple_call_lhs (stmt)
2799 && !gimple_has_side_effects (stmt)
2800 && (optimize || (decl && called_as_built_in (decl))))
2801 {
2802 internal_fn ifn = replacement_internal_fn (stmt);
2803 if (ifn != IFN_LAST)
2804 {
2805 expand_internal_call (ifn, stmt);
2806 return;
2807 }
2808 }
2809
2810 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2811
2812 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2813 builtin_p = decl && fndecl_built_in_p (decl);
2814
2815 /* If this is not a builtin function, the function type through which the
2816 call is made may be different from the type of the function. */
2817 if (!builtin_p)
2818 CALL_EXPR_FN (exp)
2819 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2820 CALL_EXPR_FN (exp));
2821
2822 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2823 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2824
2825 for (i = 0; i < gimple_call_num_args (stmt); i++)
2826 {
2827 tree arg = gimple_call_arg (stmt, i);
2828 gimple *def;
2829 /* TER addresses into arguments of builtin functions so we have a
2830 chance to infer more correct alignment information. See PR39954. */
2831 if (builtin_p
2832 && TREE_CODE (arg) == SSA_NAME
2833 && (def = get_gimple_for_ssa_name (arg))
2834 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2835 arg = gimple_assign_rhs1 (def);
2836 CALL_EXPR_ARG (exp, i) = arg;
2837 }
2838
2839 if (gimple_has_side_effects (stmt)
2840 /* ??? Downstream in expand_expr_real_1 we assume that expressions
2841 w/o side-effects do not throw so work around this here. */
2842 || stmt_could_throw_p (cfun, stmt))
2843 TREE_SIDE_EFFECTS (exp) = 1;
2844
2845 if (gimple_call_nothrow_p (stmt))
2846 TREE_NOTHROW (exp) = 1;
2847
2848 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2849 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2850 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2851 if (decl
2852 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2853 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2854 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2855 else
2856 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2857 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2858 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2859 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2860
2861 /* Must come after copying location. */
2862 copy_warning (exp, stmt);
2863
2864 /* Ensure RTL is created for debug args. */
2865 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2866 {
2867 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2868 unsigned int ix;
2869 tree dtemp;
2870
2871 if (debug_args)
2872 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2873 {
2874 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2875 expand_debug_expr (dtemp);
2876 }
2877 }
2878
2879 rtx_insn *before_call = get_last_insn ();
2880 lhs = gimple_call_lhs (stmt);
2881 if (lhs)
2882 expand_assignment (lhs, exp, false);
2883 else
2884 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2885
2886 /* If the gimple call is an indirect call and has 'nocf_check'
2887 attribute find a generated CALL insn to mark it as no
2888 control-flow verification is needed. */
2889 if (gimple_call_nocf_check_p (stmt)
2890 && !gimple_call_fndecl (stmt))
2891 {
2892 rtx_insn *last = get_last_insn ();
2893 while (!CALL_P (last)
2894 && last != before_call)
2895 last = PREV_INSN (last);
2896
2897 if (last != before_call)
2898 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2899 }
2900
2901 mark_transaction_restart_calls (stmt);
2902 }
2903
2904
2905 /* Generate RTL for an asm statement (explicit assembler code).
2906 STRING is a STRING_CST node containing the assembler code text,
2907 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2908 insn is volatile; don't optimize it. */
2909
2910 static void
2911 expand_asm_loc (tree string, int vol, location_t locus)
2912 {
2913 rtx body;
2914
2915 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2916 ggc_strdup (TREE_STRING_POINTER (string)),
2917 locus);
2918
2919 MEM_VOLATILE_P (body) = vol;
2920
2921 /* Non-empty basic ASM implicitly clobbers memory. */
2922 if (TREE_STRING_LENGTH (string) != 0)
2923 {
2924 rtx asm_op, clob;
2925 unsigned i, nclobbers;
2926 auto_vec<rtx> input_rvec, output_rvec;
2927 auto_vec<machine_mode> input_mode;
2928 auto_vec<const char *> constraints;
2929 auto_vec<rtx> use_rvec;
2930 auto_vec<rtx> clobber_rvec;
2931 HARD_REG_SET clobbered_regs;
2932 CLEAR_HARD_REG_SET (clobbered_regs);
2933
2934 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2935 clobber_rvec.safe_push (clob);
2936
2937 if (targetm.md_asm_adjust)
2938 targetm.md_asm_adjust (output_rvec, input_rvec, input_mode,
2939 constraints, use_rvec, clobber_rvec,
2940 clobbered_regs, locus);
2941
2942 asm_op = body;
2943 nclobbers = clobber_rvec.length ();
2944 auto nuses = use_rvec.length ();
2945 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nuses + nclobbers));
2946
2947 i = 0;
2948 XVECEXP (body, 0, i++) = asm_op;
2949 for (rtx use : use_rvec)
2950 XVECEXP (body, 0, i++) = gen_rtx_USE (VOIDmode, use);
2951 for (rtx clobber : clobber_rvec)
2952 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobber);
2953 }
2954
2955 emit_insn (body);
2956 }
2957
2958 /* Return the number of times character C occurs in string S. */
2959 static int
2960 n_occurrences (int c, const char *s)
2961 {
2962 int n = 0;
2963 while (*s)
2964 n += (*s++ == c);
2965 return n;
2966 }
2967
2968 /* A subroutine of expand_asm_operands. Check that all operands have
2969 the same number of alternatives. Return true if so. */
2970
2971 static bool
2972 check_operand_nalternatives (const vec<const char *> &constraints)
2973 {
2974 unsigned len = constraints.length();
2975 if (len > 0)
2976 {
2977 int nalternatives = n_occurrences (',', constraints[0]);
2978
2979 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2980 {
2981 error ("too many alternatives in %<asm%>");
2982 return false;
2983 }
2984
2985 for (unsigned i = 1; i < len; ++i)
2986 if (n_occurrences (',', constraints[i]) != nalternatives)
2987 {
2988 error ("operand constraints for %<asm%> differ "
2989 "in number of alternatives");
2990 return false;
2991 }
2992 }
2993 return true;
2994 }
2995
2996 /* Check for overlap between registers marked in CLOBBERED_REGS and
2997 anything inappropriate in T. Emit error and return the register
2998 variable definition for error, NULL_TREE for ok. */
2999
3000 static bool
3001 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs,
3002 location_t loc)
3003 {
3004 /* Conflicts between asm-declared register variables and the clobber
3005 list are not allowed. */
3006 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
3007
3008 if (overlap)
3009 {
3010 error_at (loc, "%<asm%> specifier for variable %qE conflicts with "
3011 "%<asm%> clobber list", DECL_NAME (overlap));
3012
3013 /* Reset registerness to stop multiple errors emitted for a single
3014 variable. */
3015 DECL_REGISTER (overlap) = 0;
3016 return true;
3017 }
3018
3019 return false;
3020 }
3021
3022 /* Check that the given REGNO spanning NREGS is a valid
3023 asm clobber operand. Some HW registers cannot be
3024 saved/restored, hence they should not be clobbered by
3025 asm statements. */
3026 static bool
3027 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
3028 {
3029 bool is_valid = true;
3030 HARD_REG_SET regset;
3031
3032 CLEAR_HARD_REG_SET (regset);
3033
3034 add_range_to_hard_reg_set (&regset, regno, nregs);
3035
3036 /* Clobbering the PIC register is an error. */
3037 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3038 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
3039 {
3040 /* ??? Diagnose during gimplification? */
3041 error ("PIC register clobbered by %qs in %<asm%>", regname);
3042 is_valid = false;
3043 }
3044 else if (!in_hard_reg_set_p
3045 (accessible_reg_set, reg_raw_mode[regno], regno))
3046 {
3047 /* ??? Diagnose during gimplification? */
3048 error ("the register %qs cannot be clobbered in %<asm%>"
3049 " for the current target", regname);
3050 is_valid = false;
3051 }
3052
3053 /* Clobbering the stack pointer register is deprecated. GCC expects
3054 the value of the stack pointer after an asm statement to be the same
3055 as it was before, so no asm can validly clobber the stack pointer in
3056 the usual sense. Adding the stack pointer to the clobber list has
3057 traditionally had some undocumented and somewhat obscure side-effects. */
3058 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM))
3059 {
3060 crtl->sp_is_clobbered_by_asm = true;
3061 if (warning (OPT_Wdeprecated, "listing the stack pointer register"
3062 " %qs in a clobber list is deprecated", regname))
3063 inform (input_location, "the value of the stack pointer after"
3064 " an %<asm%> statement must be the same as it was before"
3065 " the statement");
3066 }
3067
3068 return is_valid;
3069 }
3070
3071 /* Generate RTL for an asm statement with arguments.
3072 STRING is the instruction template.
3073 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
3074 Each output or input has an expression in the TREE_VALUE and
3075 a tree list in TREE_PURPOSE which in turn contains a constraint
3076 name in TREE_VALUE (or NULL_TREE) and a constraint string
3077 in TREE_PURPOSE.
3078 CLOBBERS is a list of STRING_CST nodes each naming a hard register
3079 that is clobbered by this insn.
3080
3081 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
3082 should be the fallthru basic block of the asm goto.
3083
3084 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
3085 Some elements of OUTPUTS may be replaced with trees representing temporary
3086 values. The caller should copy those temporary values to the originally
3087 specified lvalues.
3088
3089 VOL nonzero means the insn is volatile; don't optimize it. */
3090
3091 static void
3092 expand_asm_stmt (gasm *stmt)
3093 {
3094 class save_input_location
3095 {
3096 location_t old;
3097
3098 public:
3099 explicit save_input_location(location_t where)
3100 {
3101 old = input_location;
3102 input_location = where;
3103 }
3104
3105 ~save_input_location()
3106 {
3107 input_location = old;
3108 }
3109 };
3110
3111 location_t locus = gimple_location (stmt);
3112
3113 if (gimple_asm_input_p (stmt))
3114 {
3115 const char *s = gimple_asm_string (stmt);
3116 tree string = build_string (strlen (s), s);
3117 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
3118 return;
3119 }
3120
3121 /* There are some legacy diagnostics in here. */
3122 save_input_location s_i_l(locus);
3123
3124 unsigned noutputs = gimple_asm_noutputs (stmt);
3125 unsigned ninputs = gimple_asm_ninputs (stmt);
3126 unsigned nlabels = gimple_asm_nlabels (stmt);
3127 unsigned i;
3128 bool error_seen = false;
3129
3130 /* ??? Diagnose during gimplification? */
3131 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
3132 {
3133 error_at (locus, "more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
3134 return;
3135 }
3136
3137 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
3138 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
3139 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
3140
3141 /* Copy the gimple vectors into new vectors that we can manipulate. */
3142
3143 output_tvec.safe_grow (noutputs, true);
3144 input_tvec.safe_grow (ninputs, true);
3145 constraints.safe_grow (noutputs + ninputs, true);
3146
3147 for (i = 0; i < noutputs; ++i)
3148 {
3149 tree t = gimple_asm_output_op (stmt, i);
3150 output_tvec[i] = TREE_VALUE (t);
3151 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
3152 }
3153 for (i = 0; i < ninputs; i++)
3154 {
3155 tree t = gimple_asm_input_op (stmt, i);
3156 input_tvec[i] = TREE_VALUE (t);
3157 constraints[i + noutputs]
3158 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
3159 }
3160
3161 /* ??? Diagnose during gimplification? */
3162 if (! check_operand_nalternatives (constraints))
3163 return;
3164
3165 /* Count the number of meaningful clobbered registers, ignoring what
3166 we would ignore later. */
3167 auto_vec<rtx> clobber_rvec;
3168 HARD_REG_SET clobbered_regs;
3169 CLEAR_HARD_REG_SET (clobbered_regs);
3170
3171 if (unsigned n = gimple_asm_nclobbers (stmt))
3172 {
3173 clobber_rvec.reserve (n);
3174 for (i = 0; i < n; i++)
3175 {
3176 tree t = gimple_asm_clobber_op (stmt, i);
3177 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
3178 int nregs, j;
3179
3180 j = decode_reg_name_and_count (regname, &nregs);
3181 if (j < 0)
3182 {
3183 if (j == -2)
3184 {
3185 /* ??? Diagnose during gimplification? */
3186 error_at (locus, "unknown register name %qs in %<asm%>",
3187 regname);
3188 error_seen = true;
3189 }
3190 else if (j == -4)
3191 {
3192 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3193 clobber_rvec.safe_push (x);
3194 }
3195 else
3196 {
3197 /* Otherwise we should have -1 == empty string
3198 or -3 == cc, which is not a register. */
3199 gcc_assert (j == -1 || j == -3);
3200 }
3201 }
3202 else
3203 for (int reg = j; reg < j + nregs; reg++)
3204 {
3205 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3206 return;
3207
3208 SET_HARD_REG_BIT (clobbered_regs, reg);
3209 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3210 clobber_rvec.safe_push (x);
3211 }
3212 }
3213 }
3214
3215 /* First pass over inputs and outputs checks validity and sets
3216 mark_addressable if needed. */
3217 /* ??? Diagnose during gimplification? */
3218
3219 for (i = 0; i < noutputs; ++i)
3220 {
3221 tree val = output_tvec[i];
3222 tree type = TREE_TYPE (val);
3223 const char *constraint;
3224 bool is_inout;
3225 bool allows_reg;
3226 bool allows_mem;
3227
3228 /* Try to parse the output constraint. If that fails, there's
3229 no point in going further. */
3230 constraint = constraints[i];
3231 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3232 &allows_mem, &allows_reg, &is_inout))
3233 return;
3234
3235 /* If the output is a hard register, verify it doesn't conflict with
3236 any other operand's possible hard register use. */
3237 if (DECL_P (val)
3238 && REG_P (DECL_RTL (val))
3239 && HARD_REGISTER_P (DECL_RTL (val)))
3240 {
3241 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3242 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3243 unsigned long match;
3244
3245 /* Verify the other outputs do not use the same hard register. */
3246 for (j = i + 1; j < noutputs; ++j)
3247 if (DECL_P (output_tvec[j])
3248 && REG_P (DECL_RTL (output_tvec[j]))
3249 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3250 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3251 {
3252 error_at (locus, "invalid hard register usage between output "
3253 "operands");
3254 error_seen = true;
3255 }
3256
3257 /* Verify matching constraint operands use the same hard register
3258 and that the non-matching constraint operands do not use the same
3259 hard register if the output is an early clobber operand. */
3260 for (j = 0; j < ninputs; ++j)
3261 if (DECL_P (input_tvec[j])
3262 && REG_P (DECL_RTL (input_tvec[j]))
3263 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3264 {
3265 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3266 switch (*constraints[j + noutputs])
3267 {
3268 case '0': case '1': case '2': case '3': case '4':
3269 case '5': case '6': case '7': case '8': case '9':
3270 match = strtoul (constraints[j + noutputs], NULL, 10);
3271 break;
3272 default:
3273 match = ULONG_MAX;
3274 break;
3275 }
3276 if (i == match
3277 && output_hregno != input_hregno)
3278 {
3279 error_at (locus, "invalid hard register usage between "
3280 "output operand and matching constraint operand");
3281 error_seen = true;
3282 }
3283 else if (early_clobber_p
3284 && i != match
3285 && output_hregno == input_hregno)
3286 {
3287 error_at (locus, "invalid hard register usage between "
3288 "earlyclobber operand and input operand");
3289 error_seen = true;
3290 }
3291 }
3292 }
3293
3294 if (! allows_reg
3295 && (allows_mem
3296 || is_inout
3297 || (DECL_P (val)
3298 && REG_P (DECL_RTL (val))
3299 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3300 mark_addressable (val);
3301 }
3302
3303 for (i = 0; i < ninputs; ++i)
3304 {
3305 bool allows_reg, allows_mem;
3306 const char *constraint;
3307
3308 constraint = constraints[i + noutputs];
3309 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3310 constraints.address (),
3311 &allows_mem, &allows_reg))
3312 return;
3313
3314 if (! allows_reg && allows_mem)
3315 mark_addressable (input_tvec[i]);
3316 }
3317
3318 /* Second pass evaluates arguments. */
3319
3320 /* Make sure stack is consistent for asm goto. */
3321 if (nlabels > 0)
3322 do_pending_stack_adjust ();
3323 int old_generating_concat_p = generating_concat_p;
3324
3325 /* Vector of RTX's of evaluated output operands. */
3326 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3327 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3328 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3329
3330 output_rvec.safe_grow (noutputs, true);
3331
3332 for (i = 0; i < noutputs; ++i)
3333 {
3334 tree val = output_tvec[i];
3335 tree type = TREE_TYPE (val);
3336 bool is_inout, allows_reg, allows_mem, ok;
3337 rtx op;
3338
3339 ok = parse_output_constraint (&constraints[i], i, ninputs,
3340 noutputs, &allows_mem, &allows_reg,
3341 &is_inout);
3342 gcc_assert (ok);
3343
3344 /* If an output operand is not a decl or indirect ref and our constraint
3345 allows a register, make a temporary to act as an intermediate.
3346 Make the asm insn write into that, then we will copy it to
3347 the real output operand. Likewise for promoted variables. */
3348
3349 generating_concat_p = 0;
3350
3351 gcc_assert (TREE_CODE (val) != INDIRECT_REF);
3352 if (((TREE_CODE (val) == MEM_REF
3353 && TREE_CODE (TREE_OPERAND (val, 0)) != ADDR_EXPR)
3354 && allows_mem)
3355 || (DECL_P (val)
3356 && (allows_mem || REG_P (DECL_RTL (val)))
3357 && ! (REG_P (DECL_RTL (val))
3358 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3359 || ! allows_reg
3360 || is_inout
3361 || TREE_ADDRESSABLE (type)
3362 || (!tree_fits_poly_int64_p (TYPE_SIZE (type))
3363 && !known_size_p (max_int_size_in_bytes (type))))
3364 {
3365 op = expand_expr (val, NULL_RTX, VOIDmode,
3366 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3367 if (MEM_P (op))
3368 op = validize_mem (op);
3369
3370 if (! allows_reg && !MEM_P (op))
3371 {
3372 error_at (locus, "output number %d not directly addressable", i);
3373 error_seen = true;
3374 }
3375 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3376 || GET_CODE (op) == CONCAT)
3377 {
3378 rtx old_op = op;
3379 op = gen_reg_rtx (GET_MODE (op));
3380
3381 generating_concat_p = old_generating_concat_p;
3382
3383 if (is_inout)
3384 emit_move_insn (op, old_op);
3385
3386 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3387 emit_move_insn (old_op, op);
3388 after_rtl_seq = get_insns ();
3389 after_rtl_end = get_last_insn ();
3390 end_sequence ();
3391 }
3392 }
3393 else
3394 {
3395 op = assign_temp (type, 0, 1);
3396 op = validize_mem (op);
3397 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3398 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3399
3400 generating_concat_p = old_generating_concat_p;
3401
3402 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3403 expand_assignment (val, make_tree (type, op), false);
3404 after_rtl_seq = get_insns ();
3405 after_rtl_end = get_last_insn ();
3406 end_sequence ();
3407 }
3408 output_rvec[i] = op;
3409
3410 if (is_inout)
3411 inout_opnum.safe_push (i);
3412 }
3413
3414 const char *str = gimple_asm_string (stmt);
3415 if (error_seen)
3416 {
3417 ninputs = 0;
3418 noutputs = 0;
3419 inout_opnum.truncate (0);
3420 output_rvec.truncate (0);
3421 clobber_rvec.truncate (0);
3422 constraints.truncate (0);
3423 CLEAR_HARD_REG_SET (clobbered_regs);
3424 str = "";
3425 }
3426
3427 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3428 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3429
3430 input_rvec.safe_grow (ninputs, true);
3431 input_mode.safe_grow (ninputs, true);
3432
3433 generating_concat_p = 0;
3434
3435 for (i = 0; i < ninputs; ++i)
3436 {
3437 tree val = input_tvec[i];
3438 tree type = TREE_TYPE (val);
3439 bool allows_reg, allows_mem, ok;
3440 const char *constraint;
3441 rtx op;
3442
3443 constraint = constraints[i + noutputs];
3444 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3445 constraints.address (),
3446 &allows_mem, &allows_reg);
3447 gcc_assert (ok);
3448
3449 /* EXPAND_INITIALIZER will not generate code for valid initializer
3450 constants, but will still generate code for other types of operand.
3451 This is the behavior we want for constant constraints. */
3452 op = expand_expr (val, NULL_RTX, VOIDmode,
3453 allows_reg ? EXPAND_NORMAL
3454 : allows_mem ? EXPAND_MEMORY
3455 : EXPAND_INITIALIZER);
3456
3457 /* Never pass a CONCAT to an ASM. */
3458 if (GET_CODE (op) == CONCAT)
3459 op = force_reg (GET_MODE (op), op);
3460 else if (MEM_P (op))
3461 op = validize_mem (op);
3462
3463 if (asm_operand_ok (op, constraint, NULL) <= 0)
3464 {
3465 if (allows_reg && TYPE_MODE (type) != BLKmode)
3466 op = force_reg (TYPE_MODE (type), op);
3467 else if (!allows_mem)
3468 warning_at (locus, 0, "%<asm%> operand %d probably does not match "
3469 "constraints", i + noutputs);
3470 else if (MEM_P (op))
3471 {
3472 /* We won't recognize either volatile memory or memory
3473 with a queued address as available a memory_operand
3474 at this point. Ignore it: clearly this *is* a memory. */
3475 }
3476 else
3477 gcc_unreachable ();
3478 }
3479 input_rvec[i] = op;
3480 input_mode[i] = TYPE_MODE (type);
3481 }
3482
3483 /* For in-out operands, copy output rtx to input rtx. */
3484 unsigned ninout = inout_opnum.length ();
3485 for (i = 0; i < ninout; i++)
3486 {
3487 int j = inout_opnum[i];
3488 rtx o = output_rvec[j];
3489
3490 input_rvec.safe_push (o);
3491 input_mode.safe_push (GET_MODE (o));
3492
3493 char buffer[16];
3494 sprintf (buffer, "%d", j);
3495 constraints.safe_push (ggc_strdup (buffer));
3496 }
3497 ninputs += ninout;
3498
3499 /* Sometimes we wish to automatically clobber registers across an asm.
3500 Case in point is when the i386 backend moved from cc0 to a hard reg --
3501 maintaining source-level compatibility means automatically clobbering
3502 the flags register. */
3503 rtx_insn *after_md_seq = NULL;
3504 auto_vec<rtx> use_rvec;
3505 if (targetm.md_asm_adjust)
3506 after_md_seq
3507 = targetm.md_asm_adjust (output_rvec, input_rvec, input_mode,
3508 constraints, use_rvec, clobber_rvec,
3509 clobbered_regs, locus);
3510
3511 /* Do not allow the hook to change the output and input count,
3512 lest it mess up the operand numbering. */
3513 gcc_assert (output_rvec.length() == noutputs);
3514 gcc_assert (input_rvec.length() == ninputs);
3515 gcc_assert (constraints.length() == noutputs + ninputs);
3516
3517 /* But it certainly can adjust the uses and clobbers. */
3518 unsigned nuses = use_rvec.length ();
3519 unsigned nclobbers = clobber_rvec.length ();
3520
3521 /* Third pass checks for easy conflicts. */
3522 /* ??? Why are we doing this on trees instead of rtx. */
3523
3524 bool clobber_conflict_found = 0;
3525 for (i = 0; i < noutputs; ++i)
3526 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs, locus))
3527 clobber_conflict_found = 1;
3528 for (i = 0; i < ninputs - ninout; ++i)
3529 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs, locus))
3530 clobber_conflict_found = 1;
3531
3532 /* Make vectors for the expression-rtx, constraint strings,
3533 and named operands. */
3534
3535 rtvec argvec = rtvec_alloc (ninputs);
3536 rtvec constraintvec = rtvec_alloc (ninputs);
3537 rtvec labelvec = rtvec_alloc (nlabels);
3538
3539 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3540 : GET_MODE (output_rvec[0])),
3541 ggc_strdup (str),
3542 "", 0, argvec, constraintvec,
3543 labelvec, locus);
3544 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3545
3546 for (i = 0; i < ninputs; ++i)
3547 {
3548 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3549 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3550 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3551 constraints[i + noutputs],
3552 locus);
3553 }
3554
3555 /* Copy labels to the vector. */
3556 rtx_code_label *fallthru_label = NULL;
3557 if (nlabels > 0)
3558 {
3559 basic_block fallthru_bb = NULL;
3560 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3561 if (fallthru)
3562 fallthru_bb = fallthru->dest;
3563
3564 for (i = 0; i < nlabels; ++i)
3565 {
3566 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3567 rtx_insn *r;
3568 /* If asm goto has any labels in the fallthru basic block, use
3569 a label that we emit immediately after the asm goto. Expansion
3570 may insert further instructions into the same basic block after
3571 asm goto and if we don't do this, insertion of instructions on
3572 the fallthru edge might misbehave. See PR58670. */
3573 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3574 {
3575 if (fallthru_label == NULL_RTX)
3576 fallthru_label = gen_label_rtx ();
3577 r = fallthru_label;
3578 }
3579 else
3580 r = label_rtx (label);
3581 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3582 }
3583 }
3584
3585 /* Now, for each output, construct an rtx
3586 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3587 ARGVEC CONSTRAINTS OPNAMES))
3588 If there is more than one, put them inside a PARALLEL. */
3589
3590 if (noutputs == 0 && nuses == 0 && nclobbers == 0)
3591 {
3592 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3593 if (nlabels > 0)
3594 emit_jump_insn (body);
3595 else
3596 emit_insn (body);
3597 }
3598 else if (noutputs == 1 && nuses == 0 && nclobbers == 0)
3599 {
3600 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3601 if (nlabels > 0)
3602 emit_jump_insn (gen_rtx_SET (output_rvec[0], body));
3603 else
3604 emit_insn (gen_rtx_SET (output_rvec[0], body));
3605 }
3606 else
3607 {
3608 rtx obody = body;
3609 int num = noutputs;
3610
3611 if (num == 0)
3612 num = 1;
3613
3614 body = gen_rtx_PARALLEL (VOIDmode,
3615 rtvec_alloc (num + nuses + nclobbers));
3616
3617 /* For each output operand, store a SET. */
3618 for (i = 0; i < noutputs; ++i)
3619 {
3620 rtx src, o = output_rvec[i];
3621 if (i == 0)
3622 {
3623 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3624 src = obody;
3625 }
3626 else
3627 {
3628 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3629 ASM_OPERANDS_TEMPLATE (obody),
3630 constraints[i], i, argvec,
3631 constraintvec, labelvec, locus);
3632 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3633 }
3634 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3635 }
3636
3637 /* If there are no outputs (but there are some clobbers)
3638 store the bare ASM_OPERANDS into the PARALLEL. */
3639 if (i == 0)
3640 XVECEXP (body, 0, i++) = obody;
3641
3642 /* Add the uses specified by the target hook. No checking should
3643 be needed since this doesn't come directly from user code. */
3644 for (rtx use : use_rvec)
3645 XVECEXP (body, 0, i++) = gen_rtx_USE (VOIDmode, use);
3646
3647 /* Store (clobber REG) for each clobbered register specified. */
3648 for (unsigned j = 0; j < nclobbers; ++j)
3649 {
3650 rtx clobbered_reg = clobber_rvec[j];
3651
3652 /* Do sanity check for overlap between clobbers and respectively
3653 input and outputs that hasn't been handled. Such overlap
3654 should have been detected and reported above. */
3655 if (!clobber_conflict_found && REG_P (clobbered_reg))
3656 {
3657 /* We test the old body (obody) contents to avoid
3658 tripping over the under-construction body. */
3659 for (unsigned k = 0; k < noutputs; ++k)
3660 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3661 internal_error ("%<asm%> clobber conflict with "
3662 "output operand");
3663
3664 for (unsigned k = 0; k < ninputs - ninout; ++k)
3665 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3666 internal_error ("%<asm%> clobber conflict with "
3667 "input operand");
3668 }
3669
3670 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3671 }
3672
3673 if (nlabels > 0)
3674 emit_jump_insn (body);
3675 else
3676 emit_insn (body);
3677 }
3678
3679 generating_concat_p = old_generating_concat_p;
3680
3681 if (fallthru_label)
3682 emit_label (fallthru_label);
3683
3684 if (after_md_seq)
3685 emit_insn (after_md_seq);
3686 if (after_rtl_seq)
3687 {
3688 if (nlabels == 0)
3689 emit_insn (after_rtl_seq);
3690 else
3691 {
3692 edge e;
3693 edge_iterator ei;
3694 unsigned int cnt = EDGE_COUNT (gimple_bb (stmt)->succs);
3695
3696 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
3697 {
3698 rtx_insn *copy;
3699 if (--cnt == 0)
3700 copy = after_rtl_seq;
3701 else
3702 {
3703 start_sequence ();
3704 duplicate_insn_chain (after_rtl_seq, after_rtl_end,
3705 NULL, NULL);
3706 copy = get_insns ();
3707 end_sequence ();
3708 }
3709 prepend_insn_to_edge (copy, e);
3710 }
3711 }
3712 }
3713
3714 free_temp_slots ();
3715 crtl->has_asm_statement = 1;
3716 }
3717
3718 /* Emit code to jump to the address
3719 specified by the pointer expression EXP. */
3720
3721 static void
3722 expand_computed_goto (tree exp)
3723 {
3724 rtx x = expand_normal (exp);
3725
3726 do_pending_stack_adjust ();
3727 emit_indirect_jump (x);
3728 }
3729
3730 /* Generate RTL code for a `goto' statement with target label LABEL.
3731 LABEL should be a LABEL_DECL tree node that was or will later be
3732 defined with `expand_label'. */
3733
3734 static void
3735 expand_goto (tree label)
3736 {
3737 if (flag_checking)
3738 {
3739 /* Check for a nonlocal goto to a containing function. Should have
3740 gotten translated to __builtin_nonlocal_goto. */
3741 tree context = decl_function_context (label);
3742 gcc_assert (!context || context == current_function_decl);
3743 }
3744
3745 emit_jump (jump_target_rtx (label));
3746 }
3747
3748 /* Output a return with no value. */
3749
3750 static void
3751 expand_null_return_1 (void)
3752 {
3753 clear_pending_stack_adjust ();
3754 do_pending_stack_adjust ();
3755 emit_jump (return_label);
3756 }
3757
3758 /* Generate RTL to return from the current function, with no value.
3759 (That is, we do not do anything about returning any value.) */
3760
3761 void
3762 expand_null_return (void)
3763 {
3764 /* If this function was declared to return a value, but we
3765 didn't, clobber the return registers so that they are not
3766 propagated live to the rest of the function. */
3767 clobber_return_register ();
3768
3769 expand_null_return_1 ();
3770 }
3771
3772 /* Generate RTL to return from the current function, with value VAL. */
3773
3774 static void
3775 expand_value_return (rtx val)
3776 {
3777 /* Copy the value to the return location unless it's already there. */
3778
3779 tree decl = DECL_RESULT (current_function_decl);
3780 rtx return_reg = DECL_RTL (decl);
3781 if (return_reg != val)
3782 {
3783 tree funtype = TREE_TYPE (current_function_decl);
3784 tree type = TREE_TYPE (decl);
3785 int unsignedp = TYPE_UNSIGNED (type);
3786 machine_mode old_mode = DECL_MODE (decl);
3787 machine_mode mode;
3788 if (DECL_BY_REFERENCE (decl))
3789 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3790 else
3791 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3792
3793 if (mode != old_mode)
3794 {
3795 /* Some ABIs require scalar floating point modes to be returned
3796 in a wider scalar integer mode. We need to explicitly
3797 reinterpret to an integer mode of the correct precision
3798 before extending to the desired result. */
3799 if (SCALAR_INT_MODE_P (mode)
3800 && SCALAR_FLOAT_MODE_P (old_mode)
3801 && known_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (old_mode)))
3802 val = convert_float_to_wider_int (mode, old_mode, val);
3803 else
3804 val = convert_modes (mode, old_mode, val, unsignedp);
3805 }
3806
3807 if (GET_CODE (return_reg) == PARALLEL)
3808 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3809 else
3810 emit_move_insn (return_reg, val);
3811 }
3812
3813 expand_null_return_1 ();
3814 }
3815
3816 /* Generate RTL to evaluate the expression RETVAL and return it
3817 from the current function. */
3818
3819 static void
3820 expand_return (tree retval)
3821 {
3822 rtx result_rtl;
3823 rtx val = 0;
3824 tree retval_rhs;
3825
3826 /* If function wants no value, give it none. */
3827 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
3828 {
3829 expand_normal (retval);
3830 expand_null_return ();
3831 return;
3832 }
3833
3834 if (retval == error_mark_node)
3835 {
3836 /* Treat this like a return of no value from a function that
3837 returns a value. */
3838 expand_null_return ();
3839 return;
3840 }
3841 else if ((TREE_CODE (retval) == MODIFY_EXPR
3842 || TREE_CODE (retval) == INIT_EXPR)
3843 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3844 retval_rhs = TREE_OPERAND (retval, 1);
3845 else
3846 retval_rhs = retval;
3847
3848 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3849
3850 /* If we are returning the RESULT_DECL, then the value has already
3851 been stored into it, so we don't have to do anything special. */
3852 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3853 expand_value_return (result_rtl);
3854
3855 /* If the result is an aggregate that is being returned in one (or more)
3856 registers, load the registers here. */
3857
3858 else if (retval_rhs != 0
3859 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3860 && REG_P (result_rtl))
3861 {
3862 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3863 if (val)
3864 {
3865 /* Use the mode of the result value on the return register. */
3866 PUT_MODE (result_rtl, GET_MODE (val));
3867 expand_value_return (val);
3868 }
3869 else
3870 expand_null_return ();
3871 }
3872 else if (retval_rhs != 0
3873 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3874 && (REG_P (result_rtl)
3875 || (GET_CODE (result_rtl) == PARALLEL)))
3876 {
3877 /* Compute the return value into a temporary (usually a pseudo reg). */
3878 val
3879 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3880 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3881 val = force_not_mem (val);
3882 expand_value_return (val);
3883 }
3884 else
3885 {
3886 /* No hard reg used; calculate value into hard return reg. */
3887 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3888 expand_value_return (result_rtl);
3889 }
3890 }
3891
3892 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3893 register, tell the rtl optimizers that its value is no longer
3894 needed. */
3895
3896 static void
3897 expand_clobber (tree lhs)
3898 {
3899 if (DECL_P (lhs))
3900 {
3901 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3902 if (decl_rtl && REG_P (decl_rtl))
3903 {
3904 machine_mode decl_mode = GET_MODE (decl_rtl);
3905 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3906 REGMODE_NATURAL_SIZE (decl_mode)))
3907 emit_clobber (decl_rtl);
3908 }
3909 }
3910 }
3911
3912 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3913 STMT that doesn't require special handling for outgoing edges. That
3914 is no tailcalls and no GIMPLE_COND. */
3915
3916 static void
3917 expand_gimple_stmt_1 (gimple *stmt)
3918 {
3919 tree op0;
3920
3921 set_curr_insn_location (gimple_location (stmt));
3922
3923 switch (gimple_code (stmt))
3924 {
3925 case GIMPLE_GOTO:
3926 op0 = gimple_goto_dest (stmt);
3927 if (TREE_CODE (op0) == LABEL_DECL)
3928 expand_goto (op0);
3929 else
3930 expand_computed_goto (op0);
3931 break;
3932 case GIMPLE_LABEL:
3933 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3934 break;
3935 case GIMPLE_NOP:
3936 case GIMPLE_PREDICT:
3937 break;
3938 case GIMPLE_SWITCH:
3939 {
3940 gswitch *swtch = as_a <gswitch *> (stmt);
3941 if (gimple_switch_num_labels (swtch) == 1)
3942 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3943 else
3944 expand_case (swtch);
3945 }
3946 break;
3947 case GIMPLE_ASM:
3948 expand_asm_stmt (as_a <gasm *> (stmt));
3949 break;
3950 case GIMPLE_CALL:
3951 expand_call_stmt (as_a <gcall *> (stmt));
3952 break;
3953
3954 case GIMPLE_RETURN:
3955 {
3956 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3957
3958 /* If a return doesn't have a location, it very likely represents
3959 multiple user returns so we cannot let it inherit the location
3960 of the last statement of the previous basic block in RTL. */
3961 if (!gimple_has_location (stmt))
3962 set_curr_insn_location (cfun->function_end_locus);
3963
3964 if (op0 && op0 != error_mark_node)
3965 {
3966 tree result = DECL_RESULT (current_function_decl);
3967
3968 /* If we are not returning the current function's RESULT_DECL,
3969 build an assignment to it. */
3970 if (op0 != result)
3971 {
3972 /* I believe that a function's RESULT_DECL is unique. */
3973 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3974
3975 /* ??? We'd like to use simply expand_assignment here,
3976 but this fails if the value is of BLKmode but the return
3977 decl is a register. expand_return has special handling
3978 for this combination, which eventually should move
3979 to common code. See comments there. Until then, let's
3980 build a modify expression :-/ */
3981 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3982 result, op0);
3983 }
3984 }
3985
3986 if (!op0)
3987 expand_null_return ();
3988 else
3989 expand_return (op0);
3990 }
3991 break;
3992
3993 case GIMPLE_ASSIGN:
3994 {
3995 gassign *assign_stmt = as_a <gassign *> (stmt);
3996 tree lhs = gimple_assign_lhs (assign_stmt);
3997
3998 /* Tree expand used to fiddle with |= and &= of two bitfield
3999 COMPONENT_REFs here. This can't happen with gimple, the LHS
4000 of binary assigns must be a gimple reg. */
4001
4002 if (TREE_CODE (lhs) != SSA_NAME
4003 || gimple_assign_rhs_class (assign_stmt) == GIMPLE_SINGLE_RHS)
4004 {
4005 tree rhs = gimple_assign_rhs1 (assign_stmt);
4006 gcc_assert (gimple_assign_rhs_class (assign_stmt)
4007 == GIMPLE_SINGLE_RHS);
4008 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
4009 /* Do not put locations on possibly shared trees. */
4010 && !is_gimple_min_invariant (rhs))
4011 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
4012 if (TREE_CLOBBER_P (rhs))
4013 /* This is a clobber to mark the going out of scope for
4014 this LHS. */
4015 expand_clobber (lhs);
4016 else
4017 expand_assignment (lhs, rhs,
4018 gimple_assign_nontemporal_move_p (
4019 assign_stmt));
4020 }
4021 else
4022 {
4023 rtx target, temp;
4024 gcc_assert (!gimple_assign_nontemporal_move_p (assign_stmt));
4025 bool promoted = false;
4026
4027 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
4028 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4029 promoted = true;
4030
4031 /* If we store into a promoted register, don't directly
4032 expand to target. */
4033 temp = promoted ? NULL_RTX : target;
4034 temp = expand_expr_real_gassign (assign_stmt, temp,
4035 GET_MODE (target), EXPAND_NORMAL);
4036
4037 if (temp == target)
4038 ;
4039 else if (promoted)
4040 {
4041 int unsignedp = SUBREG_PROMOTED_SIGN (target);
4042 /* If TEMP is a VOIDmode constant, use convert_modes to make
4043 sure that we properly convert it. */
4044 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4045 {
4046 temp = convert_modes (GET_MODE (target),
4047 TYPE_MODE (TREE_TYPE (lhs)),
4048 temp, unsignedp);
4049 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4050 GET_MODE (target), temp, unsignedp);
4051 }
4052
4053 convert_move (SUBREG_REG (target), temp, unsignedp);
4054 }
4055 else
4056 {
4057 temp = force_operand (temp, target);
4058 if (temp != target)
4059 emit_move_insn (target, temp);
4060 }
4061 }
4062 }
4063 break;
4064
4065 default:
4066 gcc_unreachable ();
4067 }
4068 }
4069
4070 /* Expand one gimple statement STMT and return the last RTL instruction
4071 before any of the newly generated ones.
4072
4073 In addition to generating the necessary RTL instructions this also
4074 sets REG_EH_REGION notes if necessary and sets the current source
4075 location for diagnostics. */
4076
4077 static rtx_insn *
4078 expand_gimple_stmt (gimple *stmt)
4079 {
4080 location_t saved_location = input_location;
4081 rtx_insn *last = get_last_insn ();
4082 int lp_nr;
4083
4084 gcc_assert (cfun);
4085
4086 /* We need to save and restore the current source location so that errors
4087 discovered during expansion are emitted with the right location. But
4088 it would be better if the diagnostic routines used the source location
4089 embedded in the tree nodes rather than globals. */
4090 if (gimple_has_location (stmt))
4091 input_location = gimple_location (stmt);
4092
4093 expand_gimple_stmt_1 (stmt);
4094
4095 /* Free any temporaries used to evaluate this statement. */
4096 free_temp_slots ();
4097
4098 input_location = saved_location;
4099
4100 /* Mark all insns that may trap. */
4101 lp_nr = lookup_stmt_eh_lp (stmt);
4102 if (lp_nr)
4103 {
4104 rtx_insn *insn;
4105 for (insn = next_real_insn (last); insn;
4106 insn = next_real_insn (insn))
4107 {
4108 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
4109 /* If we want exceptions for non-call insns, any
4110 may_trap_p instruction may throw. */
4111 && GET_CODE (PATTERN (insn)) != CLOBBER
4112 && GET_CODE (PATTERN (insn)) != USE
4113 && insn_could_throw_p (insn))
4114 make_reg_eh_region_note (insn, 0, lp_nr);
4115 }
4116 }
4117
4118 return last;
4119 }
4120
4121 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
4122 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
4123 generated a tail call (something that might be denied by the ABI
4124 rules governing the call; see calls.cc).
4125
4126 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
4127 can still reach the rest of BB. The case here is __builtin_sqrt,
4128 where the NaN result goes through the external function (with a
4129 tailcall) and the normal result happens via a sqrt instruction. */
4130
4131 static basic_block
4132 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
4133 {
4134 rtx_insn *last2, *last;
4135 edge e;
4136 edge_iterator ei;
4137 profile_probability probability;
4138
4139 last2 = last = expand_gimple_stmt (stmt);
4140
4141 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
4142 if (CALL_P (last) && SIBLING_CALL_P (last))
4143 goto found;
4144
4145 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
4146
4147 *can_fallthru = true;
4148 return NULL;
4149
4150 found:
4151 /* ??? Wouldn't it be better to just reset any pending stack adjust?
4152 Any instructions emitted here are about to be deleted. */
4153 do_pending_stack_adjust ();
4154
4155 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
4156 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
4157 EH or abnormal edges, we shouldn't have created a tail call in
4158 the first place. So it seems to me we should just be removing
4159 all edges here, or redirecting the existing fallthru edge to
4160 the exit block. */
4161
4162 probability = profile_probability::never ();
4163
4164 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4165 {
4166 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
4167 {
4168 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
4169 e->dest->count -= e->count ();
4170 probability += e->probability;
4171 remove_edge (e);
4172 }
4173 else
4174 ei_next (&ei);
4175 }
4176
4177 /* This is somewhat ugly: the call_expr expander often emits instructions
4178 after the sibcall (to perform the function return). These confuse the
4179 find_many_sub_basic_blocks code, so we need to get rid of these. */
4180 last = NEXT_INSN (last);
4181 gcc_assert (BARRIER_P (last));
4182
4183 *can_fallthru = false;
4184 while (NEXT_INSN (last))
4185 {
4186 /* For instance an sqrt builtin expander expands if with
4187 sibcall in the then and label for `else`. */
4188 if (LABEL_P (NEXT_INSN (last)))
4189 {
4190 *can_fallthru = true;
4191 break;
4192 }
4193 delete_insn (NEXT_INSN (last));
4194 }
4195
4196 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
4197 | EDGE_SIBCALL);
4198 e->probability = probability;
4199 BB_END (bb) = last;
4200 update_bb_for_insn (bb);
4201
4202 if (NEXT_INSN (last))
4203 {
4204 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
4205
4206 last = BB_END (bb);
4207 if (BARRIER_P (last))
4208 BB_END (bb) = PREV_INSN (last);
4209 }
4210
4211 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
4212
4213 return bb;
4214 }
4215
4216 /* Return the difference between the floor and the truncated result of
4217 a signed division by OP1 with remainder MOD. */
4218 static rtx
4219 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4220 {
4221 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
4222 return gen_rtx_IF_THEN_ELSE
4223 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4224 gen_rtx_IF_THEN_ELSE
4225 (mode, gen_rtx_LT (BImode,
4226 gen_rtx_DIV (mode, op1, mod),
4227 const0_rtx),
4228 constm1_rtx, const0_rtx),
4229 const0_rtx);
4230 }
4231
4232 /* Return the difference between the ceil and the truncated result of
4233 a signed division by OP1 with remainder MOD. */
4234 static rtx
4235 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4236 {
4237 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4238 return gen_rtx_IF_THEN_ELSE
4239 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4240 gen_rtx_IF_THEN_ELSE
4241 (mode, gen_rtx_GT (BImode,
4242 gen_rtx_DIV (mode, op1, mod),
4243 const0_rtx),
4244 const1_rtx, const0_rtx),
4245 const0_rtx);
4246 }
4247
4248 /* Return the difference between the ceil and the truncated result of
4249 an unsigned division by OP1 with remainder MOD. */
4250 static rtx
4251 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4252 {
4253 /* (mod != 0 ? 1 : 0) */
4254 return gen_rtx_IF_THEN_ELSE
4255 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4256 const1_rtx, const0_rtx);
4257 }
4258
4259 /* Return the difference between the rounded and the truncated result
4260 of a signed division by OP1 with remainder MOD. Halfway cases are
4261 rounded away from zero, rather than to the nearest even number. */
4262 static rtx
4263 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4264 {
4265 /* (abs (mod) >= abs (op1) - abs (mod)
4266 ? (op1 / mod > 0 ? 1 : -1)
4267 : 0) */
4268 return gen_rtx_IF_THEN_ELSE
4269 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4270 gen_rtx_MINUS (mode,
4271 gen_rtx_ABS (mode, op1),
4272 gen_rtx_ABS (mode, mod))),
4273 gen_rtx_IF_THEN_ELSE
4274 (mode, gen_rtx_GT (BImode,
4275 gen_rtx_DIV (mode, op1, mod),
4276 const0_rtx),
4277 const1_rtx, constm1_rtx),
4278 const0_rtx);
4279 }
4280
4281 /* Return the difference between the rounded and the truncated result
4282 of a unsigned division by OP1 with remainder MOD. Halfway cases
4283 are rounded away from zero, rather than to the nearest even
4284 number. */
4285 static rtx
4286 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4287 {
4288 /* (mod >= op1 - mod ? 1 : 0) */
4289 return gen_rtx_IF_THEN_ELSE
4290 (mode, gen_rtx_GE (BImode, mod,
4291 gen_rtx_MINUS (mode, op1, mod)),
4292 const1_rtx, const0_rtx);
4293 }
4294
4295 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4296 any rtl. */
4297
4298 static rtx
4299 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4300 addr_space_t as)
4301 {
4302 #ifndef POINTERS_EXTEND_UNSIGNED
4303 gcc_assert (mode == Pmode
4304 || mode == targetm.addr_space.address_mode (as));
4305 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4306 #else
4307 rtx temp;
4308
4309 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4310
4311 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4312 return x;
4313
4314 /* X must have some form of address mode already. */
4315 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4316 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4317 x = lowpart_subreg (mode, x, xmode);
4318 else if (POINTERS_EXTEND_UNSIGNED > 0)
4319 x = gen_rtx_ZERO_EXTEND (mode, x);
4320 else if (!POINTERS_EXTEND_UNSIGNED)
4321 x = gen_rtx_SIGN_EXTEND (mode, x);
4322 else
4323 {
4324 switch (GET_CODE (x))
4325 {
4326 case SUBREG:
4327 if ((SUBREG_PROMOTED_VAR_P (x)
4328 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4329 || (GET_CODE (SUBREG_REG (x)) == PLUS
4330 && REG_P (XEXP (SUBREG_REG (x), 0))
4331 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4332 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4333 && GET_MODE (SUBREG_REG (x)) == mode)
4334 return SUBREG_REG (x);
4335 break;
4336 case LABEL_REF:
4337 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4338 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4339 return temp;
4340 case SYMBOL_REF:
4341 temp = shallow_copy_rtx (x);
4342 PUT_MODE (temp, mode);
4343 return temp;
4344 case CONST:
4345 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4346 if (temp)
4347 temp = gen_rtx_CONST (mode, temp);
4348 return temp;
4349 case PLUS:
4350 case MINUS:
4351 if (CONST_INT_P (XEXP (x, 1)))
4352 {
4353 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4354 if (temp)
4355 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4356 }
4357 break;
4358 default:
4359 break;
4360 }
4361 /* Don't know how to express ptr_extend as operation in debug info. */
4362 return NULL;
4363 }
4364 #endif /* POINTERS_EXTEND_UNSIGNED */
4365
4366 return x;
4367 }
4368
4369 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4370 by avoid_deep_ter_for_debug. */
4371
4372 static hash_map<tree, tree> *deep_ter_debug_map;
4373
4374 /* Split too deep TER chains for debug stmts using debug temporaries. */
4375
4376 static void
4377 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4378 {
4379 use_operand_p use_p;
4380 ssa_op_iter iter;
4381 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4382 {
4383 tree use = USE_FROM_PTR (use_p);
4384 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4385 continue;
4386 gimple *g = get_gimple_for_ssa_name (use);
4387 if (g == NULL)
4388 continue;
4389 if (depth > 6 && !stmt_ends_bb_p (g))
4390 {
4391 if (deep_ter_debug_map == NULL)
4392 deep_ter_debug_map = new hash_map<tree, tree>;
4393
4394 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4395 if (vexpr != NULL)
4396 continue;
4397 vexpr = build_debug_expr_decl (TREE_TYPE (use));
4398 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4399 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4400 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4401 avoid_deep_ter_for_debug (def_temp, 0);
4402 }
4403 else
4404 avoid_deep_ter_for_debug (g, depth + 1);
4405 }
4406 }
4407
4408 /* Return an RTX equivalent to the value of the parameter DECL. */
4409
4410 static rtx
4411 expand_debug_parm_decl (tree decl)
4412 {
4413 rtx incoming = DECL_INCOMING_RTL (decl);
4414
4415 if (incoming
4416 && GET_MODE (incoming) != BLKmode
4417 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4418 || (MEM_P (incoming)
4419 && REG_P (XEXP (incoming, 0))
4420 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4421 {
4422 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4423
4424 #ifdef HAVE_window_save
4425 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4426 If the target machine has an explicit window save instruction, the
4427 actual entry value is the corresponding OUTGOING_REGNO instead. */
4428 if (REG_P (incoming)
4429 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4430 incoming
4431 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4432 OUTGOING_REGNO (REGNO (incoming)), 0);
4433 else if (MEM_P (incoming))
4434 {
4435 rtx reg = XEXP (incoming, 0);
4436 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4437 {
4438 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4439 incoming = replace_equiv_address_nv (incoming, reg);
4440 }
4441 else
4442 incoming = copy_rtx (incoming);
4443 }
4444 #endif
4445
4446 ENTRY_VALUE_EXP (rtl) = incoming;
4447 return rtl;
4448 }
4449
4450 if (incoming
4451 && GET_MODE (incoming) != BLKmode
4452 && !TREE_ADDRESSABLE (decl)
4453 && MEM_P (incoming)
4454 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4455 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4456 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4457 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4458 return copy_rtx (incoming);
4459
4460 return NULL_RTX;
4461 }
4462
4463 /* Return an RTX equivalent to the value of the tree expression EXP. */
4464
4465 static rtx
4466 expand_debug_expr (tree exp)
4467 {
4468 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4469 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4470 machine_mode inner_mode = VOIDmode;
4471 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4472 addr_space_t as;
4473 scalar_int_mode op0_mode, op1_mode, addr_mode;
4474
4475 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4476 {
4477 case tcc_expression:
4478 switch (TREE_CODE (exp))
4479 {
4480 case COND_EXPR:
4481 case DOT_PROD_EXPR:
4482 case SAD_EXPR:
4483 case WIDEN_MULT_PLUS_EXPR:
4484 case WIDEN_MULT_MINUS_EXPR:
4485 goto ternary;
4486
4487 case TRUTH_ANDIF_EXPR:
4488 case TRUTH_ORIF_EXPR:
4489 case TRUTH_AND_EXPR:
4490 case TRUTH_OR_EXPR:
4491 case TRUTH_XOR_EXPR:
4492 goto binary;
4493
4494 case TRUTH_NOT_EXPR:
4495 goto unary;
4496
4497 default:
4498 break;
4499 }
4500 break;
4501
4502 ternary:
4503 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4504 if (!op2)
4505 return NULL_RTX;
4506 /* Fall through. */
4507
4508 binary:
4509 case tcc_binary:
4510 if (mode == BLKmode)
4511 return NULL_RTX;
4512 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4513 if (!op1)
4514 return NULL_RTX;
4515 switch (TREE_CODE (exp))
4516 {
4517 case LSHIFT_EXPR:
4518 case RSHIFT_EXPR:
4519 case LROTATE_EXPR:
4520 case RROTATE_EXPR:
4521 case WIDEN_LSHIFT_EXPR:
4522 /* Ensure second operand isn't wider than the first one. */
4523 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4524 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4525 && (GET_MODE_UNIT_PRECISION (mode)
4526 < GET_MODE_PRECISION (op1_mode)))
4527 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4528 break;
4529 default:
4530 break;
4531 }
4532 /* Fall through. */
4533
4534 unary:
4535 case tcc_unary:
4536 if (mode == BLKmode)
4537 return NULL_RTX;
4538 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4539 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4540 if (!op0)
4541 return NULL_RTX;
4542 break;
4543
4544 case tcc_comparison:
4545 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4546 goto binary;
4547
4548 case tcc_type:
4549 case tcc_statement:
4550 gcc_unreachable ();
4551
4552 case tcc_constant:
4553 case tcc_exceptional:
4554 case tcc_declaration:
4555 case tcc_reference:
4556 case tcc_vl_exp:
4557 break;
4558 }
4559
4560 switch (TREE_CODE (exp))
4561 {
4562 case STRING_CST:
4563 if (!lookup_constant_def (exp))
4564 {
4565 if (strlen (TREE_STRING_POINTER (exp)) + 1
4566 != (size_t) TREE_STRING_LENGTH (exp))
4567 return NULL_RTX;
4568 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4569 op0 = gen_rtx_MEM (BLKmode, op0);
4570 set_mem_attributes (op0, exp, 0);
4571 return op0;
4572 }
4573 /* Fall through. */
4574
4575 case INTEGER_CST:
4576 if (TREE_CODE (TREE_TYPE (exp)) == BITINT_TYPE
4577 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4578 return NULL;
4579 /* FALLTHRU */
4580 case REAL_CST:
4581 case FIXED_CST:
4582 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4583 return op0;
4584
4585 case POLY_INT_CST:
4586 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4587
4588 case COMPLEX_CST:
4589 gcc_assert (COMPLEX_MODE_P (mode));
4590 op0 = expand_debug_expr (TREE_REALPART (exp));
4591 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4592 return gen_rtx_CONCAT (mode, op0, op1);
4593
4594 case DEBUG_EXPR_DECL:
4595 op0 = DECL_RTL_IF_SET (exp);
4596
4597 if (op0)
4598 {
4599 if (GET_MODE (op0) != mode)
4600 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (exp)));
4601 else
4602 return op0;
4603 }
4604
4605 op0 = gen_rtx_DEBUG_EXPR (mode);
4606 DEBUG_EXPR_TREE_DECL (op0) = exp;
4607 SET_DECL_RTL (exp, op0);
4608
4609 return op0;
4610
4611 case VAR_DECL:
4612 case PARM_DECL:
4613 case FUNCTION_DECL:
4614 case LABEL_DECL:
4615 case CONST_DECL:
4616 case RESULT_DECL:
4617 op0 = DECL_RTL_IF_SET (exp);
4618
4619 /* This decl was probably optimized away. */
4620 if (!op0
4621 /* At least label RTXen are sometimes replaced by
4622 NOTE_INSN_DELETED_LABEL. Any notes here are not
4623 handled by copy_rtx. */
4624 || NOTE_P (op0))
4625 {
4626 if (!VAR_P (exp)
4627 || DECL_EXTERNAL (exp)
4628 || !TREE_STATIC (exp)
4629 || !DECL_NAME (exp)
4630 || DECL_HARD_REGISTER (exp)
4631 || DECL_IN_CONSTANT_POOL (exp)
4632 || mode == VOIDmode
4633 || symtab_node::get (exp) == NULL)
4634 return NULL;
4635
4636 op0 = make_decl_rtl_for_debug (exp);
4637 if (!MEM_P (op0)
4638 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4639 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4640 return NULL;
4641 }
4642 else if (VAR_P (exp)
4643 && is_global_var (exp)
4644 && symtab_node::get (exp) == NULL)
4645 return NULL;
4646 else
4647 op0 = copy_rtx (op0);
4648
4649 if (GET_MODE (op0) == BLKmode
4650 /* If op0 is not BLKmode, but mode is, adjust_mode
4651 below would ICE. While it is likely a FE bug,
4652 try to be robust here. See PR43166. */
4653 || mode == BLKmode
4654 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4655 {
4656 gcc_assert (MEM_P (op0));
4657 op0 = adjust_address_nv (op0, mode, 0);
4658 return op0;
4659 }
4660
4661 /* Fall through. */
4662
4663 adjust_mode:
4664 case PAREN_EXPR:
4665 CASE_CONVERT:
4666 {
4667 inner_mode = GET_MODE (op0);
4668
4669 if (mode == inner_mode)
4670 return op0;
4671
4672 if (inner_mode == VOIDmode)
4673 {
4674 if (TREE_CODE (exp) == SSA_NAME)
4675 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4676 else
4677 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4678 if (mode == inner_mode)
4679 return op0;
4680 }
4681
4682 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4683 {
4684 if (GET_MODE_UNIT_BITSIZE (mode)
4685 == GET_MODE_UNIT_BITSIZE (inner_mode))
4686 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4687 else if (GET_MODE_UNIT_BITSIZE (mode)
4688 < GET_MODE_UNIT_BITSIZE (inner_mode))
4689 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4690 else
4691 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4692 }
4693 else if (FLOAT_MODE_P (mode))
4694 {
4695 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4696 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4697 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4698 else
4699 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4700 }
4701 else if (FLOAT_MODE_P (inner_mode))
4702 {
4703 if (unsignedp)
4704 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4705 else
4706 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4707 }
4708 else if (GET_MODE_UNIT_PRECISION (mode)
4709 == GET_MODE_UNIT_PRECISION (inner_mode))
4710 op0 = lowpart_subreg (mode, op0, inner_mode);
4711 else if (GET_MODE_UNIT_PRECISION (mode)
4712 < GET_MODE_UNIT_PRECISION (inner_mode))
4713 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4714 else if (UNARY_CLASS_P (exp)
4715 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4716 : unsignedp)
4717 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4718 else
4719 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4720
4721 return op0;
4722 }
4723
4724 case MEM_REF:
4725 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4726 {
4727 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4728 TREE_OPERAND (exp, 0),
4729 TREE_OPERAND (exp, 1));
4730 if (newexp)
4731 return expand_debug_expr (newexp);
4732 }
4733 /* FALLTHROUGH */
4734 case INDIRECT_REF:
4735 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4736 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4737 if (!op0)
4738 return NULL;
4739
4740 if (TREE_CODE (exp) == MEM_REF)
4741 {
4742 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4743 || (GET_CODE (op0) == PLUS
4744 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4745 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4746 Instead just use get_inner_reference. */
4747 goto component_ref;
4748
4749 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4750 poly_int64 offset;
4751 if (!op1 || !poly_int_rtx_p (op1, &offset))
4752 return NULL;
4753
4754 op0 = plus_constant (inner_mode, op0, offset);
4755 }
4756
4757 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4758
4759 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4760 op0, as);
4761 if (op0 == NULL_RTX)
4762 return NULL;
4763
4764 op0 = gen_rtx_MEM (mode, op0);
4765 set_mem_attributes (op0, exp, 0);
4766 if (TREE_CODE (exp) == MEM_REF
4767 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4768 set_mem_expr (op0, NULL_TREE);
4769 set_mem_addr_space (op0, as);
4770
4771 return op0;
4772
4773 case TARGET_MEM_REF:
4774 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4775 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4776 return NULL;
4777
4778 op0 = expand_debug_expr
4779 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4780 if (!op0)
4781 return NULL;
4782
4783 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4784 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4785 op0, as);
4786 if (op0 == NULL_RTX)
4787 return NULL;
4788
4789 op0 = gen_rtx_MEM (mode, op0);
4790
4791 set_mem_attributes (op0, exp, 0);
4792 set_mem_addr_space (op0, as);
4793
4794 return op0;
4795
4796 component_ref:
4797 case ARRAY_REF:
4798 case ARRAY_RANGE_REF:
4799 case COMPONENT_REF:
4800 case BIT_FIELD_REF:
4801 case REALPART_EXPR:
4802 case IMAGPART_EXPR:
4803 case VIEW_CONVERT_EXPR:
4804 {
4805 machine_mode mode1;
4806 poly_int64 bitsize, bitpos;
4807 tree offset;
4808 int reversep, volatilep = 0;
4809 tree tem
4810 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4811 &unsignedp, &reversep, &volatilep);
4812 rtx orig_op0;
4813
4814 if (known_eq (bitsize, 0))
4815 return NULL;
4816
4817 orig_op0 = op0 = expand_debug_expr (tem);
4818
4819 if (!op0)
4820 return NULL;
4821
4822 if (offset)
4823 {
4824 machine_mode addrmode, offmode;
4825
4826 if (!MEM_P (op0))
4827 return NULL;
4828
4829 op0 = XEXP (op0, 0);
4830 addrmode = GET_MODE (op0);
4831 if (addrmode == VOIDmode)
4832 addrmode = Pmode;
4833
4834 op1 = expand_debug_expr (offset);
4835 if (!op1)
4836 return NULL;
4837
4838 offmode = GET_MODE (op1);
4839 if (offmode == VOIDmode)
4840 offmode = TYPE_MODE (TREE_TYPE (offset));
4841
4842 if (addrmode != offmode)
4843 op1 = lowpart_subreg (addrmode, op1, offmode);
4844
4845 /* Don't use offset_address here, we don't need a
4846 recognizable address, and we don't want to generate
4847 code. */
4848 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4849 op0, op1));
4850 }
4851
4852 if (MEM_P (op0))
4853 {
4854 if (mode1 == VOIDmode)
4855 {
4856 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4857 return NULL;
4858 /* Bitfield. */
4859 mode1 = smallest_int_mode_for_size (bitsize);
4860 }
4861 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4862 if (maybe_ne (bytepos, 0))
4863 {
4864 op0 = adjust_address_nv (op0, mode1, bytepos);
4865 bitpos = num_trailing_bits (bitpos);
4866 }
4867 else if (known_eq (bitpos, 0)
4868 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4869 op0 = adjust_address_nv (op0, mode, 0);
4870 else if (GET_MODE (op0) != mode1)
4871 op0 = adjust_address_nv (op0, mode1, 0);
4872 else
4873 op0 = copy_rtx (op0);
4874 if (op0 == orig_op0)
4875 op0 = shallow_copy_rtx (op0);
4876 if (TREE_CODE (tem) != SSA_NAME)
4877 set_mem_attributes (op0, exp, 0);
4878 }
4879
4880 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4881 return op0;
4882
4883 if (maybe_lt (bitpos, 0))
4884 return NULL;
4885
4886 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4887 return NULL;
4888
4889 poly_int64 bytepos;
4890 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4891 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4892 {
4893 machine_mode opmode = GET_MODE (op0);
4894
4895 if (opmode == VOIDmode)
4896 opmode = TYPE_MODE (TREE_TYPE (tem));
4897
4898 /* This condition may hold if we're expanding the address
4899 right past the end of an array that turned out not to
4900 be addressable (i.e., the address was only computed in
4901 debug stmts). The gen_subreg below would rightfully
4902 crash, and the address doesn't really exist, so just
4903 drop it. */
4904 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4905 return NULL;
4906
4907 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4908 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4909 }
4910
4911 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4912 && TYPE_UNSIGNED (TREE_TYPE (exp))
4913 ? SIGN_EXTRACT
4914 : ZERO_EXTRACT, mode,
4915 GET_MODE (op0) != VOIDmode
4916 ? GET_MODE (op0)
4917 : TYPE_MODE (TREE_TYPE (tem)),
4918 op0, gen_int_mode (bitsize, word_mode),
4919 gen_int_mode (bitpos, word_mode));
4920 }
4921
4922 case ABS_EXPR:
4923 case ABSU_EXPR:
4924 return simplify_gen_unary (ABS, mode, op0, mode);
4925
4926 case NEGATE_EXPR:
4927 return simplify_gen_unary (NEG, mode, op0, mode);
4928
4929 case BIT_NOT_EXPR:
4930 return simplify_gen_unary (NOT, mode, op0, mode);
4931
4932 case FLOAT_EXPR:
4933 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4934 0)))
4935 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4936 inner_mode);
4937
4938 case FIX_TRUNC_EXPR:
4939 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4940 inner_mode);
4941
4942 case POINTER_PLUS_EXPR:
4943 /* For the rare target where pointers are not the same size as
4944 size_t, we need to check for mis-matched modes and correct
4945 the addend. */
4946 if (op0 && op1
4947 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4948 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4949 && op0_mode != op1_mode)
4950 {
4951 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4952 /* If OP0 is a partial mode, then we must truncate, even
4953 if it has the same bitsize as OP1 as GCC's
4954 representation of partial modes is opaque. */
4955 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4956 && (GET_MODE_BITSIZE (op0_mode)
4957 == GET_MODE_BITSIZE (op1_mode))))
4958 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4959 else
4960 /* We always sign-extend, regardless of the signedness of
4961 the operand, because the operand is always unsigned
4962 here even if the original C expression is signed. */
4963 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4964 }
4965 /* Fall through. */
4966 case PLUS_EXPR:
4967 return simplify_gen_binary (PLUS, mode, op0, op1);
4968
4969 case MINUS_EXPR:
4970 case POINTER_DIFF_EXPR:
4971 return simplify_gen_binary (MINUS, mode, op0, op1);
4972
4973 case MULT_EXPR:
4974 return simplify_gen_binary (MULT, mode, op0, op1);
4975
4976 case RDIV_EXPR:
4977 case TRUNC_DIV_EXPR:
4978 case EXACT_DIV_EXPR:
4979 if (unsignedp)
4980 return simplify_gen_binary (UDIV, mode, op0, op1);
4981 else
4982 return simplify_gen_binary (DIV, mode, op0, op1);
4983
4984 case TRUNC_MOD_EXPR:
4985 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4986
4987 case FLOOR_DIV_EXPR:
4988 if (unsignedp)
4989 return simplify_gen_binary (UDIV, mode, op0, op1);
4990 else
4991 {
4992 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4993 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4994 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4995 return simplify_gen_binary (PLUS, mode, div, adj);
4996 }
4997
4998 case FLOOR_MOD_EXPR:
4999 if (unsignedp)
5000 return simplify_gen_binary (UMOD, mode, op0, op1);
5001 else
5002 {
5003 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
5004 rtx adj = floor_sdiv_adjust (mode, mod, op1);
5005 adj = simplify_gen_unary (NEG, mode,
5006 simplify_gen_binary (MULT, mode, adj, op1),
5007 mode);
5008 return simplify_gen_binary (PLUS, mode, mod, adj);
5009 }
5010
5011 case CEIL_DIV_EXPR:
5012 if (unsignedp)
5013 {
5014 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
5015 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
5016 rtx adj = ceil_udiv_adjust (mode, mod, op1);
5017 return simplify_gen_binary (PLUS, mode, div, adj);
5018 }
5019 else
5020 {
5021 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
5022 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
5023 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
5024 return simplify_gen_binary (PLUS, mode, div, adj);
5025 }
5026
5027 case CEIL_MOD_EXPR:
5028 if (unsignedp)
5029 {
5030 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
5031 rtx adj = ceil_udiv_adjust (mode, mod, op1);
5032 adj = simplify_gen_unary (NEG, mode,
5033 simplify_gen_binary (MULT, mode, adj, op1),
5034 mode);
5035 return simplify_gen_binary (PLUS, mode, mod, adj);
5036 }
5037 else
5038 {
5039 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
5040 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
5041 adj = simplify_gen_unary (NEG, mode,
5042 simplify_gen_binary (MULT, mode, adj, op1),
5043 mode);
5044 return simplify_gen_binary (PLUS, mode, mod, adj);
5045 }
5046
5047 case ROUND_DIV_EXPR:
5048 if (unsignedp)
5049 {
5050 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
5051 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
5052 rtx adj = round_udiv_adjust (mode, mod, op1);
5053 return simplify_gen_binary (PLUS, mode, div, adj);
5054 }
5055 else
5056 {
5057 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
5058 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
5059 rtx adj = round_sdiv_adjust (mode, mod, op1);
5060 return simplify_gen_binary (PLUS, mode, div, adj);
5061 }
5062
5063 case ROUND_MOD_EXPR:
5064 if (unsignedp)
5065 {
5066 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
5067 rtx adj = round_udiv_adjust (mode, mod, op1);
5068 adj = simplify_gen_unary (NEG, mode,
5069 simplify_gen_binary (MULT, mode, adj, op1),
5070 mode);
5071 return simplify_gen_binary (PLUS, mode, mod, adj);
5072 }
5073 else
5074 {
5075 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
5076 rtx adj = round_sdiv_adjust (mode, mod, op1);
5077 adj = simplify_gen_unary (NEG, mode,
5078 simplify_gen_binary (MULT, mode, adj, op1),
5079 mode);
5080 return simplify_gen_binary (PLUS, mode, mod, adj);
5081 }
5082
5083 case LSHIFT_EXPR:
5084 return simplify_gen_binary (ASHIFT, mode, op0, op1);
5085
5086 case RSHIFT_EXPR:
5087 if (unsignedp)
5088 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
5089 else
5090 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
5091
5092 case LROTATE_EXPR:
5093 return simplify_gen_binary (ROTATE, mode, op0, op1);
5094
5095 case RROTATE_EXPR:
5096 return simplify_gen_binary (ROTATERT, mode, op0, op1);
5097
5098 case MIN_EXPR:
5099 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
5100
5101 case MAX_EXPR:
5102 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
5103
5104 case BIT_AND_EXPR:
5105 case TRUTH_AND_EXPR:
5106 return simplify_gen_binary (AND, mode, op0, op1);
5107
5108 case BIT_IOR_EXPR:
5109 case TRUTH_OR_EXPR:
5110 return simplify_gen_binary (IOR, mode, op0, op1);
5111
5112 case BIT_XOR_EXPR:
5113 case TRUTH_XOR_EXPR:
5114 return simplify_gen_binary (XOR, mode, op0, op1);
5115
5116 case TRUTH_ANDIF_EXPR:
5117 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
5118
5119 case TRUTH_ORIF_EXPR:
5120 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
5121
5122 case TRUTH_NOT_EXPR:
5123 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
5124
5125 case LT_EXPR:
5126 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
5127 op0, op1);
5128
5129 case LE_EXPR:
5130 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
5131 op0, op1);
5132
5133 case GT_EXPR:
5134 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
5135 op0, op1);
5136
5137 case GE_EXPR:
5138 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
5139 op0, op1);
5140
5141 case EQ_EXPR:
5142 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
5143
5144 case NE_EXPR:
5145 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
5146
5147 case UNORDERED_EXPR:
5148 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
5149
5150 case ORDERED_EXPR:
5151 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
5152
5153 case UNLT_EXPR:
5154 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
5155
5156 case UNLE_EXPR:
5157 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
5158
5159 case UNGT_EXPR:
5160 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
5161
5162 case UNGE_EXPR:
5163 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
5164
5165 case UNEQ_EXPR:
5166 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
5167
5168 case LTGT_EXPR:
5169 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
5170
5171 case COND_EXPR:
5172 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
5173
5174 case COMPLEX_EXPR:
5175 gcc_assert (COMPLEX_MODE_P (mode));
5176 if (GET_MODE (op0) == VOIDmode)
5177 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
5178 if (GET_MODE (op1) == VOIDmode)
5179 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
5180 return gen_rtx_CONCAT (mode, op0, op1);
5181
5182 case CONJ_EXPR:
5183 if (GET_CODE (op0) == CONCAT)
5184 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
5185 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
5186 XEXP (op0, 1),
5187 GET_MODE_INNER (mode)));
5188 else
5189 {
5190 scalar_mode imode = GET_MODE_INNER (mode);
5191 rtx re, im;
5192
5193 if (MEM_P (op0))
5194 {
5195 re = adjust_address_nv (op0, imode, 0);
5196 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
5197 }
5198 else
5199 {
5200 scalar_int_mode ifmode;
5201 scalar_int_mode ihmode;
5202 rtx halfsize;
5203 if (!int_mode_for_mode (mode).exists (&ifmode)
5204 || !int_mode_for_mode (imode).exists (&ihmode))
5205 return NULL;
5206 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
5207 re = op0;
5208 if (mode != ifmode)
5209 re = gen_rtx_SUBREG (ifmode, re, 0);
5210 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
5211 if (imode != ihmode)
5212 re = gen_rtx_SUBREG (imode, re, 0);
5213 im = copy_rtx (op0);
5214 if (mode != ifmode)
5215 im = gen_rtx_SUBREG (ifmode, im, 0);
5216 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
5217 if (imode != ihmode)
5218 im = gen_rtx_SUBREG (imode, im, 0);
5219 }
5220 im = gen_rtx_NEG (imode, im);
5221 return gen_rtx_CONCAT (mode, re, im);
5222 }
5223
5224 case ADDR_EXPR:
5225 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
5226 if (!op0 || !MEM_P (op0))
5227 {
5228 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
5229 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
5230 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
5231 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
5232 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
5233 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
5234
5235 if (handled_component_p (TREE_OPERAND (exp, 0)))
5236 {
5237 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
5238 bool reverse;
5239 tree decl
5240 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
5241 &bitsize, &maxsize, &reverse);
5242 if ((VAR_P (decl)
5243 || TREE_CODE (decl) == PARM_DECL
5244 || TREE_CODE (decl) == RESULT_DECL)
5245 && (!TREE_ADDRESSABLE (decl)
5246 || target_for_debug_bind (decl))
5247 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
5248 && known_gt (bitsize, 0)
5249 && known_eq (bitsize, maxsize))
5250 {
5251 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
5252 return plus_constant (mode, base, byteoffset);
5253 }
5254 }
5255
5256 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5258 == ADDR_EXPR)
5259 {
5260 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5261 0));
5262 if (op0 != NULL
5263 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5264 || (GET_CODE (op0) == PLUS
5265 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5266 && CONST_INT_P (XEXP (op0, 1)))))
5267 {
5268 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5269 1));
5270 poly_int64 offset;
5271 if (!op1 || !poly_int_rtx_p (op1, &offset))
5272 return NULL;
5273
5274 return plus_constant (mode, op0, offset);
5275 }
5276 }
5277
5278 return NULL;
5279 }
5280
5281 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5282 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5283 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5284
5285 return op0;
5286
5287 case VECTOR_CST:
5288 {
5289 unsigned HOST_WIDE_INT i, nelts;
5290
5291 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5292 return NULL;
5293
5294 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5295
5296 for (i = 0; i < nelts; ++i)
5297 {
5298 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5299 if (!op1)
5300 return NULL;
5301 XVECEXP (op0, 0, i) = op1;
5302 }
5303
5304 return op0;
5305 }
5306
5307 case CONSTRUCTOR:
5308 if (TREE_CLOBBER_P (exp))
5309 return NULL;
5310 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5311 {
5312 unsigned i;
5313 unsigned HOST_WIDE_INT nelts;
5314 tree val;
5315
5316 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5317 goto flag_unsupported;
5318
5319 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5320
5321 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5322 {
5323 op1 = expand_debug_expr (val);
5324 if (!op1)
5325 return NULL;
5326 XVECEXP (op0, 0, i) = op1;
5327 }
5328
5329 if (i < nelts)
5330 {
5331 op1 = expand_debug_expr
5332 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5333
5334 if (!op1)
5335 return NULL;
5336
5337 for (; i < nelts; i++)
5338 XVECEXP (op0, 0, i) = op1;
5339 }
5340
5341 return op0;
5342 }
5343 else
5344 goto flag_unsupported;
5345
5346 case CALL_EXPR:
5347 /* ??? Maybe handle some builtins? */
5348 return NULL;
5349
5350 case SSA_NAME:
5351 {
5352 gimple *g = get_gimple_for_ssa_name (exp);
5353 if (g)
5354 {
5355 tree t = NULL_TREE;
5356 if (deep_ter_debug_map)
5357 {
5358 tree *slot = deep_ter_debug_map->get (exp);
5359 if (slot)
5360 t = *slot;
5361 }
5362 if (t == NULL_TREE)
5363 t = gimple_assign_rhs_to_tree (g);
5364 op0 = expand_debug_expr (t);
5365 if (!op0)
5366 return NULL;
5367 }
5368 else
5369 {
5370 /* If this is a reference to an incoming value of
5371 parameter that is never used in the code or where the
5372 incoming value is never used in the code, use
5373 PARM_DECL's DECL_RTL if set. */
5374 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5375 && SSA_NAME_VAR (exp)
5376 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5377 && has_zero_uses (exp))
5378 {
5379 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5380 if (op0)
5381 goto adjust_mode;
5382 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5383 if (op0)
5384 goto adjust_mode;
5385 }
5386
5387 int part = var_to_partition (SA.map, exp);
5388
5389 if (part == NO_PARTITION)
5390 return NULL;
5391
5392 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5393
5394 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5395 }
5396 goto adjust_mode;
5397 }
5398
5399 case ERROR_MARK:
5400 return NULL;
5401
5402 /* Vector stuff. For most of the codes we don't have rtl codes. */
5403 case REALIGN_LOAD_EXPR:
5404 case VEC_COND_EXPR:
5405 case VEC_PACK_FIX_TRUNC_EXPR:
5406 case VEC_PACK_FLOAT_EXPR:
5407 case VEC_PACK_SAT_EXPR:
5408 case VEC_PACK_TRUNC_EXPR:
5409 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5410 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5411 case VEC_UNPACK_FLOAT_HI_EXPR:
5412 case VEC_UNPACK_FLOAT_LO_EXPR:
5413 case VEC_UNPACK_HI_EXPR:
5414 case VEC_UNPACK_LO_EXPR:
5415 case VEC_WIDEN_MULT_HI_EXPR:
5416 case VEC_WIDEN_MULT_LO_EXPR:
5417 case VEC_WIDEN_MULT_EVEN_EXPR:
5418 case VEC_WIDEN_MULT_ODD_EXPR:
5419 case VEC_WIDEN_LSHIFT_HI_EXPR:
5420 case VEC_WIDEN_LSHIFT_LO_EXPR:
5421 case VEC_PERM_EXPR:
5422 case VEC_DUPLICATE_EXPR:
5423 case VEC_SERIES_EXPR:
5424 case SAD_EXPR:
5425 return NULL;
5426
5427 /* Misc codes. */
5428 case ADDR_SPACE_CONVERT_EXPR:
5429 case FIXED_CONVERT_EXPR:
5430 case OBJ_TYPE_REF:
5431 case WITH_SIZE_EXPR:
5432 case BIT_INSERT_EXPR:
5433 return NULL;
5434
5435 case DOT_PROD_EXPR:
5436 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5437 && SCALAR_INT_MODE_P (mode))
5438 {
5439 op0
5440 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5441 0)))
5442 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5443 inner_mode);
5444 op1
5445 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5446 1)))
5447 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5448 inner_mode);
5449 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5450 return simplify_gen_binary (PLUS, mode, op0, op2);
5451 }
5452 return NULL;
5453
5454 case WIDEN_MULT_EXPR:
5455 case WIDEN_MULT_PLUS_EXPR:
5456 case WIDEN_MULT_MINUS_EXPR:
5457 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5458 && SCALAR_INT_MODE_P (mode))
5459 {
5460 inner_mode = GET_MODE (op0);
5461 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5462 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5463 else
5464 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5465 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5466 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5467 else
5468 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5469 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5470 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5471 return op0;
5472 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5473 return simplify_gen_binary (PLUS, mode, op0, op2);
5474 else
5475 return simplify_gen_binary (MINUS, mode, op2, op0);
5476 }
5477 return NULL;
5478
5479 case MULT_HIGHPART_EXPR:
5480 /* ??? Similar to the above. */
5481 return NULL;
5482
5483 case WIDEN_SUM_EXPR:
5484 case WIDEN_LSHIFT_EXPR:
5485 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5486 && SCALAR_INT_MODE_P (mode))
5487 {
5488 op0
5489 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5490 0)))
5491 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5492 inner_mode);
5493 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5494 ? ASHIFT : PLUS, mode, op0, op1);
5495 }
5496 return NULL;
5497
5498 default:
5499 flag_unsupported:
5500 if (flag_checking)
5501 {
5502 debug_tree (exp);
5503 gcc_unreachable ();
5504 }
5505 return NULL;
5506 }
5507 }
5508
5509 /* Return an RTX equivalent to the source bind value of the tree expression
5510 EXP. */
5511
5512 static rtx
5513 expand_debug_source_expr (tree exp)
5514 {
5515 rtx op0 = NULL_RTX;
5516 machine_mode mode = VOIDmode, inner_mode;
5517
5518 switch (TREE_CODE (exp))
5519 {
5520 case VAR_DECL:
5521 if (DECL_ABSTRACT_ORIGIN (exp))
5522 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5523 break;
5524 case PARM_DECL:
5525 {
5526 mode = DECL_MODE (exp);
5527 op0 = expand_debug_parm_decl (exp);
5528 if (op0)
5529 break;
5530 /* See if this isn't an argument that has been completely
5531 optimized out. */
5532 if (!DECL_RTL_SET_P (exp)
5533 && !DECL_INCOMING_RTL (exp)
5534 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5535 {
5536 tree aexp = DECL_ORIGIN (exp);
5537 if (DECL_CONTEXT (aexp)
5538 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5539 {
5540 vec<tree, va_gc> **debug_args;
5541 unsigned int ix;
5542 tree ddecl;
5543 debug_args = decl_debug_args_lookup (current_function_decl);
5544 if (debug_args != NULL)
5545 {
5546 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5547 ix += 2)
5548 if (ddecl == aexp)
5549 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5550 }
5551 }
5552 }
5553 break;
5554 }
5555 default:
5556 break;
5557 }
5558
5559 if (op0 == NULL_RTX)
5560 return NULL_RTX;
5561
5562 inner_mode = GET_MODE (op0);
5563 if (mode == inner_mode)
5564 return op0;
5565
5566 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5567 {
5568 if (GET_MODE_UNIT_BITSIZE (mode)
5569 == GET_MODE_UNIT_BITSIZE (inner_mode))
5570 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5571 else if (GET_MODE_UNIT_BITSIZE (mode)
5572 < GET_MODE_UNIT_BITSIZE (inner_mode))
5573 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5574 else
5575 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5576 }
5577 else if (FLOAT_MODE_P (mode))
5578 gcc_unreachable ();
5579 else if (FLOAT_MODE_P (inner_mode))
5580 {
5581 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5582 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5583 else
5584 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5585 }
5586 else if (GET_MODE_UNIT_PRECISION (mode)
5587 == GET_MODE_UNIT_PRECISION (inner_mode))
5588 op0 = lowpart_subreg (mode, op0, inner_mode);
5589 else if (GET_MODE_UNIT_PRECISION (mode)
5590 < GET_MODE_UNIT_PRECISION (inner_mode))
5591 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5592 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5593 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5594 else
5595 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5596
5597 return op0;
5598 }
5599
5600 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5601 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5602 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5603
5604 static void
5605 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5606 {
5607 rtx exp = *exp_p;
5608
5609 if (exp == NULL_RTX)
5610 return;
5611
5612 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5613 return;
5614
5615 if (depth == 4)
5616 {
5617 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5618 rtx dval = make_debug_expr_from_rtl (exp);
5619
5620 /* Emit a debug bind insn before INSN. */
5621 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5622 DEBUG_EXPR_TREE_DECL (dval), exp,
5623 VAR_INIT_STATUS_INITIALIZED);
5624
5625 emit_debug_insn_before (bind, insn);
5626 *exp_p = dval;
5627 return;
5628 }
5629
5630 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5631 int i, j;
5632 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5633 switch (*format_ptr++)
5634 {
5635 case 'e':
5636 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5637 break;
5638
5639 case 'E':
5640 case 'V':
5641 for (j = 0; j < XVECLEN (exp, i); j++)
5642 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5643 break;
5644
5645 default:
5646 break;
5647 }
5648 }
5649
5650 /* Expand the _LOCs in debug insns. We run this after expanding all
5651 regular insns, so that any variables referenced in the function
5652 will have their DECL_RTLs set. */
5653
5654 static void
5655 expand_debug_locations (void)
5656 {
5657 rtx_insn *insn;
5658 rtx_insn *last = get_last_insn ();
5659 int save_strict_alias = flag_strict_aliasing;
5660
5661 /* New alias sets while setting up memory attributes cause
5662 -fcompare-debug failures, even though it doesn't bring about any
5663 codegen changes. */
5664 flag_strict_aliasing = 0;
5665
5666 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5667 if (DEBUG_BIND_INSN_P (insn))
5668 {
5669 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5670 rtx val;
5671 rtx_insn *prev_insn, *insn2;
5672 machine_mode mode;
5673
5674 if (value == NULL_TREE)
5675 val = NULL_RTX;
5676 else
5677 {
5678 if (INSN_VAR_LOCATION_STATUS (insn)
5679 == VAR_INIT_STATUS_UNINITIALIZED)
5680 val = expand_debug_source_expr (value);
5681 /* The avoid_deep_ter_for_debug function inserts
5682 debug bind stmts after SSA_NAME definition, with the
5683 SSA_NAME as the whole bind location. Disable temporarily
5684 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5685 being defined in this DEBUG_INSN. */
5686 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5687 {
5688 tree *slot = deep_ter_debug_map->get (value);
5689 if (slot)
5690 {
5691 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5692 *slot = NULL_TREE;
5693 else
5694 slot = NULL;
5695 }
5696 val = expand_debug_expr (value);
5697 if (slot)
5698 *slot = INSN_VAR_LOCATION_DECL (insn);
5699 }
5700 else
5701 val = expand_debug_expr (value);
5702 gcc_assert (last == get_last_insn ());
5703 }
5704
5705 if (!val)
5706 val = gen_rtx_UNKNOWN_VAR_LOC ();
5707 else
5708 {
5709 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5710
5711 gcc_assert (mode == GET_MODE (val)
5712 || (GET_MODE (val) == VOIDmode
5713 && (CONST_SCALAR_INT_P (val)
5714 || GET_CODE (val) == CONST_FIXED
5715 || GET_CODE (val) == LABEL_REF)));
5716 }
5717
5718 INSN_VAR_LOCATION_LOC (insn) = val;
5719 prev_insn = PREV_INSN (insn);
5720 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5721 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5722 }
5723
5724 flag_strict_aliasing = save_strict_alias;
5725 }
5726
5727 /* Performs swapping operands of commutative operations to expand
5728 the expensive one first. */
5729
5730 static void
5731 reorder_operands (basic_block bb)
5732 {
5733 unsigned int *lattice; /* Hold cost of each statement. */
5734 unsigned int i = 0, n = 0;
5735 gimple_stmt_iterator gsi;
5736 gimple_seq stmts;
5737 gimple *stmt;
5738 bool swap;
5739 tree op0, op1;
5740 ssa_op_iter iter;
5741 use_operand_p use_p;
5742 gimple *def0, *def1;
5743
5744 /* Compute cost of each statement using estimate_num_insns. */
5745 stmts = bb_seq (bb);
5746 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5747 {
5748 stmt = gsi_stmt (gsi);
5749 if (!is_gimple_debug (stmt))
5750 gimple_set_uid (stmt, n++);
5751 }
5752 lattice = XNEWVEC (unsigned int, n);
5753 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5754 {
5755 unsigned cost;
5756 stmt = gsi_stmt (gsi);
5757 if (is_gimple_debug (stmt))
5758 continue;
5759 cost = estimate_num_insns (stmt, &eni_size_weights);
5760 lattice[i] = cost;
5761 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5762 {
5763 tree use = USE_FROM_PTR (use_p);
5764 gimple *def_stmt;
5765 if (TREE_CODE (use) != SSA_NAME)
5766 continue;
5767 def_stmt = get_gimple_for_ssa_name (use);
5768 if (!def_stmt)
5769 continue;
5770 lattice[i] += lattice[gimple_uid (def_stmt)];
5771 }
5772 i++;
5773 if (!is_gimple_assign (stmt)
5774 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5775 continue;
5776 op0 = gimple_op (stmt, 1);
5777 op1 = gimple_op (stmt, 2);
5778 if (TREE_CODE (op0) != SSA_NAME
5779 || TREE_CODE (op1) != SSA_NAME)
5780 continue;
5781 /* Swap operands if the second one is more expensive. */
5782 def0 = get_gimple_for_ssa_name (op0);
5783 def1 = get_gimple_for_ssa_name (op1);
5784 if (!def1)
5785 continue;
5786 swap = false;
5787 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5788 swap = true;
5789 if (swap)
5790 {
5791 if (dump_file && (dump_flags & TDF_DETAILS))
5792 {
5793 fprintf (dump_file, "Swap operands in stmt:\n");
5794 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5795 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5796 def0 ? lattice[gimple_uid (def0)] : 0,
5797 lattice[gimple_uid (def1)]);
5798 }
5799 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5800 gimple_assign_rhs2_ptr (stmt));
5801 }
5802 }
5803 XDELETE (lattice);
5804 }
5805
5806 /* Expand basic block BB from GIMPLE trees to RTL. */
5807
5808 static basic_block
5809 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5810 {
5811 gimple_stmt_iterator gsi;
5812 gimple_seq stmts;
5813 gimple *stmt = NULL;
5814 rtx_note *note = NULL;
5815 rtx_insn *last;
5816 edge e;
5817 edge_iterator ei;
5818 bool nondebug_stmt_seen = false;
5819
5820 if (dump_file)
5821 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5822 bb->index);
5823
5824 /* Note that since we are now transitioning from GIMPLE to RTL, we
5825 cannot use the gsi_*_bb() routines because they expect the basic
5826 block to be in GIMPLE, instead of RTL. Therefore, we need to
5827 access the BB sequence directly. */
5828 if (optimize)
5829 reorder_operands (bb);
5830 stmts = bb_seq (bb);
5831 bb->il.gimple.seq = NULL;
5832 bb->il.gimple.phi_nodes = NULL;
5833 rtl_profile_for_bb (bb);
5834 init_rtl_bb_info (bb);
5835 bb->flags |= BB_RTL;
5836
5837 /* Remove the RETURN_EXPR if we may fall though to the exit
5838 instead. */
5839 gsi = gsi_last (stmts);
5840 if (!gsi_end_p (gsi)
5841 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5842 {
5843 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5844
5845 gcc_assert (single_succ_p (bb));
5846 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5847
5848 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5849 && !gimple_return_retval (ret_stmt))
5850 {
5851 gsi_remove (&gsi, false);
5852 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5853 }
5854 }
5855
5856 gsi = gsi_start (stmts);
5857 if (!gsi_end_p (gsi))
5858 {
5859 stmt = gsi_stmt (gsi);
5860 if (gimple_code (stmt) != GIMPLE_LABEL)
5861 stmt = NULL;
5862 }
5863
5864 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5865
5866 if (stmt || elt)
5867 {
5868 gcc_checking_assert (!note);
5869 last = get_last_insn ();
5870
5871 if (stmt)
5872 {
5873 expand_gimple_stmt (stmt);
5874 gsi_next (&gsi);
5875 }
5876
5877 if (elt)
5878 emit_label (*elt);
5879
5880 BB_HEAD (bb) = NEXT_INSN (last);
5881 if (NOTE_P (BB_HEAD (bb)))
5882 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5883 gcc_assert (LABEL_P (BB_HEAD (bb)));
5884 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5885
5886 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5887 }
5888 else
5889 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5890
5891 if (note)
5892 NOTE_BASIC_BLOCK (note) = bb;
5893
5894 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5895 {
5896 basic_block new_bb;
5897
5898 stmt = gsi_stmt (gsi);
5899 if (!is_gimple_debug (stmt))
5900 nondebug_stmt_seen = true;
5901
5902 /* If this statement is a non-debug one, and we generate debug
5903 insns, then this one might be the last real use of a TERed
5904 SSA_NAME, but where there are still some debug uses further
5905 down. Expanding the current SSA name in such further debug
5906 uses by their RHS might lead to wrong debug info, as coalescing
5907 might make the operands of such RHS be placed into the same
5908 pseudo as something else. Like so:
5909 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5910 use(a_1);
5911 a_2 = ...
5912 #DEBUG ... => a_1
5913 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5914 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5915 the write to a_2 would actually have clobbered the place which
5916 formerly held a_0.
5917
5918 So, instead of that, we recognize the situation, and generate
5919 debug temporaries at the last real use of TERed SSA names:
5920 a_1 = a_0 + 1;
5921 #DEBUG #D1 => a_1
5922 use(a_1);
5923 a_2 = ...
5924 #DEBUG ... => #D1
5925 */
5926 if (MAY_HAVE_DEBUG_BIND_INSNS
5927 && SA.values
5928 && !is_gimple_debug (stmt))
5929 {
5930 ssa_op_iter iter;
5931 tree op;
5932 gimple *def;
5933
5934 location_t sloc = curr_insn_location ();
5935
5936 /* Look for SSA names that have their last use here (TERed
5937 names always have only one real use). */
5938 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5939 if ((def = get_gimple_for_ssa_name (op)))
5940 {
5941 imm_use_iterator imm_iter;
5942 use_operand_p use_p;
5943 bool have_debug_uses = false;
5944
5945 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5946 {
5947 if (gimple_debug_bind_p (USE_STMT (use_p)))
5948 {
5949 have_debug_uses = true;
5950 break;
5951 }
5952 }
5953
5954 if (have_debug_uses)
5955 {
5956 /* OP is a TERed SSA name, with DEF its defining
5957 statement, and where OP is used in further debug
5958 instructions. Generate a debug temporary, and
5959 replace all uses of OP in debug insns with that
5960 temporary. */
5961 gimple *debugstmt;
5962 tree value = gimple_assign_rhs_to_tree (def);
5963 tree vexpr = build_debug_expr_decl (TREE_TYPE (value));
5964 rtx val;
5965 machine_mode mode;
5966
5967 set_curr_insn_location (gimple_location (def));
5968
5969 if (DECL_P (value))
5970 mode = DECL_MODE (value);
5971 else
5972 mode = TYPE_MODE (TREE_TYPE (value));
5973 /* FIXME: Is setting the mode really necessary? */
5974 SET_DECL_MODE (vexpr, mode);
5975
5976 val = gen_rtx_VAR_LOCATION
5977 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5978
5979 emit_debug_insn (val);
5980
5981 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5982 {
5983 if (!gimple_debug_bind_p (debugstmt))
5984 continue;
5985
5986 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5987 SET_USE (use_p, vexpr);
5988
5989 update_stmt (debugstmt);
5990 }
5991 }
5992 }
5993 set_curr_insn_location (sloc);
5994 }
5995
5996 currently_expanding_gimple_stmt = stmt;
5997
5998 /* Expand this statement, then evaluate the resulting RTL and
5999 fixup the CFG accordingly. */
6000 if (gimple_code (stmt) == GIMPLE_COND)
6001 {
6002 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
6003 if (new_bb)
6004 {
6005 currently_expanding_gimple_stmt = NULL;
6006 return new_bb;
6007 }
6008 }
6009 else if (is_gimple_debug (stmt))
6010 {
6011 location_t sloc = curr_insn_location ();
6012 gimple_stmt_iterator nsi = gsi;
6013
6014 for (;;)
6015 {
6016 tree var;
6017 tree value = NULL_TREE;
6018 rtx val = NULL_RTX;
6019 machine_mode mode;
6020
6021 if (!gimple_debug_nonbind_marker_p (stmt))
6022 {
6023 if (gimple_debug_bind_p (stmt))
6024 {
6025 var = gimple_debug_bind_get_var (stmt);
6026
6027 if (TREE_CODE (var) != DEBUG_EXPR_DECL
6028 && TREE_CODE (var) != LABEL_DECL
6029 && !target_for_debug_bind (var))
6030 goto delink_debug_stmt;
6031
6032 if (DECL_P (var) && !VECTOR_TYPE_P (TREE_TYPE (var)))
6033 mode = DECL_MODE (var);
6034 else
6035 mode = TYPE_MODE (TREE_TYPE (var));
6036
6037 if (gimple_debug_bind_has_value_p (stmt))
6038 value = gimple_debug_bind_get_value (stmt);
6039
6040 val = gen_rtx_VAR_LOCATION
6041 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
6042 }
6043 else if (gimple_debug_source_bind_p (stmt))
6044 {
6045 var = gimple_debug_source_bind_get_var (stmt);
6046
6047 value = gimple_debug_source_bind_get_value (stmt);
6048
6049 if (!VECTOR_TYPE_P (TREE_TYPE (var)))
6050 mode = DECL_MODE (var);
6051 else
6052 mode = TYPE_MODE (TREE_TYPE (var));
6053
6054 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
6055 VAR_INIT_STATUS_UNINITIALIZED);
6056 }
6057 else
6058 gcc_unreachable ();
6059 }
6060 /* If this function was first compiled with markers
6061 enabled, but they're now disable (e.g. LTO), drop
6062 them on the floor. */
6063 else if (gimple_debug_nonbind_marker_p (stmt)
6064 && !MAY_HAVE_DEBUG_MARKER_INSNS)
6065 goto delink_debug_stmt;
6066 else if (gimple_debug_begin_stmt_p (stmt))
6067 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
6068 else if (gimple_debug_inline_entry_p (stmt))
6069 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
6070 else
6071 gcc_unreachable ();
6072
6073 last = get_last_insn ();
6074
6075 set_curr_insn_location (gimple_location (stmt));
6076
6077 emit_debug_insn (val);
6078
6079 if (dump_file && (dump_flags & TDF_DETAILS))
6080 {
6081 /* We can't dump the insn with a TREE where an RTX
6082 is expected. */
6083 if (GET_CODE (val) == VAR_LOCATION)
6084 {
6085 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
6086 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
6087 }
6088 maybe_dump_rtl_for_gimple_stmt (stmt, last);
6089 if (GET_CODE (val) == VAR_LOCATION)
6090 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
6091 }
6092
6093 delink_debug_stmt:
6094 /* In order not to generate too many debug temporaries,
6095 we delink all uses of debug statements we already expanded.
6096 Therefore debug statements between definition and real
6097 use of TERed SSA names will continue to use the SSA name,
6098 and not be replaced with debug temps. */
6099 delink_stmt_imm_use (stmt);
6100
6101 gsi = nsi;
6102 gsi_next (&nsi);
6103 if (gsi_end_p (nsi))
6104 break;
6105 stmt = gsi_stmt (nsi);
6106 if (!is_gimple_debug (stmt))
6107 break;
6108 }
6109
6110 set_curr_insn_location (sloc);
6111 }
6112 else
6113 {
6114 gcall *call_stmt = dyn_cast <gcall *> (stmt);
6115 if (call_stmt
6116 && gimple_call_tail_p (call_stmt)
6117 && disable_tail_calls)
6118 gimple_call_set_tail (call_stmt, false);
6119
6120 if (call_stmt && gimple_call_tail_p (call_stmt))
6121 {
6122 bool can_fallthru;
6123 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
6124 if (new_bb)
6125 {
6126 if (can_fallthru)
6127 bb = new_bb;
6128 else
6129 {
6130 currently_expanding_gimple_stmt = NULL;
6131 return new_bb;
6132 }
6133 }
6134 }
6135 else
6136 {
6137 def_operand_p def_p;
6138 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
6139
6140 if (def_p != NULL)
6141 {
6142 /* Ignore this stmt if it is in the list of
6143 replaceable expressions. */
6144 if (SA.values
6145 && bitmap_bit_p (SA.values,
6146 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
6147 continue;
6148 }
6149 last = expand_gimple_stmt (stmt);
6150 maybe_dump_rtl_for_gimple_stmt (stmt, last);
6151 }
6152 }
6153 }
6154
6155 currently_expanding_gimple_stmt = NULL;
6156
6157 /* Expand implicit goto and convert goto_locus. */
6158 FOR_EACH_EDGE (e, ei, bb->succs)
6159 {
6160 if (e->goto_locus != UNKNOWN_LOCATION || !nondebug_stmt_seen)
6161 set_curr_insn_location (e->goto_locus);
6162 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
6163 {
6164 emit_jump (label_rtx_for_bb (e->dest));
6165 e->flags &= ~EDGE_FALLTHRU;
6166 }
6167 }
6168
6169 /* Expanded RTL can create a jump in the last instruction of block.
6170 This later might be assumed to be a jump to successor and break edge insertion.
6171 We need to insert dummy move to prevent this. PR41440. */
6172 if (single_succ_p (bb)
6173 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
6174 && (last = get_last_insn ())
6175 && (JUMP_P (last)
6176 || (DEBUG_INSN_P (last)
6177 && JUMP_P (prev_nondebug_insn (last)))))
6178 {
6179 rtx dummy = gen_reg_rtx (SImode);
6180 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
6181 }
6182
6183 do_pending_stack_adjust ();
6184
6185 /* Find the block tail. The last insn in the block is the insn
6186 before a barrier and/or table jump insn. */
6187 last = get_last_insn ();
6188 if (BARRIER_P (last))
6189 last = PREV_INSN (last);
6190 if (JUMP_TABLE_DATA_P (last))
6191 last = PREV_INSN (PREV_INSN (last));
6192 if (BARRIER_P (last))
6193 last = PREV_INSN (last);
6194 BB_END (bb) = last;
6195
6196 update_bb_for_insn (bb);
6197
6198 return bb;
6199 }
6200
6201
6202 /* Create a basic block for initialization code. */
6203
6204 static basic_block
6205 construct_init_block (void)
6206 {
6207 basic_block init_block, first_block;
6208 edge e = NULL;
6209 int flags;
6210
6211 /* Multiple entry points not supported yet. */
6212 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
6213 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6214 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
6215 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
6216 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
6217
6218 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
6219
6220 /* When entry edge points to first basic block, we don't need jump,
6221 otherwise we have to jump into proper target. */
6222 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
6223 {
6224 tree label = gimple_block_label (e->dest);
6225
6226 emit_jump (jump_target_rtx (label));
6227 flags = 0;
6228 }
6229 else
6230 flags = EDGE_FALLTHRU;
6231
6232 init_block = create_basic_block (NEXT_INSN (get_insns ()),
6233 get_last_insn (),
6234 ENTRY_BLOCK_PTR_FOR_FN (cfun));
6235 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6236 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6237 if (e)
6238 {
6239 first_block = e->dest;
6240 redirect_edge_succ (e, init_block);
6241 make_single_succ_edge (init_block, first_block, flags);
6242 }
6243 else
6244 make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6245 EDGE_FALLTHRU);
6246
6247 update_bb_for_insn (init_block);
6248 return init_block;
6249 }
6250
6251 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6252 found in the block tree. */
6253
6254 static void
6255 set_block_levels (tree block, int level)
6256 {
6257 while (block)
6258 {
6259 BLOCK_NUMBER (block) = level;
6260 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6261 block = BLOCK_CHAIN (block);
6262 }
6263 }
6264
6265 /* Create a block containing landing pads and similar stuff. */
6266
6267 static void
6268 construct_exit_block (void)
6269 {
6270 rtx_insn *head = get_last_insn ();
6271 rtx_insn *end;
6272 basic_block exit_block;
6273 edge e, e2;
6274 unsigned ix;
6275 edge_iterator ei;
6276 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6277 rtx_insn *orig_end = BB_END (prev_bb);
6278
6279 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6280
6281 /* Make sure the locus is set to the end of the function, so that
6282 epilogue line numbers and warnings are set properly. */
6283 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6284 input_location = cfun->function_end_locus;
6285
6286 /* Generate rtl for function exit. */
6287 expand_function_end ();
6288
6289 end = get_last_insn ();
6290 if (head == end)
6291 return;
6292 /* While emitting the function end we could move end of the last basic
6293 block. */
6294 BB_END (prev_bb) = orig_end;
6295 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6296 head = NEXT_INSN (head);
6297 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6298 bb count counting will be confused. Any instructions before that
6299 label are emitted for the case where PREV_BB falls through into the
6300 exit block, so append those instructions to prev_bb in that case. */
6301 if (NEXT_INSN (head) != return_label)
6302 {
6303 while (NEXT_INSN (head) != return_label)
6304 {
6305 if (!NOTE_P (NEXT_INSN (head)))
6306 BB_END (prev_bb) = NEXT_INSN (head);
6307 head = NEXT_INSN (head);
6308 }
6309 }
6310 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6311 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6312 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6313
6314 ix = 0;
6315 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6316 {
6317 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6318 if (!(e->flags & EDGE_ABNORMAL))
6319 redirect_edge_succ (e, exit_block);
6320 else
6321 ix++;
6322 }
6323
6324 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6325 EDGE_FALLTHRU);
6326 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6327 if (e2 != e)
6328 {
6329 exit_block->count -= e2->count ();
6330 }
6331 update_bb_for_insn (exit_block);
6332 }
6333
6334 /* Helper function for discover_nonconstant_array_refs.
6335 Look for ARRAY_REF nodes with non-constant indexes and mark them
6336 addressable. */
6337
6338 static tree
6339 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6340 void *data)
6341 {
6342 tree t = *tp;
6343 bitmap forced_stack_vars = (bitmap)((walk_stmt_info *)data)->info;
6344
6345 if (IS_TYPE_OR_DECL_P (t))
6346 *walk_subtrees = 0;
6347 else if (REFERENCE_CLASS_P (t) && TREE_THIS_VOLATILE (t))
6348 {
6349 t = get_base_address (t);
6350 if (t && DECL_P (t)
6351 && DECL_MODE (t) != BLKmode
6352 && !TREE_ADDRESSABLE (t))
6353 bitmap_set_bit (forced_stack_vars, DECL_UID (t));
6354 *walk_subtrees = 0;
6355 }
6356 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6357 {
6358 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6359 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6360 && (!TREE_OPERAND (t, 2)
6361 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6362 || (TREE_CODE (t) == COMPONENT_REF
6363 && (!TREE_OPERAND (t,2)
6364 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6365 || TREE_CODE (t) == BIT_FIELD_REF
6366 || TREE_CODE (t) == REALPART_EXPR
6367 || TREE_CODE (t) == IMAGPART_EXPR
6368 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6369 || CONVERT_EXPR_P (t))
6370 t = TREE_OPERAND (t, 0);
6371
6372 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6373 {
6374 t = get_base_address (t);
6375 if (t && DECL_P (t)
6376 && DECL_MODE (t) != BLKmode
6377 && !TREE_ADDRESSABLE (t))
6378 bitmap_set_bit (forced_stack_vars, DECL_UID (t));
6379 }
6380
6381 *walk_subtrees = 0;
6382 }
6383 /* References of size POLY_INT_CST to a fixed-size object must go
6384 through memory. It's more efficient to force that here than
6385 to create temporary slots on the fly.
6386 RTL expansion expectes TARGET_MEM_REF to always address actual memory.
6387 Also, force to stack non-BLKmode vars accessed through VIEW_CONVERT_EXPR
6388 to BLKmode type. */
6389 else if (TREE_CODE (t) == TARGET_MEM_REF
6390 || (TREE_CODE (t) == MEM_REF
6391 && TYPE_SIZE (TREE_TYPE (t))
6392 && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t))))
6393 || (TREE_CODE (t) == VIEW_CONVERT_EXPR
6394 && TYPE_MODE (TREE_TYPE (t)) == BLKmode))
6395 {
6396 tree base = get_base_address (t);
6397 if (base
6398 && DECL_P (base)
6399 && !TREE_ADDRESSABLE (base)
6400 && DECL_MODE (base) != BLKmode
6401 && GET_MODE_SIZE (DECL_MODE (base)).is_constant ())
6402 bitmap_set_bit (forced_stack_vars, DECL_UID (base));
6403 *walk_subtrees = 0;
6404 }
6405
6406 return NULL_TREE;
6407 }
6408
6409 /* If there's a chance to get a pseudo for t then if it would be of float mode
6410 and the actual access is via an integer mode (lowered memcpy or similar
6411 access) then avoid the register expansion if the mode likely is not storage
6412 suitable for raw bits processing (like XFmode on i?86). */
6413
6414 static void
6415 avoid_type_punning_on_regs (tree t, bitmap forced_stack_vars)
6416 {
6417 machine_mode access_mode = TYPE_MODE (TREE_TYPE (t));
6418 if (access_mode != BLKmode
6419 && !SCALAR_INT_MODE_P (access_mode))
6420 return;
6421 tree base = get_base_address (t);
6422 if (DECL_P (base)
6423 && !TREE_ADDRESSABLE (base)
6424 && FLOAT_MODE_P (DECL_MODE (base))
6425 && maybe_lt (GET_MODE_PRECISION (DECL_MODE (base)),
6426 GET_MODE_BITSIZE (GET_MODE_INNER (DECL_MODE (base))))
6427 /* Double check in the expensive way we really would get a pseudo. */
6428 && use_register_for_decl (base))
6429 bitmap_set_bit (forced_stack_vars, DECL_UID (base));
6430 }
6431
6432 /* RTL expansion is not able to compile array references with variable
6433 offsets for arrays stored in single register. Discover such
6434 expressions and mark variables as addressable to avoid this
6435 scenario. */
6436
6437 static void
6438 discover_nonconstant_array_refs (bitmap forced_stack_vars)
6439 {
6440 basic_block bb;
6441 gimple_stmt_iterator gsi;
6442
6443 walk_stmt_info wi = {};
6444 wi.info = forced_stack_vars;
6445 FOR_EACH_BB_FN (bb, cfun)
6446 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6447 {
6448 gimple *stmt = gsi_stmt (gsi);
6449 if (!is_gimple_debug (stmt))
6450 {
6451 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, &wi);
6452 gcall *call = dyn_cast <gcall *> (stmt);
6453 if (call && gimple_call_internal_p (call))
6454 {
6455 tree cand = NULL_TREE;
6456 switch (gimple_call_internal_fn (call))
6457 {
6458 case IFN_LOAD_LANES:
6459 /* The source must be a MEM. */
6460 cand = gimple_call_arg (call, 0);
6461 break;
6462 case IFN_STORE_LANES:
6463 /* The destination must be a MEM. */
6464 cand = gimple_call_lhs (call);
6465 break;
6466 default:
6467 break;
6468 }
6469 if (cand)
6470 cand = get_base_address (cand);
6471 if (cand
6472 && DECL_P (cand)
6473 && use_register_for_decl (cand))
6474 bitmap_set_bit (forced_stack_vars, DECL_UID (cand));
6475 }
6476 if (gimple_vdef (stmt))
6477 {
6478 tree t = gimple_get_lhs (stmt);
6479 if (t && REFERENCE_CLASS_P (t))
6480 avoid_type_punning_on_regs (t, forced_stack_vars);
6481 }
6482 }
6483 }
6484 }
6485
6486 /* This function sets crtl->args.internal_arg_pointer to a virtual
6487 register if DRAP is needed. Local register allocator will replace
6488 virtual_incoming_args_rtx with the virtual register. */
6489
6490 static void
6491 expand_stack_alignment (void)
6492 {
6493 rtx drap_rtx;
6494 unsigned int preferred_stack_boundary;
6495
6496 if (! SUPPORTS_STACK_ALIGNMENT)
6497 return;
6498
6499 if (cfun->calls_alloca
6500 || cfun->has_nonlocal_label
6501 || crtl->has_nonlocal_goto)
6502 crtl->need_drap = true;
6503
6504 /* Call update_stack_boundary here again to update incoming stack
6505 boundary. It may set incoming stack alignment to a different
6506 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6507 use the minimum incoming stack alignment to check if it is OK
6508 to perform sibcall optimization since sibcall optimization will
6509 only align the outgoing stack to incoming stack boundary. */
6510 if (targetm.calls.update_stack_boundary)
6511 targetm.calls.update_stack_boundary ();
6512
6513 /* The incoming stack frame has to be aligned at least at
6514 parm_stack_boundary. */
6515 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6516
6517 /* Update crtl->stack_alignment_estimated and use it later to align
6518 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6519 exceptions since callgraph doesn't collect incoming stack alignment
6520 in this case. */
6521 if (cfun->can_throw_non_call_exceptions
6522 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6523 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6524 else
6525 preferred_stack_boundary = crtl->preferred_stack_boundary;
6526 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6527 crtl->stack_alignment_estimated = preferred_stack_boundary;
6528 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6529 crtl->stack_alignment_needed = preferred_stack_boundary;
6530
6531 gcc_assert (crtl->stack_alignment_needed
6532 <= crtl->stack_alignment_estimated);
6533
6534 crtl->stack_realign_needed
6535 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6536 crtl->stack_realign_tried = crtl->stack_realign_needed;
6537
6538 crtl->stack_realign_processed = true;
6539
6540 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6541 alignment. */
6542 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6543 drap_rtx = targetm.calls.get_drap_rtx ();
6544
6545 /* stack_realign_drap and drap_rtx must match. */
6546 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6547
6548 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6549 if (drap_rtx != NULL)
6550 {
6551 crtl->args.internal_arg_pointer = drap_rtx;
6552
6553 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6554 needed. */
6555 fixup_tail_calls ();
6556 }
6557 }
6558 \f
6559
6560 static void
6561 expand_main_function (void)
6562 {
6563 #if (defined(INVOKE__main) \
6564 || (!defined(HAS_INIT_SECTION) \
6565 && !defined(INIT_SECTION_ASM_OP) \
6566 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6567 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6568 #endif
6569 }
6570 \f
6571
6572 /* Expand code to initialize the stack_protect_guard. This is invoked at
6573 the beginning of a function to be protected. */
6574
6575 static void
6576 stack_protect_prologue (void)
6577 {
6578 tree guard_decl = targetm.stack_protect_guard ();
6579 rtx x, y;
6580
6581 crtl->stack_protect_guard_decl = guard_decl;
6582 x = expand_normal (crtl->stack_protect_guard);
6583
6584 if (targetm.have_stack_protect_combined_set () && guard_decl)
6585 {
6586 gcc_assert (DECL_P (guard_decl));
6587 y = DECL_RTL (guard_decl);
6588
6589 /* Allow the target to compute address of Y and copy it to X without
6590 leaking Y into a register. This combined address + copy pattern
6591 allows the target to prevent spilling of any intermediate results by
6592 splitting it after register allocator. */
6593 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6594 {
6595 emit_insn (insn);
6596 return;
6597 }
6598 }
6599
6600 if (guard_decl)
6601 y = expand_normal (guard_decl);
6602 else
6603 y = const0_rtx;
6604
6605 /* Allow the target to copy from Y to X without leaking Y into a
6606 register. */
6607 if (targetm.have_stack_protect_set ())
6608 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6609 {
6610 emit_insn (insn);
6611 return;
6612 }
6613
6614 /* Otherwise do a straight move. */
6615 emit_move_insn (x, y);
6616 }
6617
6618 /* Translate the intermediate representation contained in the CFG
6619 from GIMPLE trees to RTL.
6620
6621 We do conversion per basic block and preserve/update the tree CFG.
6622 This implies we have to do some magic as the CFG can simultaneously
6623 consist of basic blocks containing RTL and GIMPLE trees. This can
6624 confuse the CFG hooks, so be careful to not manipulate CFG during
6625 the expansion. */
6626
6627 namespace {
6628
6629 const pass_data pass_data_expand =
6630 {
6631 RTL_PASS, /* type */
6632 "expand", /* name */
6633 OPTGROUP_NONE, /* optinfo_flags */
6634 TV_EXPAND, /* tv_id */
6635 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6636 | PROP_gimple_lcx
6637 | PROP_gimple_lvec
6638 | PROP_gimple_lva), /* properties_required */
6639 PROP_rtl, /* properties_provided */
6640 ( PROP_ssa | PROP_gimple ), /* properties_destroyed */
6641 0, /* todo_flags_start */
6642 0, /* todo_flags_finish */
6643 };
6644
6645 class pass_expand : public rtl_opt_pass
6646 {
6647 public:
6648 pass_expand (gcc::context *ctxt)
6649 : rtl_opt_pass (pass_data_expand, ctxt)
6650 {}
6651
6652 /* opt_pass methods: */
6653 unsigned int execute (function *) final override;
6654
6655 }; // class pass_expand
6656
6657 unsigned int
6658 pass_expand::execute (function *fun)
6659 {
6660 basic_block bb, init_block;
6661 edge_iterator ei;
6662 edge e;
6663 rtx_insn *var_seq, *var_ret_seq;
6664 unsigned i;
6665
6666 timevar_push (TV_OUT_OF_SSA);
6667 rewrite_out_of_ssa (&SA);
6668 timevar_pop (TV_OUT_OF_SSA);
6669 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6670
6671 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6672 {
6673 gimple_stmt_iterator gsi;
6674 FOR_EACH_BB_FN (bb, cfun)
6675 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6676 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6677 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6678 }
6679
6680 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6681 auto_bitmap forced_stack_vars;
6682 discover_nonconstant_array_refs (forced_stack_vars);
6683
6684 /* Make sure all values used by the optimization passes have sane
6685 defaults. */
6686 reg_renumber = 0;
6687
6688 /* Some backends want to know that we are expanding to RTL. */
6689 currently_expanding_to_rtl = 1;
6690 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6691 free_dominance_info (CDI_DOMINATORS);
6692
6693 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6694
6695 insn_locations_init ();
6696 if (!DECL_IS_UNDECLARED_BUILTIN (current_function_decl))
6697 {
6698 /* Eventually, all FEs should explicitly set function_start_locus. */
6699 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6700 set_curr_insn_location
6701 (DECL_SOURCE_LOCATION (current_function_decl));
6702 else
6703 set_curr_insn_location (fun->function_start_locus);
6704 }
6705 else
6706 set_curr_insn_location (UNKNOWN_LOCATION);
6707 prologue_location = curr_insn_location ();
6708
6709 #ifdef INSN_SCHEDULING
6710 init_sched_attrs ();
6711 #endif
6712
6713 /* Make sure first insn is a note even if we don't want linenums.
6714 This makes sure the first insn will never be deleted.
6715 Also, final expects a note to appear there. */
6716 emit_note (NOTE_INSN_DELETED);
6717
6718 targetm.expand_to_rtl_hook ();
6719 crtl->init_stack_alignment ();
6720 fun->cfg->max_jumptable_ents = 0;
6721
6722 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6723 of the function section at exapnsion time to predict distance of calls. */
6724 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6725
6726 /* Expand the variables recorded during gimple lowering. */
6727 timevar_push (TV_VAR_EXPAND);
6728 start_sequence ();
6729
6730 var_ret_seq = expand_used_vars (forced_stack_vars);
6731
6732 var_seq = get_insns ();
6733 end_sequence ();
6734 timevar_pop (TV_VAR_EXPAND);
6735
6736 /* Honor stack protection warnings. */
6737 if (warn_stack_protect)
6738 {
6739 if (fun->calls_alloca)
6740 warning (OPT_Wstack_protector,
6741 "stack protector not protecting local variables: "
6742 "variable length buffer");
6743 if (has_short_buffer && !crtl->stack_protect_guard)
6744 warning (OPT_Wstack_protector,
6745 "stack protector not protecting function: "
6746 "all local arrays are less than %d bytes long",
6747 (int) param_ssp_buffer_size);
6748 }
6749
6750 /* Temporarily mark PARM_DECLs and RESULT_DECLs we need to expand to
6751 memory addressable so expand_function_start can emit the required
6752 copies. */
6753 auto_vec<tree, 16> marked_parms;
6754 for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
6755 parm = DECL_CHAIN (parm))
6756 if (!TREE_ADDRESSABLE (parm)
6757 && bitmap_bit_p (forced_stack_vars, DECL_UID (parm)))
6758 {
6759 TREE_ADDRESSABLE (parm) = 1;
6760 marked_parms.safe_push (parm);
6761 }
6762 if (DECL_RESULT (current_function_decl)
6763 && !TREE_ADDRESSABLE (DECL_RESULT (current_function_decl))
6764 && bitmap_bit_p (forced_stack_vars,
6765 DECL_UID (DECL_RESULT (current_function_decl))))
6766 {
6767 TREE_ADDRESSABLE (DECL_RESULT (current_function_decl)) = 1;
6768 marked_parms.safe_push (DECL_RESULT (current_function_decl));
6769 }
6770
6771 /* Set up parameters and prepare for return, for the function. */
6772 expand_function_start (current_function_decl);
6773
6774 /* Clear TREE_ADDRESSABLE again. */
6775 while (!marked_parms.is_empty ())
6776 TREE_ADDRESSABLE (marked_parms.pop ()) = 0;
6777
6778 /* If we emitted any instructions for setting up the variables,
6779 emit them before the FUNCTION_START note. */
6780 if (var_seq)
6781 {
6782 emit_insn_before (var_seq, parm_birth_insn);
6783
6784 /* In expand_function_end we'll insert the alloca save/restore
6785 before parm_birth_insn. We've just insertted an alloca call.
6786 Adjust the pointer to match. */
6787 parm_birth_insn = var_seq;
6788 }
6789
6790 /* Now propagate the RTL assignment of each partition to the
6791 underlying var of each SSA_NAME. */
6792 tree name;
6793
6794 FOR_EACH_SSA_NAME (i, name, cfun)
6795 {
6796 /* We might have generated new SSA names in
6797 update_alias_info_with_stack_vars. They will have a NULL
6798 defining statements, and won't be part of the partitioning,
6799 so ignore those. */
6800 if (!SSA_NAME_DEF_STMT (name))
6801 continue;
6802
6803 adjust_one_expanded_partition_var (name);
6804 }
6805
6806 /* Clean up RTL of variables that straddle across multiple
6807 partitions, and check that the rtl of any PARM_DECLs that are not
6808 cleaned up is that of their default defs. */
6809 FOR_EACH_SSA_NAME (i, name, cfun)
6810 {
6811 int part;
6812
6813 /* We might have generated new SSA names in
6814 update_alias_info_with_stack_vars. They will have a NULL
6815 defining statements, and won't be part of the partitioning,
6816 so ignore those. */
6817 if (!SSA_NAME_DEF_STMT (name))
6818 continue;
6819 part = var_to_partition (SA.map, name);
6820 if (part == NO_PARTITION)
6821 continue;
6822
6823 /* If this decl was marked as living in multiple places, reset
6824 this now to NULL. */
6825 tree var = SSA_NAME_VAR (name);
6826 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6827 SET_DECL_RTL (var, NULL);
6828 /* Check that the pseudos chosen by assign_parms are those of
6829 the corresponding default defs. */
6830 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6831 && (TREE_CODE (var) == PARM_DECL
6832 || TREE_CODE (var) == RESULT_DECL))
6833 {
6834 rtx in = DECL_RTL_IF_SET (var);
6835 gcc_assert (in);
6836 rtx out = SA.partition_to_pseudo[part];
6837 gcc_assert (in == out);
6838
6839 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6840 those expected by debug backends for each parm and for
6841 the result. This is particularly important for stabs,
6842 whose register elimination from parm's DECL_RTL may cause
6843 -fcompare-debug differences as SET_DECL_RTL changes reg's
6844 attrs. So, make sure the RTL already has the parm as the
6845 EXPR, so that it won't change. */
6846 SET_DECL_RTL (var, NULL_RTX);
6847 if (MEM_P (in))
6848 set_mem_attributes (in, var, true);
6849 SET_DECL_RTL (var, in);
6850 }
6851 }
6852
6853 /* If this function is `main', emit a call to `__main'
6854 to run global initializers, etc. */
6855 if (DECL_NAME (current_function_decl)
6856 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6857 && DECL_FILE_SCOPE_P (current_function_decl))
6858 expand_main_function ();
6859
6860 /* Initialize the stack_protect_guard field. This must happen after the
6861 call to __main (if any) so that the external decl is initialized. */
6862 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6863 stack_protect_prologue ();
6864
6865 expand_phi_nodes (&SA);
6866
6867 /* Release any stale SSA redirection data. */
6868 redirect_edge_var_map_empty ();
6869
6870 /* Register rtl specific functions for cfg. */
6871 rtl_register_cfg_hooks ();
6872
6873 init_block = construct_init_block ();
6874
6875 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6876 remaining edges later. */
6877 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6878 e->flags &= ~EDGE_EXECUTABLE;
6879
6880 /* If the function has too many markers, drop them while expanding. */
6881 if (cfun->debug_marker_count
6882 >= param_max_debug_marker_count)
6883 cfun->debug_nonbind_markers = false;
6884
6885 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6886 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6887 next_bb)
6888 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6889
6890 if (MAY_HAVE_DEBUG_BIND_INSNS)
6891 expand_debug_locations ();
6892
6893 if (deep_ter_debug_map)
6894 {
6895 delete deep_ter_debug_map;
6896 deep_ter_debug_map = NULL;
6897 }
6898
6899 /* Free stuff we no longer need after GIMPLE optimizations. */
6900 free_dominance_info (CDI_DOMINATORS);
6901 free_dominance_info (CDI_POST_DOMINATORS);
6902 delete_tree_cfg_annotations (fun);
6903
6904 timevar_push (TV_OUT_OF_SSA);
6905 finish_out_of_ssa (&SA);
6906 timevar_pop (TV_OUT_OF_SSA);
6907
6908 timevar_push (TV_POST_EXPAND);
6909 /* We are no longer in SSA form. */
6910 fun->gimple_df->in_ssa_p = false;
6911 loops_state_clear (LOOP_CLOSED_SSA);
6912
6913 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6914 conservatively to true until they are all profile aware. */
6915 delete lab_rtx_for_bb;
6916 free_histograms (fun);
6917
6918 construct_exit_block ();
6919 insn_locations_finalize ();
6920
6921 if (var_ret_seq)
6922 {
6923 rtx_insn *after = return_label;
6924 rtx_insn *next = NEXT_INSN (after);
6925 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6926 after = next;
6927 emit_insn_after (var_ret_seq, after);
6928 }
6929
6930 if (hwasan_sanitize_stack_p ())
6931 hwasan_maybe_emit_frame_base_init ();
6932
6933 /* Zap the tree EH table. */
6934 set_eh_throw_stmt_table (fun, NULL);
6935
6936 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6937 split edges which edge insertions might do. */
6938 rebuild_jump_labels (get_insns ());
6939
6940 /* If we have a single successor to the entry block, put the pending insns
6941 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6942 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6943 {
6944 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
6945 if (e->insns.r)
6946 {
6947 rtx_insn *insns = e->insns.r;
6948 e->insns.r = NULL;
6949 rebuild_jump_labels_chain (insns);
6950 if (NOTE_P (parm_birth_insn)
6951 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6952 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6953 else
6954 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6955 }
6956 }
6957
6958 /* Otherwise, as well as for other edges, take the usual way. */
6959 commit_edge_insertions ();
6960
6961 /* We're done expanding trees to RTL. */
6962 currently_expanding_to_rtl = 0;
6963
6964 flush_mark_addressable_queue ();
6965
6966 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6967 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6968 {
6969 edge e;
6970 edge_iterator ei;
6971 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6972 {
6973 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6974 e->flags &= ~EDGE_EXECUTABLE;
6975
6976 /* At the moment not all abnormal edges match the RTL
6977 representation. It is safe to remove them here as
6978 find_many_sub_basic_blocks will rediscover them.
6979 In the future we should get this fixed properly. */
6980 if ((e->flags & EDGE_ABNORMAL)
6981 && !(e->flags & EDGE_SIBCALL))
6982 remove_edge (e);
6983 else
6984 ei_next (&ei);
6985 }
6986 }
6987
6988 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6989 bitmap_ones (blocks);
6990 find_many_sub_basic_blocks (blocks);
6991 purge_all_dead_edges ();
6992
6993 /* After initial rtl generation, call back to finish generating
6994 exception support code. We need to do this before cleaning up
6995 the CFG as the code does not expect dead landing pads. */
6996 if (fun->eh->region_tree != NULL)
6997 finish_eh_generation ();
6998
6999 /* Call expand_stack_alignment after finishing all
7000 updates to crtl->preferred_stack_boundary. */
7001 expand_stack_alignment ();
7002
7003 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
7004 function. */
7005 if (crtl->tail_call_emit)
7006 fixup_tail_calls ();
7007
7008 HOST_WIDE_INT patch_area_size, patch_area_entry;
7009 parse_and_check_patch_area (flag_patchable_function_entry, false,
7010 &patch_area_size, &patch_area_entry);
7011
7012 tree patchable_function_entry_attr
7013 = lookup_attribute ("patchable_function_entry",
7014 DECL_ATTRIBUTES (cfun->decl));
7015 if (patchable_function_entry_attr)
7016 {
7017 tree pp_val = TREE_VALUE (patchable_function_entry_attr);
7018 tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
7019
7020 patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
7021 patch_area_entry = 0;
7022 if (TREE_CHAIN (pp_val) != NULL_TREE)
7023 {
7024 tree patchable_function_entry_value2
7025 = TREE_VALUE (TREE_CHAIN (pp_val));
7026 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
7027 }
7028 }
7029
7030 if (patch_area_entry > patch_area_size)
7031 {
7032 if (patch_area_size > 0)
7033 warning (OPT_Wattributes,
7034 "patchable function entry %wu exceeds size %wu",
7035 patch_area_entry, patch_area_size);
7036 patch_area_entry = 0;
7037 }
7038
7039 crtl->patch_area_size = patch_area_size;
7040 crtl->patch_area_entry = patch_area_entry;
7041
7042 /* BB subdivision may have created basic blocks that are only reachable
7043 from unlikely bbs but not marked as such in the profile. */
7044 if (optimize)
7045 propagate_unlikely_bbs_forward ();
7046
7047 /* Remove unreachable blocks, otherwise we cannot compute dominators
7048 which are needed for loop state verification. As a side-effect
7049 this also compacts blocks.
7050 ??? We cannot remove trivially dead insns here as for example
7051 the DRAP reg on i?86 is not magically live at this point.
7052 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
7053 cleanup_cfg (CLEANUP_NO_INSN_DEL);
7054
7055 checking_verify_flow_info ();
7056
7057 /* Initialize pseudos allocated for hard registers. */
7058 emit_initial_value_sets ();
7059
7060 /* And finally unshare all RTL. */
7061 unshare_all_rtl ();
7062
7063 /* There's no need to defer outputting this function any more; we
7064 know we want to output it. */
7065 DECL_DEFER_OUTPUT (current_function_decl) = 0;
7066
7067 /* Now that we're done expanding trees to RTL, we shouldn't have any
7068 more CONCATs anywhere. */
7069 generating_concat_p = 0;
7070
7071 if (dump_file)
7072 {
7073 fprintf (dump_file,
7074 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
7075 /* And the pass manager will dump RTL for us. */
7076 }
7077
7078 /* If we're emitting a nested function, make sure its parent gets
7079 emitted as well. Doing otherwise confuses debug info. */
7080 {
7081 tree parent;
7082 for (parent = DECL_CONTEXT (current_function_decl);
7083 parent != NULL_TREE;
7084 parent = get_containing_scope (parent))
7085 if (TREE_CODE (parent) == FUNCTION_DECL)
7086 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
7087 }
7088
7089 TREE_ASM_WRITTEN (current_function_decl) = 1;
7090
7091 /* After expanding, the return labels are no longer needed. */
7092 return_label = NULL;
7093 naked_return_label = NULL;
7094
7095 /* After expanding, the tm_restart map is no longer needed. */
7096 if (fun->gimple_df->tm_restart)
7097 fun->gimple_df->tm_restart = NULL;
7098
7099 /* Tag the blocks with a depth number so that change_scope can find
7100 the common parent easily. */
7101 set_block_levels (DECL_INITIAL (fun->decl), 0);
7102 default_rtl_profile ();
7103
7104 /* For -dx discard loops now, otherwise IL verify in clean_state will
7105 ICE. */
7106 if (rtl_dump_and_exit)
7107 {
7108 cfun->curr_properties &= ~PROP_loops;
7109 loop_optimizer_finalize ();
7110 }
7111
7112 timevar_pop (TV_POST_EXPAND);
7113
7114 return 0;
7115 }
7116
7117 } // anon namespace
7118
7119 rtl_opt_pass *
7120 make_pass_expand (gcc::context *ctxt)
7121 {
7122 return new pass_expand (ctxt);
7123 }