]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgexpand.c
gimple-predict.h: New file.
[thirdparty/gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "varasm.h"
32 #include "stor-layout.h"
33 #include "stmt.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "cfgrtl.h"
37 #include "cfganal.h"
38 #include "cfgbuild.h"
39 #include "cfgcleanup.h"
40 #include "insn-codes.h"
41 #include "optabs.h"
42 #include "flags.h"
43 #include "insn-config.h"
44 #include "expmed.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "emit-rtl.h"
49 #include "expr.h"
50 #include "langhooks.h"
51 #include "internal-fn.h"
52 #include "tree-eh.h"
53 #include "gimple-iterator.h"
54 #include "gimple-walk.h"
55 #include "cgraph.h"
56 #include "tree-cfg.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-pass.h"
60 #include "except.h"
61 #include "diagnostic.h"
62 #include "gimple-pretty-print.h"
63 #include "toplev.h"
64 #include "debug.h"
65 #include "params.h"
66 #include "tree-inline.h"
67 #include "value-prof.h"
68 #include "target.h"
69 #include "tree-ssa-live.h"
70 #include "tree-outof-ssa.h"
71 #include "cfgloop.h"
72 #include "regs.h" /* For reg_renumber. */
73 #include "insn-attr.h" /* For INSN_SCHEDULING. */
74 #include "asan.h"
75 #include "tree-ssa-address.h"
76 #include "recog.h"
77 #include "output.h"
78 #include "builtins.h"
79 #include "tree-chkp.h"
80 #include "rtl-chkp.h"
81
82 /* Some systems use __main in a way incompatible with its use in gcc, in these
83 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
84 give the same symbol without quotes for an alternative entry point. You
85 must define both, or neither. */
86 #ifndef NAME__MAIN
87 #define NAME__MAIN "__main"
88 #endif
89
90 /* This variable holds information helping the rewriting of SSA trees
91 into RTL. */
92 struct ssaexpand SA;
93
94 /* This variable holds the currently expanded gimple statement for purposes
95 of comminucating the profile info to the builtin expanders. */
96 gimple currently_expanding_gimple_stmt;
97
98 static rtx expand_debug_expr (tree);
99
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
102
103 tree
104 gimple_assign_rhs_to_tree (gimple stmt)
105 {
106 tree t;
107 enum gimple_rhs_class grhs_class;
108
109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
110
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
122 else if (grhs_class == GIMPLE_UNARY_RHS)
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
126 else if (grhs_class == GIMPLE_SINGLE_RHS)
127 {
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
134 && EXPR_P (t)))
135 t = copy_node (t);
136 }
137 else
138 gcc_unreachable ();
139
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
142
143 return t;
144 }
145
146
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
150
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152
153 /* Associate declaration T with storage space X. If T is no
154 SSA name this is exactly SET_DECL_RTL, otherwise make the
155 partition of T associated with X. */
156 static inline void
157 set_rtl (tree t, rtx x)
158 {
159 if (TREE_CODE (t) == SSA_NAME)
160 {
161 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
162 if (x && !MEM_P (x))
163 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
164 /* For the benefit of debug information at -O0 (where vartracking
165 doesn't run) record the place also in the base DECL if it's
166 a normal variable (not a parameter). */
167 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
168 {
169 tree var = SSA_NAME_VAR (t);
170 /* If we don't yet have something recorded, just record it now. */
171 if (!DECL_RTL_SET_P (var))
172 SET_DECL_RTL (var, x);
173 /* If we have it set already to "multiple places" don't
174 change this. */
175 else if (DECL_RTL (var) == pc_rtx)
176 ;
177 /* If we have something recorded and it's not the same place
178 as we want to record now, we have multiple partitions for the
179 same base variable, with different places. We can't just
180 randomly chose one, hence we have to say that we don't know.
181 This only happens with optimization, and there var-tracking
182 will figure out the right thing. */
183 else if (DECL_RTL (var) != x)
184 SET_DECL_RTL (var, pc_rtx);
185 }
186 }
187 else
188 SET_DECL_RTL (t, x);
189 }
190
191 /* This structure holds data relevant to one variable that will be
192 placed in a stack slot. */
193 struct stack_var
194 {
195 /* The Variable. */
196 tree decl;
197
198 /* Initially, the size of the variable. Later, the size of the partition,
199 if this variable becomes it's partition's representative. */
200 HOST_WIDE_INT size;
201
202 /* The *byte* alignment required for this variable. Or as, with the
203 size, the alignment for this partition. */
204 unsigned int alignb;
205
206 /* The partition representative. */
207 size_t representative;
208
209 /* The next stack variable in the partition, or EOC. */
210 size_t next;
211
212 /* The numbers of conflicting stack variables. */
213 bitmap conflicts;
214 };
215
216 #define EOC ((size_t)-1)
217
218 /* We have an array of such objects while deciding allocation. */
219 static struct stack_var *stack_vars;
220 static size_t stack_vars_alloc;
221 static size_t stack_vars_num;
222 static hash_map<tree, size_t> *decl_to_stack_part;
223
224 /* Conflict bitmaps go on this obstack. This allows us to destroy
225 all of them in one big sweep. */
226 static bitmap_obstack stack_var_bitmap_obstack;
227
228 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
229 is non-decreasing. */
230 static size_t *stack_vars_sorted;
231
232 /* The phase of the stack frame. This is the known misalignment of
233 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
234 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
235 static int frame_phase;
236
237 /* Used during expand_used_vars to remember if we saw any decls for
238 which we'd like to enable stack smashing protection. */
239 static bool has_protected_decls;
240
241 /* Used during expand_used_vars. Remember if we say a character buffer
242 smaller than our cutoff threshold. Used for -Wstack-protector. */
243 static bool has_short_buffer;
244
245 /* Compute the byte alignment to use for DECL. Ignore alignment
246 we can't do with expected alignment of the stack boundary. */
247
248 static unsigned int
249 align_local_variable (tree decl)
250 {
251 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
252 DECL_ALIGN (decl) = align;
253 return align / BITS_PER_UNIT;
254 }
255
256 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
257 down otherwise. Return truncated BASE value. */
258
259 static inline unsigned HOST_WIDE_INT
260 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
261 {
262 return align_up ? (base + align - 1) & -align : base & -align;
263 }
264
265 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
266 Return the frame offset. */
267
268 static HOST_WIDE_INT
269 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
270 {
271 HOST_WIDE_INT offset, new_frame_offset;
272
273 if (FRAME_GROWS_DOWNWARD)
274 {
275 new_frame_offset
276 = align_base (frame_offset - frame_phase - size,
277 align, false) + frame_phase;
278 offset = new_frame_offset;
279 }
280 else
281 {
282 new_frame_offset
283 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
284 offset = new_frame_offset;
285 new_frame_offset += size;
286 }
287 frame_offset = new_frame_offset;
288
289 if (frame_offset_overflow (frame_offset, cfun->decl))
290 frame_offset = offset = 0;
291
292 return offset;
293 }
294
295 /* Accumulate DECL into STACK_VARS. */
296
297 static void
298 add_stack_var (tree decl)
299 {
300 struct stack_var *v;
301
302 if (stack_vars_num >= stack_vars_alloc)
303 {
304 if (stack_vars_alloc)
305 stack_vars_alloc = stack_vars_alloc * 3 / 2;
306 else
307 stack_vars_alloc = 32;
308 stack_vars
309 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
310 }
311 if (!decl_to_stack_part)
312 decl_to_stack_part = new hash_map<tree, size_t>;
313
314 v = &stack_vars[stack_vars_num];
315 decl_to_stack_part->put (decl, stack_vars_num);
316
317 v->decl = decl;
318 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
319 /* Ensure that all variables have size, so that &a != &b for any two
320 variables that are simultaneously live. */
321 if (v->size == 0)
322 v->size = 1;
323 v->alignb = align_local_variable (SSAVAR (decl));
324 /* An alignment of zero can mightily confuse us later. */
325 gcc_assert (v->alignb != 0);
326
327 /* All variables are initially in their own partition. */
328 v->representative = stack_vars_num;
329 v->next = EOC;
330
331 /* All variables initially conflict with no other. */
332 v->conflicts = NULL;
333
334 /* Ensure that this decl doesn't get put onto the list twice. */
335 set_rtl (decl, pc_rtx);
336
337 stack_vars_num++;
338 }
339
340 /* Make the decls associated with luid's X and Y conflict. */
341
342 static void
343 add_stack_var_conflict (size_t x, size_t y)
344 {
345 struct stack_var *a = &stack_vars[x];
346 struct stack_var *b = &stack_vars[y];
347 if (!a->conflicts)
348 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
349 if (!b->conflicts)
350 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
351 bitmap_set_bit (a->conflicts, y);
352 bitmap_set_bit (b->conflicts, x);
353 }
354
355 /* Check whether the decls associated with luid's X and Y conflict. */
356
357 static bool
358 stack_var_conflict_p (size_t x, size_t y)
359 {
360 struct stack_var *a = &stack_vars[x];
361 struct stack_var *b = &stack_vars[y];
362 if (x == y)
363 return false;
364 /* Partitions containing an SSA name result from gimple registers
365 with things like unsupported modes. They are top-level and
366 hence conflict with everything else. */
367 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
368 return true;
369
370 if (!a->conflicts || !b->conflicts)
371 return false;
372 return bitmap_bit_p (a->conflicts, y);
373 }
374
375 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
376 enter its partition number into bitmap DATA. */
377
378 static bool
379 visit_op (gimple, tree op, tree, void *data)
380 {
381 bitmap active = (bitmap)data;
382 op = get_base_address (op);
383 if (op
384 && DECL_P (op)
385 && DECL_RTL_IF_SET (op) == pc_rtx)
386 {
387 size_t *v = decl_to_stack_part->get (op);
388 if (v)
389 bitmap_set_bit (active, *v);
390 }
391 return false;
392 }
393
394 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
395 record conflicts between it and all currently active other partitions
396 from bitmap DATA. */
397
398 static bool
399 visit_conflict (gimple, tree op, tree, void *data)
400 {
401 bitmap active = (bitmap)data;
402 op = get_base_address (op);
403 if (op
404 && DECL_P (op)
405 && DECL_RTL_IF_SET (op) == pc_rtx)
406 {
407 size_t *v = decl_to_stack_part->get (op);
408 if (v && bitmap_set_bit (active, *v))
409 {
410 size_t num = *v;
411 bitmap_iterator bi;
412 unsigned i;
413 gcc_assert (num < stack_vars_num);
414 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
415 add_stack_var_conflict (num, i);
416 }
417 }
418 return false;
419 }
420
421 /* Helper routine for add_scope_conflicts, calculating the active partitions
422 at the end of BB, leaving the result in WORK. We're called to generate
423 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
424 liveness. */
425
426 static void
427 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
428 {
429 edge e;
430 edge_iterator ei;
431 gimple_stmt_iterator gsi;
432 walk_stmt_load_store_addr_fn visit;
433
434 bitmap_clear (work);
435 FOR_EACH_EDGE (e, ei, bb->preds)
436 bitmap_ior_into (work, (bitmap)e->src->aux);
437
438 visit = visit_op;
439
440 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
441 {
442 gimple stmt = gsi_stmt (gsi);
443 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
444 }
445 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
446 {
447 gimple stmt = gsi_stmt (gsi);
448
449 if (gimple_clobber_p (stmt))
450 {
451 tree lhs = gimple_assign_lhs (stmt);
452 size_t *v;
453 /* Nested function lowering might introduce LHSs
454 that are COMPONENT_REFs. */
455 if (TREE_CODE (lhs) != VAR_DECL)
456 continue;
457 if (DECL_RTL_IF_SET (lhs) == pc_rtx
458 && (v = decl_to_stack_part->get (lhs)))
459 bitmap_clear_bit (work, *v);
460 }
461 else if (!is_gimple_debug (stmt))
462 {
463 if (for_conflict
464 && visit == visit_op)
465 {
466 /* If this is the first real instruction in this BB we need
467 to add conflicts for everything live at this point now.
468 Unlike classical liveness for named objects we can't
469 rely on seeing a def/use of the names we're interested in.
470 There might merely be indirect loads/stores. We'd not add any
471 conflicts for such partitions. */
472 bitmap_iterator bi;
473 unsigned i;
474 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
475 {
476 struct stack_var *a = &stack_vars[i];
477 if (!a->conflicts)
478 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479 bitmap_ior_into (a->conflicts, work);
480 }
481 visit = visit_conflict;
482 }
483 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
484 }
485 }
486 }
487
488 /* Generate stack partition conflicts between all partitions that are
489 simultaneously live. */
490
491 static void
492 add_scope_conflicts (void)
493 {
494 basic_block bb;
495 bool changed;
496 bitmap work = BITMAP_ALLOC (NULL);
497 int *rpo;
498 int n_bbs;
499
500 /* We approximate the live range of a stack variable by taking the first
501 mention of its name as starting point(s), and by the end-of-scope
502 death clobber added by gimplify as ending point(s) of the range.
503 This overapproximates in the case we for instance moved an address-taken
504 operation upward, without also moving a dereference to it upwards.
505 But it's conservatively correct as a variable never can hold values
506 before its name is mentioned at least once.
507
508 We then do a mostly classical bitmap liveness algorithm. */
509
510 FOR_ALL_BB_FN (bb, cfun)
511 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
512
513 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
514 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
515
516 changed = true;
517 while (changed)
518 {
519 int i;
520 changed = false;
521 for (i = 0; i < n_bbs; i++)
522 {
523 bitmap active;
524 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
525 active = (bitmap)bb->aux;
526 add_scope_conflicts_1 (bb, work, false);
527 if (bitmap_ior_into (active, work))
528 changed = true;
529 }
530 }
531
532 FOR_EACH_BB_FN (bb, cfun)
533 add_scope_conflicts_1 (bb, work, true);
534
535 free (rpo);
536 BITMAP_FREE (work);
537 FOR_ALL_BB_FN (bb, cfun)
538 BITMAP_FREE (bb->aux);
539 }
540
541 /* A subroutine of partition_stack_vars. A comparison function for qsort,
542 sorting an array of indices by the properties of the object. */
543
544 static int
545 stack_var_cmp (const void *a, const void *b)
546 {
547 size_t ia = *(const size_t *)a;
548 size_t ib = *(const size_t *)b;
549 unsigned int aligna = stack_vars[ia].alignb;
550 unsigned int alignb = stack_vars[ib].alignb;
551 HOST_WIDE_INT sizea = stack_vars[ia].size;
552 HOST_WIDE_INT sizeb = stack_vars[ib].size;
553 tree decla = stack_vars[ia].decl;
554 tree declb = stack_vars[ib].decl;
555 bool largea, largeb;
556 unsigned int uida, uidb;
557
558 /* Primary compare on "large" alignment. Large comes first. */
559 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
560 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
561 if (largea != largeb)
562 return (int)largeb - (int)largea;
563
564 /* Secondary compare on size, decreasing */
565 if (sizea > sizeb)
566 return -1;
567 if (sizea < sizeb)
568 return 1;
569
570 /* Tertiary compare on true alignment, decreasing. */
571 if (aligna < alignb)
572 return -1;
573 if (aligna > alignb)
574 return 1;
575
576 /* Final compare on ID for sort stability, increasing.
577 Two SSA names are compared by their version, SSA names come before
578 non-SSA names, and two normal decls are compared by their DECL_UID. */
579 if (TREE_CODE (decla) == SSA_NAME)
580 {
581 if (TREE_CODE (declb) == SSA_NAME)
582 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
583 else
584 return -1;
585 }
586 else if (TREE_CODE (declb) == SSA_NAME)
587 return 1;
588 else
589 uida = DECL_UID (decla), uidb = DECL_UID (declb);
590 if (uida < uidb)
591 return 1;
592 if (uida > uidb)
593 return -1;
594 return 0;
595 }
596
597 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
598 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
599
600 /* If the points-to solution *PI points to variables that are in a partition
601 together with other variables add all partition members to the pointed-to
602 variables bitmap. */
603
604 static void
605 add_partitioned_vars_to_ptset (struct pt_solution *pt,
606 part_hashmap *decls_to_partitions,
607 hash_set<bitmap> *visited, bitmap temp)
608 {
609 bitmap_iterator bi;
610 unsigned i;
611 bitmap *part;
612
613 if (pt->anything
614 || pt->vars == NULL
615 /* The pointed-to vars bitmap is shared, it is enough to
616 visit it once. */
617 || visited->add (pt->vars))
618 return;
619
620 bitmap_clear (temp);
621
622 /* By using a temporary bitmap to store all members of the partitions
623 we have to add we make sure to visit each of the partitions only
624 once. */
625 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
626 if ((!temp
627 || !bitmap_bit_p (temp, i))
628 && (part = decls_to_partitions->get (i)))
629 bitmap_ior_into (temp, *part);
630 if (!bitmap_empty_p (temp))
631 bitmap_ior_into (pt->vars, temp);
632 }
633
634 /* Update points-to sets based on partition info, so we can use them on RTL.
635 The bitmaps representing stack partitions will be saved until expand,
636 where partitioned decls used as bases in memory expressions will be
637 rewritten. */
638
639 static void
640 update_alias_info_with_stack_vars (void)
641 {
642 part_hashmap *decls_to_partitions = NULL;
643 size_t i, j;
644 tree var = NULL_TREE;
645
646 for (i = 0; i < stack_vars_num; i++)
647 {
648 bitmap part = NULL;
649 tree name;
650 struct ptr_info_def *pi;
651
652 /* Not interested in partitions with single variable. */
653 if (stack_vars[i].representative != i
654 || stack_vars[i].next == EOC)
655 continue;
656
657 if (!decls_to_partitions)
658 {
659 decls_to_partitions = new part_hashmap;
660 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
661 }
662
663 /* Create an SSA_NAME that points to the partition for use
664 as base during alias-oracle queries on RTL for bases that
665 have been partitioned. */
666 if (var == NULL_TREE)
667 var = create_tmp_var (ptr_type_node);
668 name = make_ssa_name (var);
669
670 /* Create bitmaps representing partitions. They will be used for
671 points-to sets later, so use GGC alloc. */
672 part = BITMAP_GGC_ALLOC ();
673 for (j = i; j != EOC; j = stack_vars[j].next)
674 {
675 tree decl = stack_vars[j].decl;
676 unsigned int uid = DECL_PT_UID (decl);
677 bitmap_set_bit (part, uid);
678 decls_to_partitions->put (uid, part);
679 cfun->gimple_df->decls_to_pointers->put (decl, name);
680 if (TREE_ADDRESSABLE (decl))
681 TREE_ADDRESSABLE (name) = 1;
682 }
683
684 /* Make the SSA name point to all partition members. */
685 pi = get_ptr_info (name);
686 pt_solution_set (&pi->pt, part, false);
687 }
688
689 /* Make all points-to sets that contain one member of a partition
690 contain all members of the partition. */
691 if (decls_to_partitions)
692 {
693 unsigned i;
694 hash_set<bitmap> visited;
695 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
696
697 for (i = 1; i < num_ssa_names; i++)
698 {
699 tree name = ssa_name (i);
700 struct ptr_info_def *pi;
701
702 if (name
703 && POINTER_TYPE_P (TREE_TYPE (name))
704 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
705 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
706 &visited, temp);
707 }
708
709 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
710 decls_to_partitions, &visited, temp);
711
712 delete decls_to_partitions;
713 BITMAP_FREE (temp);
714 }
715 }
716
717 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
718 partitioning algorithm. Partitions A and B are known to be non-conflicting.
719 Merge them into a single partition A. */
720
721 static void
722 union_stack_vars (size_t a, size_t b)
723 {
724 struct stack_var *vb = &stack_vars[b];
725 bitmap_iterator bi;
726 unsigned u;
727
728 gcc_assert (stack_vars[b].next == EOC);
729 /* Add B to A's partition. */
730 stack_vars[b].next = stack_vars[a].next;
731 stack_vars[b].representative = a;
732 stack_vars[a].next = b;
733
734 /* Update the required alignment of partition A to account for B. */
735 if (stack_vars[a].alignb < stack_vars[b].alignb)
736 stack_vars[a].alignb = stack_vars[b].alignb;
737
738 /* Update the interference graph and merge the conflicts. */
739 if (vb->conflicts)
740 {
741 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
742 add_stack_var_conflict (a, stack_vars[u].representative);
743 BITMAP_FREE (vb->conflicts);
744 }
745 }
746
747 /* A subroutine of expand_used_vars. Binpack the variables into
748 partitions constrained by the interference graph. The overall
749 algorithm used is as follows:
750
751 Sort the objects by size in descending order.
752 For each object A {
753 S = size(A)
754 O = 0
755 loop {
756 Look for the largest non-conflicting object B with size <= S.
757 UNION (A, B)
758 }
759 }
760 */
761
762 static void
763 partition_stack_vars (void)
764 {
765 size_t si, sj, n = stack_vars_num;
766
767 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
768 for (si = 0; si < n; ++si)
769 stack_vars_sorted[si] = si;
770
771 if (n == 1)
772 return;
773
774 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
775
776 for (si = 0; si < n; ++si)
777 {
778 size_t i = stack_vars_sorted[si];
779 unsigned int ialign = stack_vars[i].alignb;
780 HOST_WIDE_INT isize = stack_vars[i].size;
781
782 /* Ignore objects that aren't partition representatives. If we
783 see a var that is not a partition representative, it must
784 have been merged earlier. */
785 if (stack_vars[i].representative != i)
786 continue;
787
788 for (sj = si + 1; sj < n; ++sj)
789 {
790 size_t j = stack_vars_sorted[sj];
791 unsigned int jalign = stack_vars[j].alignb;
792 HOST_WIDE_INT jsize = stack_vars[j].size;
793
794 /* Ignore objects that aren't partition representatives. */
795 if (stack_vars[j].representative != j)
796 continue;
797
798 /* Do not mix objects of "small" (supported) alignment
799 and "large" (unsupported) alignment. */
800 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
801 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
802 break;
803
804 /* For Address Sanitizer do not mix objects with different
805 sizes, as the shorter vars wouldn't be adequately protected.
806 Don't do that for "large" (unsupported) alignment objects,
807 those aren't protected anyway. */
808 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
809 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
810 break;
811
812 /* Ignore conflicting objects. */
813 if (stack_var_conflict_p (i, j))
814 continue;
815
816 /* UNION the objects, placing J at OFFSET. */
817 union_stack_vars (i, j);
818 }
819 }
820
821 update_alias_info_with_stack_vars ();
822 }
823
824 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
825
826 static void
827 dump_stack_var_partition (void)
828 {
829 size_t si, i, j, n = stack_vars_num;
830
831 for (si = 0; si < n; ++si)
832 {
833 i = stack_vars_sorted[si];
834
835 /* Skip variables that aren't partition representatives, for now. */
836 if (stack_vars[i].representative != i)
837 continue;
838
839 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
840 " align %u\n", (unsigned long) i, stack_vars[i].size,
841 stack_vars[i].alignb);
842
843 for (j = i; j != EOC; j = stack_vars[j].next)
844 {
845 fputc ('\t', dump_file);
846 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
847 }
848 fputc ('\n', dump_file);
849 }
850 }
851
852 /* Assign rtl to DECL at BASE + OFFSET. */
853
854 static void
855 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
856 HOST_WIDE_INT offset)
857 {
858 unsigned align;
859 rtx x;
860
861 /* If this fails, we've overflowed the stack frame. Error nicely? */
862 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
863
864 x = plus_constant (Pmode, base, offset);
865 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
866
867 if (TREE_CODE (decl) != SSA_NAME)
868 {
869 /* Set alignment we actually gave this decl if it isn't an SSA name.
870 If it is we generate stack slots only accidentally so it isn't as
871 important, we'll simply use the alignment that is already set. */
872 if (base == virtual_stack_vars_rtx)
873 offset -= frame_phase;
874 align = offset & -offset;
875 align *= BITS_PER_UNIT;
876 if (align == 0 || align > base_align)
877 align = base_align;
878
879 /* One would think that we could assert that we're not decreasing
880 alignment here, but (at least) the i386 port does exactly this
881 via the MINIMUM_ALIGNMENT hook. */
882
883 DECL_ALIGN (decl) = align;
884 DECL_USER_ALIGN (decl) = 0;
885 }
886
887 set_mem_attributes (x, SSAVAR (decl), true);
888 set_rtl (decl, x);
889 }
890
891 struct stack_vars_data
892 {
893 /* Vector of offset pairs, always end of some padding followed
894 by start of the padding that needs Address Sanitizer protection.
895 The vector is in reversed, highest offset pairs come first. */
896 vec<HOST_WIDE_INT> asan_vec;
897
898 /* Vector of partition representative decls in between the paddings. */
899 vec<tree> asan_decl_vec;
900
901 /* Base pseudo register for Address Sanitizer protected automatic vars. */
902 rtx asan_base;
903
904 /* Alignment needed for the Address Sanitizer protected automatic vars. */
905 unsigned int asan_alignb;
906 };
907
908 /* A subroutine of expand_used_vars. Give each partition representative
909 a unique location within the stack frame. Update each partition member
910 with that location. */
911
912 static void
913 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
914 {
915 size_t si, i, j, n = stack_vars_num;
916 HOST_WIDE_INT large_size = 0, large_alloc = 0;
917 rtx large_base = NULL;
918 unsigned large_align = 0;
919 tree decl;
920
921 /* Determine if there are any variables requiring "large" alignment.
922 Since these are dynamically allocated, we only process these if
923 no predicate involved. */
924 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
925 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
926 {
927 /* Find the total size of these variables. */
928 for (si = 0; si < n; ++si)
929 {
930 unsigned alignb;
931
932 i = stack_vars_sorted[si];
933 alignb = stack_vars[i].alignb;
934
935 /* All "large" alignment decls come before all "small" alignment
936 decls, but "large" alignment decls are not sorted based on
937 their alignment. Increase large_align to track the largest
938 required alignment. */
939 if ((alignb * BITS_PER_UNIT) > large_align)
940 large_align = alignb * BITS_PER_UNIT;
941
942 /* Stop when we get to the first decl with "small" alignment. */
943 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
944 break;
945
946 /* Skip variables that aren't partition representatives. */
947 if (stack_vars[i].representative != i)
948 continue;
949
950 /* Skip variables that have already had rtl assigned. See also
951 add_stack_var where we perpetrate this pc_rtx hack. */
952 decl = stack_vars[i].decl;
953 if ((TREE_CODE (decl) == SSA_NAME
954 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
955 : DECL_RTL (decl)) != pc_rtx)
956 continue;
957
958 large_size += alignb - 1;
959 large_size &= -(HOST_WIDE_INT)alignb;
960 large_size += stack_vars[i].size;
961 }
962
963 /* If there were any, allocate space. */
964 if (large_size > 0)
965 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
966 large_align, true);
967 }
968
969 for (si = 0; si < n; ++si)
970 {
971 rtx base;
972 unsigned base_align, alignb;
973 HOST_WIDE_INT offset;
974
975 i = stack_vars_sorted[si];
976
977 /* Skip variables that aren't partition representatives, for now. */
978 if (stack_vars[i].representative != i)
979 continue;
980
981 /* Skip variables that have already had rtl assigned. See also
982 add_stack_var where we perpetrate this pc_rtx hack. */
983 decl = stack_vars[i].decl;
984 if ((TREE_CODE (decl) == SSA_NAME
985 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
986 : DECL_RTL (decl)) != pc_rtx)
987 continue;
988
989 /* Check the predicate to see whether this variable should be
990 allocated in this pass. */
991 if (pred && !pred (i))
992 continue;
993
994 alignb = stack_vars[i].alignb;
995 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
996 {
997 base = virtual_stack_vars_rtx;
998 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
999 {
1000 HOST_WIDE_INT prev_offset
1001 = align_base (frame_offset,
1002 MAX (alignb, ASAN_RED_ZONE_SIZE),
1003 FRAME_GROWS_DOWNWARD);
1004 tree repr_decl = NULL_TREE;
1005 offset
1006 = alloc_stack_frame_space (stack_vars[i].size
1007 + ASAN_RED_ZONE_SIZE,
1008 MAX (alignb, ASAN_RED_ZONE_SIZE));
1009
1010 data->asan_vec.safe_push (prev_offset);
1011 data->asan_vec.safe_push (offset + stack_vars[i].size);
1012 /* Find best representative of the partition.
1013 Prefer those with DECL_NAME, even better
1014 satisfying asan_protect_stack_decl predicate. */
1015 for (j = i; j != EOC; j = stack_vars[j].next)
1016 if (asan_protect_stack_decl (stack_vars[j].decl)
1017 && DECL_NAME (stack_vars[j].decl))
1018 {
1019 repr_decl = stack_vars[j].decl;
1020 break;
1021 }
1022 else if (repr_decl == NULL_TREE
1023 && DECL_P (stack_vars[j].decl)
1024 && DECL_NAME (stack_vars[j].decl))
1025 repr_decl = stack_vars[j].decl;
1026 if (repr_decl == NULL_TREE)
1027 repr_decl = stack_vars[i].decl;
1028 data->asan_decl_vec.safe_push (repr_decl);
1029 data->asan_alignb = MAX (data->asan_alignb, alignb);
1030 if (data->asan_base == NULL)
1031 data->asan_base = gen_reg_rtx (Pmode);
1032 base = data->asan_base;
1033
1034 if (!STRICT_ALIGNMENT)
1035 base_align = crtl->max_used_stack_slot_alignment;
1036 else
1037 base_align = MAX (crtl->max_used_stack_slot_alignment,
1038 GET_MODE_ALIGNMENT (SImode)
1039 << ASAN_SHADOW_SHIFT);
1040 }
1041 else
1042 {
1043 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1044 base_align = crtl->max_used_stack_slot_alignment;
1045 }
1046 }
1047 else
1048 {
1049 /* Large alignment is only processed in the last pass. */
1050 if (pred)
1051 continue;
1052 gcc_assert (large_base != NULL);
1053
1054 large_alloc += alignb - 1;
1055 large_alloc &= -(HOST_WIDE_INT)alignb;
1056 offset = large_alloc;
1057 large_alloc += stack_vars[i].size;
1058
1059 base = large_base;
1060 base_align = large_align;
1061 }
1062
1063 /* Create rtl for each variable based on their location within the
1064 partition. */
1065 for (j = i; j != EOC; j = stack_vars[j].next)
1066 {
1067 expand_one_stack_var_at (stack_vars[j].decl,
1068 base, base_align,
1069 offset);
1070 }
1071 }
1072
1073 gcc_assert (large_alloc == large_size);
1074 }
1075
1076 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1077 static HOST_WIDE_INT
1078 account_stack_vars (void)
1079 {
1080 size_t si, j, i, n = stack_vars_num;
1081 HOST_WIDE_INT size = 0;
1082
1083 for (si = 0; si < n; ++si)
1084 {
1085 i = stack_vars_sorted[si];
1086
1087 /* Skip variables that aren't partition representatives, for now. */
1088 if (stack_vars[i].representative != i)
1089 continue;
1090
1091 size += stack_vars[i].size;
1092 for (j = i; j != EOC; j = stack_vars[j].next)
1093 set_rtl (stack_vars[j].decl, NULL);
1094 }
1095 return size;
1096 }
1097
1098 /* A subroutine of expand_one_var. Called to immediately assign rtl
1099 to a variable to be allocated in the stack frame. */
1100
1101 static void
1102 expand_one_stack_var (tree var)
1103 {
1104 HOST_WIDE_INT size, offset;
1105 unsigned byte_align;
1106
1107 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1108 byte_align = align_local_variable (SSAVAR (var));
1109
1110 /* We handle highly aligned variables in expand_stack_vars. */
1111 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1112
1113 offset = alloc_stack_frame_space (size, byte_align);
1114
1115 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1116 crtl->max_used_stack_slot_alignment, offset);
1117 }
1118
1119 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1120 that will reside in a hard register. */
1121
1122 static void
1123 expand_one_hard_reg_var (tree var)
1124 {
1125 rest_of_decl_compilation (var, 0, 0);
1126 }
1127
1128 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1129 that will reside in a pseudo register. */
1130
1131 static void
1132 expand_one_register_var (tree var)
1133 {
1134 tree decl = SSAVAR (var);
1135 tree type = TREE_TYPE (decl);
1136 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1137 rtx x = gen_reg_rtx (reg_mode);
1138
1139 set_rtl (var, x);
1140
1141 /* Note if the object is a user variable. */
1142 if (!DECL_ARTIFICIAL (decl))
1143 mark_user_reg (x);
1144
1145 if (POINTER_TYPE_P (type))
1146 mark_reg_pointer (x, get_pointer_alignment (var));
1147 }
1148
1149 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1150 has some associated error, e.g. its type is error-mark. We just need
1151 to pick something that won't crash the rest of the compiler. */
1152
1153 static void
1154 expand_one_error_var (tree var)
1155 {
1156 machine_mode mode = DECL_MODE (var);
1157 rtx x;
1158
1159 if (mode == BLKmode)
1160 x = gen_rtx_MEM (BLKmode, const0_rtx);
1161 else if (mode == VOIDmode)
1162 x = const0_rtx;
1163 else
1164 x = gen_reg_rtx (mode);
1165
1166 SET_DECL_RTL (var, x);
1167 }
1168
1169 /* A subroutine of expand_one_var. VAR is a variable that will be
1170 allocated to the local stack frame. Return true if we wish to
1171 add VAR to STACK_VARS so that it will be coalesced with other
1172 variables. Return false to allocate VAR immediately.
1173
1174 This function is used to reduce the number of variables considered
1175 for coalescing, which reduces the size of the quadratic problem. */
1176
1177 static bool
1178 defer_stack_allocation (tree var, bool toplevel)
1179 {
1180 /* Whether the variable is small enough for immediate allocation not to be
1181 a problem with regard to the frame size. */
1182 bool smallish
1183 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1184 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1185
1186 /* If stack protection is enabled, *all* stack variables must be deferred,
1187 so that we can re-order the strings to the top of the frame.
1188 Similarly for Address Sanitizer. */
1189 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1190 return true;
1191
1192 /* We handle "large" alignment via dynamic allocation. We want to handle
1193 this extra complication in only one place, so defer them. */
1194 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1195 return true;
1196
1197 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1198 might be detached from their block and appear at toplevel when we reach
1199 here. We want to coalesce them with variables from other blocks when
1200 the immediate contribution to the frame size would be noticeable. */
1201 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1202 return true;
1203
1204 /* Variables declared in the outermost scope automatically conflict
1205 with every other variable. The only reason to want to defer them
1206 at all is that, after sorting, we can more efficiently pack
1207 small variables in the stack frame. Continue to defer at -O2. */
1208 if (toplevel && optimize < 2)
1209 return false;
1210
1211 /* Without optimization, *most* variables are allocated from the
1212 stack, which makes the quadratic problem large exactly when we
1213 want compilation to proceed as quickly as possible. On the
1214 other hand, we don't want the function's stack frame size to
1215 get completely out of hand. So we avoid adding scalars and
1216 "small" aggregates to the list at all. */
1217 if (optimize == 0 && smallish)
1218 return false;
1219
1220 return true;
1221 }
1222
1223 /* A subroutine of expand_used_vars. Expand one variable according to
1224 its flavor. Variables to be placed on the stack are not actually
1225 expanded yet, merely recorded.
1226 When REALLY_EXPAND is false, only add stack values to be allocated.
1227 Return stack usage this variable is supposed to take.
1228 */
1229
1230 static HOST_WIDE_INT
1231 expand_one_var (tree var, bool toplevel, bool really_expand)
1232 {
1233 unsigned int align = BITS_PER_UNIT;
1234 tree origvar = var;
1235
1236 var = SSAVAR (var);
1237
1238 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1239 {
1240 /* Because we don't know if VAR will be in register or on stack,
1241 we conservatively assume it will be on stack even if VAR is
1242 eventually put into register after RA pass. For non-automatic
1243 variables, which won't be on stack, we collect alignment of
1244 type and ignore user specified alignment. Similarly for
1245 SSA_NAMEs for which use_register_for_decl returns true. */
1246 if (TREE_STATIC (var)
1247 || DECL_EXTERNAL (var)
1248 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1249 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1250 TYPE_MODE (TREE_TYPE (var)),
1251 TYPE_ALIGN (TREE_TYPE (var)));
1252 else if (DECL_HAS_VALUE_EXPR_P (var)
1253 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1254 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1255 or variables which were assigned a stack slot already by
1256 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1257 changed from the offset chosen to it. */
1258 align = crtl->stack_alignment_estimated;
1259 else
1260 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1261
1262 /* If the variable alignment is very large we'll dynamicaly allocate
1263 it, which means that in-frame portion is just a pointer. */
1264 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1265 align = POINTER_SIZE;
1266 }
1267
1268 if (SUPPORTS_STACK_ALIGNMENT
1269 && crtl->stack_alignment_estimated < align)
1270 {
1271 /* stack_alignment_estimated shouldn't change after stack
1272 realign decision made */
1273 gcc_assert (!crtl->stack_realign_processed);
1274 crtl->stack_alignment_estimated = align;
1275 }
1276
1277 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1278 So here we only make sure stack_alignment_needed >= align. */
1279 if (crtl->stack_alignment_needed < align)
1280 crtl->stack_alignment_needed = align;
1281 if (crtl->max_used_stack_slot_alignment < align)
1282 crtl->max_used_stack_slot_alignment = align;
1283
1284 if (TREE_CODE (origvar) == SSA_NAME)
1285 {
1286 gcc_assert (TREE_CODE (var) != VAR_DECL
1287 || (!DECL_EXTERNAL (var)
1288 && !DECL_HAS_VALUE_EXPR_P (var)
1289 && !TREE_STATIC (var)
1290 && TREE_TYPE (var) != error_mark_node
1291 && !DECL_HARD_REGISTER (var)
1292 && really_expand));
1293 }
1294 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1295 ;
1296 else if (DECL_EXTERNAL (var))
1297 ;
1298 else if (DECL_HAS_VALUE_EXPR_P (var))
1299 ;
1300 else if (TREE_STATIC (var))
1301 ;
1302 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1303 ;
1304 else if (TREE_TYPE (var) == error_mark_node)
1305 {
1306 if (really_expand)
1307 expand_one_error_var (var);
1308 }
1309 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1310 {
1311 if (really_expand)
1312 {
1313 expand_one_hard_reg_var (var);
1314 if (!DECL_HARD_REGISTER (var))
1315 /* Invalid register specification. */
1316 expand_one_error_var (var);
1317 }
1318 }
1319 else if (use_register_for_decl (var))
1320 {
1321 if (really_expand)
1322 expand_one_register_var (origvar);
1323 }
1324 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1325 {
1326 /* Reject variables which cover more than half of the address-space. */
1327 if (really_expand)
1328 {
1329 error ("size of variable %q+D is too large", var);
1330 expand_one_error_var (var);
1331 }
1332 }
1333 else if (defer_stack_allocation (var, toplevel))
1334 add_stack_var (origvar);
1335 else
1336 {
1337 if (really_expand)
1338 expand_one_stack_var (origvar);
1339 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1340 }
1341 return 0;
1342 }
1343
1344 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1345 expanding variables. Those variables that can be put into registers
1346 are allocated pseudos; those that can't are put on the stack.
1347
1348 TOPLEVEL is true if this is the outermost BLOCK. */
1349
1350 static void
1351 expand_used_vars_for_block (tree block, bool toplevel)
1352 {
1353 tree t;
1354
1355 /* Expand all variables at this level. */
1356 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1357 if (TREE_USED (t)
1358 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1359 || !DECL_NONSHAREABLE (t)))
1360 expand_one_var (t, toplevel, true);
1361
1362 /* Expand all variables at containing levels. */
1363 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1364 expand_used_vars_for_block (t, false);
1365 }
1366
1367 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1368 and clear TREE_USED on all local variables. */
1369
1370 static void
1371 clear_tree_used (tree block)
1372 {
1373 tree t;
1374
1375 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1376 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1377 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1378 || !DECL_NONSHAREABLE (t))
1379 TREE_USED (t) = 0;
1380
1381 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1382 clear_tree_used (t);
1383 }
1384
1385 enum {
1386 SPCT_FLAG_DEFAULT = 1,
1387 SPCT_FLAG_ALL = 2,
1388 SPCT_FLAG_STRONG = 3,
1389 SPCT_FLAG_EXPLICIT = 4
1390 };
1391
1392 /* Examine TYPE and determine a bit mask of the following features. */
1393
1394 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1395 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1396 #define SPCT_HAS_ARRAY 4
1397 #define SPCT_HAS_AGGREGATE 8
1398
1399 static unsigned int
1400 stack_protect_classify_type (tree type)
1401 {
1402 unsigned int ret = 0;
1403 tree t;
1404
1405 switch (TREE_CODE (type))
1406 {
1407 case ARRAY_TYPE:
1408 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1409 if (t == char_type_node
1410 || t == signed_char_type_node
1411 || t == unsigned_char_type_node)
1412 {
1413 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1414 unsigned HOST_WIDE_INT len;
1415
1416 if (!TYPE_SIZE_UNIT (type)
1417 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1418 len = max;
1419 else
1420 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1421
1422 if (len < max)
1423 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1424 else
1425 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1426 }
1427 else
1428 ret = SPCT_HAS_ARRAY;
1429 break;
1430
1431 case UNION_TYPE:
1432 case QUAL_UNION_TYPE:
1433 case RECORD_TYPE:
1434 ret = SPCT_HAS_AGGREGATE;
1435 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1436 if (TREE_CODE (t) == FIELD_DECL)
1437 ret |= stack_protect_classify_type (TREE_TYPE (t));
1438 break;
1439
1440 default:
1441 break;
1442 }
1443
1444 return ret;
1445 }
1446
1447 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1448 part of the local stack frame. Remember if we ever return nonzero for
1449 any variable in this function. The return value is the phase number in
1450 which the variable should be allocated. */
1451
1452 static int
1453 stack_protect_decl_phase (tree decl)
1454 {
1455 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1456 int ret = 0;
1457
1458 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1459 has_short_buffer = true;
1460
1461 if (flag_stack_protect == SPCT_FLAG_ALL
1462 || flag_stack_protect == SPCT_FLAG_STRONG
1463 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1464 && lookup_attribute ("stack_protect",
1465 DECL_ATTRIBUTES (current_function_decl))))
1466 {
1467 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1468 && !(bits & SPCT_HAS_AGGREGATE))
1469 ret = 1;
1470 else if (bits & SPCT_HAS_ARRAY)
1471 ret = 2;
1472 }
1473 else
1474 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1475
1476 if (ret)
1477 has_protected_decls = true;
1478
1479 return ret;
1480 }
1481
1482 /* Two helper routines that check for phase 1 and phase 2. These are used
1483 as callbacks for expand_stack_vars. */
1484
1485 static bool
1486 stack_protect_decl_phase_1 (size_t i)
1487 {
1488 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1489 }
1490
1491 static bool
1492 stack_protect_decl_phase_2 (size_t i)
1493 {
1494 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1495 }
1496
1497 /* And helper function that checks for asan phase (with stack protector
1498 it is phase 3). This is used as callback for expand_stack_vars.
1499 Returns true if any of the vars in the partition need to be protected. */
1500
1501 static bool
1502 asan_decl_phase_3 (size_t i)
1503 {
1504 while (i != EOC)
1505 {
1506 if (asan_protect_stack_decl (stack_vars[i].decl))
1507 return true;
1508 i = stack_vars[i].next;
1509 }
1510 return false;
1511 }
1512
1513 /* Ensure that variables in different stack protection phases conflict
1514 so that they are not merged and share the same stack slot. */
1515
1516 static void
1517 add_stack_protection_conflicts (void)
1518 {
1519 size_t i, j, n = stack_vars_num;
1520 unsigned char *phase;
1521
1522 phase = XNEWVEC (unsigned char, n);
1523 for (i = 0; i < n; ++i)
1524 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1525
1526 for (i = 0; i < n; ++i)
1527 {
1528 unsigned char ph_i = phase[i];
1529 for (j = i + 1; j < n; ++j)
1530 if (ph_i != phase[j])
1531 add_stack_var_conflict (i, j);
1532 }
1533
1534 XDELETEVEC (phase);
1535 }
1536
1537 /* Create a decl for the guard at the top of the stack frame. */
1538
1539 static void
1540 create_stack_guard (void)
1541 {
1542 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1543 VAR_DECL, NULL, ptr_type_node);
1544 TREE_THIS_VOLATILE (guard) = 1;
1545 TREE_USED (guard) = 1;
1546 expand_one_stack_var (guard);
1547 crtl->stack_protect_guard = guard;
1548 }
1549
1550 /* Prepare for expanding variables. */
1551 static void
1552 init_vars_expansion (void)
1553 {
1554 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1555 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1556
1557 /* A map from decl to stack partition. */
1558 decl_to_stack_part = new hash_map<tree, size_t>;
1559
1560 /* Initialize local stack smashing state. */
1561 has_protected_decls = false;
1562 has_short_buffer = false;
1563 }
1564
1565 /* Free up stack variable graph data. */
1566 static void
1567 fini_vars_expansion (void)
1568 {
1569 bitmap_obstack_release (&stack_var_bitmap_obstack);
1570 if (stack_vars)
1571 XDELETEVEC (stack_vars);
1572 if (stack_vars_sorted)
1573 XDELETEVEC (stack_vars_sorted);
1574 stack_vars = NULL;
1575 stack_vars_sorted = NULL;
1576 stack_vars_alloc = stack_vars_num = 0;
1577 delete decl_to_stack_part;
1578 decl_to_stack_part = NULL;
1579 }
1580
1581 /* Make a fair guess for the size of the stack frame of the function
1582 in NODE. This doesn't have to be exact, the result is only used in
1583 the inline heuristics. So we don't want to run the full stack var
1584 packing algorithm (which is quadratic in the number of stack vars).
1585 Instead, we calculate the total size of all stack vars. This turns
1586 out to be a pretty fair estimate -- packing of stack vars doesn't
1587 happen very often. */
1588
1589 HOST_WIDE_INT
1590 estimated_stack_frame_size (struct cgraph_node *node)
1591 {
1592 HOST_WIDE_INT size = 0;
1593 size_t i;
1594 tree var;
1595 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1596
1597 push_cfun (fn);
1598
1599 init_vars_expansion ();
1600
1601 FOR_EACH_LOCAL_DECL (fn, i, var)
1602 if (auto_var_in_fn_p (var, fn->decl))
1603 size += expand_one_var (var, true, false);
1604
1605 if (stack_vars_num > 0)
1606 {
1607 /* Fake sorting the stack vars for account_stack_vars (). */
1608 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1609 for (i = 0; i < stack_vars_num; ++i)
1610 stack_vars_sorted[i] = i;
1611 size += account_stack_vars ();
1612 }
1613
1614 fini_vars_expansion ();
1615 pop_cfun ();
1616 return size;
1617 }
1618
1619 /* Helper routine to check if a record or union contains an array field. */
1620
1621 static int
1622 record_or_union_type_has_array_p (const_tree tree_type)
1623 {
1624 tree fields = TYPE_FIELDS (tree_type);
1625 tree f;
1626
1627 for (f = fields; f; f = DECL_CHAIN (f))
1628 if (TREE_CODE (f) == FIELD_DECL)
1629 {
1630 tree field_type = TREE_TYPE (f);
1631 if (RECORD_OR_UNION_TYPE_P (field_type)
1632 && record_or_union_type_has_array_p (field_type))
1633 return 1;
1634 if (TREE_CODE (field_type) == ARRAY_TYPE)
1635 return 1;
1636 }
1637 return 0;
1638 }
1639
1640 /* Check if the current function has local referenced variables that
1641 have their addresses taken, contain an array, or are arrays. */
1642
1643 static bool
1644 stack_protect_decl_p ()
1645 {
1646 unsigned i;
1647 tree var;
1648
1649 FOR_EACH_LOCAL_DECL (cfun, i, var)
1650 if (!is_global_var (var))
1651 {
1652 tree var_type = TREE_TYPE (var);
1653 if (TREE_CODE (var) == VAR_DECL
1654 && (TREE_CODE (var_type) == ARRAY_TYPE
1655 || TREE_ADDRESSABLE (var)
1656 || (RECORD_OR_UNION_TYPE_P (var_type)
1657 && record_or_union_type_has_array_p (var_type))))
1658 return true;
1659 }
1660 return false;
1661 }
1662
1663 /* Check if the current function has calls that use a return slot. */
1664
1665 static bool
1666 stack_protect_return_slot_p ()
1667 {
1668 basic_block bb;
1669
1670 FOR_ALL_BB_FN (bb, cfun)
1671 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1672 !gsi_end_p (gsi); gsi_next (&gsi))
1673 {
1674 gimple stmt = gsi_stmt (gsi);
1675 /* This assumes that calls to internal-only functions never
1676 use a return slot. */
1677 if (is_gimple_call (stmt)
1678 && !gimple_call_internal_p (stmt)
1679 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1680 gimple_call_fndecl (stmt)))
1681 return true;
1682 }
1683 return false;
1684 }
1685
1686 /* Expand all variables used in the function. */
1687
1688 static rtx_insn *
1689 expand_used_vars (void)
1690 {
1691 tree var, outer_block = DECL_INITIAL (current_function_decl);
1692 vec<tree> maybe_local_decls = vNULL;
1693 rtx_insn *var_end_seq = NULL;
1694 unsigned i;
1695 unsigned len;
1696 bool gen_stack_protect_signal = false;
1697
1698 /* Compute the phase of the stack frame for this function. */
1699 {
1700 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1701 int off = STARTING_FRAME_OFFSET % align;
1702 frame_phase = off ? align - off : 0;
1703 }
1704
1705 /* Set TREE_USED on all variables in the local_decls. */
1706 FOR_EACH_LOCAL_DECL (cfun, i, var)
1707 TREE_USED (var) = 1;
1708 /* Clear TREE_USED on all variables associated with a block scope. */
1709 clear_tree_used (DECL_INITIAL (current_function_decl));
1710
1711 init_vars_expansion ();
1712
1713 if (targetm.use_pseudo_pic_reg ())
1714 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1715
1716 hash_map<tree, tree> ssa_name_decls;
1717 for (i = 0; i < SA.map->num_partitions; i++)
1718 {
1719 tree var = partition_to_var (SA.map, i);
1720
1721 gcc_assert (!virtual_operand_p (var));
1722
1723 /* Assign decls to each SSA name partition, share decls for partitions
1724 we could have coalesced (those with the same type). */
1725 if (SSA_NAME_VAR (var) == NULL_TREE)
1726 {
1727 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1728 if (!*slot)
1729 *slot = create_tmp_reg (TREE_TYPE (var));
1730 replace_ssa_name_symbol (var, *slot);
1731 }
1732
1733 /* Always allocate space for partitions based on VAR_DECLs. But for
1734 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1735 debug info, there is no need to do so if optimization is disabled
1736 because all the SSA_NAMEs based on these DECLs have been coalesced
1737 into a single partition, which is thus assigned the canonical RTL
1738 location of the DECLs. If in_lto_p, we can't rely on optimize,
1739 a function could be compiled with -O1 -flto first and only the
1740 link performed at -O0. */
1741 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1742 expand_one_var (var, true, true);
1743 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1744 {
1745 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1746 contain the default def (representing the parm or result itself)
1747 we don't do anything here. But those which don't contain the
1748 default def (representing a temporary based on the parm/result)
1749 we need to allocate space just like for normal VAR_DECLs. */
1750 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1751 {
1752 expand_one_var (var, true, true);
1753 gcc_assert (SA.partition_to_pseudo[i]);
1754 }
1755 }
1756 }
1757
1758 if (flag_stack_protect == SPCT_FLAG_STRONG)
1759 gen_stack_protect_signal
1760 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1761
1762 /* At this point all variables on the local_decls with TREE_USED
1763 set are not associated with any block scope. Lay them out. */
1764
1765 len = vec_safe_length (cfun->local_decls);
1766 FOR_EACH_LOCAL_DECL (cfun, i, var)
1767 {
1768 bool expand_now = false;
1769
1770 /* Expanded above already. */
1771 if (is_gimple_reg (var))
1772 {
1773 TREE_USED (var) = 0;
1774 goto next;
1775 }
1776 /* We didn't set a block for static or extern because it's hard
1777 to tell the difference between a global variable (re)declared
1778 in a local scope, and one that's really declared there to
1779 begin with. And it doesn't really matter much, since we're
1780 not giving them stack space. Expand them now. */
1781 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1782 expand_now = true;
1783
1784 /* Expand variables not associated with any block now. Those created by
1785 the optimizers could be live anywhere in the function. Those that
1786 could possibly have been scoped originally and detached from their
1787 block will have their allocation deferred so we coalesce them with
1788 others when optimization is enabled. */
1789 else if (TREE_USED (var))
1790 expand_now = true;
1791
1792 /* Finally, mark all variables on the list as used. We'll use
1793 this in a moment when we expand those associated with scopes. */
1794 TREE_USED (var) = 1;
1795
1796 if (expand_now)
1797 expand_one_var (var, true, true);
1798
1799 next:
1800 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1801 {
1802 rtx rtl = DECL_RTL_IF_SET (var);
1803
1804 /* Keep artificial non-ignored vars in cfun->local_decls
1805 chain until instantiate_decls. */
1806 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1807 add_local_decl (cfun, var);
1808 else if (rtl == NULL_RTX)
1809 /* If rtl isn't set yet, which can happen e.g. with
1810 -fstack-protector, retry before returning from this
1811 function. */
1812 maybe_local_decls.safe_push (var);
1813 }
1814 }
1815
1816 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1817
1818 +-----------------+-----------------+
1819 | ...processed... | ...duplicates...|
1820 +-----------------+-----------------+
1821 ^
1822 +-- LEN points here.
1823
1824 We just want the duplicates, as those are the artificial
1825 non-ignored vars that we want to keep until instantiate_decls.
1826 Move them down and truncate the array. */
1827 if (!vec_safe_is_empty (cfun->local_decls))
1828 cfun->local_decls->block_remove (0, len);
1829
1830 /* At this point, all variables within the block tree with TREE_USED
1831 set are actually used by the optimized function. Lay them out. */
1832 expand_used_vars_for_block (outer_block, true);
1833
1834 if (stack_vars_num > 0)
1835 {
1836 add_scope_conflicts ();
1837
1838 /* If stack protection is enabled, we don't share space between
1839 vulnerable data and non-vulnerable data. */
1840 if (flag_stack_protect != 0
1841 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1842 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1843 && lookup_attribute ("stack_protect",
1844 DECL_ATTRIBUTES (current_function_decl)))))
1845 add_stack_protection_conflicts ();
1846
1847 /* Now that we have collected all stack variables, and have computed a
1848 minimal interference graph, attempt to save some stack space. */
1849 partition_stack_vars ();
1850 if (dump_file)
1851 dump_stack_var_partition ();
1852 }
1853
1854 switch (flag_stack_protect)
1855 {
1856 case SPCT_FLAG_ALL:
1857 create_stack_guard ();
1858 break;
1859
1860 case SPCT_FLAG_STRONG:
1861 if (gen_stack_protect_signal
1862 || cfun->calls_alloca || has_protected_decls
1863 || lookup_attribute ("stack_protect",
1864 DECL_ATTRIBUTES (current_function_decl)))
1865 create_stack_guard ();
1866 break;
1867
1868 case SPCT_FLAG_DEFAULT:
1869 if (cfun->calls_alloca || has_protected_decls
1870 || lookup_attribute ("stack_protect",
1871 DECL_ATTRIBUTES (current_function_decl)))
1872 create_stack_guard ();
1873 break;
1874
1875 case SPCT_FLAG_EXPLICIT:
1876 if (lookup_attribute ("stack_protect",
1877 DECL_ATTRIBUTES (current_function_decl)))
1878 create_stack_guard ();
1879 break;
1880 default:
1881 ;
1882 }
1883
1884 /* Assign rtl to each variable based on these partitions. */
1885 if (stack_vars_num > 0)
1886 {
1887 struct stack_vars_data data;
1888
1889 data.asan_vec = vNULL;
1890 data.asan_decl_vec = vNULL;
1891 data.asan_base = NULL_RTX;
1892 data.asan_alignb = 0;
1893
1894 /* Reorder decls to be protected by iterating over the variables
1895 array multiple times, and allocating out of each phase in turn. */
1896 /* ??? We could probably integrate this into the qsort we did
1897 earlier, such that we naturally see these variables first,
1898 and thus naturally allocate things in the right order. */
1899 if (has_protected_decls)
1900 {
1901 /* Phase 1 contains only character arrays. */
1902 expand_stack_vars (stack_protect_decl_phase_1, &data);
1903
1904 /* Phase 2 contains other kinds of arrays. */
1905 if (flag_stack_protect == SPCT_FLAG_ALL
1906 || flag_stack_protect == SPCT_FLAG_STRONG
1907 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1908 && lookup_attribute ("stack_protect",
1909 DECL_ATTRIBUTES (current_function_decl))))
1910 expand_stack_vars (stack_protect_decl_phase_2, &data);
1911 }
1912
1913 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1914 /* Phase 3, any partitions that need asan protection
1915 in addition to phase 1 and 2. */
1916 expand_stack_vars (asan_decl_phase_3, &data);
1917
1918 if (!data.asan_vec.is_empty ())
1919 {
1920 HOST_WIDE_INT prev_offset = frame_offset;
1921 HOST_WIDE_INT offset, sz, redzonesz;
1922 redzonesz = ASAN_RED_ZONE_SIZE;
1923 sz = data.asan_vec[0] - prev_offset;
1924 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1925 && data.asan_alignb <= 4096
1926 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1927 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1928 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1929 offset
1930 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1931 data.asan_vec.safe_push (prev_offset);
1932 data.asan_vec.safe_push (offset);
1933 /* Leave space for alignment if STRICT_ALIGNMENT. */
1934 if (STRICT_ALIGNMENT)
1935 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1936 << ASAN_SHADOW_SHIFT)
1937 / BITS_PER_UNIT, 1);
1938
1939 var_end_seq
1940 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1941 data.asan_base,
1942 data.asan_alignb,
1943 data.asan_vec.address (),
1944 data.asan_decl_vec.address (),
1945 data.asan_vec.length ());
1946 }
1947
1948 expand_stack_vars (NULL, &data);
1949
1950 data.asan_vec.release ();
1951 data.asan_decl_vec.release ();
1952 }
1953
1954 fini_vars_expansion ();
1955
1956 /* If there were any artificial non-ignored vars without rtl
1957 found earlier, see if deferred stack allocation hasn't assigned
1958 rtl to them. */
1959 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1960 {
1961 rtx rtl = DECL_RTL_IF_SET (var);
1962
1963 /* Keep artificial non-ignored vars in cfun->local_decls
1964 chain until instantiate_decls. */
1965 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1966 add_local_decl (cfun, var);
1967 }
1968 maybe_local_decls.release ();
1969
1970 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1971 if (STACK_ALIGNMENT_NEEDED)
1972 {
1973 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1974 if (!FRAME_GROWS_DOWNWARD)
1975 frame_offset += align - 1;
1976 frame_offset &= -align;
1977 }
1978
1979 return var_end_seq;
1980 }
1981
1982
1983 /* If we need to produce a detailed dump, print the tree representation
1984 for STMT to the dump file. SINCE is the last RTX after which the RTL
1985 generated for STMT should have been appended. */
1986
1987 static void
1988 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
1989 {
1990 if (dump_file && (dump_flags & TDF_DETAILS))
1991 {
1992 fprintf (dump_file, "\n;; ");
1993 print_gimple_stmt (dump_file, stmt, 0,
1994 TDF_SLIM | (dump_flags & TDF_LINENO));
1995 fprintf (dump_file, "\n");
1996
1997 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1998 }
1999 }
2000
2001 /* Maps the blocks that do not contain tree labels to rtx labels. */
2002
2003 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2004
2005 /* Returns the label_rtx expression for a label starting basic block BB. */
2006
2007 static rtx_code_label *
2008 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2009 {
2010 gimple_stmt_iterator gsi;
2011 tree lab;
2012
2013 if (bb->flags & BB_RTL)
2014 return block_label (bb);
2015
2016 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2017 if (elt)
2018 return *elt;
2019
2020 /* Find the tree label if it is present. */
2021
2022 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2023 {
2024 glabel *lab_stmt;
2025
2026 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2027 if (!lab_stmt)
2028 break;
2029
2030 lab = gimple_label_label (lab_stmt);
2031 if (DECL_NONLOCAL (lab))
2032 break;
2033
2034 return jump_target_rtx (lab);
2035 }
2036
2037 rtx_code_label *l = gen_label_rtx ();
2038 lab_rtx_for_bb->put (bb, l);
2039 return l;
2040 }
2041
2042
2043 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2044 of a basic block where we just expanded the conditional at the end,
2045 possibly clean up the CFG and instruction sequence. LAST is the
2046 last instruction before the just emitted jump sequence. */
2047
2048 static void
2049 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2050 {
2051 /* Special case: when jumpif decides that the condition is
2052 trivial it emits an unconditional jump (and the necessary
2053 barrier). But we still have two edges, the fallthru one is
2054 wrong. purge_dead_edges would clean this up later. Unfortunately
2055 we have to insert insns (and split edges) before
2056 find_many_sub_basic_blocks and hence before purge_dead_edges.
2057 But splitting edges might create new blocks which depend on the
2058 fact that if there are two edges there's no barrier. So the
2059 barrier would get lost and verify_flow_info would ICE. Instead
2060 of auditing all edge splitters to care for the barrier (which
2061 normally isn't there in a cleaned CFG), fix it here. */
2062 if (BARRIER_P (get_last_insn ()))
2063 {
2064 rtx_insn *insn;
2065 remove_edge (e);
2066 /* Now, we have a single successor block, if we have insns to
2067 insert on the remaining edge we potentially will insert
2068 it at the end of this block (if the dest block isn't feasible)
2069 in order to avoid splitting the edge. This insertion will take
2070 place in front of the last jump. But we might have emitted
2071 multiple jumps (conditional and one unconditional) to the
2072 same destination. Inserting in front of the last one then
2073 is a problem. See PR 40021. We fix this by deleting all
2074 jumps except the last unconditional one. */
2075 insn = PREV_INSN (get_last_insn ());
2076 /* Make sure we have an unconditional jump. Otherwise we're
2077 confused. */
2078 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2079 for (insn = PREV_INSN (insn); insn != last;)
2080 {
2081 insn = PREV_INSN (insn);
2082 if (JUMP_P (NEXT_INSN (insn)))
2083 {
2084 if (!any_condjump_p (NEXT_INSN (insn)))
2085 {
2086 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2087 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2088 }
2089 delete_insn (NEXT_INSN (insn));
2090 }
2091 }
2092 }
2093 }
2094
2095 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2096 Returns a new basic block if we've terminated the current basic
2097 block and created a new one. */
2098
2099 static basic_block
2100 expand_gimple_cond (basic_block bb, gcond *stmt)
2101 {
2102 basic_block new_bb, dest;
2103 edge new_edge;
2104 edge true_edge;
2105 edge false_edge;
2106 rtx_insn *last2, *last;
2107 enum tree_code code;
2108 tree op0, op1;
2109
2110 code = gimple_cond_code (stmt);
2111 op0 = gimple_cond_lhs (stmt);
2112 op1 = gimple_cond_rhs (stmt);
2113 /* We're sometimes presented with such code:
2114 D.123_1 = x < y;
2115 if (D.123_1 != 0)
2116 ...
2117 This would expand to two comparisons which then later might
2118 be cleaned up by combine. But some pattern matchers like if-conversion
2119 work better when there's only one compare, so make up for this
2120 here as special exception if TER would have made the same change. */
2121 if (SA.values
2122 && TREE_CODE (op0) == SSA_NAME
2123 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2124 && TREE_CODE (op1) == INTEGER_CST
2125 && ((gimple_cond_code (stmt) == NE_EXPR
2126 && integer_zerop (op1))
2127 || (gimple_cond_code (stmt) == EQ_EXPR
2128 && integer_onep (op1)))
2129 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2130 {
2131 gimple second = SSA_NAME_DEF_STMT (op0);
2132 if (gimple_code (second) == GIMPLE_ASSIGN)
2133 {
2134 enum tree_code code2 = gimple_assign_rhs_code (second);
2135 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2136 {
2137 code = code2;
2138 op0 = gimple_assign_rhs1 (second);
2139 op1 = gimple_assign_rhs2 (second);
2140 }
2141 /* If jumps are cheap and the target does not support conditional
2142 compare, turn some more codes into jumpy sequences. */
2143 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2144 && targetm.gen_ccmp_first == NULL)
2145 {
2146 if ((code2 == BIT_AND_EXPR
2147 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2148 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2149 || code2 == TRUTH_AND_EXPR)
2150 {
2151 code = TRUTH_ANDIF_EXPR;
2152 op0 = gimple_assign_rhs1 (second);
2153 op1 = gimple_assign_rhs2 (second);
2154 }
2155 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2156 {
2157 code = TRUTH_ORIF_EXPR;
2158 op0 = gimple_assign_rhs1 (second);
2159 op1 = gimple_assign_rhs2 (second);
2160 }
2161 }
2162 }
2163 }
2164
2165 last2 = last = get_last_insn ();
2166
2167 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2168 set_curr_insn_location (gimple_location (stmt));
2169
2170 /* These flags have no purpose in RTL land. */
2171 true_edge->flags &= ~EDGE_TRUE_VALUE;
2172 false_edge->flags &= ~EDGE_FALSE_VALUE;
2173
2174 /* We can either have a pure conditional jump with one fallthru edge or
2175 two-way jump that needs to be decomposed into two basic blocks. */
2176 if (false_edge->dest == bb->next_bb)
2177 {
2178 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2179 true_edge->probability);
2180 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2181 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2182 set_curr_insn_location (true_edge->goto_locus);
2183 false_edge->flags |= EDGE_FALLTHRU;
2184 maybe_cleanup_end_of_block (false_edge, last);
2185 return NULL;
2186 }
2187 if (true_edge->dest == bb->next_bb)
2188 {
2189 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2190 false_edge->probability);
2191 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2192 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2193 set_curr_insn_location (false_edge->goto_locus);
2194 true_edge->flags |= EDGE_FALLTHRU;
2195 maybe_cleanup_end_of_block (true_edge, last);
2196 return NULL;
2197 }
2198
2199 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2200 true_edge->probability);
2201 last = get_last_insn ();
2202 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2203 set_curr_insn_location (false_edge->goto_locus);
2204 emit_jump (label_rtx_for_bb (false_edge->dest));
2205
2206 BB_END (bb) = last;
2207 if (BARRIER_P (BB_END (bb)))
2208 BB_END (bb) = PREV_INSN (BB_END (bb));
2209 update_bb_for_insn (bb);
2210
2211 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2212 dest = false_edge->dest;
2213 redirect_edge_succ (false_edge, new_bb);
2214 false_edge->flags |= EDGE_FALLTHRU;
2215 new_bb->count = false_edge->count;
2216 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2217 add_bb_to_loop (new_bb, bb->loop_father);
2218 new_edge = make_edge (new_bb, dest, 0);
2219 new_edge->probability = REG_BR_PROB_BASE;
2220 new_edge->count = new_bb->count;
2221 if (BARRIER_P (BB_END (new_bb)))
2222 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2223 update_bb_for_insn (new_bb);
2224
2225 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2226
2227 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2228 {
2229 set_curr_insn_location (true_edge->goto_locus);
2230 true_edge->goto_locus = curr_insn_location ();
2231 }
2232
2233 return new_bb;
2234 }
2235
2236 /* Mark all calls that can have a transaction restart. */
2237
2238 static void
2239 mark_transaction_restart_calls (gimple stmt)
2240 {
2241 struct tm_restart_node dummy;
2242 tm_restart_node **slot;
2243
2244 if (!cfun->gimple_df->tm_restart)
2245 return;
2246
2247 dummy.stmt = stmt;
2248 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2249 if (slot)
2250 {
2251 struct tm_restart_node *n = *slot;
2252 tree list = n->label_or_list;
2253 rtx_insn *insn;
2254
2255 for (insn = next_real_insn (get_last_insn ());
2256 !CALL_P (insn);
2257 insn = next_real_insn (insn))
2258 continue;
2259
2260 if (TREE_CODE (list) == LABEL_DECL)
2261 add_reg_note (insn, REG_TM, label_rtx (list));
2262 else
2263 for (; list ; list = TREE_CHAIN (list))
2264 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2265 }
2266 }
2267
2268 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2269 statement STMT. */
2270
2271 static void
2272 expand_call_stmt (gcall *stmt)
2273 {
2274 tree exp, decl, lhs;
2275 bool builtin_p;
2276 size_t i;
2277
2278 if (gimple_call_internal_p (stmt))
2279 {
2280 expand_internal_call (stmt);
2281 return;
2282 }
2283
2284 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2285
2286 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2287 decl = gimple_call_fndecl (stmt);
2288 builtin_p = decl && DECL_BUILT_IN (decl);
2289
2290 /* If this is not a builtin function, the function type through which the
2291 call is made may be different from the type of the function. */
2292 if (!builtin_p)
2293 CALL_EXPR_FN (exp)
2294 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2295 CALL_EXPR_FN (exp));
2296
2297 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2298 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2299
2300 for (i = 0; i < gimple_call_num_args (stmt); i++)
2301 {
2302 tree arg = gimple_call_arg (stmt, i);
2303 gimple def;
2304 /* TER addresses into arguments of builtin functions so we have a
2305 chance to infer more correct alignment information. See PR39954. */
2306 if (builtin_p
2307 && TREE_CODE (arg) == SSA_NAME
2308 && (def = get_gimple_for_ssa_name (arg))
2309 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2310 arg = gimple_assign_rhs1 (def);
2311 CALL_EXPR_ARG (exp, i) = arg;
2312 }
2313
2314 if (gimple_has_side_effects (stmt))
2315 TREE_SIDE_EFFECTS (exp) = 1;
2316
2317 if (gimple_call_nothrow_p (stmt))
2318 TREE_NOTHROW (exp) = 1;
2319
2320 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2321 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2322 if (decl
2323 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2324 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2325 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2326 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2327 else
2328 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2329 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2330 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2331 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2332
2333 /* Ensure RTL is created for debug args. */
2334 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2335 {
2336 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2337 unsigned int ix;
2338 tree dtemp;
2339
2340 if (debug_args)
2341 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2342 {
2343 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2344 expand_debug_expr (dtemp);
2345 }
2346 }
2347
2348 lhs = gimple_call_lhs (stmt);
2349 if (lhs)
2350 expand_assignment (lhs, exp, false);
2351 else
2352 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2353
2354 mark_transaction_restart_calls (stmt);
2355 }
2356
2357
2358 /* Generate RTL for an asm statement (explicit assembler code).
2359 STRING is a STRING_CST node containing the assembler code text,
2360 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2361 insn is volatile; don't optimize it. */
2362
2363 static void
2364 expand_asm_loc (tree string, int vol, location_t locus)
2365 {
2366 rtx body;
2367
2368 if (TREE_CODE (string) == ADDR_EXPR)
2369 string = TREE_OPERAND (string, 0);
2370
2371 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2372 ggc_strdup (TREE_STRING_POINTER (string)),
2373 locus);
2374
2375 MEM_VOLATILE_P (body) = vol;
2376
2377 emit_insn (body);
2378 }
2379
2380 /* Return the number of times character C occurs in string S. */
2381 static int
2382 n_occurrences (int c, const char *s)
2383 {
2384 int n = 0;
2385 while (*s)
2386 n += (*s++ == c);
2387 return n;
2388 }
2389
2390 /* A subroutine of expand_asm_operands. Check that all operands have
2391 the same number of alternatives. Return true if so. */
2392
2393 static bool
2394 check_operand_nalternatives (const vec<const char *> &constraints)
2395 {
2396 unsigned len = constraints.length();
2397 if (len > 0)
2398 {
2399 int nalternatives = n_occurrences (',', constraints[0]);
2400
2401 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2402 {
2403 error ("too many alternatives in %<asm%>");
2404 return false;
2405 }
2406
2407 for (unsigned i = 1; i < len; ++i)
2408 if (n_occurrences (',', constraints[i]) != nalternatives)
2409 {
2410 error ("operand constraints for %<asm%> differ "
2411 "in number of alternatives");
2412 return false;
2413 }
2414 }
2415 return true;
2416 }
2417
2418 /* Check for overlap between registers marked in CLOBBERED_REGS and
2419 anything inappropriate in T. Emit error and return the register
2420 variable definition for error, NULL_TREE for ok. */
2421
2422 static bool
2423 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2424 {
2425 /* Conflicts between asm-declared register variables and the clobber
2426 list are not allowed. */
2427 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2428
2429 if (overlap)
2430 {
2431 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2432 DECL_NAME (overlap));
2433
2434 /* Reset registerness to stop multiple errors emitted for a single
2435 variable. */
2436 DECL_REGISTER (overlap) = 0;
2437 return true;
2438 }
2439
2440 return false;
2441 }
2442
2443 /* Generate RTL for an asm statement with arguments.
2444 STRING is the instruction template.
2445 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2446 Each output or input has an expression in the TREE_VALUE and
2447 a tree list in TREE_PURPOSE which in turn contains a constraint
2448 name in TREE_VALUE (or NULL_TREE) and a constraint string
2449 in TREE_PURPOSE.
2450 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2451 that is clobbered by this insn.
2452
2453 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2454 should be the fallthru basic block of the asm goto.
2455
2456 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2457 Some elements of OUTPUTS may be replaced with trees representing temporary
2458 values. The caller should copy those temporary values to the originally
2459 specified lvalues.
2460
2461 VOL nonzero means the insn is volatile; don't optimize it. */
2462
2463 static void
2464 expand_asm_stmt (gasm *stmt)
2465 {
2466 class save_input_location
2467 {
2468 location_t old;
2469
2470 public:
2471 explicit save_input_location(location_t where)
2472 {
2473 old = input_location;
2474 input_location = where;
2475 }
2476
2477 ~save_input_location()
2478 {
2479 input_location = old;
2480 }
2481 };
2482
2483 location_t locus = gimple_location (stmt);
2484
2485 if (gimple_asm_input_p (stmt))
2486 {
2487 const char *s = gimple_asm_string (stmt);
2488 tree string = build_string (strlen (s), s);
2489 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2490 return;
2491 }
2492
2493 /* There are some legacy diagnostics in here, and also avoids a
2494 sixth parameger to targetm.md_asm_adjust. */
2495 save_input_location s_i_l(locus);
2496
2497 unsigned noutputs = gimple_asm_noutputs (stmt);
2498 unsigned ninputs = gimple_asm_ninputs (stmt);
2499 unsigned nlabels = gimple_asm_nlabels (stmt);
2500 unsigned i;
2501
2502 /* ??? Diagnose during gimplification? */
2503 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2504 {
2505 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2506 return;
2507 }
2508
2509 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2510 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2511 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2512
2513 /* Copy the gimple vectors into new vectors that we can manipulate. */
2514
2515 output_tvec.safe_grow (noutputs);
2516 input_tvec.safe_grow (ninputs);
2517 constraints.safe_grow (noutputs + ninputs);
2518
2519 for (i = 0; i < noutputs; ++i)
2520 {
2521 tree t = gimple_asm_output_op (stmt, i);
2522 output_tvec[i] = TREE_VALUE (t);
2523 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2524 }
2525 for (i = 0; i < ninputs; i++)
2526 {
2527 tree t = gimple_asm_input_op (stmt, i);
2528 input_tvec[i] = TREE_VALUE (t);
2529 constraints[i + noutputs]
2530 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2531 }
2532
2533 /* ??? Diagnose during gimplification? */
2534 if (! check_operand_nalternatives (constraints))
2535 return;
2536
2537 /* Count the number of meaningful clobbered registers, ignoring what
2538 we would ignore later. */
2539 auto_vec<rtx> clobber_rvec;
2540 HARD_REG_SET clobbered_regs;
2541 CLEAR_HARD_REG_SET (clobbered_regs);
2542
2543 if (unsigned n = gimple_asm_nclobbers (stmt))
2544 {
2545 clobber_rvec.reserve (n);
2546 for (i = 0; i < n; i++)
2547 {
2548 tree t = gimple_asm_clobber_op (stmt, i);
2549 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2550 int nregs, j;
2551
2552 j = decode_reg_name_and_count (regname, &nregs);
2553 if (j < 0)
2554 {
2555 if (j == -2)
2556 {
2557 /* ??? Diagnose during gimplification? */
2558 error ("unknown register name %qs in %<asm%>", regname);
2559 }
2560 else if (j == -4)
2561 {
2562 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2563 clobber_rvec.safe_push (x);
2564 }
2565 else
2566 {
2567 /* Otherwise we should have -1 == empty string
2568 or -3 == cc, which is not a register. */
2569 gcc_assert (j == -1 || j == -3);
2570 }
2571 }
2572 else
2573 for (int reg = j; reg < j + nregs; reg++)
2574 {
2575 /* Clobbering the PIC register is an error. */
2576 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2577 {
2578 /* ??? Diagnose during gimplification? */
2579 error ("PIC register clobbered by %qs in %<asm%>",
2580 regname);
2581 return;
2582 }
2583
2584 SET_HARD_REG_BIT (clobbered_regs, reg);
2585 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2586 clobber_rvec.safe_push (x);
2587 }
2588 }
2589 }
2590 unsigned nclobbers = clobber_rvec.length();
2591
2592 /* First pass over inputs and outputs checks validity and sets
2593 mark_addressable if needed. */
2594 /* ??? Diagnose during gimplification? */
2595
2596 for (i = 0; i < noutputs; ++i)
2597 {
2598 tree val = output_tvec[i];
2599 tree type = TREE_TYPE (val);
2600 const char *constraint;
2601 bool is_inout;
2602 bool allows_reg;
2603 bool allows_mem;
2604
2605 /* Try to parse the output constraint. If that fails, there's
2606 no point in going further. */
2607 constraint = constraints[i];
2608 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2609 &allows_mem, &allows_reg, &is_inout))
2610 return;
2611
2612 if (! allows_reg
2613 && (allows_mem
2614 || is_inout
2615 || (DECL_P (val)
2616 && REG_P (DECL_RTL (val))
2617 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2618 mark_addressable (val);
2619 }
2620
2621 for (i = 0; i < ninputs; ++i)
2622 {
2623 bool allows_reg, allows_mem;
2624 const char *constraint;
2625
2626 constraint = constraints[i + noutputs];
2627 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2628 constraints.address (),
2629 &allows_mem, &allows_reg))
2630 return;
2631
2632 if (! allows_reg && allows_mem)
2633 mark_addressable (input_tvec[i]);
2634 }
2635
2636 /* Second pass evaluates arguments. */
2637
2638 /* Make sure stack is consistent for asm goto. */
2639 if (nlabels > 0)
2640 do_pending_stack_adjust ();
2641 int old_generating_concat_p = generating_concat_p;
2642
2643 /* Vector of RTX's of evaluated output operands. */
2644 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2645 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2646 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
2647
2648 output_rvec.safe_grow (noutputs);
2649
2650 for (i = 0; i < noutputs; ++i)
2651 {
2652 tree val = output_tvec[i];
2653 tree type = TREE_TYPE (val);
2654 bool is_inout, allows_reg, allows_mem, ok;
2655 rtx op;
2656
2657 ok = parse_output_constraint (&constraints[i], i, ninputs,
2658 noutputs, &allows_mem, &allows_reg,
2659 &is_inout);
2660 gcc_assert (ok);
2661
2662 /* If an output operand is not a decl or indirect ref and our constraint
2663 allows a register, make a temporary to act as an intermediate.
2664 Make the asm insn write into that, then we will copy it to
2665 the real output operand. Likewise for promoted variables. */
2666
2667 generating_concat_p = 0;
2668
2669 if ((TREE_CODE (val) == INDIRECT_REF
2670 && allows_mem)
2671 || (DECL_P (val)
2672 && (allows_mem || REG_P (DECL_RTL (val)))
2673 && ! (REG_P (DECL_RTL (val))
2674 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2675 || ! allows_reg
2676 || is_inout)
2677 {
2678 op = expand_expr (val, NULL_RTX, VOIDmode,
2679 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2680 if (MEM_P (op))
2681 op = validize_mem (op);
2682
2683 if (! allows_reg && !MEM_P (op))
2684 error ("output number %d not directly addressable", i);
2685 if ((! allows_mem && MEM_P (op))
2686 || GET_CODE (op) == CONCAT)
2687 {
2688 rtx old_op = op;
2689 op = gen_reg_rtx (GET_MODE (op));
2690
2691 generating_concat_p = old_generating_concat_p;
2692
2693 if (is_inout)
2694 emit_move_insn (op, old_op);
2695
2696 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2697 emit_move_insn (old_op, op);
2698 after_rtl_seq = get_insns ();
2699 after_rtl_end = get_last_insn ();
2700 end_sequence ();
2701 }
2702 }
2703 else
2704 {
2705 op = assign_temp (type, 0, 1);
2706 op = validize_mem (op);
2707 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
2708 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
2709
2710 generating_concat_p = old_generating_concat_p;
2711
2712 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2713 expand_assignment (val, make_tree (type, op), false);
2714 after_rtl_seq = get_insns ();
2715 after_rtl_end = get_last_insn ();
2716 end_sequence ();
2717 }
2718 output_rvec[i] = op;
2719
2720 if (is_inout)
2721 inout_opnum.safe_push (i);
2722 }
2723
2724 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
2725 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
2726
2727 input_rvec.safe_grow (ninputs);
2728 input_mode.safe_grow (ninputs);
2729
2730 generating_concat_p = 0;
2731
2732 for (i = 0; i < ninputs; ++i)
2733 {
2734 tree val = input_tvec[i];
2735 tree type = TREE_TYPE (val);
2736 bool allows_reg, allows_mem, ok;
2737 const char *constraint;
2738 rtx op;
2739
2740 constraint = constraints[i + noutputs];
2741 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2742 constraints.address (),
2743 &allows_mem, &allows_reg);
2744 gcc_assert (ok);
2745
2746 /* EXPAND_INITIALIZER will not generate code for valid initializer
2747 constants, but will still generate code for other types of operand.
2748 This is the behavior we want for constant constraints. */
2749 op = expand_expr (val, NULL_RTX, VOIDmode,
2750 allows_reg ? EXPAND_NORMAL
2751 : allows_mem ? EXPAND_MEMORY
2752 : EXPAND_INITIALIZER);
2753
2754 /* Never pass a CONCAT to an ASM. */
2755 if (GET_CODE (op) == CONCAT)
2756 op = force_reg (GET_MODE (op), op);
2757 else if (MEM_P (op))
2758 op = validize_mem (op);
2759
2760 if (asm_operand_ok (op, constraint, NULL) <= 0)
2761 {
2762 if (allows_reg && TYPE_MODE (type) != BLKmode)
2763 op = force_reg (TYPE_MODE (type), op);
2764 else if (!allows_mem)
2765 warning (0, "asm operand %d probably doesn%'t match constraints",
2766 i + noutputs);
2767 else if (MEM_P (op))
2768 {
2769 /* We won't recognize either volatile memory or memory
2770 with a queued address as available a memory_operand
2771 at this point. Ignore it: clearly this *is* a memory. */
2772 }
2773 else
2774 gcc_unreachable ();
2775 }
2776 input_rvec[i] = op;
2777 input_mode[i] = TYPE_MODE (type);
2778 }
2779
2780 /* For in-out operands, copy output rtx to input rtx. */
2781 unsigned ninout = inout_opnum.length();
2782 for (i = 0; i < ninout; i++)
2783 {
2784 int j = inout_opnum[i];
2785 rtx o = output_rvec[j];
2786
2787 input_rvec.safe_push (o);
2788 input_mode.safe_push (GET_MODE (o));
2789
2790 char buffer[16];
2791 sprintf (buffer, "%d", j);
2792 constraints.safe_push (ggc_strdup (buffer));
2793 }
2794 ninputs += ninout;
2795
2796 /* Sometimes we wish to automatically clobber registers across an asm.
2797 Case in point is when the i386 backend moved from cc0 to a hard reg --
2798 maintaining source-level compatibility means automatically clobbering
2799 the flags register. */
2800 rtx_insn *after_md_seq = NULL;
2801 if (targetm.md_asm_adjust)
2802 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
2803 constraints, clobber_rvec,
2804 clobbered_regs);
2805
2806 /* Do not allow the hook to change the output and input count,
2807 lest it mess up the operand numbering. */
2808 gcc_assert (output_rvec.length() == noutputs);
2809 gcc_assert (input_rvec.length() == ninputs);
2810 gcc_assert (constraints.length() == noutputs + ninputs);
2811
2812 /* But it certainly can adjust the clobbers. */
2813 nclobbers = clobber_rvec.length();
2814
2815 /* Third pass checks for easy conflicts. */
2816 /* ??? Why are we doing this on trees instead of rtx. */
2817
2818 bool clobber_conflict_found = 0;
2819 for (i = 0; i < noutputs; ++i)
2820 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
2821 clobber_conflict_found = 1;
2822 for (i = 0; i < ninputs - ninout; ++i)
2823 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
2824 clobber_conflict_found = 1;
2825
2826 /* Make vectors for the expression-rtx, constraint strings,
2827 and named operands. */
2828
2829 rtvec argvec = rtvec_alloc (ninputs);
2830 rtvec constraintvec = rtvec_alloc (ninputs);
2831 rtvec labelvec = rtvec_alloc (nlabels);
2832
2833 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2834 : GET_MODE (output_rvec[0])),
2835 ggc_strdup (gimple_asm_string (stmt)),
2836 empty_string, 0, argvec, constraintvec,
2837 labelvec, locus);
2838 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
2839
2840 for (i = 0; i < ninputs; ++i)
2841 {
2842 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
2843 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2844 = gen_rtx_ASM_INPUT_loc (input_mode[i],
2845 constraints[i + noutputs],
2846 locus);
2847 }
2848
2849 /* Copy labels to the vector. */
2850 rtx_code_label *fallthru_label = NULL;
2851 if (nlabels > 0)
2852 {
2853 basic_block fallthru_bb = NULL;
2854 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2855 if (fallthru)
2856 fallthru_bb = fallthru->dest;
2857
2858 for (i = 0; i < nlabels; ++i)
2859 {
2860 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
2861 rtx_insn *r;
2862 /* If asm goto has any labels in the fallthru basic block, use
2863 a label that we emit immediately after the asm goto. Expansion
2864 may insert further instructions into the same basic block after
2865 asm goto and if we don't do this, insertion of instructions on
2866 the fallthru edge might misbehave. See PR58670. */
2867 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
2868 {
2869 if (fallthru_label == NULL_RTX)
2870 fallthru_label = gen_label_rtx ();
2871 r = fallthru_label;
2872 }
2873 else
2874 r = label_rtx (label);
2875 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2876 }
2877 }
2878
2879 /* Now, for each output, construct an rtx
2880 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2881 ARGVEC CONSTRAINTS OPNAMES))
2882 If there is more than one, put them inside a PARALLEL. */
2883
2884 if (nlabels > 0 && nclobbers == 0)
2885 {
2886 gcc_assert (noutputs == 0);
2887 emit_jump_insn (body);
2888 }
2889 else if (noutputs == 0 && nclobbers == 0)
2890 {
2891 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2892 emit_insn (body);
2893 }
2894 else if (noutputs == 1 && nclobbers == 0)
2895 {
2896 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
2897 emit_insn (gen_rtx_SET (output_rvec[0], body));
2898 }
2899 else
2900 {
2901 rtx obody = body;
2902 int num = noutputs;
2903
2904 if (num == 0)
2905 num = 1;
2906
2907 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2908
2909 /* For each output operand, store a SET. */
2910 for (i = 0; i < noutputs; ++i)
2911 {
2912 rtx src, o = output_rvec[i];
2913 if (i == 0)
2914 {
2915 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
2916 src = obody;
2917 }
2918 else
2919 {
2920 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
2921 ASM_OPERANDS_TEMPLATE (obody),
2922 constraints[i], i, argvec,
2923 constraintvec, labelvec, locus);
2924 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
2925 }
2926 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
2927 }
2928
2929 /* If there are no outputs (but there are some clobbers)
2930 store the bare ASM_OPERANDS into the PARALLEL. */
2931 if (i == 0)
2932 XVECEXP (body, 0, i++) = obody;
2933
2934 /* Store (clobber REG) for each clobbered register specified. */
2935 for (unsigned j = 0; j < nclobbers; ++j)
2936 {
2937 rtx clobbered_reg = clobber_rvec[j];
2938
2939 /* Do sanity check for overlap between clobbers and respectively
2940 input and outputs that hasn't been handled. Such overlap
2941 should have been detected and reported above. */
2942 if (!clobber_conflict_found && REG_P (clobbered_reg))
2943 {
2944 /* We test the old body (obody) contents to avoid
2945 tripping over the under-construction body. */
2946 for (unsigned k = 0; k < noutputs; ++k)
2947 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
2948 internal_error ("asm clobber conflict with output operand");
2949
2950 for (unsigned k = 0; k < ninputs - ninout; ++k)
2951 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
2952 internal_error ("asm clobber conflict with input operand");
2953 }
2954
2955 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2956 }
2957
2958 if (nlabels > 0)
2959 emit_jump_insn (body);
2960 else
2961 emit_insn (body);
2962 }
2963
2964 generating_concat_p = old_generating_concat_p;
2965
2966 if (fallthru_label)
2967 emit_label (fallthru_label);
2968
2969 if (after_md_seq)
2970 emit_insn (after_md_seq);
2971 if (after_rtl_seq)
2972 emit_insn (after_rtl_seq);
2973
2974 free_temp_slots ();
2975 crtl->has_asm_statement = 1;
2976 }
2977
2978 /* Emit code to jump to the address
2979 specified by the pointer expression EXP. */
2980
2981 static void
2982 expand_computed_goto (tree exp)
2983 {
2984 rtx x = expand_normal (exp);
2985
2986 do_pending_stack_adjust ();
2987 emit_indirect_jump (x);
2988 }
2989
2990 /* Generate RTL code for a `goto' statement with target label LABEL.
2991 LABEL should be a LABEL_DECL tree node that was or will later be
2992 defined with `expand_label'. */
2993
2994 static void
2995 expand_goto (tree label)
2996 {
2997 #ifdef ENABLE_CHECKING
2998 /* Check for a nonlocal goto to a containing function. Should have
2999 gotten translated to __builtin_nonlocal_goto. */
3000 tree context = decl_function_context (label);
3001 gcc_assert (!context || context == current_function_decl);
3002 #endif
3003
3004 emit_jump (jump_target_rtx (label));
3005 }
3006
3007 /* Output a return with no value. */
3008
3009 static void
3010 expand_null_return_1 (void)
3011 {
3012 clear_pending_stack_adjust ();
3013 do_pending_stack_adjust ();
3014 emit_jump (return_label);
3015 }
3016
3017 /* Generate RTL to return from the current function, with no value.
3018 (That is, we do not do anything about returning any value.) */
3019
3020 void
3021 expand_null_return (void)
3022 {
3023 /* If this function was declared to return a value, but we
3024 didn't, clobber the return registers so that they are not
3025 propagated live to the rest of the function. */
3026 clobber_return_register ();
3027
3028 expand_null_return_1 ();
3029 }
3030
3031 /* Generate RTL to return from the current function, with value VAL. */
3032
3033 static void
3034 expand_value_return (rtx val)
3035 {
3036 /* Copy the value to the return location unless it's already there. */
3037
3038 tree decl = DECL_RESULT (current_function_decl);
3039 rtx return_reg = DECL_RTL (decl);
3040 if (return_reg != val)
3041 {
3042 tree funtype = TREE_TYPE (current_function_decl);
3043 tree type = TREE_TYPE (decl);
3044 int unsignedp = TYPE_UNSIGNED (type);
3045 machine_mode old_mode = DECL_MODE (decl);
3046 machine_mode mode;
3047 if (DECL_BY_REFERENCE (decl))
3048 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3049 else
3050 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3051
3052 if (mode != old_mode)
3053 val = convert_modes (mode, old_mode, val, unsignedp);
3054
3055 if (GET_CODE (return_reg) == PARALLEL)
3056 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3057 else
3058 emit_move_insn (return_reg, val);
3059 }
3060
3061 expand_null_return_1 ();
3062 }
3063
3064 /* Generate RTL to evaluate the expression RETVAL and return it
3065 from the current function. */
3066
3067 static void
3068 expand_return (tree retval, tree bounds)
3069 {
3070 rtx result_rtl;
3071 rtx val = 0;
3072 tree retval_rhs;
3073 rtx bounds_rtl;
3074
3075 /* If function wants no value, give it none. */
3076 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3077 {
3078 expand_normal (retval);
3079 expand_null_return ();
3080 return;
3081 }
3082
3083 if (retval == error_mark_node)
3084 {
3085 /* Treat this like a return of no value from a function that
3086 returns a value. */
3087 expand_null_return ();
3088 return;
3089 }
3090 else if ((TREE_CODE (retval) == MODIFY_EXPR
3091 || TREE_CODE (retval) == INIT_EXPR)
3092 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3093 retval_rhs = TREE_OPERAND (retval, 1);
3094 else
3095 retval_rhs = retval;
3096
3097 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3098
3099 /* Put returned bounds to the right place. */
3100 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3101 if (bounds_rtl)
3102 {
3103 rtx addr = NULL;
3104 rtx bnd = NULL;
3105
3106 if (bounds && bounds != error_mark_node)
3107 {
3108 bnd = expand_normal (bounds);
3109 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3110 }
3111 else if (REG_P (bounds_rtl))
3112 {
3113 if (bounds)
3114 bnd = chkp_expand_zero_bounds ();
3115 else
3116 {
3117 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3118 addr = gen_rtx_MEM (Pmode, addr);
3119 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3120 }
3121
3122 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3123 }
3124 else
3125 {
3126 int n;
3127
3128 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3129
3130 if (bounds)
3131 bnd = chkp_expand_zero_bounds ();
3132 else
3133 {
3134 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3135 addr = gen_rtx_MEM (Pmode, addr);
3136 }
3137
3138 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3139 {
3140 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3141 if (!bounds)
3142 {
3143 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3144 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3145 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3146 }
3147 targetm.calls.store_returned_bounds (slot, bnd);
3148 }
3149 }
3150 }
3151 else if (chkp_function_instrumented_p (current_function_decl)
3152 && !BOUNDED_P (retval_rhs)
3153 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3154 && TREE_CODE (retval_rhs) != RESULT_DECL)
3155 {
3156 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3157 addr = gen_rtx_MEM (Pmode, addr);
3158
3159 gcc_assert (MEM_P (result_rtl));
3160
3161 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3162 }
3163
3164 /* If we are returning the RESULT_DECL, then the value has already
3165 been stored into it, so we don't have to do anything special. */
3166 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3167 expand_value_return (result_rtl);
3168
3169 /* If the result is an aggregate that is being returned in one (or more)
3170 registers, load the registers here. */
3171
3172 else if (retval_rhs != 0
3173 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3174 && REG_P (result_rtl))
3175 {
3176 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3177 if (val)
3178 {
3179 /* Use the mode of the result value on the return register. */
3180 PUT_MODE (result_rtl, GET_MODE (val));
3181 expand_value_return (val);
3182 }
3183 else
3184 expand_null_return ();
3185 }
3186 else if (retval_rhs != 0
3187 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3188 && (REG_P (result_rtl)
3189 || (GET_CODE (result_rtl) == PARALLEL)))
3190 {
3191 /* Compute the return value into a temporary (usually a pseudo reg). */
3192 val
3193 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3194 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3195 val = force_not_mem (val);
3196 expand_value_return (val);
3197 }
3198 else
3199 {
3200 /* No hard reg used; calculate value into hard return reg. */
3201 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3202 expand_value_return (result_rtl);
3203 }
3204 }
3205
3206 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3207 STMT that doesn't require special handling for outgoing edges. That
3208 is no tailcalls and no GIMPLE_COND. */
3209
3210 static void
3211 expand_gimple_stmt_1 (gimple stmt)
3212 {
3213 tree op0;
3214
3215 set_curr_insn_location (gimple_location (stmt));
3216
3217 switch (gimple_code (stmt))
3218 {
3219 case GIMPLE_GOTO:
3220 op0 = gimple_goto_dest (stmt);
3221 if (TREE_CODE (op0) == LABEL_DECL)
3222 expand_goto (op0);
3223 else
3224 expand_computed_goto (op0);
3225 break;
3226 case GIMPLE_LABEL:
3227 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3228 break;
3229 case GIMPLE_NOP:
3230 case GIMPLE_PREDICT:
3231 break;
3232 case GIMPLE_SWITCH:
3233 expand_case (as_a <gswitch *> (stmt));
3234 break;
3235 case GIMPLE_ASM:
3236 expand_asm_stmt (as_a <gasm *> (stmt));
3237 break;
3238 case GIMPLE_CALL:
3239 expand_call_stmt (as_a <gcall *> (stmt));
3240 break;
3241
3242 case GIMPLE_RETURN:
3243 {
3244 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3245 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3246
3247 if (op0 && op0 != error_mark_node)
3248 {
3249 tree result = DECL_RESULT (current_function_decl);
3250
3251 /* If we are not returning the current function's RESULT_DECL,
3252 build an assignment to it. */
3253 if (op0 != result)
3254 {
3255 /* I believe that a function's RESULT_DECL is unique. */
3256 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3257
3258 /* ??? We'd like to use simply expand_assignment here,
3259 but this fails if the value is of BLKmode but the return
3260 decl is a register. expand_return has special handling
3261 for this combination, which eventually should move
3262 to common code. See comments there. Until then, let's
3263 build a modify expression :-/ */
3264 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3265 result, op0);
3266 }
3267 /* Mark we have return statement with missing bounds. */
3268 if (!bnd && chkp_function_instrumented_p (cfun->decl))
3269 bnd = error_mark_node;
3270 }
3271
3272 if (!op0)
3273 expand_null_return ();
3274 else
3275 expand_return (op0, bnd);
3276 }
3277 break;
3278
3279 case GIMPLE_ASSIGN:
3280 {
3281 gassign *assign_stmt = as_a <gassign *> (stmt);
3282 tree lhs = gimple_assign_lhs (assign_stmt);
3283
3284 /* Tree expand used to fiddle with |= and &= of two bitfield
3285 COMPONENT_REFs here. This can't happen with gimple, the LHS
3286 of binary assigns must be a gimple reg. */
3287
3288 if (TREE_CODE (lhs) != SSA_NAME
3289 || get_gimple_rhs_class (gimple_expr_code (stmt))
3290 == GIMPLE_SINGLE_RHS)
3291 {
3292 tree rhs = gimple_assign_rhs1 (assign_stmt);
3293 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3294 == GIMPLE_SINGLE_RHS);
3295 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3296 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3297 if (TREE_CLOBBER_P (rhs))
3298 /* This is a clobber to mark the going out of scope for
3299 this LHS. */
3300 ;
3301 else
3302 expand_assignment (lhs, rhs,
3303 gimple_assign_nontemporal_move_p (
3304 assign_stmt));
3305 }
3306 else
3307 {
3308 rtx target, temp;
3309 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3310 struct separate_ops ops;
3311 bool promoted = false;
3312
3313 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3314 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3315 promoted = true;
3316
3317 ops.code = gimple_assign_rhs_code (assign_stmt);
3318 ops.type = TREE_TYPE (lhs);
3319 switch (get_gimple_rhs_class (ops.code))
3320 {
3321 case GIMPLE_TERNARY_RHS:
3322 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3323 /* Fallthru */
3324 case GIMPLE_BINARY_RHS:
3325 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3326 /* Fallthru */
3327 case GIMPLE_UNARY_RHS:
3328 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3329 break;
3330 default:
3331 gcc_unreachable ();
3332 }
3333 ops.location = gimple_location (stmt);
3334
3335 /* If we want to use a nontemporal store, force the value to
3336 register first. If we store into a promoted register,
3337 don't directly expand to target. */
3338 temp = nontemporal || promoted ? NULL_RTX : target;
3339 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3340 EXPAND_NORMAL);
3341
3342 if (temp == target)
3343 ;
3344 else if (promoted)
3345 {
3346 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3347 /* If TEMP is a VOIDmode constant, use convert_modes to make
3348 sure that we properly convert it. */
3349 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3350 {
3351 temp = convert_modes (GET_MODE (target),
3352 TYPE_MODE (ops.type),
3353 temp, unsignedp);
3354 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3355 GET_MODE (target), temp, unsignedp);
3356 }
3357
3358 convert_move (SUBREG_REG (target), temp, unsignedp);
3359 }
3360 else if (nontemporal && emit_storent_insn (target, temp))
3361 ;
3362 else
3363 {
3364 temp = force_operand (temp, target);
3365 if (temp != target)
3366 emit_move_insn (target, temp);
3367 }
3368 }
3369 }
3370 break;
3371
3372 default:
3373 gcc_unreachable ();
3374 }
3375 }
3376
3377 /* Expand one gimple statement STMT and return the last RTL instruction
3378 before any of the newly generated ones.
3379
3380 In addition to generating the necessary RTL instructions this also
3381 sets REG_EH_REGION notes if necessary and sets the current source
3382 location for diagnostics. */
3383
3384 static rtx_insn *
3385 expand_gimple_stmt (gimple stmt)
3386 {
3387 location_t saved_location = input_location;
3388 rtx_insn *last = get_last_insn ();
3389 int lp_nr;
3390
3391 gcc_assert (cfun);
3392
3393 /* We need to save and restore the current source location so that errors
3394 discovered during expansion are emitted with the right location. But
3395 it would be better if the diagnostic routines used the source location
3396 embedded in the tree nodes rather than globals. */
3397 if (gimple_has_location (stmt))
3398 input_location = gimple_location (stmt);
3399
3400 expand_gimple_stmt_1 (stmt);
3401
3402 /* Free any temporaries used to evaluate this statement. */
3403 free_temp_slots ();
3404
3405 input_location = saved_location;
3406
3407 /* Mark all insns that may trap. */
3408 lp_nr = lookup_stmt_eh_lp (stmt);
3409 if (lp_nr)
3410 {
3411 rtx_insn *insn;
3412 for (insn = next_real_insn (last); insn;
3413 insn = next_real_insn (insn))
3414 {
3415 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3416 /* If we want exceptions for non-call insns, any
3417 may_trap_p instruction may throw. */
3418 && GET_CODE (PATTERN (insn)) != CLOBBER
3419 && GET_CODE (PATTERN (insn)) != USE
3420 && insn_could_throw_p (insn))
3421 make_reg_eh_region_note (insn, 0, lp_nr);
3422 }
3423 }
3424
3425 return last;
3426 }
3427
3428 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3429 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3430 generated a tail call (something that might be denied by the ABI
3431 rules governing the call; see calls.c).
3432
3433 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3434 can still reach the rest of BB. The case here is __builtin_sqrt,
3435 where the NaN result goes through the external function (with a
3436 tailcall) and the normal result happens via a sqrt instruction. */
3437
3438 static basic_block
3439 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3440 {
3441 rtx_insn *last2, *last;
3442 edge e;
3443 edge_iterator ei;
3444 int probability;
3445 gcov_type count;
3446
3447 last2 = last = expand_gimple_stmt (stmt);
3448
3449 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3450 if (CALL_P (last) && SIBLING_CALL_P (last))
3451 goto found;
3452
3453 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3454
3455 *can_fallthru = true;
3456 return NULL;
3457
3458 found:
3459 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3460 Any instructions emitted here are about to be deleted. */
3461 do_pending_stack_adjust ();
3462
3463 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3464 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3465 EH or abnormal edges, we shouldn't have created a tail call in
3466 the first place. So it seems to me we should just be removing
3467 all edges here, or redirecting the existing fallthru edge to
3468 the exit block. */
3469
3470 probability = 0;
3471 count = 0;
3472
3473 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3474 {
3475 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3476 {
3477 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3478 {
3479 e->dest->count -= e->count;
3480 e->dest->frequency -= EDGE_FREQUENCY (e);
3481 if (e->dest->count < 0)
3482 e->dest->count = 0;
3483 if (e->dest->frequency < 0)
3484 e->dest->frequency = 0;
3485 }
3486 count += e->count;
3487 probability += e->probability;
3488 remove_edge (e);
3489 }
3490 else
3491 ei_next (&ei);
3492 }
3493
3494 /* This is somewhat ugly: the call_expr expander often emits instructions
3495 after the sibcall (to perform the function return). These confuse the
3496 find_many_sub_basic_blocks code, so we need to get rid of these. */
3497 last = NEXT_INSN (last);
3498 gcc_assert (BARRIER_P (last));
3499
3500 *can_fallthru = false;
3501 while (NEXT_INSN (last))
3502 {
3503 /* For instance an sqrt builtin expander expands if with
3504 sibcall in the then and label for `else`. */
3505 if (LABEL_P (NEXT_INSN (last)))
3506 {
3507 *can_fallthru = true;
3508 break;
3509 }
3510 delete_insn (NEXT_INSN (last));
3511 }
3512
3513 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3514 | EDGE_SIBCALL);
3515 e->probability += probability;
3516 e->count += count;
3517 BB_END (bb) = last;
3518 update_bb_for_insn (bb);
3519
3520 if (NEXT_INSN (last))
3521 {
3522 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3523
3524 last = BB_END (bb);
3525 if (BARRIER_P (last))
3526 BB_END (bb) = PREV_INSN (last);
3527 }
3528
3529 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3530
3531 return bb;
3532 }
3533
3534 /* Return the difference between the floor and the truncated result of
3535 a signed division by OP1 with remainder MOD. */
3536 static rtx
3537 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3538 {
3539 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3540 return gen_rtx_IF_THEN_ELSE
3541 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3542 gen_rtx_IF_THEN_ELSE
3543 (mode, gen_rtx_LT (BImode,
3544 gen_rtx_DIV (mode, op1, mod),
3545 const0_rtx),
3546 constm1_rtx, const0_rtx),
3547 const0_rtx);
3548 }
3549
3550 /* Return the difference between the ceil and the truncated result of
3551 a signed division by OP1 with remainder MOD. */
3552 static rtx
3553 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3554 {
3555 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3556 return gen_rtx_IF_THEN_ELSE
3557 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3558 gen_rtx_IF_THEN_ELSE
3559 (mode, gen_rtx_GT (BImode,
3560 gen_rtx_DIV (mode, op1, mod),
3561 const0_rtx),
3562 const1_rtx, const0_rtx),
3563 const0_rtx);
3564 }
3565
3566 /* Return the difference between the ceil and the truncated result of
3567 an unsigned division by OP1 with remainder MOD. */
3568 static rtx
3569 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3570 {
3571 /* (mod != 0 ? 1 : 0) */
3572 return gen_rtx_IF_THEN_ELSE
3573 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3574 const1_rtx, const0_rtx);
3575 }
3576
3577 /* Return the difference between the rounded and the truncated result
3578 of a signed division by OP1 with remainder MOD. Halfway cases are
3579 rounded away from zero, rather than to the nearest even number. */
3580 static rtx
3581 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3582 {
3583 /* (abs (mod) >= abs (op1) - abs (mod)
3584 ? (op1 / mod > 0 ? 1 : -1)
3585 : 0) */
3586 return gen_rtx_IF_THEN_ELSE
3587 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3588 gen_rtx_MINUS (mode,
3589 gen_rtx_ABS (mode, op1),
3590 gen_rtx_ABS (mode, mod))),
3591 gen_rtx_IF_THEN_ELSE
3592 (mode, gen_rtx_GT (BImode,
3593 gen_rtx_DIV (mode, op1, mod),
3594 const0_rtx),
3595 const1_rtx, constm1_rtx),
3596 const0_rtx);
3597 }
3598
3599 /* Return the difference between the rounded and the truncated result
3600 of a unsigned division by OP1 with remainder MOD. Halfway cases
3601 are rounded away from zero, rather than to the nearest even
3602 number. */
3603 static rtx
3604 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3605 {
3606 /* (mod >= op1 - mod ? 1 : 0) */
3607 return gen_rtx_IF_THEN_ELSE
3608 (mode, gen_rtx_GE (BImode, mod,
3609 gen_rtx_MINUS (mode, op1, mod)),
3610 const1_rtx, const0_rtx);
3611 }
3612
3613 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3614 any rtl. */
3615
3616 static rtx
3617 convert_debug_memory_address (machine_mode mode, rtx x,
3618 addr_space_t as)
3619 {
3620 machine_mode xmode = GET_MODE (x);
3621
3622 #ifndef POINTERS_EXTEND_UNSIGNED
3623 gcc_assert (mode == Pmode
3624 || mode == targetm.addr_space.address_mode (as));
3625 gcc_assert (xmode == mode || xmode == VOIDmode);
3626 #else
3627 rtx temp;
3628
3629 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3630
3631 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3632 return x;
3633
3634 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3635 x = simplify_gen_subreg (mode, x, xmode,
3636 subreg_lowpart_offset
3637 (mode, xmode));
3638 else if (POINTERS_EXTEND_UNSIGNED > 0)
3639 x = gen_rtx_ZERO_EXTEND (mode, x);
3640 else if (!POINTERS_EXTEND_UNSIGNED)
3641 x = gen_rtx_SIGN_EXTEND (mode, x);
3642 else
3643 {
3644 switch (GET_CODE (x))
3645 {
3646 case SUBREG:
3647 if ((SUBREG_PROMOTED_VAR_P (x)
3648 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3649 || (GET_CODE (SUBREG_REG (x)) == PLUS
3650 && REG_P (XEXP (SUBREG_REG (x), 0))
3651 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3652 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3653 && GET_MODE (SUBREG_REG (x)) == mode)
3654 return SUBREG_REG (x);
3655 break;
3656 case LABEL_REF:
3657 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3658 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3659 return temp;
3660 case SYMBOL_REF:
3661 temp = shallow_copy_rtx (x);
3662 PUT_MODE (temp, mode);
3663 return temp;
3664 case CONST:
3665 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3666 if (temp)
3667 temp = gen_rtx_CONST (mode, temp);
3668 return temp;
3669 case PLUS:
3670 case MINUS:
3671 if (CONST_INT_P (XEXP (x, 1)))
3672 {
3673 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3674 if (temp)
3675 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3676 }
3677 break;
3678 default:
3679 break;
3680 }
3681 /* Don't know how to express ptr_extend as operation in debug info. */
3682 return NULL;
3683 }
3684 #endif /* POINTERS_EXTEND_UNSIGNED */
3685
3686 return x;
3687 }
3688
3689 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3690 by avoid_deep_ter_for_debug. */
3691
3692 static hash_map<tree, tree> *deep_ter_debug_map;
3693
3694 /* Split too deep TER chains for debug stmts using debug temporaries. */
3695
3696 static void
3697 avoid_deep_ter_for_debug (gimple stmt, int depth)
3698 {
3699 use_operand_p use_p;
3700 ssa_op_iter iter;
3701 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3702 {
3703 tree use = USE_FROM_PTR (use_p);
3704 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3705 continue;
3706 gimple g = get_gimple_for_ssa_name (use);
3707 if (g == NULL)
3708 continue;
3709 if (depth > 6 && !stmt_ends_bb_p (g))
3710 {
3711 if (deep_ter_debug_map == NULL)
3712 deep_ter_debug_map = new hash_map<tree, tree>;
3713
3714 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3715 if (vexpr != NULL)
3716 continue;
3717 vexpr = make_node (DEBUG_EXPR_DECL);
3718 gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3719 DECL_ARTIFICIAL (vexpr) = 1;
3720 TREE_TYPE (vexpr) = TREE_TYPE (use);
3721 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3722 gimple_stmt_iterator gsi = gsi_for_stmt (g);
3723 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3724 avoid_deep_ter_for_debug (def_temp, 0);
3725 }
3726 else
3727 avoid_deep_ter_for_debug (g, depth + 1);
3728 }
3729 }
3730
3731 /* Return an RTX equivalent to the value of the parameter DECL. */
3732
3733 static rtx
3734 expand_debug_parm_decl (tree decl)
3735 {
3736 rtx incoming = DECL_INCOMING_RTL (decl);
3737
3738 if (incoming
3739 && GET_MODE (incoming) != BLKmode
3740 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3741 || (MEM_P (incoming)
3742 && REG_P (XEXP (incoming, 0))
3743 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3744 {
3745 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3746
3747 #ifdef HAVE_window_save
3748 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3749 If the target machine has an explicit window save instruction, the
3750 actual entry value is the corresponding OUTGOING_REGNO instead. */
3751 if (REG_P (incoming)
3752 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3753 incoming
3754 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3755 OUTGOING_REGNO (REGNO (incoming)), 0);
3756 else if (MEM_P (incoming))
3757 {
3758 rtx reg = XEXP (incoming, 0);
3759 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3760 {
3761 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3762 incoming = replace_equiv_address_nv (incoming, reg);
3763 }
3764 else
3765 incoming = copy_rtx (incoming);
3766 }
3767 #endif
3768
3769 ENTRY_VALUE_EXP (rtl) = incoming;
3770 return rtl;
3771 }
3772
3773 if (incoming
3774 && GET_MODE (incoming) != BLKmode
3775 && !TREE_ADDRESSABLE (decl)
3776 && MEM_P (incoming)
3777 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3778 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3779 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3780 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3781 return copy_rtx (incoming);
3782
3783 return NULL_RTX;
3784 }
3785
3786 /* Return an RTX equivalent to the value of the tree expression EXP. */
3787
3788 static rtx
3789 expand_debug_expr (tree exp)
3790 {
3791 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3792 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3793 machine_mode inner_mode = VOIDmode;
3794 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3795 addr_space_t as;
3796
3797 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3798 {
3799 case tcc_expression:
3800 switch (TREE_CODE (exp))
3801 {
3802 case COND_EXPR:
3803 case DOT_PROD_EXPR:
3804 case SAD_EXPR:
3805 case WIDEN_MULT_PLUS_EXPR:
3806 case WIDEN_MULT_MINUS_EXPR:
3807 case FMA_EXPR:
3808 goto ternary;
3809
3810 case TRUTH_ANDIF_EXPR:
3811 case TRUTH_ORIF_EXPR:
3812 case TRUTH_AND_EXPR:
3813 case TRUTH_OR_EXPR:
3814 case TRUTH_XOR_EXPR:
3815 goto binary;
3816
3817 case TRUTH_NOT_EXPR:
3818 goto unary;
3819
3820 default:
3821 break;
3822 }
3823 break;
3824
3825 ternary:
3826 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3827 if (!op2)
3828 return NULL_RTX;
3829 /* Fall through. */
3830
3831 binary:
3832 case tcc_binary:
3833 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3834 if (!op1)
3835 return NULL_RTX;
3836 switch (TREE_CODE (exp))
3837 {
3838 case LSHIFT_EXPR:
3839 case RSHIFT_EXPR:
3840 case LROTATE_EXPR:
3841 case RROTATE_EXPR:
3842 case WIDEN_LSHIFT_EXPR:
3843 /* Ensure second operand isn't wider than the first one. */
3844 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3845 if (SCALAR_INT_MODE_P (inner_mode))
3846 {
3847 machine_mode opmode = mode;
3848 if (VECTOR_MODE_P (mode))
3849 opmode = GET_MODE_INNER (mode);
3850 if (SCALAR_INT_MODE_P (opmode)
3851 && (GET_MODE_PRECISION (opmode)
3852 < GET_MODE_PRECISION (inner_mode)))
3853 op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3854 subreg_lowpart_offset (opmode,
3855 inner_mode));
3856 }
3857 break;
3858 default:
3859 break;
3860 }
3861 /* Fall through. */
3862
3863 unary:
3864 case tcc_unary:
3865 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3866 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3867 if (!op0)
3868 return NULL_RTX;
3869 break;
3870
3871 case tcc_comparison:
3872 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3873 goto binary;
3874
3875 case tcc_type:
3876 case tcc_statement:
3877 gcc_unreachable ();
3878
3879 case tcc_constant:
3880 case tcc_exceptional:
3881 case tcc_declaration:
3882 case tcc_reference:
3883 case tcc_vl_exp:
3884 break;
3885 }
3886
3887 switch (TREE_CODE (exp))
3888 {
3889 case STRING_CST:
3890 if (!lookup_constant_def (exp))
3891 {
3892 if (strlen (TREE_STRING_POINTER (exp)) + 1
3893 != (size_t) TREE_STRING_LENGTH (exp))
3894 return NULL_RTX;
3895 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3896 op0 = gen_rtx_MEM (BLKmode, op0);
3897 set_mem_attributes (op0, exp, 0);
3898 return op0;
3899 }
3900 /* Fall through... */
3901
3902 case INTEGER_CST:
3903 case REAL_CST:
3904 case FIXED_CST:
3905 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3906 return op0;
3907
3908 case COMPLEX_CST:
3909 gcc_assert (COMPLEX_MODE_P (mode));
3910 op0 = expand_debug_expr (TREE_REALPART (exp));
3911 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3912 return gen_rtx_CONCAT (mode, op0, op1);
3913
3914 case DEBUG_EXPR_DECL:
3915 op0 = DECL_RTL_IF_SET (exp);
3916
3917 if (op0)
3918 return op0;
3919
3920 op0 = gen_rtx_DEBUG_EXPR (mode);
3921 DEBUG_EXPR_TREE_DECL (op0) = exp;
3922 SET_DECL_RTL (exp, op0);
3923
3924 return op0;
3925
3926 case VAR_DECL:
3927 case PARM_DECL:
3928 case FUNCTION_DECL:
3929 case LABEL_DECL:
3930 case CONST_DECL:
3931 case RESULT_DECL:
3932 op0 = DECL_RTL_IF_SET (exp);
3933
3934 /* This decl was probably optimized away. */
3935 if (!op0)
3936 {
3937 if (TREE_CODE (exp) != VAR_DECL
3938 || DECL_EXTERNAL (exp)
3939 || !TREE_STATIC (exp)
3940 || !DECL_NAME (exp)
3941 || DECL_HARD_REGISTER (exp)
3942 || DECL_IN_CONSTANT_POOL (exp)
3943 || mode == VOIDmode)
3944 return NULL;
3945
3946 op0 = make_decl_rtl_for_debug (exp);
3947 if (!MEM_P (op0)
3948 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3949 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3950 return NULL;
3951 }
3952 else
3953 op0 = copy_rtx (op0);
3954
3955 if (GET_MODE (op0) == BLKmode
3956 /* If op0 is not BLKmode, but mode is, adjust_mode
3957 below would ICE. While it is likely a FE bug,
3958 try to be robust here. See PR43166. */
3959 || mode == BLKmode
3960 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3961 {
3962 gcc_assert (MEM_P (op0));
3963 op0 = adjust_address_nv (op0, mode, 0);
3964 return op0;
3965 }
3966
3967 /* Fall through. */
3968
3969 adjust_mode:
3970 case PAREN_EXPR:
3971 CASE_CONVERT:
3972 {
3973 inner_mode = GET_MODE (op0);
3974
3975 if (mode == inner_mode)
3976 return op0;
3977
3978 if (inner_mode == VOIDmode)
3979 {
3980 if (TREE_CODE (exp) == SSA_NAME)
3981 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3982 else
3983 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3984 if (mode == inner_mode)
3985 return op0;
3986 }
3987
3988 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3989 {
3990 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3991 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3992 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3993 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3994 else
3995 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3996 }
3997 else if (FLOAT_MODE_P (mode))
3998 {
3999 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4000 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4001 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4002 else
4003 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4004 }
4005 else if (FLOAT_MODE_P (inner_mode))
4006 {
4007 if (unsignedp)
4008 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4009 else
4010 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4011 }
4012 else if (CONSTANT_P (op0)
4013 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4014 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4015 subreg_lowpart_offset (mode,
4016 inner_mode));
4017 else if (UNARY_CLASS_P (exp)
4018 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4019 : unsignedp)
4020 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4021 else
4022 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4023
4024 return op0;
4025 }
4026
4027 case MEM_REF:
4028 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4029 {
4030 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4031 TREE_OPERAND (exp, 0),
4032 TREE_OPERAND (exp, 1));
4033 if (newexp)
4034 return expand_debug_expr (newexp);
4035 }
4036 /* FALLTHROUGH */
4037 case INDIRECT_REF:
4038 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4039 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4040 if (!op0)
4041 return NULL;
4042
4043 if (TREE_CODE (exp) == MEM_REF)
4044 {
4045 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4046 || (GET_CODE (op0) == PLUS
4047 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4048 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4049 Instead just use get_inner_reference. */
4050 goto component_ref;
4051
4052 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4053 if (!op1 || !CONST_INT_P (op1))
4054 return NULL;
4055
4056 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4057 }
4058
4059 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4060
4061 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4062 op0, as);
4063 if (op0 == NULL_RTX)
4064 return NULL;
4065
4066 op0 = gen_rtx_MEM (mode, op0);
4067 set_mem_attributes (op0, exp, 0);
4068 if (TREE_CODE (exp) == MEM_REF
4069 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4070 set_mem_expr (op0, NULL_TREE);
4071 set_mem_addr_space (op0, as);
4072
4073 return op0;
4074
4075 case TARGET_MEM_REF:
4076 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4077 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4078 return NULL;
4079
4080 op0 = expand_debug_expr
4081 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4082 if (!op0)
4083 return NULL;
4084
4085 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4086 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4087 op0, as);
4088 if (op0 == NULL_RTX)
4089 return NULL;
4090
4091 op0 = gen_rtx_MEM (mode, op0);
4092
4093 set_mem_attributes (op0, exp, 0);
4094 set_mem_addr_space (op0, as);
4095
4096 return op0;
4097
4098 component_ref:
4099 case ARRAY_REF:
4100 case ARRAY_RANGE_REF:
4101 case COMPONENT_REF:
4102 case BIT_FIELD_REF:
4103 case REALPART_EXPR:
4104 case IMAGPART_EXPR:
4105 case VIEW_CONVERT_EXPR:
4106 {
4107 machine_mode mode1;
4108 HOST_WIDE_INT bitsize, bitpos;
4109 tree offset;
4110 int volatilep = 0;
4111 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4112 &mode1, &unsignedp, &volatilep, false);
4113 rtx orig_op0;
4114
4115 if (bitsize == 0)
4116 return NULL;
4117
4118 orig_op0 = op0 = expand_debug_expr (tem);
4119
4120 if (!op0)
4121 return NULL;
4122
4123 if (offset)
4124 {
4125 machine_mode addrmode, offmode;
4126
4127 if (!MEM_P (op0))
4128 return NULL;
4129
4130 op0 = XEXP (op0, 0);
4131 addrmode = GET_MODE (op0);
4132 if (addrmode == VOIDmode)
4133 addrmode = Pmode;
4134
4135 op1 = expand_debug_expr (offset);
4136 if (!op1)
4137 return NULL;
4138
4139 offmode = GET_MODE (op1);
4140 if (offmode == VOIDmode)
4141 offmode = TYPE_MODE (TREE_TYPE (offset));
4142
4143 if (addrmode != offmode)
4144 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4145 subreg_lowpart_offset (addrmode,
4146 offmode));
4147
4148 /* Don't use offset_address here, we don't need a
4149 recognizable address, and we don't want to generate
4150 code. */
4151 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4152 op0, op1));
4153 }
4154
4155 if (MEM_P (op0))
4156 {
4157 if (mode1 == VOIDmode)
4158 /* Bitfield. */
4159 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4160 if (bitpos >= BITS_PER_UNIT)
4161 {
4162 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4163 bitpos %= BITS_PER_UNIT;
4164 }
4165 else if (bitpos < 0)
4166 {
4167 HOST_WIDE_INT units
4168 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4169 op0 = adjust_address_nv (op0, mode1, units);
4170 bitpos += units * BITS_PER_UNIT;
4171 }
4172 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4173 op0 = adjust_address_nv (op0, mode, 0);
4174 else if (GET_MODE (op0) != mode1)
4175 op0 = adjust_address_nv (op0, mode1, 0);
4176 else
4177 op0 = copy_rtx (op0);
4178 if (op0 == orig_op0)
4179 op0 = shallow_copy_rtx (op0);
4180 set_mem_attributes (op0, exp, 0);
4181 }
4182
4183 if (bitpos == 0 && mode == GET_MODE (op0))
4184 return op0;
4185
4186 if (bitpos < 0)
4187 return NULL;
4188
4189 if (GET_MODE (op0) == BLKmode)
4190 return NULL;
4191
4192 if ((bitpos % BITS_PER_UNIT) == 0
4193 && bitsize == GET_MODE_BITSIZE (mode1))
4194 {
4195 machine_mode opmode = GET_MODE (op0);
4196
4197 if (opmode == VOIDmode)
4198 opmode = TYPE_MODE (TREE_TYPE (tem));
4199
4200 /* This condition may hold if we're expanding the address
4201 right past the end of an array that turned out not to
4202 be addressable (i.e., the address was only computed in
4203 debug stmts). The gen_subreg below would rightfully
4204 crash, and the address doesn't really exist, so just
4205 drop it. */
4206 if (bitpos >= GET_MODE_BITSIZE (opmode))
4207 return NULL;
4208
4209 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4210 return simplify_gen_subreg (mode, op0, opmode,
4211 bitpos / BITS_PER_UNIT);
4212 }
4213
4214 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4215 && TYPE_UNSIGNED (TREE_TYPE (exp))
4216 ? SIGN_EXTRACT
4217 : ZERO_EXTRACT, mode,
4218 GET_MODE (op0) != VOIDmode
4219 ? GET_MODE (op0)
4220 : TYPE_MODE (TREE_TYPE (tem)),
4221 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4222 }
4223
4224 case ABS_EXPR:
4225 return simplify_gen_unary (ABS, mode, op0, mode);
4226
4227 case NEGATE_EXPR:
4228 return simplify_gen_unary (NEG, mode, op0, mode);
4229
4230 case BIT_NOT_EXPR:
4231 return simplify_gen_unary (NOT, mode, op0, mode);
4232
4233 case FLOAT_EXPR:
4234 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4235 0)))
4236 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4237 inner_mode);
4238
4239 case FIX_TRUNC_EXPR:
4240 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4241 inner_mode);
4242
4243 case POINTER_PLUS_EXPR:
4244 /* For the rare target where pointers are not the same size as
4245 size_t, we need to check for mis-matched modes and correct
4246 the addend. */
4247 if (op0 && op1
4248 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4249 && GET_MODE (op0) != GET_MODE (op1))
4250 {
4251 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4252 /* If OP0 is a partial mode, then we must truncate, even if it has
4253 the same bitsize as OP1 as GCC's representation of partial modes
4254 is opaque. */
4255 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4256 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4257 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4258 GET_MODE (op1));
4259 else
4260 /* We always sign-extend, regardless of the signedness of
4261 the operand, because the operand is always unsigned
4262 here even if the original C expression is signed. */
4263 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4264 GET_MODE (op1));
4265 }
4266 /* Fall through. */
4267 case PLUS_EXPR:
4268 return simplify_gen_binary (PLUS, mode, op0, op1);
4269
4270 case MINUS_EXPR:
4271 return simplify_gen_binary (MINUS, mode, op0, op1);
4272
4273 case MULT_EXPR:
4274 return simplify_gen_binary (MULT, mode, op0, op1);
4275
4276 case RDIV_EXPR:
4277 case TRUNC_DIV_EXPR:
4278 case EXACT_DIV_EXPR:
4279 if (unsignedp)
4280 return simplify_gen_binary (UDIV, mode, op0, op1);
4281 else
4282 return simplify_gen_binary (DIV, mode, op0, op1);
4283
4284 case TRUNC_MOD_EXPR:
4285 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4286
4287 case FLOOR_DIV_EXPR:
4288 if (unsignedp)
4289 return simplify_gen_binary (UDIV, mode, op0, op1);
4290 else
4291 {
4292 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4293 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4294 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4295 return simplify_gen_binary (PLUS, mode, div, adj);
4296 }
4297
4298 case FLOOR_MOD_EXPR:
4299 if (unsignedp)
4300 return simplify_gen_binary (UMOD, mode, op0, op1);
4301 else
4302 {
4303 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4304 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4305 adj = simplify_gen_unary (NEG, mode,
4306 simplify_gen_binary (MULT, mode, adj, op1),
4307 mode);
4308 return simplify_gen_binary (PLUS, mode, mod, adj);
4309 }
4310
4311 case CEIL_DIV_EXPR:
4312 if (unsignedp)
4313 {
4314 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4315 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4316 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4317 return simplify_gen_binary (PLUS, mode, div, adj);
4318 }
4319 else
4320 {
4321 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4322 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4323 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4324 return simplify_gen_binary (PLUS, mode, div, adj);
4325 }
4326
4327 case CEIL_MOD_EXPR:
4328 if (unsignedp)
4329 {
4330 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4331 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4332 adj = simplify_gen_unary (NEG, mode,
4333 simplify_gen_binary (MULT, mode, adj, op1),
4334 mode);
4335 return simplify_gen_binary (PLUS, mode, mod, adj);
4336 }
4337 else
4338 {
4339 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4340 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4341 adj = simplify_gen_unary (NEG, mode,
4342 simplify_gen_binary (MULT, mode, adj, op1),
4343 mode);
4344 return simplify_gen_binary (PLUS, mode, mod, adj);
4345 }
4346
4347 case ROUND_DIV_EXPR:
4348 if (unsignedp)
4349 {
4350 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4351 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4352 rtx adj = round_udiv_adjust (mode, mod, op1);
4353 return simplify_gen_binary (PLUS, mode, div, adj);
4354 }
4355 else
4356 {
4357 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4358 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4359 rtx adj = round_sdiv_adjust (mode, mod, op1);
4360 return simplify_gen_binary (PLUS, mode, div, adj);
4361 }
4362
4363 case ROUND_MOD_EXPR:
4364 if (unsignedp)
4365 {
4366 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4367 rtx adj = round_udiv_adjust (mode, mod, op1);
4368 adj = simplify_gen_unary (NEG, mode,
4369 simplify_gen_binary (MULT, mode, adj, op1),
4370 mode);
4371 return simplify_gen_binary (PLUS, mode, mod, adj);
4372 }
4373 else
4374 {
4375 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4376 rtx adj = round_sdiv_adjust (mode, mod, op1);
4377 adj = simplify_gen_unary (NEG, mode,
4378 simplify_gen_binary (MULT, mode, adj, op1),
4379 mode);
4380 return simplify_gen_binary (PLUS, mode, mod, adj);
4381 }
4382
4383 case LSHIFT_EXPR:
4384 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4385
4386 case RSHIFT_EXPR:
4387 if (unsignedp)
4388 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4389 else
4390 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4391
4392 case LROTATE_EXPR:
4393 return simplify_gen_binary (ROTATE, mode, op0, op1);
4394
4395 case RROTATE_EXPR:
4396 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4397
4398 case MIN_EXPR:
4399 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4400
4401 case MAX_EXPR:
4402 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4403
4404 case BIT_AND_EXPR:
4405 case TRUTH_AND_EXPR:
4406 return simplify_gen_binary (AND, mode, op0, op1);
4407
4408 case BIT_IOR_EXPR:
4409 case TRUTH_OR_EXPR:
4410 return simplify_gen_binary (IOR, mode, op0, op1);
4411
4412 case BIT_XOR_EXPR:
4413 case TRUTH_XOR_EXPR:
4414 return simplify_gen_binary (XOR, mode, op0, op1);
4415
4416 case TRUTH_ANDIF_EXPR:
4417 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4418
4419 case TRUTH_ORIF_EXPR:
4420 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4421
4422 case TRUTH_NOT_EXPR:
4423 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4424
4425 case LT_EXPR:
4426 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4427 op0, op1);
4428
4429 case LE_EXPR:
4430 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4431 op0, op1);
4432
4433 case GT_EXPR:
4434 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4435 op0, op1);
4436
4437 case GE_EXPR:
4438 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4439 op0, op1);
4440
4441 case EQ_EXPR:
4442 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4443
4444 case NE_EXPR:
4445 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4446
4447 case UNORDERED_EXPR:
4448 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4449
4450 case ORDERED_EXPR:
4451 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4452
4453 case UNLT_EXPR:
4454 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4455
4456 case UNLE_EXPR:
4457 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4458
4459 case UNGT_EXPR:
4460 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4461
4462 case UNGE_EXPR:
4463 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4464
4465 case UNEQ_EXPR:
4466 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4467
4468 case LTGT_EXPR:
4469 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4470
4471 case COND_EXPR:
4472 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4473
4474 case COMPLEX_EXPR:
4475 gcc_assert (COMPLEX_MODE_P (mode));
4476 if (GET_MODE (op0) == VOIDmode)
4477 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4478 if (GET_MODE (op1) == VOIDmode)
4479 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4480 return gen_rtx_CONCAT (mode, op0, op1);
4481
4482 case CONJ_EXPR:
4483 if (GET_CODE (op0) == CONCAT)
4484 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4485 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4486 XEXP (op0, 1),
4487 GET_MODE_INNER (mode)));
4488 else
4489 {
4490 machine_mode imode = GET_MODE_INNER (mode);
4491 rtx re, im;
4492
4493 if (MEM_P (op0))
4494 {
4495 re = adjust_address_nv (op0, imode, 0);
4496 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4497 }
4498 else
4499 {
4500 machine_mode ifmode = int_mode_for_mode (mode);
4501 machine_mode ihmode = int_mode_for_mode (imode);
4502 rtx halfsize;
4503 if (ifmode == BLKmode || ihmode == BLKmode)
4504 return NULL;
4505 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4506 re = op0;
4507 if (mode != ifmode)
4508 re = gen_rtx_SUBREG (ifmode, re, 0);
4509 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4510 if (imode != ihmode)
4511 re = gen_rtx_SUBREG (imode, re, 0);
4512 im = copy_rtx (op0);
4513 if (mode != ifmode)
4514 im = gen_rtx_SUBREG (ifmode, im, 0);
4515 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4516 if (imode != ihmode)
4517 im = gen_rtx_SUBREG (imode, im, 0);
4518 }
4519 im = gen_rtx_NEG (imode, im);
4520 return gen_rtx_CONCAT (mode, re, im);
4521 }
4522
4523 case ADDR_EXPR:
4524 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4525 if (!op0 || !MEM_P (op0))
4526 {
4527 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4528 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4529 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4530 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4531 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4532 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4533
4534 if (handled_component_p (TREE_OPERAND (exp, 0)))
4535 {
4536 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4537 tree decl
4538 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4539 &bitoffset, &bitsize, &maxsize);
4540 if ((TREE_CODE (decl) == VAR_DECL
4541 || TREE_CODE (decl) == PARM_DECL
4542 || TREE_CODE (decl) == RESULT_DECL)
4543 && (!TREE_ADDRESSABLE (decl)
4544 || target_for_debug_bind (decl))
4545 && (bitoffset % BITS_PER_UNIT) == 0
4546 && bitsize > 0
4547 && bitsize == maxsize)
4548 {
4549 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4550 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4551 }
4552 }
4553
4554 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4555 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4556 == ADDR_EXPR)
4557 {
4558 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4559 0));
4560 if (op0 != NULL
4561 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4562 || (GET_CODE (op0) == PLUS
4563 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4564 && CONST_INT_P (XEXP (op0, 1)))))
4565 {
4566 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4567 1));
4568 if (!op1 || !CONST_INT_P (op1))
4569 return NULL;
4570
4571 return plus_constant (mode, op0, INTVAL (op1));
4572 }
4573 }
4574
4575 return NULL;
4576 }
4577
4578 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4579 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4580
4581 return op0;
4582
4583 case VECTOR_CST:
4584 {
4585 unsigned i;
4586
4587 op0 = gen_rtx_CONCATN
4588 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4589
4590 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4591 {
4592 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4593 if (!op1)
4594 return NULL;
4595 XVECEXP (op0, 0, i) = op1;
4596 }
4597
4598 return op0;
4599 }
4600
4601 case CONSTRUCTOR:
4602 if (TREE_CLOBBER_P (exp))
4603 return NULL;
4604 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4605 {
4606 unsigned i;
4607 tree val;
4608
4609 op0 = gen_rtx_CONCATN
4610 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4611
4612 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4613 {
4614 op1 = expand_debug_expr (val);
4615 if (!op1)
4616 return NULL;
4617 XVECEXP (op0, 0, i) = op1;
4618 }
4619
4620 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4621 {
4622 op1 = expand_debug_expr
4623 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4624
4625 if (!op1)
4626 return NULL;
4627
4628 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4629 XVECEXP (op0, 0, i) = op1;
4630 }
4631
4632 return op0;
4633 }
4634 else
4635 goto flag_unsupported;
4636
4637 case CALL_EXPR:
4638 /* ??? Maybe handle some builtins? */
4639 return NULL;
4640
4641 case SSA_NAME:
4642 {
4643 gimple g = get_gimple_for_ssa_name (exp);
4644 if (g)
4645 {
4646 tree t = NULL_TREE;
4647 if (deep_ter_debug_map)
4648 {
4649 tree *slot = deep_ter_debug_map->get (exp);
4650 if (slot)
4651 t = *slot;
4652 }
4653 if (t == NULL_TREE)
4654 t = gimple_assign_rhs_to_tree (g);
4655 op0 = expand_debug_expr (t);
4656 if (!op0)
4657 return NULL;
4658 }
4659 else
4660 {
4661 int part = var_to_partition (SA.map, exp);
4662
4663 if (part == NO_PARTITION)
4664 {
4665 /* If this is a reference to an incoming value of parameter
4666 that is never used in the code or where the incoming
4667 value is never used in the code, use PARM_DECL's
4668 DECL_RTL if set. */
4669 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4670 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4671 {
4672 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4673 if (op0)
4674 goto adjust_mode;
4675 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4676 if (op0)
4677 goto adjust_mode;
4678 }
4679 return NULL;
4680 }
4681
4682 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4683
4684 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4685 }
4686 goto adjust_mode;
4687 }
4688
4689 case ERROR_MARK:
4690 return NULL;
4691
4692 /* Vector stuff. For most of the codes we don't have rtl codes. */
4693 case REALIGN_LOAD_EXPR:
4694 case REDUC_MAX_EXPR:
4695 case REDUC_MIN_EXPR:
4696 case REDUC_PLUS_EXPR:
4697 case VEC_COND_EXPR:
4698 case VEC_PACK_FIX_TRUNC_EXPR:
4699 case VEC_PACK_SAT_EXPR:
4700 case VEC_PACK_TRUNC_EXPR:
4701 case VEC_UNPACK_FLOAT_HI_EXPR:
4702 case VEC_UNPACK_FLOAT_LO_EXPR:
4703 case VEC_UNPACK_HI_EXPR:
4704 case VEC_UNPACK_LO_EXPR:
4705 case VEC_WIDEN_MULT_HI_EXPR:
4706 case VEC_WIDEN_MULT_LO_EXPR:
4707 case VEC_WIDEN_MULT_EVEN_EXPR:
4708 case VEC_WIDEN_MULT_ODD_EXPR:
4709 case VEC_WIDEN_LSHIFT_HI_EXPR:
4710 case VEC_WIDEN_LSHIFT_LO_EXPR:
4711 case VEC_PERM_EXPR:
4712 return NULL;
4713
4714 /* Misc codes. */
4715 case ADDR_SPACE_CONVERT_EXPR:
4716 case FIXED_CONVERT_EXPR:
4717 case OBJ_TYPE_REF:
4718 case WITH_SIZE_EXPR:
4719 return NULL;
4720
4721 case DOT_PROD_EXPR:
4722 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4723 && SCALAR_INT_MODE_P (mode))
4724 {
4725 op0
4726 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4727 0)))
4728 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4729 inner_mode);
4730 op1
4731 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4732 1)))
4733 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4734 inner_mode);
4735 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4736 return simplify_gen_binary (PLUS, mode, op0, op2);
4737 }
4738 return NULL;
4739
4740 case WIDEN_MULT_EXPR:
4741 case WIDEN_MULT_PLUS_EXPR:
4742 case WIDEN_MULT_MINUS_EXPR:
4743 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4744 && SCALAR_INT_MODE_P (mode))
4745 {
4746 inner_mode = GET_MODE (op0);
4747 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4748 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4749 else
4750 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4751 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4752 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4753 else
4754 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4755 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4756 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4757 return op0;
4758 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4759 return simplify_gen_binary (PLUS, mode, op0, op2);
4760 else
4761 return simplify_gen_binary (MINUS, mode, op2, op0);
4762 }
4763 return NULL;
4764
4765 case MULT_HIGHPART_EXPR:
4766 /* ??? Similar to the above. */
4767 return NULL;
4768
4769 case WIDEN_SUM_EXPR:
4770 case WIDEN_LSHIFT_EXPR:
4771 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4772 && SCALAR_INT_MODE_P (mode))
4773 {
4774 op0
4775 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4776 0)))
4777 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4778 inner_mode);
4779 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4780 ? ASHIFT : PLUS, mode, op0, op1);
4781 }
4782 return NULL;
4783
4784 case FMA_EXPR:
4785 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4786
4787 default:
4788 flag_unsupported:
4789 #ifdef ENABLE_CHECKING
4790 debug_tree (exp);
4791 gcc_unreachable ();
4792 #else
4793 return NULL;
4794 #endif
4795 }
4796 }
4797
4798 /* Return an RTX equivalent to the source bind value of the tree expression
4799 EXP. */
4800
4801 static rtx
4802 expand_debug_source_expr (tree exp)
4803 {
4804 rtx op0 = NULL_RTX;
4805 machine_mode mode = VOIDmode, inner_mode;
4806
4807 switch (TREE_CODE (exp))
4808 {
4809 case PARM_DECL:
4810 {
4811 mode = DECL_MODE (exp);
4812 op0 = expand_debug_parm_decl (exp);
4813 if (op0)
4814 break;
4815 /* See if this isn't an argument that has been completely
4816 optimized out. */
4817 if (!DECL_RTL_SET_P (exp)
4818 && !DECL_INCOMING_RTL (exp)
4819 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4820 {
4821 tree aexp = DECL_ORIGIN (exp);
4822 if (DECL_CONTEXT (aexp)
4823 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4824 {
4825 vec<tree, va_gc> **debug_args;
4826 unsigned int ix;
4827 tree ddecl;
4828 debug_args = decl_debug_args_lookup (current_function_decl);
4829 if (debug_args != NULL)
4830 {
4831 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4832 ix += 2)
4833 if (ddecl == aexp)
4834 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4835 }
4836 }
4837 }
4838 break;
4839 }
4840 default:
4841 break;
4842 }
4843
4844 if (op0 == NULL_RTX)
4845 return NULL_RTX;
4846
4847 inner_mode = GET_MODE (op0);
4848 if (mode == inner_mode)
4849 return op0;
4850
4851 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4852 {
4853 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4854 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4855 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4856 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4857 else
4858 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4859 }
4860 else if (FLOAT_MODE_P (mode))
4861 gcc_unreachable ();
4862 else if (FLOAT_MODE_P (inner_mode))
4863 {
4864 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4865 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4866 else
4867 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4868 }
4869 else if (CONSTANT_P (op0)
4870 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4871 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4872 subreg_lowpart_offset (mode, inner_mode));
4873 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4874 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4875 else
4876 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4877
4878 return op0;
4879 }
4880
4881 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4882 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4883 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4884
4885 static void
4886 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4887 {
4888 rtx exp = *exp_p;
4889
4890 if (exp == NULL_RTX)
4891 return;
4892
4893 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4894 return;
4895
4896 if (depth == 4)
4897 {
4898 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4899 rtx dval = make_debug_expr_from_rtl (exp);
4900
4901 /* Emit a debug bind insn before INSN. */
4902 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4903 DEBUG_EXPR_TREE_DECL (dval), exp,
4904 VAR_INIT_STATUS_INITIALIZED);
4905
4906 emit_debug_insn_before (bind, insn);
4907 *exp_p = dval;
4908 return;
4909 }
4910
4911 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4912 int i, j;
4913 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4914 switch (*format_ptr++)
4915 {
4916 case 'e':
4917 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4918 break;
4919
4920 case 'E':
4921 case 'V':
4922 for (j = 0; j < XVECLEN (exp, i); j++)
4923 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4924 break;
4925
4926 default:
4927 break;
4928 }
4929 }
4930
4931 /* Expand the _LOCs in debug insns. We run this after expanding all
4932 regular insns, so that any variables referenced in the function
4933 will have their DECL_RTLs set. */
4934
4935 static void
4936 expand_debug_locations (void)
4937 {
4938 rtx_insn *insn;
4939 rtx_insn *last = get_last_insn ();
4940 int save_strict_alias = flag_strict_aliasing;
4941
4942 /* New alias sets while setting up memory attributes cause
4943 -fcompare-debug failures, even though it doesn't bring about any
4944 codegen changes. */
4945 flag_strict_aliasing = 0;
4946
4947 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4948 if (DEBUG_INSN_P (insn))
4949 {
4950 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4951 rtx val;
4952 rtx_insn *prev_insn, *insn2;
4953 machine_mode mode;
4954
4955 if (value == NULL_TREE)
4956 val = NULL_RTX;
4957 else
4958 {
4959 if (INSN_VAR_LOCATION_STATUS (insn)
4960 == VAR_INIT_STATUS_UNINITIALIZED)
4961 val = expand_debug_source_expr (value);
4962 /* The avoid_deep_ter_for_debug function inserts
4963 debug bind stmts after SSA_NAME definition, with the
4964 SSA_NAME as the whole bind location. Disable temporarily
4965 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
4966 being defined in this DEBUG_INSN. */
4967 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
4968 {
4969 tree *slot = deep_ter_debug_map->get (value);
4970 if (slot)
4971 {
4972 if (*slot == INSN_VAR_LOCATION_DECL (insn))
4973 *slot = NULL_TREE;
4974 else
4975 slot = NULL;
4976 }
4977 val = expand_debug_expr (value);
4978 if (slot)
4979 *slot = INSN_VAR_LOCATION_DECL (insn);
4980 }
4981 else
4982 val = expand_debug_expr (value);
4983 gcc_assert (last == get_last_insn ());
4984 }
4985
4986 if (!val)
4987 val = gen_rtx_UNKNOWN_VAR_LOC ();
4988 else
4989 {
4990 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4991
4992 gcc_assert (mode == GET_MODE (val)
4993 || (GET_MODE (val) == VOIDmode
4994 && (CONST_SCALAR_INT_P (val)
4995 || GET_CODE (val) == CONST_FIXED
4996 || GET_CODE (val) == LABEL_REF)));
4997 }
4998
4999 INSN_VAR_LOCATION_LOC (insn) = val;
5000 prev_insn = PREV_INSN (insn);
5001 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5002 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5003 }
5004
5005 flag_strict_aliasing = save_strict_alias;
5006 }
5007
5008 /* Performs swapping operands of commutative operations to expand
5009 the expensive one first. */
5010
5011 static void
5012 reorder_operands (basic_block bb)
5013 {
5014 unsigned int *lattice; /* Hold cost of each statement. */
5015 unsigned int i = 0, n = 0;
5016 gimple_stmt_iterator gsi;
5017 gimple_seq stmts;
5018 gimple stmt;
5019 bool swap;
5020 tree op0, op1;
5021 ssa_op_iter iter;
5022 use_operand_p use_p;
5023 gimple def0, def1;
5024
5025 /* Compute cost of each statement using estimate_num_insns. */
5026 stmts = bb_seq (bb);
5027 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5028 {
5029 stmt = gsi_stmt (gsi);
5030 if (!is_gimple_debug (stmt))
5031 gimple_set_uid (stmt, n++);
5032 }
5033 lattice = XNEWVEC (unsigned int, n);
5034 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5035 {
5036 unsigned cost;
5037 stmt = gsi_stmt (gsi);
5038 if (is_gimple_debug (stmt))
5039 continue;
5040 cost = estimate_num_insns (stmt, &eni_size_weights);
5041 lattice[i] = cost;
5042 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5043 {
5044 tree use = USE_FROM_PTR (use_p);
5045 gimple def_stmt;
5046 if (TREE_CODE (use) != SSA_NAME)
5047 continue;
5048 def_stmt = get_gimple_for_ssa_name (use);
5049 if (!def_stmt)
5050 continue;
5051 lattice[i] += lattice[gimple_uid (def_stmt)];
5052 }
5053 i++;
5054 if (!is_gimple_assign (stmt)
5055 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5056 continue;
5057 op0 = gimple_op (stmt, 1);
5058 op1 = gimple_op (stmt, 2);
5059 if (TREE_CODE (op0) != SSA_NAME
5060 || TREE_CODE (op1) != SSA_NAME)
5061 continue;
5062 /* Swap operands if the second one is more expensive. */
5063 def0 = get_gimple_for_ssa_name (op0);
5064 def1 = get_gimple_for_ssa_name (op1);
5065 if (!def1)
5066 continue;
5067 swap = false;
5068 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5069 swap = true;
5070 if (swap)
5071 {
5072 if (dump_file && (dump_flags & TDF_DETAILS))
5073 {
5074 fprintf (dump_file, "Swap operands in stmt:\n");
5075 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5076 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5077 def0 ? lattice[gimple_uid (def0)] : 0,
5078 lattice[gimple_uid (def1)]);
5079 }
5080 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5081 gimple_assign_rhs2_ptr (stmt));
5082 }
5083 }
5084 XDELETE (lattice);
5085 }
5086
5087 /* Expand basic block BB from GIMPLE trees to RTL. */
5088
5089 static basic_block
5090 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5091 {
5092 gimple_stmt_iterator gsi;
5093 gimple_seq stmts;
5094 gimple stmt = NULL;
5095 rtx_note *note;
5096 rtx_insn *last;
5097 edge e;
5098 edge_iterator ei;
5099
5100 if (dump_file)
5101 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5102 bb->index);
5103
5104 /* Note that since we are now transitioning from GIMPLE to RTL, we
5105 cannot use the gsi_*_bb() routines because they expect the basic
5106 block to be in GIMPLE, instead of RTL. Therefore, we need to
5107 access the BB sequence directly. */
5108 if (optimize)
5109 reorder_operands (bb);
5110 stmts = bb_seq (bb);
5111 bb->il.gimple.seq = NULL;
5112 bb->il.gimple.phi_nodes = NULL;
5113 rtl_profile_for_bb (bb);
5114 init_rtl_bb_info (bb);
5115 bb->flags |= BB_RTL;
5116
5117 /* Remove the RETURN_EXPR if we may fall though to the exit
5118 instead. */
5119 gsi = gsi_last (stmts);
5120 if (!gsi_end_p (gsi)
5121 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5122 {
5123 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5124
5125 gcc_assert (single_succ_p (bb));
5126 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5127
5128 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5129 && !gimple_return_retval (ret_stmt))
5130 {
5131 gsi_remove (&gsi, false);
5132 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5133 }
5134 }
5135
5136 gsi = gsi_start (stmts);
5137 if (!gsi_end_p (gsi))
5138 {
5139 stmt = gsi_stmt (gsi);
5140 if (gimple_code (stmt) != GIMPLE_LABEL)
5141 stmt = NULL;
5142 }
5143
5144 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5145
5146 if (stmt || elt)
5147 {
5148 last = get_last_insn ();
5149
5150 if (stmt)
5151 {
5152 expand_gimple_stmt (stmt);
5153 gsi_next (&gsi);
5154 }
5155
5156 if (elt)
5157 emit_label (*elt);
5158
5159 /* Java emits line number notes in the top of labels.
5160 ??? Make this go away once line number notes are obsoleted. */
5161 BB_HEAD (bb) = NEXT_INSN (last);
5162 if (NOTE_P (BB_HEAD (bb)))
5163 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5164 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5165
5166 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5167 }
5168 else
5169 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5170
5171 NOTE_BASIC_BLOCK (note) = bb;
5172
5173 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5174 {
5175 basic_block new_bb;
5176
5177 stmt = gsi_stmt (gsi);
5178
5179 /* If this statement is a non-debug one, and we generate debug
5180 insns, then this one might be the last real use of a TERed
5181 SSA_NAME, but where there are still some debug uses further
5182 down. Expanding the current SSA name in such further debug
5183 uses by their RHS might lead to wrong debug info, as coalescing
5184 might make the operands of such RHS be placed into the same
5185 pseudo as something else. Like so:
5186 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5187 use(a_1);
5188 a_2 = ...
5189 #DEBUG ... => a_1
5190 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5191 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5192 the write to a_2 would actually have clobbered the place which
5193 formerly held a_0.
5194
5195 So, instead of that, we recognize the situation, and generate
5196 debug temporaries at the last real use of TERed SSA names:
5197 a_1 = a_0 + 1;
5198 #DEBUG #D1 => a_1
5199 use(a_1);
5200 a_2 = ...
5201 #DEBUG ... => #D1
5202 */
5203 if (MAY_HAVE_DEBUG_INSNS
5204 && SA.values
5205 && !is_gimple_debug (stmt))
5206 {
5207 ssa_op_iter iter;
5208 tree op;
5209 gimple def;
5210
5211 location_t sloc = curr_insn_location ();
5212
5213 /* Look for SSA names that have their last use here (TERed
5214 names always have only one real use). */
5215 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5216 if ((def = get_gimple_for_ssa_name (op)))
5217 {
5218 imm_use_iterator imm_iter;
5219 use_operand_p use_p;
5220 bool have_debug_uses = false;
5221
5222 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5223 {
5224 if (gimple_debug_bind_p (USE_STMT (use_p)))
5225 {
5226 have_debug_uses = true;
5227 break;
5228 }
5229 }
5230
5231 if (have_debug_uses)
5232 {
5233 /* OP is a TERed SSA name, with DEF its defining
5234 statement, and where OP is used in further debug
5235 instructions. Generate a debug temporary, and
5236 replace all uses of OP in debug insns with that
5237 temporary. */
5238 gimple debugstmt;
5239 tree value = gimple_assign_rhs_to_tree (def);
5240 tree vexpr = make_node (DEBUG_EXPR_DECL);
5241 rtx val;
5242 machine_mode mode;
5243
5244 set_curr_insn_location (gimple_location (def));
5245
5246 DECL_ARTIFICIAL (vexpr) = 1;
5247 TREE_TYPE (vexpr) = TREE_TYPE (value);
5248 if (DECL_P (value))
5249 mode = DECL_MODE (value);
5250 else
5251 mode = TYPE_MODE (TREE_TYPE (value));
5252 DECL_MODE (vexpr) = mode;
5253
5254 val = gen_rtx_VAR_LOCATION
5255 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5256
5257 emit_debug_insn (val);
5258
5259 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5260 {
5261 if (!gimple_debug_bind_p (debugstmt))
5262 continue;
5263
5264 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5265 SET_USE (use_p, vexpr);
5266
5267 update_stmt (debugstmt);
5268 }
5269 }
5270 }
5271 set_curr_insn_location (sloc);
5272 }
5273
5274 currently_expanding_gimple_stmt = stmt;
5275
5276 /* Expand this statement, then evaluate the resulting RTL and
5277 fixup the CFG accordingly. */
5278 if (gimple_code (stmt) == GIMPLE_COND)
5279 {
5280 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5281 if (new_bb)
5282 return new_bb;
5283 }
5284 else if (gimple_debug_bind_p (stmt))
5285 {
5286 location_t sloc = curr_insn_location ();
5287 gimple_stmt_iterator nsi = gsi;
5288
5289 for (;;)
5290 {
5291 tree var = gimple_debug_bind_get_var (stmt);
5292 tree value;
5293 rtx val;
5294 machine_mode mode;
5295
5296 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5297 && TREE_CODE (var) != LABEL_DECL
5298 && !target_for_debug_bind (var))
5299 goto delink_debug_stmt;
5300
5301 if (gimple_debug_bind_has_value_p (stmt))
5302 value = gimple_debug_bind_get_value (stmt);
5303 else
5304 value = NULL_TREE;
5305
5306 last = get_last_insn ();
5307
5308 set_curr_insn_location (gimple_location (stmt));
5309
5310 if (DECL_P (var))
5311 mode = DECL_MODE (var);
5312 else
5313 mode = TYPE_MODE (TREE_TYPE (var));
5314
5315 val = gen_rtx_VAR_LOCATION
5316 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5317
5318 emit_debug_insn (val);
5319
5320 if (dump_file && (dump_flags & TDF_DETAILS))
5321 {
5322 /* We can't dump the insn with a TREE where an RTX
5323 is expected. */
5324 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5325 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5326 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5327 }
5328
5329 delink_debug_stmt:
5330 /* In order not to generate too many debug temporaries,
5331 we delink all uses of debug statements we already expanded.
5332 Therefore debug statements between definition and real
5333 use of TERed SSA names will continue to use the SSA name,
5334 and not be replaced with debug temps. */
5335 delink_stmt_imm_use (stmt);
5336
5337 gsi = nsi;
5338 gsi_next (&nsi);
5339 if (gsi_end_p (nsi))
5340 break;
5341 stmt = gsi_stmt (nsi);
5342 if (!gimple_debug_bind_p (stmt))
5343 break;
5344 }
5345
5346 set_curr_insn_location (sloc);
5347 }
5348 else if (gimple_debug_source_bind_p (stmt))
5349 {
5350 location_t sloc = curr_insn_location ();
5351 tree var = gimple_debug_source_bind_get_var (stmt);
5352 tree value = gimple_debug_source_bind_get_value (stmt);
5353 rtx val;
5354 machine_mode mode;
5355
5356 last = get_last_insn ();
5357
5358 set_curr_insn_location (gimple_location (stmt));
5359
5360 mode = DECL_MODE (var);
5361
5362 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5363 VAR_INIT_STATUS_UNINITIALIZED);
5364
5365 emit_debug_insn (val);
5366
5367 if (dump_file && (dump_flags & TDF_DETAILS))
5368 {
5369 /* We can't dump the insn with a TREE where an RTX
5370 is expected. */
5371 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5372 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5373 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5374 }
5375
5376 set_curr_insn_location (sloc);
5377 }
5378 else
5379 {
5380 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5381 if (call_stmt
5382 && gimple_call_tail_p (call_stmt)
5383 && disable_tail_calls)
5384 gimple_call_set_tail (call_stmt, false);
5385
5386 if (call_stmt && gimple_call_tail_p (call_stmt))
5387 {
5388 bool can_fallthru;
5389 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5390 if (new_bb)
5391 {
5392 if (can_fallthru)
5393 bb = new_bb;
5394 else
5395 return new_bb;
5396 }
5397 }
5398 else
5399 {
5400 def_operand_p def_p;
5401 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5402
5403 if (def_p != NULL)
5404 {
5405 /* Ignore this stmt if it is in the list of
5406 replaceable expressions. */
5407 if (SA.values
5408 && bitmap_bit_p (SA.values,
5409 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5410 continue;
5411 }
5412 last = expand_gimple_stmt (stmt);
5413 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5414 }
5415 }
5416 }
5417
5418 currently_expanding_gimple_stmt = NULL;
5419
5420 /* Expand implicit goto and convert goto_locus. */
5421 FOR_EACH_EDGE (e, ei, bb->succs)
5422 {
5423 if (e->goto_locus != UNKNOWN_LOCATION)
5424 set_curr_insn_location (e->goto_locus);
5425 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5426 {
5427 emit_jump (label_rtx_for_bb (e->dest));
5428 e->flags &= ~EDGE_FALLTHRU;
5429 }
5430 }
5431
5432 /* Expanded RTL can create a jump in the last instruction of block.
5433 This later might be assumed to be a jump to successor and break edge insertion.
5434 We need to insert dummy move to prevent this. PR41440. */
5435 if (single_succ_p (bb)
5436 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5437 && (last = get_last_insn ())
5438 && JUMP_P (last))
5439 {
5440 rtx dummy = gen_reg_rtx (SImode);
5441 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5442 }
5443
5444 do_pending_stack_adjust ();
5445
5446 /* Find the block tail. The last insn in the block is the insn
5447 before a barrier and/or table jump insn. */
5448 last = get_last_insn ();
5449 if (BARRIER_P (last))
5450 last = PREV_INSN (last);
5451 if (JUMP_TABLE_DATA_P (last))
5452 last = PREV_INSN (PREV_INSN (last));
5453 BB_END (bb) = last;
5454
5455 update_bb_for_insn (bb);
5456
5457 return bb;
5458 }
5459
5460
5461 /* Create a basic block for initialization code. */
5462
5463 static basic_block
5464 construct_init_block (void)
5465 {
5466 basic_block init_block, first_block;
5467 edge e = NULL;
5468 int flags;
5469
5470 /* Multiple entry points not supported yet. */
5471 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5472 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5473 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5474 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5475 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5476
5477 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5478
5479 /* When entry edge points to first basic block, we don't need jump,
5480 otherwise we have to jump into proper target. */
5481 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5482 {
5483 tree label = gimple_block_label (e->dest);
5484
5485 emit_jump (jump_target_rtx (label));
5486 flags = 0;
5487 }
5488 else
5489 flags = EDGE_FALLTHRU;
5490
5491 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5492 get_last_insn (),
5493 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5494 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5495 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5496 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5497 if (e)
5498 {
5499 first_block = e->dest;
5500 redirect_edge_succ (e, init_block);
5501 e = make_edge (init_block, first_block, flags);
5502 }
5503 else
5504 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5505 e->probability = REG_BR_PROB_BASE;
5506 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5507
5508 update_bb_for_insn (init_block);
5509 return init_block;
5510 }
5511
5512 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5513 found in the block tree. */
5514
5515 static void
5516 set_block_levels (tree block, int level)
5517 {
5518 while (block)
5519 {
5520 BLOCK_NUMBER (block) = level;
5521 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5522 block = BLOCK_CHAIN (block);
5523 }
5524 }
5525
5526 /* Create a block containing landing pads and similar stuff. */
5527
5528 static void
5529 construct_exit_block (void)
5530 {
5531 rtx_insn *head = get_last_insn ();
5532 rtx_insn *end;
5533 basic_block exit_block;
5534 edge e, e2;
5535 unsigned ix;
5536 edge_iterator ei;
5537 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5538 rtx_insn *orig_end = BB_END (prev_bb);
5539
5540 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5541
5542 /* Make sure the locus is set to the end of the function, so that
5543 epilogue line numbers and warnings are set properly. */
5544 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5545 input_location = cfun->function_end_locus;
5546
5547 /* Generate rtl for function exit. */
5548 expand_function_end ();
5549
5550 end = get_last_insn ();
5551 if (head == end)
5552 return;
5553 /* While emitting the function end we could move end of the last basic
5554 block. */
5555 BB_END (prev_bb) = orig_end;
5556 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5557 head = NEXT_INSN (head);
5558 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5559 bb frequency counting will be confused. Any instructions before that
5560 label are emitted for the case where PREV_BB falls through into the
5561 exit block, so append those instructions to prev_bb in that case. */
5562 if (NEXT_INSN (head) != return_label)
5563 {
5564 while (NEXT_INSN (head) != return_label)
5565 {
5566 if (!NOTE_P (NEXT_INSN (head)))
5567 BB_END (prev_bb) = NEXT_INSN (head);
5568 head = NEXT_INSN (head);
5569 }
5570 }
5571 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5572 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5573 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5574 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5575
5576 ix = 0;
5577 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5578 {
5579 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5580 if (!(e->flags & EDGE_ABNORMAL))
5581 redirect_edge_succ (e, exit_block);
5582 else
5583 ix++;
5584 }
5585
5586 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5587 e->probability = REG_BR_PROB_BASE;
5588 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5589 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5590 if (e2 != e)
5591 {
5592 e->count -= e2->count;
5593 exit_block->count -= e2->count;
5594 exit_block->frequency -= EDGE_FREQUENCY (e2);
5595 }
5596 if (e->count < 0)
5597 e->count = 0;
5598 if (exit_block->count < 0)
5599 exit_block->count = 0;
5600 if (exit_block->frequency < 0)
5601 exit_block->frequency = 0;
5602 update_bb_for_insn (exit_block);
5603 }
5604
5605 /* Helper function for discover_nonconstant_array_refs.
5606 Look for ARRAY_REF nodes with non-constant indexes and mark them
5607 addressable. */
5608
5609 static tree
5610 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5611 void *data ATTRIBUTE_UNUSED)
5612 {
5613 tree t = *tp;
5614
5615 if (IS_TYPE_OR_DECL_P (t))
5616 *walk_subtrees = 0;
5617 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5618 {
5619 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5620 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5621 && (!TREE_OPERAND (t, 2)
5622 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5623 || (TREE_CODE (t) == COMPONENT_REF
5624 && (!TREE_OPERAND (t,2)
5625 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5626 || TREE_CODE (t) == BIT_FIELD_REF
5627 || TREE_CODE (t) == REALPART_EXPR
5628 || TREE_CODE (t) == IMAGPART_EXPR
5629 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5630 || CONVERT_EXPR_P (t))
5631 t = TREE_OPERAND (t, 0);
5632
5633 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5634 {
5635 t = get_base_address (t);
5636 if (t && DECL_P (t)
5637 && DECL_MODE (t) != BLKmode)
5638 TREE_ADDRESSABLE (t) = 1;
5639 }
5640
5641 *walk_subtrees = 0;
5642 }
5643
5644 return NULL_TREE;
5645 }
5646
5647 /* RTL expansion is not able to compile array references with variable
5648 offsets for arrays stored in single register. Discover such
5649 expressions and mark variables as addressable to avoid this
5650 scenario. */
5651
5652 static void
5653 discover_nonconstant_array_refs (void)
5654 {
5655 basic_block bb;
5656 gimple_stmt_iterator gsi;
5657
5658 FOR_EACH_BB_FN (bb, cfun)
5659 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5660 {
5661 gimple stmt = gsi_stmt (gsi);
5662 if (!is_gimple_debug (stmt))
5663 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5664 }
5665 }
5666
5667 /* This function sets crtl->args.internal_arg_pointer to a virtual
5668 register if DRAP is needed. Local register allocator will replace
5669 virtual_incoming_args_rtx with the virtual register. */
5670
5671 static void
5672 expand_stack_alignment (void)
5673 {
5674 rtx drap_rtx;
5675 unsigned int preferred_stack_boundary;
5676
5677 if (! SUPPORTS_STACK_ALIGNMENT)
5678 return;
5679
5680 if (cfun->calls_alloca
5681 || cfun->has_nonlocal_label
5682 || crtl->has_nonlocal_goto)
5683 crtl->need_drap = true;
5684
5685 /* Call update_stack_boundary here again to update incoming stack
5686 boundary. It may set incoming stack alignment to a different
5687 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5688 use the minimum incoming stack alignment to check if it is OK
5689 to perform sibcall optimization since sibcall optimization will
5690 only align the outgoing stack to incoming stack boundary. */
5691 if (targetm.calls.update_stack_boundary)
5692 targetm.calls.update_stack_boundary ();
5693
5694 /* The incoming stack frame has to be aligned at least at
5695 parm_stack_boundary. */
5696 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5697
5698 /* Update crtl->stack_alignment_estimated and use it later to align
5699 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5700 exceptions since callgraph doesn't collect incoming stack alignment
5701 in this case. */
5702 if (cfun->can_throw_non_call_exceptions
5703 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5704 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5705 else
5706 preferred_stack_boundary = crtl->preferred_stack_boundary;
5707 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5708 crtl->stack_alignment_estimated = preferred_stack_boundary;
5709 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5710 crtl->stack_alignment_needed = preferred_stack_boundary;
5711
5712 gcc_assert (crtl->stack_alignment_needed
5713 <= crtl->stack_alignment_estimated);
5714
5715 crtl->stack_realign_needed
5716 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5717 crtl->stack_realign_tried = crtl->stack_realign_needed;
5718
5719 crtl->stack_realign_processed = true;
5720
5721 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5722 alignment. */
5723 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5724 drap_rtx = targetm.calls.get_drap_rtx ();
5725
5726 /* stack_realign_drap and drap_rtx must match. */
5727 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5728
5729 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5730 if (NULL != drap_rtx)
5731 {
5732 crtl->args.internal_arg_pointer = drap_rtx;
5733
5734 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5735 needed. */
5736 fixup_tail_calls ();
5737 }
5738 }
5739 \f
5740
5741 static void
5742 expand_main_function (void)
5743 {
5744 #if (defined(INVOKE__main) \
5745 || (!defined(HAS_INIT_SECTION) \
5746 && !defined(INIT_SECTION_ASM_OP) \
5747 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5748 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5749 #endif
5750 }
5751 \f
5752
5753 /* Expand code to initialize the stack_protect_guard. This is invoked at
5754 the beginning of a function to be protected. */
5755
5756 static void
5757 stack_protect_prologue (void)
5758 {
5759 tree guard_decl = targetm.stack_protect_guard ();
5760 rtx x, y;
5761
5762 x = expand_normal (crtl->stack_protect_guard);
5763 y = expand_normal (guard_decl);
5764
5765 /* Allow the target to copy from Y to X without leaking Y into a
5766 register. */
5767 if (targetm.have_stack_protect_set ())
5768 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
5769 {
5770 emit_insn (insn);
5771 return;
5772 }
5773
5774 /* Otherwise do a straight move. */
5775 emit_move_insn (x, y);
5776 }
5777
5778 /* Translate the intermediate representation contained in the CFG
5779 from GIMPLE trees to RTL.
5780
5781 We do conversion per basic block and preserve/update the tree CFG.
5782 This implies we have to do some magic as the CFG can simultaneously
5783 consist of basic blocks containing RTL and GIMPLE trees. This can
5784 confuse the CFG hooks, so be careful to not manipulate CFG during
5785 the expansion. */
5786
5787 namespace {
5788
5789 const pass_data pass_data_expand =
5790 {
5791 RTL_PASS, /* type */
5792 "expand", /* name */
5793 OPTGROUP_NONE, /* optinfo_flags */
5794 TV_EXPAND, /* tv_id */
5795 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5796 | PROP_gimple_lcx
5797 | PROP_gimple_lvec
5798 | PROP_gimple_lva), /* properties_required */
5799 PROP_rtl, /* properties_provided */
5800 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5801 0, /* todo_flags_start */
5802 0, /* todo_flags_finish */
5803 };
5804
5805 class pass_expand : public rtl_opt_pass
5806 {
5807 public:
5808 pass_expand (gcc::context *ctxt)
5809 : rtl_opt_pass (pass_data_expand, ctxt)
5810 {}
5811
5812 /* opt_pass methods: */
5813 virtual unsigned int execute (function *);
5814
5815 }; // class pass_expand
5816
5817 unsigned int
5818 pass_expand::execute (function *fun)
5819 {
5820 basic_block bb, init_block;
5821 sbitmap blocks;
5822 edge_iterator ei;
5823 edge e;
5824 rtx_insn *var_seq, *var_ret_seq;
5825 unsigned i;
5826
5827 timevar_push (TV_OUT_OF_SSA);
5828 rewrite_out_of_ssa (&SA);
5829 timevar_pop (TV_OUT_OF_SSA);
5830 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5831
5832 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5833 {
5834 gimple_stmt_iterator gsi;
5835 FOR_EACH_BB_FN (bb, cfun)
5836 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5837 if (gimple_debug_bind_p (gsi_stmt (gsi)))
5838 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5839 }
5840
5841 /* Make sure all values used by the optimization passes have sane
5842 defaults. */
5843 reg_renumber = 0;
5844
5845 /* Some backends want to know that we are expanding to RTL. */
5846 currently_expanding_to_rtl = 1;
5847 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5848 free_dominance_info (CDI_DOMINATORS);
5849
5850 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5851
5852 if (chkp_function_instrumented_p (current_function_decl))
5853 chkp_reset_rtl_bounds ();
5854
5855 insn_locations_init ();
5856 if (!DECL_IS_BUILTIN (current_function_decl))
5857 {
5858 /* Eventually, all FEs should explicitly set function_start_locus. */
5859 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5860 set_curr_insn_location
5861 (DECL_SOURCE_LOCATION (current_function_decl));
5862 else
5863 set_curr_insn_location (fun->function_start_locus);
5864 }
5865 else
5866 set_curr_insn_location (UNKNOWN_LOCATION);
5867 prologue_location = curr_insn_location ();
5868
5869 #ifdef INSN_SCHEDULING
5870 init_sched_attrs ();
5871 #endif
5872
5873 /* Make sure first insn is a note even if we don't want linenums.
5874 This makes sure the first insn will never be deleted.
5875 Also, final expects a note to appear there. */
5876 emit_note (NOTE_INSN_DELETED);
5877
5878 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5879 discover_nonconstant_array_refs ();
5880
5881 targetm.expand_to_rtl_hook ();
5882 crtl->stack_alignment_needed = STACK_BOUNDARY;
5883 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5884 crtl->stack_alignment_estimated = 0;
5885 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5886 fun->cfg->max_jumptable_ents = 0;
5887
5888 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5889 of the function section at exapnsion time to predict distance of calls. */
5890 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5891
5892 /* Expand the variables recorded during gimple lowering. */
5893 timevar_push (TV_VAR_EXPAND);
5894 start_sequence ();
5895
5896 var_ret_seq = expand_used_vars ();
5897
5898 var_seq = get_insns ();
5899 end_sequence ();
5900 timevar_pop (TV_VAR_EXPAND);
5901
5902 /* Honor stack protection warnings. */
5903 if (warn_stack_protect)
5904 {
5905 if (fun->calls_alloca)
5906 warning (OPT_Wstack_protector,
5907 "stack protector not protecting local variables: "
5908 "variable length buffer");
5909 if (has_short_buffer && !crtl->stack_protect_guard)
5910 warning (OPT_Wstack_protector,
5911 "stack protector not protecting function: "
5912 "all local arrays are less than %d bytes long",
5913 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5914 }
5915
5916 /* Set up parameters and prepare for return, for the function. */
5917 expand_function_start (current_function_decl);
5918
5919 /* If we emitted any instructions for setting up the variables,
5920 emit them before the FUNCTION_START note. */
5921 if (var_seq)
5922 {
5923 emit_insn_before (var_seq, parm_birth_insn);
5924
5925 /* In expand_function_end we'll insert the alloca save/restore
5926 before parm_birth_insn. We've just insertted an alloca call.
5927 Adjust the pointer to match. */
5928 parm_birth_insn = var_seq;
5929 }
5930
5931 /* Now that we also have the parameter RTXs, copy them over to our
5932 partitions. */
5933 for (i = 0; i < SA.map->num_partitions; i++)
5934 {
5935 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5936
5937 if (TREE_CODE (var) != VAR_DECL
5938 && !SA.partition_to_pseudo[i])
5939 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5940 gcc_assert (SA.partition_to_pseudo[i]);
5941
5942 /* If this decl was marked as living in multiple places, reset
5943 this now to NULL. */
5944 if (DECL_RTL_IF_SET (var) == pc_rtx)
5945 SET_DECL_RTL (var, NULL);
5946
5947 /* Some RTL parts really want to look at DECL_RTL(x) when x
5948 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5949 SET_DECL_RTL here making this available, but that would mean
5950 to select one of the potentially many RTLs for one DECL. Instead
5951 of doing that we simply reset the MEM_EXPR of the RTL in question,
5952 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5953 if (!DECL_RTL_SET_P (var))
5954 {
5955 if (MEM_P (SA.partition_to_pseudo[i]))
5956 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5957 }
5958 }
5959
5960 /* If we have a class containing differently aligned pointers
5961 we need to merge those into the corresponding RTL pointer
5962 alignment. */
5963 for (i = 1; i < num_ssa_names; i++)
5964 {
5965 tree name = ssa_name (i);
5966 int part;
5967 rtx r;
5968
5969 if (!name
5970 /* We might have generated new SSA names in
5971 update_alias_info_with_stack_vars. They will have a NULL
5972 defining statements, and won't be part of the partitioning,
5973 so ignore those. */
5974 || !SSA_NAME_DEF_STMT (name))
5975 continue;
5976 part = var_to_partition (SA.map, name);
5977 if (part == NO_PARTITION)
5978 continue;
5979
5980 /* Adjust all partition members to get the underlying decl of
5981 the representative which we might have created in expand_one_var. */
5982 if (SSA_NAME_VAR (name) == NULL_TREE)
5983 {
5984 tree leader = partition_to_var (SA.map, part);
5985 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5986 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5987 }
5988 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5989 continue;
5990
5991 r = SA.partition_to_pseudo[part];
5992 if (REG_P (r))
5993 mark_reg_pointer (r, get_pointer_alignment (name));
5994 }
5995
5996 /* If this function is `main', emit a call to `__main'
5997 to run global initializers, etc. */
5998 if (DECL_NAME (current_function_decl)
5999 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6000 && DECL_FILE_SCOPE_P (current_function_decl))
6001 expand_main_function ();
6002
6003 /* Initialize the stack_protect_guard field. This must happen after the
6004 call to __main (if any) so that the external decl is initialized. */
6005 if (crtl->stack_protect_guard)
6006 stack_protect_prologue ();
6007
6008 expand_phi_nodes (&SA);
6009
6010 /* Register rtl specific functions for cfg. */
6011 rtl_register_cfg_hooks ();
6012
6013 init_block = construct_init_block ();
6014
6015 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6016 remaining edges later. */
6017 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6018 e->flags &= ~EDGE_EXECUTABLE;
6019
6020 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6021 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6022 next_bb)
6023 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6024
6025 if (MAY_HAVE_DEBUG_INSNS)
6026 expand_debug_locations ();
6027
6028 if (deep_ter_debug_map)
6029 {
6030 delete deep_ter_debug_map;
6031 deep_ter_debug_map = NULL;
6032 }
6033
6034 /* Free stuff we no longer need after GIMPLE optimizations. */
6035 free_dominance_info (CDI_DOMINATORS);
6036 free_dominance_info (CDI_POST_DOMINATORS);
6037 delete_tree_cfg_annotations ();
6038
6039 timevar_push (TV_OUT_OF_SSA);
6040 finish_out_of_ssa (&SA);
6041 timevar_pop (TV_OUT_OF_SSA);
6042
6043 timevar_push (TV_POST_EXPAND);
6044 /* We are no longer in SSA form. */
6045 fun->gimple_df->in_ssa_p = false;
6046 loops_state_clear (LOOP_CLOSED_SSA);
6047
6048 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6049 conservatively to true until they are all profile aware. */
6050 delete lab_rtx_for_bb;
6051 free_histograms ();
6052
6053 construct_exit_block ();
6054 insn_locations_finalize ();
6055
6056 if (var_ret_seq)
6057 {
6058 rtx_insn *after = return_label;
6059 rtx_insn *next = NEXT_INSN (after);
6060 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6061 after = next;
6062 emit_insn_after (var_ret_seq, after);
6063 }
6064
6065 /* Zap the tree EH table. */
6066 set_eh_throw_stmt_table (fun, NULL);
6067
6068 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6069 split edges which edge insertions might do. */
6070 rebuild_jump_labels (get_insns ());
6071
6072 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6073 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6074 {
6075 edge e;
6076 edge_iterator ei;
6077 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6078 {
6079 if (e->insns.r)
6080 {
6081 rebuild_jump_labels_chain (e->insns.r);
6082 /* Put insns after parm birth, but before
6083 NOTE_INSNS_FUNCTION_BEG. */
6084 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6085 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6086 {
6087 rtx_insn *insns = e->insns.r;
6088 e->insns.r = NULL;
6089 if (NOTE_P (parm_birth_insn)
6090 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6091 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6092 else
6093 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6094 }
6095 else
6096 commit_one_edge_insertion (e);
6097 }
6098 else
6099 ei_next (&ei);
6100 }
6101 }
6102
6103 /* We're done expanding trees to RTL. */
6104 currently_expanding_to_rtl = 0;
6105
6106 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6107 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6108 {
6109 edge e;
6110 edge_iterator ei;
6111 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6112 {
6113 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6114 e->flags &= ~EDGE_EXECUTABLE;
6115
6116 /* At the moment not all abnormal edges match the RTL
6117 representation. It is safe to remove them here as
6118 find_many_sub_basic_blocks will rediscover them.
6119 In the future we should get this fixed properly. */
6120 if ((e->flags & EDGE_ABNORMAL)
6121 && !(e->flags & EDGE_SIBCALL))
6122 remove_edge (e);
6123 else
6124 ei_next (&ei);
6125 }
6126 }
6127
6128 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6129 bitmap_ones (blocks);
6130 find_many_sub_basic_blocks (blocks);
6131 sbitmap_free (blocks);
6132 purge_all_dead_edges ();
6133
6134 expand_stack_alignment ();
6135
6136 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6137 function. */
6138 if (crtl->tail_call_emit)
6139 fixup_tail_calls ();
6140
6141 /* After initial rtl generation, call back to finish generating
6142 exception support code. We need to do this before cleaning up
6143 the CFG as the code does not expect dead landing pads. */
6144 if (fun->eh->region_tree != NULL)
6145 finish_eh_generation ();
6146
6147 /* Remove unreachable blocks, otherwise we cannot compute dominators
6148 which are needed for loop state verification. As a side-effect
6149 this also compacts blocks.
6150 ??? We cannot remove trivially dead insns here as for example
6151 the DRAP reg on i?86 is not magically live at this point.
6152 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6153 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6154
6155 #ifdef ENABLE_CHECKING
6156 verify_flow_info ();
6157 #endif
6158
6159 /* Initialize pseudos allocated for hard registers. */
6160 emit_initial_value_sets ();
6161
6162 /* And finally unshare all RTL. */
6163 unshare_all_rtl ();
6164
6165 /* There's no need to defer outputting this function any more; we
6166 know we want to output it. */
6167 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6168
6169 /* Now that we're done expanding trees to RTL, we shouldn't have any
6170 more CONCATs anywhere. */
6171 generating_concat_p = 0;
6172
6173 if (dump_file)
6174 {
6175 fprintf (dump_file,
6176 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6177 /* And the pass manager will dump RTL for us. */
6178 }
6179
6180 /* If we're emitting a nested function, make sure its parent gets
6181 emitted as well. Doing otherwise confuses debug info. */
6182 {
6183 tree parent;
6184 for (parent = DECL_CONTEXT (current_function_decl);
6185 parent != NULL_TREE;
6186 parent = get_containing_scope (parent))
6187 if (TREE_CODE (parent) == FUNCTION_DECL)
6188 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6189 }
6190
6191 /* We are now committed to emitting code for this function. Do any
6192 preparation, such as emitting abstract debug info for the inline
6193 before it gets mangled by optimization. */
6194 if (cgraph_function_possibly_inlined_p (current_function_decl))
6195 (*debug_hooks->outlining_inline_function) (current_function_decl);
6196
6197 TREE_ASM_WRITTEN (current_function_decl) = 1;
6198
6199 /* After expanding, the return labels are no longer needed. */
6200 return_label = NULL;
6201 naked_return_label = NULL;
6202
6203 /* After expanding, the tm_restart map is no longer needed. */
6204 if (fun->gimple_df->tm_restart)
6205 fun->gimple_df->tm_restart = NULL;
6206
6207 /* Tag the blocks with a depth number so that change_scope can find
6208 the common parent easily. */
6209 set_block_levels (DECL_INITIAL (fun->decl), 0);
6210 default_rtl_profile ();
6211
6212 timevar_pop (TV_POST_EXPAND);
6213
6214 return 0;
6215 }
6216
6217 } // anon namespace
6218
6219 rtl_opt_pass *
6220 make_pass_expand (gcc::context *ctxt)
6221 {
6222 return new pass_expand (ctxt);
6223 }