]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-dfa.c
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / tree-dfa.c
1 /* Data flow functions for trees.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-pretty-print.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "langhooks.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "tree-dfa.h"
37 #include "gimple-range.h"
38
39 /* Build and maintain data flow information for trees. */
40
41 /* Counters used to display DFA and SSA statistics. */
42 struct dfa_stats_d
43 {
44 long num_defs;
45 long num_uses;
46 long num_phis;
47 long num_phi_args;
48 size_t max_num_phi_args;
49 long num_vdefs;
50 long num_vuses;
51 };
52
53
54 /* Local functions. */
55 static void collect_dfa_stats (struct dfa_stats_d *);
56
57
58 /*---------------------------------------------------------------------------
59 Dataflow analysis (DFA) routines
60 ---------------------------------------------------------------------------*/
61
62 /* Renumber all of the gimple stmt uids. */
63
64 void
65 renumber_gimple_stmt_uids (struct function *fun)
66 {
67 basic_block bb;
68
69 set_gimple_stmt_max_uid (fun, 0);
70 FOR_ALL_BB_FN (bb, fun)
71 {
72 gimple_stmt_iterator bsi;
73 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
74 {
75 gimple *stmt = gsi_stmt (bsi);
76 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
77 }
78 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
79 {
80 gimple *stmt = gsi_stmt (bsi);
81 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
82 }
83 }
84 }
85
86 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
87 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
88
89 void
90 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
91 {
92 int i;
93
94 set_gimple_stmt_max_uid (cfun, 0);
95 for (i = 0; i < n_blocks; i++)
96 {
97 basic_block bb = blocks[i];
98 gimple_stmt_iterator bsi;
99 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
100 {
101 gimple *stmt = gsi_stmt (bsi);
102 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
103 }
104 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
105 {
106 gimple *stmt = gsi_stmt (bsi);
107 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
108 }
109 }
110 }
111
112
113
114 /*---------------------------------------------------------------------------
115 Debugging functions
116 ---------------------------------------------------------------------------*/
117
118 /* Dump variable VAR and its may-aliases to FILE. */
119
120 void
121 dump_variable (FILE *file, tree var)
122 {
123 if (TREE_CODE (var) == SSA_NAME)
124 {
125 if (POINTER_TYPE_P (TREE_TYPE (var)))
126 dump_points_to_info_for (file, var);
127 var = SSA_NAME_VAR (var);
128 }
129
130 if (var == NULL_TREE)
131 {
132 fprintf (file, "<nil>");
133 return;
134 }
135
136 print_generic_expr (file, var, dump_flags);
137
138 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
139 if (DECL_PT_UID (var) != DECL_UID (var))
140 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
141
142 fprintf (file, ", ");
143 print_generic_expr (file, TREE_TYPE (var), dump_flags);
144
145 if (TREE_ADDRESSABLE (var))
146 fprintf (file, ", is addressable");
147
148 if (is_global_var (var))
149 fprintf (file, ", is global");
150
151 if (TREE_THIS_VOLATILE (var))
152 fprintf (file, ", is volatile");
153
154 if (cfun && ssa_default_def (cfun, var))
155 {
156 fprintf (file, ", default def: ");
157 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
158 }
159
160 if (DECL_INITIAL (var))
161 {
162 fprintf (file, ", initial: ");
163 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
164 }
165
166 fprintf (file, "\n");
167 }
168
169
170 /* Dump variable VAR and its may-aliases to stderr. */
171
172 DEBUG_FUNCTION void
173 debug_variable (tree var)
174 {
175 dump_variable (stderr, var);
176 }
177
178
179 /* Dump various DFA statistics to FILE. */
180
181 void
182 dump_dfa_stats (FILE *file)
183 {
184 struct dfa_stats_d dfa_stats;
185
186 unsigned long size, total = 0;
187 const char * const fmt_str = "%-30s%-13s%12s\n";
188 const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n";
189 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
190 const char *funcname
191 = lang_hooks.decl_printable_name (current_function_decl, 2);
192
193 collect_dfa_stats (&dfa_stats);
194
195 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
196
197 fprintf (file, "---------------------------------------------------------\n");
198 fprintf (file, fmt_str, "", " Number of ", "Memory");
199 fprintf (file, fmt_str, "", " instances ", "used ");
200 fprintf (file, "---------------------------------------------------------\n");
201
202 size = dfa_stats.num_uses * sizeof (tree *);
203 total += size;
204 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
205 SIZE_AMOUNT (size));
206
207 size = dfa_stats.num_defs * sizeof (tree *);
208 total += size;
209 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
210 SIZE_AMOUNT (size));
211
212 size = dfa_stats.num_vuses * sizeof (tree *);
213 total += size;
214 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
215 SIZE_AMOUNT (size));
216
217 size = dfa_stats.num_vdefs * sizeof (tree *);
218 total += size;
219 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
220 SIZE_AMOUNT (size));
221
222 size = dfa_stats.num_phis * sizeof (struct gphi);
223 total += size;
224 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
225 SIZE_AMOUNT (size));
226
227 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
228 total += size;
229 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
230 SIZE_AMOUNT (size));
231
232 fprintf (file, "---------------------------------------------------------\n");
233 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data",
234 SIZE_AMOUNT (total));
235 fprintf (file, "---------------------------------------------------------\n");
236 fprintf (file, "\n");
237
238 if (dfa_stats.num_phis)
239 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
240 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
241 (long) dfa_stats.max_num_phi_args);
242
243 fprintf (file, "\n");
244 }
245
246
247 /* Dump DFA statistics on stderr. */
248
249 DEBUG_FUNCTION void
250 debug_dfa_stats (void)
251 {
252 dump_dfa_stats (stderr);
253 }
254
255
256 /* Collect DFA statistics and store them in the structure pointed to by
257 DFA_STATS_P. */
258
259 static void
260 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
261 {
262 basic_block bb;
263
264 gcc_assert (dfa_stats_p);
265
266 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
267
268 /* Walk all the statements in the function counting references. */
269 FOR_EACH_BB_FN (bb, cfun)
270 {
271 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
272 gsi_next (&si))
273 {
274 gphi *phi = si.phi ();
275 dfa_stats_p->num_phis++;
276 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
277 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
278 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
279 }
280
281 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
282 gsi_next (&si))
283 {
284 gimple *stmt = gsi_stmt (si);
285 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
286 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
287 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
288 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
289 }
290 }
291 }
292
293
294 /*---------------------------------------------------------------------------
295 Miscellaneous helpers
296 ---------------------------------------------------------------------------*/
297
298 /* Lookup VAR UID in the default_defs hashtable and return the associated
299 variable. */
300
301 tree
302 ssa_default_def (struct function *fn, tree var)
303 {
304 struct tree_decl_minimal ind;
305 struct tree_ssa_name in;
306 gcc_assert (VAR_P (var)
307 || TREE_CODE (var) == PARM_DECL
308 || TREE_CODE (var) == RESULT_DECL);
309
310 /* Always NULL_TREE for rtl function dumps. */
311 if (!fn->gimple_df)
312 return NULL_TREE;
313
314 in.var = (tree)&ind;
315 ind.uid = DECL_UID (var);
316 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
317 }
318
319 /* Insert the pair VAR's UID, DEF into the default_defs hashtable
320 of function FN. */
321
322 void
323 set_ssa_default_def (struct function *fn, tree var, tree def)
324 {
325 struct tree_decl_minimal ind;
326 struct tree_ssa_name in;
327
328 gcc_assert (VAR_P (var)
329 || TREE_CODE (var) == PARM_DECL
330 || TREE_CODE (var) == RESULT_DECL);
331 in.var = (tree)&ind;
332 ind.uid = DECL_UID (var);
333 if (!def)
334 {
335 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
336 DECL_UID (var),
337 NO_INSERT);
338 if (loc)
339 {
340 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
341 DEFAULT_DEFS (fn)->clear_slot (loc);
342 }
343 return;
344 }
345 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
346 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
347 DECL_UID (var), INSERT);
348
349 /* Default definition might be changed by tail call optimization. */
350 if (*loc)
351 SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
352
353 /* Mark DEF as the default definition for VAR. */
354 *loc = def;
355 SSA_NAME_IS_DEFAULT_DEF (def) = true;
356 }
357
358 /* Retrieve or create a default definition for VAR. */
359
360 tree
361 get_or_create_ssa_default_def (struct function *fn, tree var)
362 {
363 tree ddef = ssa_default_def (fn, var);
364 if (ddef == NULL_TREE)
365 {
366 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
367 set_ssa_default_def (fn, var, ddef);
368 }
369 return ddef;
370 }
371
372
373 /* If EXP is a handled component reference for a structure, return the
374 base variable. The access range is delimited by bit positions *POFFSET and
375 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
376 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
377 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
378 true, the storage order of the reference is reversed. */
379
380 tree
381 get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
382 poly_int64_pod *psize,
383 poly_int64_pod *pmax_size,
384 bool *preverse)
385 {
386 poly_offset_int bitsize = -1;
387 poly_offset_int maxsize;
388 tree size_tree = NULL_TREE;
389 poly_offset_int bit_offset = 0;
390 bool seen_variable_array_ref = false;
391
392 /* First get the final access size and the storage order from just the
393 outermost expression. */
394 if (TREE_CODE (exp) == COMPONENT_REF)
395 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
396 else if (TREE_CODE (exp) == BIT_FIELD_REF)
397 size_tree = TREE_OPERAND (exp, 1);
398 else if (TREE_CODE (exp) == WITH_SIZE_EXPR)
399 {
400 size_tree = TREE_OPERAND (exp, 1);
401 exp = TREE_OPERAND (exp, 0);
402 }
403 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
404 {
405 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
406 if (mode == BLKmode)
407 size_tree = TYPE_SIZE (TREE_TYPE (exp));
408 else
409 bitsize = GET_MODE_BITSIZE (mode);
410 }
411 if (size_tree != NULL_TREE
412 && poly_int_tree_p (size_tree))
413 bitsize = wi::to_poly_offset (size_tree);
414
415 *preverse = reverse_storage_order_for_component_p (exp);
416
417 /* Initially, maxsize is the same as the accessed element size.
418 In the following it will only grow (or become -1). */
419 maxsize = bitsize;
420
421 /* Compute cumulative bit-offset for nested component-refs and array-refs,
422 and find the ultimate containing object. */
423 while (1)
424 {
425 switch (TREE_CODE (exp))
426 {
427 case BIT_FIELD_REF:
428 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
429 break;
430
431 case COMPONENT_REF:
432 {
433 tree field = TREE_OPERAND (exp, 1);
434 tree this_offset = component_ref_field_offset (exp);
435
436 if (this_offset && poly_int_tree_p (this_offset))
437 {
438 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
439 << LOG2_BITS_PER_UNIT);
440 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
441 bit_offset += woffset;
442
443 /* If we had seen a variable array ref already and we just
444 referenced the last field of a struct or a union member
445 then we have to adjust maxsize by the padding at the end
446 of our field. */
447 if (seen_variable_array_ref)
448 {
449 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
450 tree next = DECL_CHAIN (field);
451 while (next && TREE_CODE (next) != FIELD_DECL)
452 next = DECL_CHAIN (next);
453 if (!next
454 || TREE_CODE (stype) != RECORD_TYPE)
455 {
456 tree fsize = DECL_SIZE_UNIT (field);
457 tree ssize = TYPE_SIZE_UNIT (stype);
458 if (fsize == NULL
459 || !poly_int_tree_p (fsize)
460 || ssize == NULL
461 || !poly_int_tree_p (ssize))
462 maxsize = -1;
463 else if (known_size_p (maxsize))
464 {
465 poly_offset_int tem
466 = (wi::to_poly_offset (ssize)
467 - wi::to_poly_offset (fsize));
468 tem <<= LOG2_BITS_PER_UNIT;
469 tem -= woffset;
470 maxsize += tem;
471 }
472 }
473 /* An component ref with an adjacent field up in the
474 structure hierarchy constrains the size of any variable
475 array ref lower in the access hierarchy. */
476 else
477 seen_variable_array_ref = false;
478 }
479 }
480 else
481 {
482 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
483 /* We need to adjust maxsize to the whole structure bitsize.
484 But we can subtract any constant offset seen so far,
485 because that would get us out of the structure otherwise. */
486 if (known_size_p (maxsize)
487 && csize
488 && poly_int_tree_p (csize))
489 maxsize = wi::to_poly_offset (csize) - bit_offset;
490 else
491 maxsize = -1;
492 }
493 }
494 break;
495
496 case ARRAY_REF:
497 case ARRAY_RANGE_REF:
498 {
499 tree index = TREE_OPERAND (exp, 1);
500 tree low_bound, unit_size;
501
502 /* If the resulting bit-offset is constant, track it. */
503 if (poly_int_tree_p (index)
504 && (low_bound = array_ref_low_bound (exp),
505 poly_int_tree_p (low_bound))
506 && (unit_size = array_ref_element_size (exp),
507 TREE_CODE (unit_size) == INTEGER_CST))
508 {
509 poly_offset_int woffset
510 = wi::sext (wi::to_poly_offset (index)
511 - wi::to_poly_offset (low_bound),
512 TYPE_PRECISION (sizetype));
513 woffset *= wi::to_offset (unit_size);
514 woffset <<= LOG2_BITS_PER_UNIT;
515 bit_offset += woffset;
516
517 /* An array ref with a constant index up in the structure
518 hierarchy will constrain the size of any variable array ref
519 lower in the access hierarchy. */
520 seen_variable_array_ref = false;
521 }
522 else
523 {
524 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
525 /* We need to adjust maxsize to the whole array bitsize.
526 But we can subtract any constant offset seen so far,
527 because that would get us outside of the array otherwise. */
528 if (known_size_p (maxsize)
529 && asize
530 && poly_int_tree_p (asize))
531 maxsize = wi::to_poly_offset (asize) - bit_offset;
532 else
533 maxsize = -1;
534
535 /* Remember that we have seen an array ref with a variable
536 index. */
537 seen_variable_array_ref = true;
538
539 value_range vr;
540 range_query *query;
541 if (cfun)
542 query = get_range_query (cfun);
543 else
544 query = get_global_range_query ();
545
546 if (TREE_CODE (index) == SSA_NAME
547 && (low_bound = array_ref_low_bound (exp),
548 poly_int_tree_p (low_bound))
549 && (unit_size = array_ref_element_size (exp),
550 TREE_CODE (unit_size) == INTEGER_CST)
551 && query->range_of_expr (vr, index)
552 && vr.kind () == VR_RANGE)
553 {
554 wide_int min = vr.lower_bound ();
555 wide_int max = vr.upper_bound ();
556 poly_offset_int lbound = wi::to_poly_offset (low_bound);
557 /* Try to constrain maxsize with range information. */
558 offset_int omax
559 = offset_int::from (max, TYPE_SIGN (TREE_TYPE (index)));
560 if (known_lt (lbound, omax))
561 {
562 poly_offset_int rmaxsize;
563 rmaxsize = (omax - lbound + 1)
564 * wi::to_offset (unit_size) << LOG2_BITS_PER_UNIT;
565 if (!known_size_p (maxsize)
566 || known_lt (rmaxsize, maxsize))
567 {
568 /* If we know an upper bound below the declared
569 one this is no longer variable. */
570 if (known_size_p (maxsize))
571 seen_variable_array_ref = false;
572 maxsize = rmaxsize;
573 }
574 }
575 /* Try to adjust bit_offset with range information. */
576 offset_int omin
577 = offset_int::from (min, TYPE_SIGN (TREE_TYPE (index)));
578 if (known_le (lbound, omin))
579 {
580 poly_offset_int woffset
581 = wi::sext (omin - lbound,
582 TYPE_PRECISION (sizetype));
583 woffset *= wi::to_offset (unit_size);
584 woffset <<= LOG2_BITS_PER_UNIT;
585 bit_offset += woffset;
586 if (known_size_p (maxsize))
587 maxsize -= woffset;
588 }
589 }
590 }
591 }
592 break;
593
594 case REALPART_EXPR:
595 break;
596
597 case IMAGPART_EXPR:
598 bit_offset += bitsize;
599 break;
600
601 case VIEW_CONVERT_EXPR:
602 break;
603
604 case TARGET_MEM_REF:
605 /* Via the variable index or index2 we can reach the
606 whole object. Still hand back the decl here. */
607 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
608 && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
609 {
610 exp = TREE_OPERAND (TMR_BASE (exp), 0);
611 bit_offset = 0;
612 maxsize = -1;
613 goto done;
614 }
615 /* Fallthru. */
616 case MEM_REF:
617 /* We need to deal with variable arrays ending structures such as
618 struct { int length; int a[1]; } x; x.a[d]
619 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
620 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
621 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
622 where we do not know maxsize for variable index accesses to
623 the array. The simplest way to conservatively deal with this
624 is to punt in the case that offset + maxsize reaches the
625 base type boundary. This needs to include possible trailing
626 padding that is there for alignment purposes. */
627 if (seen_variable_array_ref
628 && known_size_p (maxsize)
629 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
630 || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
631 || (maybe_eq
632 (bit_offset + maxsize,
633 wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
634 maxsize = -1;
635
636 /* Hand back the decl for MEM[&decl, off]. */
637 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
638 {
639 if (integer_zerop (TREE_OPERAND (exp, 1)))
640 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
641 else
642 {
643 poly_offset_int off = mem_ref_offset (exp);
644 off <<= LOG2_BITS_PER_UNIT;
645 off += bit_offset;
646 poly_int64 off_hwi;
647 if (off.to_shwi (&off_hwi))
648 {
649 bit_offset = off_hwi;
650 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
651 }
652 }
653 }
654 goto done;
655
656 default:
657 goto done;
658 }
659
660 exp = TREE_OPERAND (exp, 0);
661 }
662
663 done:
664 if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
665 {
666 *poffset = 0;
667 *psize = -1;
668 *pmax_size = -1;
669
670 return exp;
671 }
672
673 /* ??? Due to negative offsets in ARRAY_REF we can end up with
674 negative bit_offset here. We might want to store a zero offset
675 in this case. */
676 if (!bit_offset.to_shwi (poffset))
677 {
678 *poffset = 0;
679 *pmax_size = -1;
680
681 return exp;
682 }
683
684 /* In case of a decl or constant base object we can do better. */
685
686 if (DECL_P (exp))
687 {
688 if (VAR_P (exp)
689 && ((flag_unconstrained_commons && DECL_COMMON (exp))
690 || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
691 {
692 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
693 /* If size is unknown, or we have read to the end, assume there
694 may be more to the structure than we are told. */
695 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
696 || (seen_variable_array_ref
697 && (sz_tree == NULL_TREE
698 || !poly_int_tree_p (sz_tree)
699 || maybe_eq (bit_offset + maxsize,
700 wi::to_poly_offset (sz_tree)))))
701 maxsize = -1;
702 }
703 /* If maxsize is unknown adjust it according to the size of the
704 base decl. */
705 else if (!known_size_p (maxsize)
706 && DECL_SIZE (exp)
707 && poly_int_tree_p (DECL_SIZE (exp)))
708 maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
709 }
710 else if (CONSTANT_CLASS_P (exp))
711 {
712 /* If maxsize is unknown adjust it according to the size of the
713 base type constant. */
714 if (!known_size_p (maxsize)
715 && TYPE_SIZE (TREE_TYPE (exp))
716 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
717 maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
718 - bit_offset);
719 }
720
721 if (!maxsize.to_shwi (pmax_size)
722 || maybe_lt (*pmax_size, 0)
723 || !endpoint_representable_p (*poffset, *pmax_size))
724 *pmax_size = -1;
725
726 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
727 check for such overflows individually and assume it works. */
728 if (!endpoint_representable_p (*poffset, *psize))
729 {
730 *poffset = 0;
731 *psize = -1;
732 *pmax_size = -1;
733
734 return exp;
735 }
736
737 return exp;
738 }
739
740 /* Like get_ref_base_and_extent, but for cases in which we only care
741 about constant-width accesses at constant offsets. Return null
742 if the access is anything else. */
743
744 tree
745 get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
746 HOST_WIDE_INT *psize, bool *preverse)
747 {
748 poly_int64 offset, size, max_size;
749 HOST_WIDE_INT const_offset, const_size;
750 bool reverse;
751 tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
752 &reverse);
753 if (!offset.is_constant (&const_offset)
754 || !size.is_constant (&const_size)
755 || const_offset < 0
756 || !known_size_p (max_size)
757 || maybe_ne (max_size, const_size))
758 return NULL_TREE;
759
760 *poffset = const_offset;
761 *psize = const_size;
762 *preverse = reverse;
763 return decl;
764 }
765
766 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
767 denotes the starting address of the memory access EXP.
768 Returns NULL_TREE if the offset is not constant or any component
769 is not BITS_PER_UNIT-aligned.
770 VALUEIZE if non-NULL is used to valueize SSA names. It should return
771 its argument or a constant if the argument is known to be constant. */
772
773 tree
774 get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
775 tree (*valueize) (tree))
776 {
777 poly_int64 byte_offset = 0;
778
779 /* Compute cumulative byte-offset for nested component-refs and array-refs,
780 and find the ultimate containing object. */
781 while (1)
782 {
783 switch (TREE_CODE (exp))
784 {
785 case BIT_FIELD_REF:
786 {
787 poly_int64 this_byte_offset;
788 poly_uint64 this_bit_offset;
789 if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset)
790 || !multiple_p (this_bit_offset, BITS_PER_UNIT,
791 &this_byte_offset))
792 return NULL_TREE;
793 byte_offset += this_byte_offset;
794 }
795 break;
796
797 case COMPONENT_REF:
798 {
799 tree field = TREE_OPERAND (exp, 1);
800 tree this_offset = component_ref_field_offset (exp);
801 poly_int64 hthis_offset;
802
803 if (!this_offset
804 || !poly_int_tree_p (this_offset, &hthis_offset)
805 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
806 % BITS_PER_UNIT))
807 return NULL_TREE;
808
809 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
810 / BITS_PER_UNIT);
811 byte_offset += hthis_offset;
812 }
813 break;
814
815 case ARRAY_REF:
816 case ARRAY_RANGE_REF:
817 {
818 tree index = TREE_OPERAND (exp, 1);
819 tree low_bound, unit_size;
820
821 if (valueize
822 && TREE_CODE (index) == SSA_NAME)
823 index = (*valueize) (index);
824 if (!poly_int_tree_p (index))
825 return NULL_TREE;
826 low_bound = array_ref_low_bound (exp);
827 if (valueize
828 && TREE_CODE (low_bound) == SSA_NAME)
829 low_bound = (*valueize) (low_bound);
830 if (!poly_int_tree_p (low_bound))
831 return NULL_TREE;
832 unit_size = array_ref_element_size (exp);
833 if (TREE_CODE (unit_size) != INTEGER_CST)
834 return NULL_TREE;
835
836 /* If the resulting bit-offset is constant, track it. */
837 poly_offset_int woffset
838 = wi::sext (wi::to_poly_offset (index)
839 - wi::to_poly_offset (low_bound),
840 TYPE_PRECISION (sizetype));
841 woffset *= wi::to_offset (unit_size);
842 byte_offset += woffset.force_shwi ();
843 }
844 break;
845
846 case REALPART_EXPR:
847 break;
848
849 case IMAGPART_EXPR:
850 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
851 break;
852
853 case VIEW_CONVERT_EXPR:
854 break;
855
856 case MEM_REF:
857 {
858 tree base = TREE_OPERAND (exp, 0);
859 if (valueize
860 && TREE_CODE (base) == SSA_NAME)
861 base = (*valueize) (base);
862
863 /* Hand back the decl for MEM[&decl, off]. */
864 if (TREE_CODE (base) == ADDR_EXPR)
865 {
866 if (!integer_zerop (TREE_OPERAND (exp, 1)))
867 {
868 poly_offset_int off = mem_ref_offset (exp);
869 byte_offset += off.force_shwi ();
870 }
871 exp = TREE_OPERAND (base, 0);
872 }
873 goto done;
874 }
875
876 case TARGET_MEM_REF:
877 {
878 tree base = TREE_OPERAND (exp, 0);
879 if (valueize
880 && TREE_CODE (base) == SSA_NAME)
881 base = (*valueize) (base);
882
883 /* Hand back the decl for MEM[&decl, off]. */
884 if (TREE_CODE (base) == ADDR_EXPR)
885 {
886 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
887 return NULL_TREE;
888 if (!integer_zerop (TMR_OFFSET (exp)))
889 {
890 poly_offset_int off = mem_ref_offset (exp);
891 byte_offset += off.force_shwi ();
892 }
893 exp = TREE_OPERAND (base, 0);
894 }
895 goto done;
896 }
897
898 default:
899 goto done;
900 }
901
902 exp = TREE_OPERAND (exp, 0);
903 }
904 done:
905
906 *poffset = byte_offset;
907 return exp;
908 }
909
910 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
911 denotes the starting address of the memory access EXP.
912 Returns NULL_TREE if the offset is not constant or any component
913 is not BITS_PER_UNIT-aligned. */
914
915 tree
916 get_addr_base_and_unit_offset (tree exp, poly_int64_pod *poffset)
917 {
918 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
919 }
920
921 /* Returns true if STMT references an SSA_NAME that has
922 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
923
924 bool
925 stmt_references_abnormal_ssa_name (gimple *stmt)
926 {
927 ssa_op_iter oi;
928 use_operand_p use_p;
929
930 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
931 {
932 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
933 return true;
934 }
935
936 return false;
937 }
938
939 /* If STMT takes any abnormal PHI values as input, replace them with
940 local copies. */
941
942 void
943 replace_abnormal_ssa_names (gimple *stmt)
944 {
945 ssa_op_iter oi;
946 use_operand_p use_p;
947
948 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
949 {
950 tree op = USE_FROM_PTR (use_p);
951 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
952 {
953 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
954 tree new_name = make_ssa_name (TREE_TYPE (op));
955 gassign *assign = gimple_build_assign (new_name, op);
956 gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
957 SET_USE (use_p, new_name);
958 }
959 }
960 }
961
962 /* Pair of tree and a sorting index, for dump_enumerated_decls. */
963 struct GTY(()) numbered_tree
964 {
965 tree t;
966 int num;
967 };
968
969
970 /* Compare two declarations references by their DECL_UID / sequence number.
971 Called via qsort. */
972
973 static int
974 compare_decls_by_uid (const void *pa, const void *pb)
975 {
976 const numbered_tree *nt_a = ((const numbered_tree *)pa);
977 const numbered_tree *nt_b = ((const numbered_tree *)pb);
978
979 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
980 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
981 return nt_a->num - nt_b->num;
982 }
983
984 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
985 static tree
986 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
987 {
988 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
989 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
990 numbered_tree nt;
991
992 if (!DECL_P (*tp))
993 return NULL_TREE;
994 nt.t = *tp;
995 nt.num = list->length ();
996 list->safe_push (nt);
997 *walk_subtrees = 0;
998 return NULL_TREE;
999 }
1000
1001 /* Find all the declarations used by the current function, sort them by uid,
1002 and emit the sorted list. Each declaration is tagged with a sequence
1003 number indicating when it was found during statement / tree walking,
1004 so that TDF_NOUID comparisons of anonymous declarations are still
1005 meaningful. Where a declaration was encountered more than once, we
1006 emit only the sequence number of the first encounter.
1007 FILE is the dump file where to output the list and FLAGS is as in
1008 print_generic_expr. */
1009 void
1010 dump_enumerated_decls (FILE *file, dump_flags_t flags)
1011 {
1012 if (!cfun->cfg)
1013 return;
1014
1015 basic_block bb;
1016 struct walk_stmt_info wi;
1017 auto_vec<numbered_tree, 40> decl_list;
1018
1019 memset (&wi, '\0', sizeof (wi));
1020 wi.info = (void *) &decl_list;
1021 FOR_EACH_BB_FN (bb, cfun)
1022 {
1023 gimple_stmt_iterator gsi;
1024
1025 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1026 if (!is_gimple_debug (gsi_stmt (gsi)))
1027 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1028 }
1029 decl_list.qsort (compare_decls_by_uid);
1030 if (decl_list.length ())
1031 {
1032 unsigned ix;
1033 numbered_tree *ntp;
1034 tree last = NULL_TREE;
1035
1036 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1037 current_function_name ());
1038 FOR_EACH_VEC_ELT (decl_list, ix, ntp)
1039 {
1040 if (ntp->t == last)
1041 continue;
1042 fprintf (file, "%d: ", ntp->num);
1043 print_generic_decl (file, ntp->t, flags);
1044 fprintf (file, "\n");
1045 last = ntp->t;
1046 }
1047 }
1048 }