]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-dfa.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / tree-dfa.cc
1 /* Data flow functions for trees.
2 Copyright (C) 2001-2022 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-pretty-print.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "langhooks.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "tree-dfa.h"
37 #include "gimple-range.h"
38
39 /* Build and maintain data flow information for trees. */
40
41 /* Counters used to display DFA and SSA statistics. */
42 struct dfa_stats_d
43 {
44 long num_defs;
45 long num_uses;
46 long num_phis;
47 long num_phi_args;
48 size_t max_num_phi_args;
49 long num_vdefs;
50 long num_vuses;
51 };
52
53
54 /* Local functions. */
55 static void collect_dfa_stats (struct dfa_stats_d *);
56
57
58 /*---------------------------------------------------------------------------
59 Dataflow analysis (DFA) routines
60 ---------------------------------------------------------------------------*/
61
62 /* Renumber all of the gimple stmt uids. */
63
64 void
65 renumber_gimple_stmt_uids (struct function *fun)
66 {
67 basic_block bb;
68
69 set_gimple_stmt_max_uid (fun, 0);
70 FOR_ALL_BB_FN (bb, fun)
71 {
72 gimple_stmt_iterator bsi;
73 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
74 {
75 gimple *stmt = gsi_stmt (bsi);
76 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
77 }
78 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
79 {
80 gimple *stmt = gsi_stmt (bsi);
81 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
82 }
83 }
84 }
85
86 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
87 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
88
89 void
90 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
91 {
92 int i;
93
94 set_gimple_stmt_max_uid (cfun, 0);
95 for (i = 0; i < n_blocks; i++)
96 {
97 basic_block bb = blocks[i];
98 gimple_stmt_iterator bsi;
99 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
100 {
101 gimple *stmt = gsi_stmt (bsi);
102 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
103 }
104 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
105 {
106 gimple *stmt = gsi_stmt (bsi);
107 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
108 }
109 }
110 }
111
112
113
114 /*---------------------------------------------------------------------------
115 Debugging functions
116 ---------------------------------------------------------------------------*/
117
118 /* Dump variable VAR and its may-aliases to FILE. */
119
120 void
121 dump_variable (FILE *file, tree var)
122 {
123 if (TREE_CODE (var) == SSA_NAME)
124 {
125 if (POINTER_TYPE_P (TREE_TYPE (var)))
126 dump_points_to_info_for (file, var);
127 var = SSA_NAME_VAR (var);
128 }
129
130 if (var == NULL_TREE)
131 {
132 fprintf (file, "<nil>");
133 return;
134 }
135
136 print_generic_expr (file, var, dump_flags);
137
138 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
139 if (DECL_PT_UID (var) != DECL_UID (var))
140 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
141
142 fprintf (file, ", ");
143 print_generic_expr (file, TREE_TYPE (var), dump_flags);
144
145 if (TREE_ADDRESSABLE (var))
146 fprintf (file, ", is addressable");
147
148 if (is_global_var (var))
149 fprintf (file, ", is global");
150
151 if (TREE_THIS_VOLATILE (var))
152 fprintf (file, ", is volatile");
153
154 if (cfun && ssa_default_def (cfun, var))
155 {
156 fprintf (file, ", default def: ");
157 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
158 }
159
160 if (DECL_INITIAL (var))
161 {
162 fprintf (file, ", initial: ");
163 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
164 }
165
166 fprintf (file, "\n");
167 }
168
169
170 /* Dump variable VAR and its may-aliases to stderr. */
171
172 DEBUG_FUNCTION void
173 debug_variable (tree var)
174 {
175 dump_variable (stderr, var);
176 }
177
178
179 /* Dump various DFA statistics to FILE. */
180
181 void
182 dump_dfa_stats (FILE *file)
183 {
184 struct dfa_stats_d dfa_stats;
185
186 unsigned long size, total = 0;
187 const char * const fmt_str = "%-30s%-13s%12s\n";
188 const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n";
189 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
190 const char *funcname
191 = lang_hooks.decl_printable_name (current_function_decl, 2);
192
193 collect_dfa_stats (&dfa_stats);
194
195 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
196
197 fprintf (file, "---------------------------------------------------------\n");
198 fprintf (file, fmt_str, "", " Number of ", "Memory");
199 fprintf (file, fmt_str, "", " instances ", "used ");
200 fprintf (file, "---------------------------------------------------------\n");
201
202 size = dfa_stats.num_uses * sizeof (tree *);
203 total += size;
204 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
205 SIZE_AMOUNT (size));
206
207 size = dfa_stats.num_defs * sizeof (tree *);
208 total += size;
209 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
210 SIZE_AMOUNT (size));
211
212 size = dfa_stats.num_vuses * sizeof (tree *);
213 total += size;
214 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
215 SIZE_AMOUNT (size));
216
217 size = dfa_stats.num_vdefs * sizeof (tree *);
218 total += size;
219 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
220 SIZE_AMOUNT (size));
221
222 size = dfa_stats.num_phis * sizeof (struct gphi);
223 total += size;
224 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
225 SIZE_AMOUNT (size));
226
227 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
228 total += size;
229 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
230 SIZE_AMOUNT (size));
231
232 fprintf (file, "---------------------------------------------------------\n");
233 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data",
234 SIZE_AMOUNT (total));
235 fprintf (file, "---------------------------------------------------------\n");
236 fprintf (file, "\n");
237
238 if (dfa_stats.num_phis)
239 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
240 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
241 (long) dfa_stats.max_num_phi_args);
242
243 fprintf (file, "\n");
244 }
245
246
247 /* Dump DFA statistics on stderr. */
248
249 DEBUG_FUNCTION void
250 debug_dfa_stats (void)
251 {
252 dump_dfa_stats (stderr);
253 }
254
255
256 /* Collect DFA statistics and store them in the structure pointed to by
257 DFA_STATS_P. */
258
259 static void
260 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
261 {
262 basic_block bb;
263
264 gcc_assert (dfa_stats_p);
265
266 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
267
268 /* Walk all the statements in the function counting references. */
269 FOR_EACH_BB_FN (bb, cfun)
270 {
271 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
272 gsi_next (&si))
273 {
274 gphi *phi = si.phi ();
275 dfa_stats_p->num_phis++;
276 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
277 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
278 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
279 }
280
281 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
282 gsi_next (&si))
283 {
284 gimple *stmt = gsi_stmt (si);
285 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
286 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
287 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
288 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
289 }
290 }
291 }
292
293
294 /*---------------------------------------------------------------------------
295 Miscellaneous helpers
296 ---------------------------------------------------------------------------*/
297
298 /* Lookup VAR UID in the default_defs hashtable and return the associated
299 variable. */
300
301 tree
302 ssa_default_def (struct function *fn, tree var)
303 {
304 struct tree_decl_minimal ind;
305 struct tree_ssa_name in;
306 gcc_assert (VAR_P (var)
307 || TREE_CODE (var) == PARM_DECL
308 || TREE_CODE (var) == RESULT_DECL);
309
310 /* Always NULL_TREE for rtl function dumps. */
311 if (!fn->gimple_df)
312 return NULL_TREE;
313
314 in.var = (tree)&ind;
315 ind.uid = DECL_UID (var);
316 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
317 }
318
319 /* Insert the pair VAR's UID, DEF into the default_defs hashtable
320 of function FN. */
321
322 void
323 set_ssa_default_def (struct function *fn, tree var, tree def)
324 {
325 struct tree_decl_minimal ind;
326 struct tree_ssa_name in;
327
328 gcc_assert (VAR_P (var)
329 || TREE_CODE (var) == PARM_DECL
330 || TREE_CODE (var) == RESULT_DECL);
331 in.var = (tree)&ind;
332 ind.uid = DECL_UID (var);
333 if (!def)
334 {
335 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
336 DECL_UID (var),
337 NO_INSERT);
338 if (loc)
339 {
340 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
341 DEFAULT_DEFS (fn)->clear_slot (loc);
342 }
343 return;
344 }
345 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
346 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
347 DECL_UID (var), INSERT);
348
349 /* Default definition might be changed by tail call optimization. */
350 if (*loc)
351 SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
352
353 /* Mark DEF as the default definition for VAR. */
354 *loc = def;
355 SSA_NAME_IS_DEFAULT_DEF (def) = true;
356 }
357
358 /* Retrieve or create a default definition for VAR. */
359
360 tree
361 get_or_create_ssa_default_def (struct function *fn, tree var)
362 {
363 tree ddef = ssa_default_def (fn, var);
364 if (ddef == NULL_TREE)
365 {
366 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
367 set_ssa_default_def (fn, var, ddef);
368 }
369 return ddef;
370 }
371
372
373 /* If EXP is a handled component reference for a structure, return the
374 base variable. The access range is delimited by bit positions *POFFSET and
375 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
376 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
377 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
378 true, the storage order of the reference is reversed. */
379
380 tree
381 get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
382 poly_int64_pod *psize,
383 poly_int64_pod *pmax_size,
384 bool *preverse)
385 {
386 poly_offset_int bitsize = -1;
387 poly_offset_int maxsize;
388 tree size_tree = NULL_TREE;
389 poly_offset_int bit_offset = 0;
390 bool seen_variable_array_ref = false;
391
392 /* First get the final access size and the storage order from just the
393 outermost expression. */
394 if (TREE_CODE (exp) == COMPONENT_REF)
395 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
396 else if (TREE_CODE (exp) == BIT_FIELD_REF)
397 size_tree = TREE_OPERAND (exp, 1);
398 else if (TREE_CODE (exp) == WITH_SIZE_EXPR)
399 {
400 size_tree = TREE_OPERAND (exp, 1);
401 exp = TREE_OPERAND (exp, 0);
402 }
403 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
404 {
405 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
406 if (mode == BLKmode)
407 size_tree = TYPE_SIZE (TREE_TYPE (exp));
408 else
409 bitsize = GET_MODE_BITSIZE (mode);
410 }
411 if (size_tree != NULL_TREE
412 && poly_int_tree_p (size_tree))
413 bitsize = wi::to_poly_offset (size_tree);
414
415 *preverse = reverse_storage_order_for_component_p (exp);
416
417 /* Initially, maxsize is the same as the accessed element size.
418 In the following it will only grow (or become -1). */
419 maxsize = bitsize;
420
421 /* Compute cumulative bit-offset for nested component-refs and array-refs,
422 and find the ultimate containing object. */
423 while (1)
424 {
425 switch (TREE_CODE (exp))
426 {
427 case BIT_FIELD_REF:
428 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
429 break;
430
431 case COMPONENT_REF:
432 {
433 tree field = TREE_OPERAND (exp, 1);
434 tree this_offset = component_ref_field_offset (exp);
435
436 if (this_offset && poly_int_tree_p (this_offset))
437 {
438 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
439 << LOG2_BITS_PER_UNIT);
440 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
441 bit_offset += woffset;
442
443 /* If we had seen a variable array ref already and we just
444 referenced the last field of a struct or a union member
445 then we have to adjust maxsize by the padding at the end
446 of our field. */
447 if (seen_variable_array_ref)
448 {
449 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
450 tree next = DECL_CHAIN (field);
451 while (next && TREE_CODE (next) != FIELD_DECL)
452 next = DECL_CHAIN (next);
453 if (!next
454 || TREE_CODE (stype) != RECORD_TYPE)
455 {
456 tree fsize = DECL_SIZE (field);
457 tree ssize = TYPE_SIZE (stype);
458 if (fsize == NULL
459 || !poly_int_tree_p (fsize)
460 || ssize == NULL
461 || !poly_int_tree_p (ssize))
462 maxsize = -1;
463 else if (known_size_p (maxsize))
464 {
465 poly_offset_int tem
466 = (wi::to_poly_offset (ssize)
467 - wi::to_poly_offset (fsize));
468 tem -= woffset;
469 maxsize += tem;
470 }
471 }
472 /* An component ref with an adjacent field up in the
473 structure hierarchy constrains the size of any variable
474 array ref lower in the access hierarchy. */
475 else
476 seen_variable_array_ref = false;
477 }
478 }
479 else
480 {
481 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
482 /* We need to adjust maxsize to the whole structure bitsize.
483 But we can subtract any constant offset seen so far,
484 because that would get us out of the structure otherwise. */
485 if (known_size_p (maxsize)
486 && csize
487 && poly_int_tree_p (csize))
488 maxsize = wi::to_poly_offset (csize) - bit_offset;
489 else
490 maxsize = -1;
491 }
492 }
493 break;
494
495 case ARRAY_REF:
496 case ARRAY_RANGE_REF:
497 {
498 tree index = TREE_OPERAND (exp, 1);
499 tree low_bound, unit_size;
500
501 /* If the resulting bit-offset is constant, track it. */
502 if (poly_int_tree_p (index)
503 && (low_bound = array_ref_low_bound (exp),
504 poly_int_tree_p (low_bound))
505 && (unit_size = array_ref_element_size (exp),
506 TREE_CODE (unit_size) == INTEGER_CST))
507 {
508 poly_offset_int woffset
509 = wi::sext (wi::to_poly_offset (index)
510 - wi::to_poly_offset (low_bound),
511 TYPE_PRECISION (sizetype));
512 woffset *= wi::to_offset (unit_size);
513 woffset <<= LOG2_BITS_PER_UNIT;
514 bit_offset += woffset;
515
516 /* An array ref with a constant index up in the structure
517 hierarchy will constrain the size of any variable array ref
518 lower in the access hierarchy. */
519 seen_variable_array_ref = false;
520 }
521 else
522 {
523 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
524 /* We need to adjust maxsize to the whole array bitsize.
525 But we can subtract any constant offset seen so far,
526 because that would get us outside of the array otherwise. */
527 if (known_size_p (maxsize)
528 && asize
529 && poly_int_tree_p (asize))
530 maxsize = wi::to_poly_offset (asize) - bit_offset;
531 else
532 maxsize = -1;
533
534 /* Remember that we have seen an array ref with a variable
535 index. */
536 seen_variable_array_ref = true;
537
538 value_range vr;
539 range_query *query;
540 if (cfun)
541 query = get_range_query (cfun);
542 else
543 query = get_global_range_query ();
544
545 if (TREE_CODE (index) == SSA_NAME
546 && (low_bound = array_ref_low_bound (exp),
547 poly_int_tree_p (low_bound))
548 && (unit_size = array_ref_element_size (exp),
549 TREE_CODE (unit_size) == INTEGER_CST)
550 && query->range_of_expr (vr, index)
551 && vr.kind () == VR_RANGE)
552 {
553 wide_int min = vr.lower_bound ();
554 wide_int max = vr.upper_bound ();
555 poly_offset_int lbound = wi::to_poly_offset (low_bound);
556 /* Try to constrain maxsize with range information. */
557 offset_int omax
558 = offset_int::from (max, TYPE_SIGN (TREE_TYPE (index)));
559 if (known_lt (lbound, omax))
560 {
561 poly_offset_int rmaxsize;
562 rmaxsize = (omax - lbound + 1)
563 * wi::to_offset (unit_size) << LOG2_BITS_PER_UNIT;
564 if (!known_size_p (maxsize)
565 || known_lt (rmaxsize, maxsize))
566 {
567 /* If we know an upper bound below the declared
568 one this is no longer variable. */
569 if (known_size_p (maxsize))
570 seen_variable_array_ref = false;
571 maxsize = rmaxsize;
572 }
573 }
574 /* Try to adjust bit_offset with range information. */
575 offset_int omin
576 = offset_int::from (min, TYPE_SIGN (TREE_TYPE (index)));
577 if (known_le (lbound, omin))
578 {
579 poly_offset_int woffset
580 = wi::sext (omin - lbound,
581 TYPE_PRECISION (sizetype));
582 woffset *= wi::to_offset (unit_size);
583 woffset <<= LOG2_BITS_PER_UNIT;
584 bit_offset += woffset;
585 if (known_size_p (maxsize))
586 maxsize -= woffset;
587 }
588 }
589 }
590 }
591 break;
592
593 case REALPART_EXPR:
594 break;
595
596 case IMAGPART_EXPR:
597 bit_offset += bitsize;
598 break;
599
600 case VIEW_CONVERT_EXPR:
601 break;
602
603 case TARGET_MEM_REF:
604 /* Via the variable index or index2 we can reach the
605 whole object. Still hand back the decl here. */
606 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
607 && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
608 {
609 exp = TREE_OPERAND (TMR_BASE (exp), 0);
610 bit_offset = 0;
611 maxsize = -1;
612 goto done;
613 }
614 /* Fallthru. */
615 case MEM_REF:
616 /* We need to deal with variable arrays ending structures such as
617 struct { int length; int a[1]; } x; x.a[d]
618 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
619 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
620 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
621 where we do not know maxsize for variable index accesses to
622 the array. The simplest way to conservatively deal with this
623 is to punt in the case that offset + maxsize reaches the
624 base type boundary. This needs to include possible trailing
625 padding that is there for alignment purposes. */
626 if (seen_variable_array_ref
627 && known_size_p (maxsize)
628 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
629 || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
630 || (maybe_eq
631 (bit_offset + maxsize,
632 wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
633 maxsize = -1;
634
635 /* Hand back the decl for MEM[&decl, off]. */
636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
637 {
638 if (integer_zerop (TREE_OPERAND (exp, 1)))
639 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
640 else
641 {
642 poly_offset_int off = mem_ref_offset (exp);
643 off <<= LOG2_BITS_PER_UNIT;
644 off += bit_offset;
645 poly_int64 off_hwi;
646 if (off.to_shwi (&off_hwi))
647 {
648 bit_offset = off_hwi;
649 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
650 }
651 }
652 }
653 goto done;
654
655 default:
656 goto done;
657 }
658
659 exp = TREE_OPERAND (exp, 0);
660 }
661
662 done:
663 if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
664 {
665 *poffset = 0;
666 *psize = -1;
667 *pmax_size = -1;
668
669 return exp;
670 }
671
672 /* ??? Due to negative offsets in ARRAY_REF we can end up with
673 negative bit_offset here. We might want to store a zero offset
674 in this case. */
675 if (!bit_offset.to_shwi (poffset))
676 {
677 *poffset = 0;
678 *pmax_size = -1;
679
680 return exp;
681 }
682
683 /* In case of a decl or constant base object we can do better. */
684
685 if (DECL_P (exp))
686 {
687 if (VAR_P (exp)
688 && ((flag_unconstrained_commons && DECL_COMMON (exp))
689 || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
690 {
691 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
692 /* If size is unknown, or we have read to the end, assume there
693 may be more to the structure than we are told. */
694 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
695 || (seen_variable_array_ref
696 && (sz_tree == NULL_TREE
697 || !poly_int_tree_p (sz_tree)
698 || maybe_eq (bit_offset + maxsize,
699 wi::to_poly_offset (sz_tree)))))
700 maxsize = -1;
701 }
702 /* If maxsize is unknown adjust it according to the size of the
703 base decl. */
704 else if (!known_size_p (maxsize)
705 && DECL_SIZE (exp)
706 && poly_int_tree_p (DECL_SIZE (exp)))
707 maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
708 }
709 else if (CONSTANT_CLASS_P (exp))
710 {
711 /* If maxsize is unknown adjust it according to the size of the
712 base type constant. */
713 if (!known_size_p (maxsize)
714 && TYPE_SIZE (TREE_TYPE (exp))
715 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
716 maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
717 - bit_offset);
718 }
719
720 if (!maxsize.to_shwi (pmax_size)
721 || maybe_lt (*pmax_size, 0)
722 || !endpoint_representable_p (*poffset, *pmax_size))
723 *pmax_size = -1;
724
725 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
726 check for such overflows individually and assume it works. */
727 if (!endpoint_representable_p (*poffset, *psize))
728 {
729 *poffset = 0;
730 *psize = -1;
731 *pmax_size = -1;
732
733 return exp;
734 }
735
736 return exp;
737 }
738
739 /* Like get_ref_base_and_extent, but for cases in which we only care
740 about constant-width accesses at constant offsets. Return null
741 if the access is anything else. */
742
743 tree
744 get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
745 HOST_WIDE_INT *psize, bool *preverse)
746 {
747 poly_int64 offset, size, max_size;
748 HOST_WIDE_INT const_offset, const_size;
749 bool reverse;
750 tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
751 &reverse);
752 if (!offset.is_constant (&const_offset)
753 || !size.is_constant (&const_size)
754 || const_offset < 0
755 || !known_size_p (max_size)
756 || maybe_ne (max_size, const_size))
757 return NULL_TREE;
758
759 *poffset = const_offset;
760 *psize = const_size;
761 *preverse = reverse;
762 return decl;
763 }
764
765 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
766 denotes the starting address of the memory access EXP.
767 Returns NULL_TREE if the offset is not constant or any component
768 is not BITS_PER_UNIT-aligned.
769 VALUEIZE if non-NULL is used to valueize SSA names. It should return
770 its argument or a constant if the argument is known to be constant. */
771
772 tree
773 get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
774 tree (*valueize) (tree))
775 {
776 poly_int64 byte_offset = 0;
777
778 /* Compute cumulative byte-offset for nested component-refs and array-refs,
779 and find the ultimate containing object. */
780 while (1)
781 {
782 switch (TREE_CODE (exp))
783 {
784 case BIT_FIELD_REF:
785 {
786 poly_int64 this_byte_offset;
787 poly_uint64 this_bit_offset;
788 if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset)
789 || !multiple_p (this_bit_offset, BITS_PER_UNIT,
790 &this_byte_offset))
791 return NULL_TREE;
792 byte_offset += this_byte_offset;
793 }
794 break;
795
796 case COMPONENT_REF:
797 {
798 tree field = TREE_OPERAND (exp, 1);
799 tree this_offset = component_ref_field_offset (exp);
800 poly_int64 hthis_offset;
801
802 if (!this_offset
803 || !poly_int_tree_p (this_offset, &hthis_offset)
804 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
805 % BITS_PER_UNIT))
806 return NULL_TREE;
807
808 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
809 / BITS_PER_UNIT);
810 byte_offset += hthis_offset;
811 }
812 break;
813
814 case ARRAY_REF:
815 case ARRAY_RANGE_REF:
816 {
817 tree index = TREE_OPERAND (exp, 1);
818 tree low_bound, unit_size;
819
820 if (valueize
821 && TREE_CODE (index) == SSA_NAME)
822 index = (*valueize) (index);
823 if (!poly_int_tree_p (index))
824 return NULL_TREE;
825 low_bound = array_ref_low_bound (exp);
826 if (valueize
827 && TREE_CODE (low_bound) == SSA_NAME)
828 low_bound = (*valueize) (low_bound);
829 if (!poly_int_tree_p (low_bound))
830 return NULL_TREE;
831 unit_size = array_ref_element_size (exp);
832 if (TREE_CODE (unit_size) != INTEGER_CST)
833 return NULL_TREE;
834
835 /* If the resulting bit-offset is constant, track it. */
836 poly_offset_int woffset
837 = wi::sext (wi::to_poly_offset (index)
838 - wi::to_poly_offset (low_bound),
839 TYPE_PRECISION (sizetype));
840 woffset *= wi::to_offset (unit_size);
841 byte_offset += woffset.force_shwi ();
842 }
843 break;
844
845 case REALPART_EXPR:
846 break;
847
848 case IMAGPART_EXPR:
849 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
850 break;
851
852 case VIEW_CONVERT_EXPR:
853 break;
854
855 case MEM_REF:
856 {
857 tree base = TREE_OPERAND (exp, 0);
858 if (valueize
859 && TREE_CODE (base) == SSA_NAME)
860 base = (*valueize) (base);
861
862 /* Hand back the decl for MEM[&decl, off]. */
863 if (TREE_CODE (base) == ADDR_EXPR)
864 {
865 if (!integer_zerop (TREE_OPERAND (exp, 1)))
866 {
867 poly_offset_int off = mem_ref_offset (exp);
868 byte_offset += off.force_shwi ();
869 }
870 exp = TREE_OPERAND (base, 0);
871 }
872 goto done;
873 }
874
875 case TARGET_MEM_REF:
876 {
877 tree base = TREE_OPERAND (exp, 0);
878 if (valueize
879 && TREE_CODE (base) == SSA_NAME)
880 base = (*valueize) (base);
881
882 /* Hand back the decl for MEM[&decl, off]. */
883 if (TREE_CODE (base) == ADDR_EXPR)
884 {
885 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
886 return NULL_TREE;
887 if (!integer_zerop (TMR_OFFSET (exp)))
888 {
889 poly_offset_int off = mem_ref_offset (exp);
890 byte_offset += off.force_shwi ();
891 }
892 exp = TREE_OPERAND (base, 0);
893 }
894 goto done;
895 }
896
897 default:
898 goto done;
899 }
900
901 exp = TREE_OPERAND (exp, 0);
902 }
903 done:
904
905 *poffset = byte_offset;
906 return exp;
907 }
908
909 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
910 denotes the starting address of the memory access EXP.
911 Returns NULL_TREE if the offset is not constant or any component
912 is not BITS_PER_UNIT-aligned. */
913
914 tree
915 get_addr_base_and_unit_offset (tree exp, poly_int64_pod *poffset)
916 {
917 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
918 }
919
920 /* Returns true if STMT references an SSA_NAME that has
921 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
922
923 bool
924 stmt_references_abnormal_ssa_name (gimple *stmt)
925 {
926 ssa_op_iter oi;
927 use_operand_p use_p;
928
929 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
930 {
931 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
932 return true;
933 }
934
935 return false;
936 }
937
938 /* If STMT takes any abnormal PHI values as input, replace them with
939 local copies. */
940
941 void
942 replace_abnormal_ssa_names (gimple *stmt)
943 {
944 ssa_op_iter oi;
945 use_operand_p use_p;
946
947 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
948 {
949 tree op = USE_FROM_PTR (use_p);
950 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
951 {
952 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
953 tree new_name = make_ssa_name (TREE_TYPE (op));
954 gassign *assign = gimple_build_assign (new_name, op);
955 gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
956 SET_USE (use_p, new_name);
957 }
958 }
959 }
960
961 /* Pair of tree and a sorting index, for dump_enumerated_decls. */
962 struct GTY(()) numbered_tree
963 {
964 tree t;
965 int num;
966 };
967
968
969 /* Compare two declarations references by their DECL_UID / sequence number.
970 Called via qsort. */
971
972 static int
973 compare_decls_by_uid (const void *pa, const void *pb)
974 {
975 const numbered_tree *nt_a = ((const numbered_tree *)pa);
976 const numbered_tree *nt_b = ((const numbered_tree *)pb);
977
978 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
979 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
980 return nt_a->num - nt_b->num;
981 }
982
983 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
984 static tree
985 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
986 {
987 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
988 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
989 numbered_tree nt;
990
991 if (!DECL_P (*tp))
992 return NULL_TREE;
993 nt.t = *tp;
994 nt.num = list->length ();
995 list->safe_push (nt);
996 *walk_subtrees = 0;
997 return NULL_TREE;
998 }
999
1000 /* Find all the declarations used by the current function, sort them by uid,
1001 and emit the sorted list. Each declaration is tagged with a sequence
1002 number indicating when it was found during statement / tree walking,
1003 so that TDF_NOUID comparisons of anonymous declarations are still
1004 meaningful. Where a declaration was encountered more than once, we
1005 emit only the sequence number of the first encounter.
1006 FILE is the dump file where to output the list and FLAGS is as in
1007 print_generic_expr. */
1008 void
1009 dump_enumerated_decls (FILE *file, dump_flags_t flags)
1010 {
1011 if (!cfun->cfg)
1012 return;
1013
1014 basic_block bb;
1015 struct walk_stmt_info wi;
1016 auto_vec<numbered_tree, 40> decl_list;
1017
1018 memset (&wi, '\0', sizeof (wi));
1019 wi.info = (void *) &decl_list;
1020 FOR_EACH_BB_FN (bb, cfun)
1021 {
1022 gimple_stmt_iterator gsi;
1023
1024 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 if (!is_gimple_debug (gsi_stmt (gsi)))
1026 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1027 }
1028 decl_list.qsort (compare_decls_by_uid);
1029 if (decl_list.length ())
1030 {
1031 unsigned ix;
1032 numbered_tree *ntp;
1033 tree last = NULL_TREE;
1034
1035 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1036 current_function_name ());
1037 FOR_EACH_VEC_ELT (decl_list, ix, ntp)
1038 {
1039 if (ntp->t == last)
1040 continue;
1041 fprintf (file, "%d: ", ntp->num);
1042 print_generic_decl (file, ntp->t, flags);
1043 fprintf (file, "\n");
1044 last = ntp->t;
1045 }
1046 }
1047 }