]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-range-path.cc
d: Import dmd b8384668f, druntime e6caaab9, phobos 5ab9ad256 (v2.098.0-beta.1)
[thirdparty/gcc.git] / gcc / gimple-range-path.cc
1 /* Basic block path solver.
2 Copyright (C) 2021 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfganal.h"
28 #include "value-range.h"
29 #include "gimple-range.h"
30 #include "tree-pretty-print.h"
31 #include "gimple-range-path.h"
32 #include "ssa.h"
33 #include "tree-cfg.h"
34 #include "gimple-iterator.h"
35
36 // Internal construct to help facilitate debugging of solver.
37 #define DEBUG_SOLVER (dump_file && (param_threader_debug == THREADER_DEBUG_ALL))
38
39 path_range_query::path_range_query (bool resolve, gimple_ranger *ranger)
40 : m_cache (new ssa_global_cache),
41 m_has_cache_entry (BITMAP_ALLOC (NULL)),
42 m_resolve (resolve),
43 m_alloced_ranger (!ranger)
44 {
45 if (m_alloced_ranger)
46 m_ranger = new gimple_ranger;
47 else
48 m_ranger = ranger;
49
50 m_oracle = new path_oracle (m_ranger->oracle ());
51 }
52
53 path_range_query::~path_range_query ()
54 {
55 delete m_oracle;
56 if (m_alloced_ranger)
57 delete m_ranger;
58 BITMAP_FREE (m_has_cache_entry);
59 delete m_cache;
60 }
61
62 // Return TRUE if NAME is in the import bitmap.
63
64 bool
65 path_range_query::import_p (tree name)
66 {
67 return (TREE_CODE (name) == SSA_NAME
68 && bitmap_bit_p (m_imports, SSA_NAME_VERSION (name)));
69 }
70
71 // Mark cache entry for NAME as unused.
72
73 void
74 path_range_query::clear_cache (tree name)
75 {
76 unsigned v = SSA_NAME_VERSION (name);
77 bitmap_clear_bit (m_has_cache_entry, v);
78 }
79
80 // If NAME has a cache entry, return it in R, and return TRUE.
81
82 inline bool
83 path_range_query::get_cache (irange &r, tree name)
84 {
85 if (!gimple_range_ssa_p (name))
86 return get_global_range_query ()->range_of_expr (r, name);
87
88 unsigned v = SSA_NAME_VERSION (name);
89 if (bitmap_bit_p (m_has_cache_entry, v))
90 return m_cache->get_global_range (r, name);
91
92 return false;
93 }
94
95 // Set the cache entry for NAME to R.
96
97 void
98 path_range_query::set_cache (const irange &r, tree name)
99 {
100 unsigned v = SSA_NAME_VERSION (name);
101 bitmap_set_bit (m_has_cache_entry, v);
102 m_cache->set_global_range (name, r);
103 }
104
105 void
106 path_range_query::dump (FILE *dump_file)
107 {
108 push_dump_file save (dump_file, dump_flags & ~TDF_DETAILS);
109
110 if (m_path.is_empty ())
111 return;
112
113 unsigned i;
114 bitmap_iterator bi;
115
116 dump_ranger (dump_file, m_path);
117
118 fprintf (dump_file, "Imports:\n");
119 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
120 {
121 tree name = ssa_name (i);
122 print_generic_expr (dump_file, name, TDF_SLIM);
123 fprintf (dump_file, "\n");
124 }
125
126 m_cache->dump (dump_file);
127 }
128
129 void
130 path_range_query::debug ()
131 {
132 dump (stderr);
133 }
134
135 // Return TRUE if NAME is defined outside the current path.
136
137 bool
138 path_range_query::defined_outside_path (tree name)
139 {
140 gimple *def = SSA_NAME_DEF_STMT (name);
141 basic_block bb = gimple_bb (def);
142
143 return !bb || !m_path.contains (bb);
144 }
145
146 // Return the range of NAME on entry to the path.
147
148 void
149 path_range_query::range_on_path_entry (irange &r, tree name)
150 {
151 gcc_checking_assert (defined_outside_path (name));
152 basic_block entry = entry_bb ();
153
154 // Prefer to use range_of_expr if we have a statement to look at,
155 // since it has better caching than range_on_edge.
156 gimple *last = last_stmt (entry);
157 if (last)
158 {
159 if (m_ranger->range_of_expr (r, name, last))
160 return;
161 gcc_unreachable ();
162 }
163
164 // If we have no statement, look at all the incoming ranges to the
165 // block. This can happen when we're querying a block with only an
166 // outgoing edge (no statement but the fall through edge), but for
167 // which we can determine a range on entry to the block.
168 int_range_max tmp;
169 bool changed = false;
170 r.set_undefined ();
171 for (unsigned i = 0; i < EDGE_COUNT (entry->preds); ++i)
172 {
173 edge e = EDGE_PRED (entry, i);
174 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
175 && m_ranger->range_on_edge (tmp, e, name))
176 {
177 r.union_ (tmp);
178 changed = true;
179 }
180 }
181
182 // Make sure we don't return UNDEFINED by mistake.
183 if (!changed)
184 r.set_varying (TREE_TYPE (name));
185 }
186
187 // Return the range of NAME at the end of the path being analyzed.
188
189 bool
190 path_range_query::internal_range_of_expr (irange &r, tree name, gimple *stmt)
191 {
192 if (!irange::supports_type_p (TREE_TYPE (name)))
193 return false;
194
195 if (get_cache (r, name))
196 return true;
197
198 if (m_resolve && defined_outside_path (name))
199 {
200 range_on_path_entry (r, name);
201 set_cache (r, name);
202 return true;
203 }
204
205 if (stmt
206 && range_defined_in_block (r, name, gimple_bb (stmt)))
207 {
208 if (TREE_CODE (name) == SSA_NAME)
209 r.intersect (gimple_range_global (name));
210
211 set_cache (r, name);
212 return true;
213 }
214
215 r = gimple_range_global (name);
216 return true;
217 }
218
219 bool
220 path_range_query::range_of_expr (irange &r, tree name, gimple *stmt)
221 {
222 if (internal_range_of_expr (r, name, stmt))
223 {
224 if (r.undefined_p ())
225 m_undefined_path = true;
226
227 return true;
228 }
229 return false;
230 }
231
232 bool
233 path_range_query::unreachable_path_p ()
234 {
235 return m_undefined_path;
236 }
237
238 // Initialize the current path to PATH. The current block is set to
239 // the entry block to the path.
240 //
241 // Note that the blocks are in reverse order, so the exit block is
242 // path[0].
243
244 void
245 path_range_query::set_path (const vec<basic_block> &path)
246 {
247 gcc_checking_assert (path.length () > 1);
248 m_path = path.copy ();
249 m_pos = m_path.length () - 1;
250 bitmap_clear (m_has_cache_entry);
251 }
252
253 bool
254 path_range_query::ssa_defined_in_bb (tree name, basic_block bb)
255 {
256 return (TREE_CODE (name) == SSA_NAME
257 && SSA_NAME_DEF_STMT (name)
258 && gimple_bb (SSA_NAME_DEF_STMT (name)) == bb);
259 }
260
261 // Return the range of the result of PHI in R.
262 //
263 // Since PHIs are calculated in parallel at the beginning of the
264 // block, we must be careful to never save anything to the cache here.
265 // It is the caller's responsibility to adjust the cache. Also,
266 // calculating the PHI's range must not trigger additional lookups.
267
268 void
269 path_range_query::ssa_range_in_phi (irange &r, gphi *phi)
270 {
271 tree name = gimple_phi_result (phi);
272 basic_block bb = gimple_bb (phi);
273 unsigned nargs = gimple_phi_num_args (phi);
274
275 if (at_entry ())
276 {
277 if (m_resolve && m_ranger->range_of_expr (r, name, phi))
278 return;
279
280 // Try to fold the phi exclusively with global or cached values.
281 // This will get things like PHI <5(99), 6(88)>. We do this by
282 // calling range_of_expr with no context.
283 int_range_max arg_range;
284 r.set_undefined ();
285 for (size_t i = 0; i < nargs; ++i)
286 {
287 tree arg = gimple_phi_arg_def (phi, i);
288 if (range_of_expr (arg_range, arg, /*stmt=*/NULL))
289 r.union_ (arg_range);
290 else
291 {
292 r.set_varying (TREE_TYPE (name));
293 return;
294 }
295 }
296 return;
297 }
298
299 basic_block prev = prev_bb ();
300 edge e_in = find_edge (prev, bb);
301
302 for (size_t i = 0; i < nargs; ++i)
303 if (e_in == gimple_phi_arg_edge (phi, i))
304 {
305 tree arg = gimple_phi_arg_def (phi, i);
306 // Avoid using the cache for ARGs defined in this block, as
307 // that could create an ordering problem.
308 if (ssa_defined_in_bb (arg, bb) || !get_cache (r, arg))
309 {
310 if (m_resolve)
311 {
312 int_range_max tmp;
313 // Using both the range on entry to the path, and the
314 // range on this edge yields significantly better
315 // results.
316 if (defined_outside_path (arg))
317 range_on_path_entry (r, arg);
318 else
319 r.set_varying (TREE_TYPE (name));
320 m_ranger->range_on_edge (tmp, e_in, arg);
321 r.intersect (tmp);
322 return;
323 }
324 r.set_varying (TREE_TYPE (name));
325 }
326 return;
327 }
328 gcc_unreachable ();
329 }
330
331 // If NAME is defined in BB, set R to the range of NAME, and return
332 // TRUE. Otherwise, return FALSE.
333
334 bool
335 path_range_query::range_defined_in_block (irange &r, tree name, basic_block bb)
336 {
337 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
338 basic_block def_bb = gimple_bb (def_stmt);
339
340 if (def_bb != bb)
341 return false;
342
343 if (get_cache (r, name))
344 return true;
345
346 if (gimple_code (def_stmt) == GIMPLE_PHI)
347 ssa_range_in_phi (r, as_a<gphi *> (def_stmt));
348 else
349 {
350 if (name)
351 get_path_oracle ()->killing_def (name);
352
353 if (!range_of_stmt (r, def_stmt, name))
354 r.set_varying (TREE_TYPE (name));
355 }
356
357 if (bb)
358 m_non_null.adjust_range (r, name, bb);
359
360 if (DEBUG_SOLVER && (bb || !r.varying_p ()))
361 {
362 fprintf (dump_file, "range_defined_in_block (BB%d) for ", bb ? bb->index : -1);
363 print_generic_expr (dump_file, name, TDF_SLIM);
364 fprintf (dump_file, " is ");
365 r.dump (dump_file);
366 fprintf (dump_file, "\n");
367 }
368
369 return true;
370 }
371
372 // Compute ranges defined in the PHIs in this block.
373
374 void
375 path_range_query::compute_ranges_in_phis (basic_block bb)
376 {
377 int_range_max r;
378 gphi_iterator iter;
379
380 // PHIs must be resolved simultaneously on entry to the block
381 // because any dependencies must be satistifed with values on entry.
382 // Thus, we calculate all PHIs first, and then update the cache at
383 // the end.
384
385 m_tmp_phi_cache.clear ();
386 for (iter = gsi_start_phis (bb); !gsi_end_p (iter); gsi_next (&iter))
387 {
388 gphi *phi = iter.phi ();
389 tree name = gimple_phi_result (phi);
390
391 if (import_p (name) && range_defined_in_block (r, name, bb))
392 m_tmp_phi_cache.set_global_range (name, r);
393 }
394 for (iter = gsi_start_phis (bb); !gsi_end_p (iter); gsi_next (&iter))
395 {
396 gphi *phi = iter.phi ();
397 tree name = gimple_phi_result (phi);
398
399 if (m_tmp_phi_cache.get_global_range (r, name))
400 set_cache (r, name);
401 }
402 }
403
404 // Compute ranges defined in the current block, or exported to the
405 // next block.
406
407 void
408 path_range_query::compute_ranges_in_block (basic_block bb)
409 {
410 bitmap_iterator bi;
411 int_range_max r, cached_range;
412 unsigned i;
413
414 if (m_resolve && !at_entry ())
415 compute_phi_relations (bb, prev_bb ());
416
417 // Force recalculation of any names in the cache that are defined in
418 // this block. This can happen on interdependent SSA/phis in loops.
419 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
420 {
421 tree name = ssa_name (i);
422 if (ssa_defined_in_bb (name, bb))
423 clear_cache (name);
424 }
425
426 // Solve imports defined in this block, starting with the PHIs...
427 compute_ranges_in_phis (bb);
428 // ...and then the rest of the imports.
429 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
430 {
431 tree name = ssa_name (i);
432
433 if (gimple_code (SSA_NAME_DEF_STMT (name)) != GIMPLE_PHI
434 && range_defined_in_block (r, name, bb))
435 set_cache (r, name);
436 }
437
438 if (at_exit ())
439 return;
440
441 // Solve imports that are exported to the next block.
442 basic_block next = next_bb ();
443 edge e = find_edge (bb, next);
444 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
445 {
446 tree name = ssa_name (i);
447 gori_compute &g = m_ranger->gori ();
448 bitmap exports = g.exports (bb);
449
450 if (bitmap_bit_p (exports, i))
451 {
452 if (g.outgoing_edge_range_p (r, e, name, *this))
453 {
454 if (get_cache (cached_range, name))
455 r.intersect (cached_range);
456
457 set_cache (r, name);
458 if (DEBUG_SOLVER)
459 {
460 fprintf (dump_file, "outgoing_edge_range_p for ");
461 print_generic_expr (dump_file, name, TDF_SLIM);
462 fprintf (dump_file, " on edge %d->%d ",
463 e->src->index, e->dest->index);
464 fprintf (dump_file, "is ");
465 r.dump (dump_file);
466 fprintf (dump_file, "\n");
467 }
468 }
469 }
470 }
471
472 if (m_resolve)
473 compute_outgoing_relations (bb, next);
474 }
475
476 // Adjust all pointer imports in BB with non-null information.
477
478 void
479 path_range_query::adjust_for_non_null_uses (basic_block bb)
480 {
481 int_range_max r;
482 bitmap_iterator bi;
483 unsigned i;
484
485 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
486 {
487 tree name = ssa_name (i);
488
489 if (!POINTER_TYPE_P (TREE_TYPE (name)))
490 continue;
491
492 if (get_cache (r, name))
493 {
494 if (r.nonzero_p ())
495 continue;
496 }
497 else
498 r.set_varying (TREE_TYPE (name));
499
500 if (m_non_null.adjust_range (r, name, bb))
501 set_cache (r, name);
502 }
503 }
504
505 // If NAME is a supported SSA_NAME, add it the bitmap in IMPORTS.
506
507 bool
508 path_range_query::add_to_imports (tree name, bitmap imports)
509 {
510 if (TREE_CODE (name) == SSA_NAME
511 && irange::supports_type_p (TREE_TYPE (name)))
512 return bitmap_set_bit (imports, SSA_NAME_VERSION (name));
513 return false;
514 }
515
516 // Compute the imports to the path ending in EXIT. These are
517 // essentially the SSA names used to calculate the final conditional
518 // along the path.
519 //
520 // They are hints for the solver. Adding more elements doesn't slow
521 // us down, because we don't solve anything that doesn't appear in the
522 // path. On the other hand, not having enough imports will limit what
523 // we can solve.
524
525 void
526 path_range_query::compute_imports (bitmap imports, basic_block exit)
527 {
528 // Start with the imports from the exit block...
529 gori_compute &gori = m_ranger->gori ();
530 bitmap r_imports = gori.imports (exit);
531 bitmap_copy (imports, r_imports);
532
533 auto_vec<tree> worklist (bitmap_count_bits (imports));
534 bitmap_iterator bi;
535 unsigned i;
536 EXECUTE_IF_SET_IN_BITMAP (imports, 0, i, bi)
537 {
538 tree name = ssa_name (i);
539 worklist.quick_push (name);
540 }
541
542 // ...and add any operands used to define these imports.
543 while (!worklist.is_empty ())
544 {
545 tree name = worklist.pop ();
546 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
547
548 if (is_gimple_assign (def_stmt))
549 {
550 add_to_imports (gimple_assign_rhs1 (def_stmt), imports);
551 tree rhs = gimple_assign_rhs2 (def_stmt);
552 if (rhs && add_to_imports (rhs, imports))
553 worklist.safe_push (rhs);
554 rhs = gimple_assign_rhs3 (def_stmt);
555 if (rhs && add_to_imports (rhs, imports))
556 worklist.safe_push (rhs);
557 }
558 else if (gphi *phi = dyn_cast <gphi *> (def_stmt))
559 {
560 for (size_t i = 0; i < gimple_phi_num_args (phi); ++i)
561 {
562 edge e = gimple_phi_arg_edge (phi, i);
563 tree arg = gimple_phi_arg (phi, i)->def;
564
565 if (TREE_CODE (arg) == SSA_NAME
566 && m_path.contains (e->src)
567 && bitmap_set_bit (imports, SSA_NAME_VERSION (arg)))
568 worklist.safe_push (arg);
569 }
570 }
571 }
572 // Exported booleans along the path, may help conditionals.
573 if (m_resolve)
574 for (i = 0; i < m_path.length (); ++i)
575 {
576 basic_block bb = m_path[i];
577 tree name;
578 FOR_EACH_GORI_EXPORT_NAME (gori, bb, name)
579 if (TREE_CODE (TREE_TYPE (name)) == BOOLEAN_TYPE)
580 bitmap_set_bit (imports, SSA_NAME_VERSION (name));
581 }
582 }
583
584 // Compute the ranges for IMPORTS along PATH.
585 //
586 // IMPORTS are the set of SSA names, any of which could potentially
587 // change the value of the final conditional in PATH. Default to the
588 // imports of the last block in the path if none is given.
589
590 void
591 path_range_query::compute_ranges (const vec<basic_block> &path,
592 const bitmap_head *imports)
593 {
594 if (DEBUG_SOLVER)
595 fprintf (dump_file, "\n==============================================\n");
596
597 set_path (path);
598 m_undefined_path = false;
599
600 if (imports)
601 bitmap_copy (m_imports, imports);
602 else
603 compute_imports (m_imports, exit_bb ());
604
605 if (m_resolve)
606 get_path_oracle ()->reset_path ();
607
608 if (DEBUG_SOLVER)
609 {
610 fprintf (dump_file, "path_range_query: compute_ranges for path: ");
611 for (unsigned i = path.length (); i > 0; --i)
612 {
613 basic_block bb = path[i - 1];
614 fprintf (dump_file, "%d", bb->index);
615 if (i > 1)
616 fprintf (dump_file, "->");
617 }
618 fprintf (dump_file, "\n");
619 }
620
621 while (1)
622 {
623 basic_block bb = curr_bb ();
624
625 compute_ranges_in_block (bb);
626 adjust_for_non_null_uses (bb);
627
628 if (at_exit ())
629 break;
630
631 move_next ();
632 }
633
634 if (DEBUG_SOLVER)
635 {
636 get_path_oracle ()->dump (dump_file);
637 dump (dump_file);
638 }
639 }
640
641 // Convenience function to compute ranges along a path consisting of
642 // E->SRC and E->DEST.
643
644 void
645 path_range_query::compute_ranges (edge e)
646 {
647 auto_vec<basic_block> bbs (2);
648 bbs.quick_push (e->dest);
649 bbs.quick_push (e->src);
650 compute_ranges (bbs);
651 }
652
653 // A folding aid used to register and query relations along a path.
654 // When queried, it returns relations as they would appear on exit to
655 // the path.
656 //
657 // Relations are registered on entry so the path_oracle knows which
658 // block to query the root oracle at when a relation lies outside the
659 // path. However, when queried we return the relation on exit to the
660 // path, since the root_oracle ignores the registered.
661
662 class jt_fur_source : public fur_depend
663 {
664 public:
665 jt_fur_source (gimple *s, path_range_query *, gori_compute *,
666 const vec<basic_block> &);
667 relation_kind query_relation (tree op1, tree op2) override;
668 void register_relation (gimple *, relation_kind, tree op1, tree op2) override;
669 void register_relation (edge, relation_kind, tree op1, tree op2) override;
670 private:
671 basic_block m_entry;
672 };
673
674 jt_fur_source::jt_fur_source (gimple *s,
675 path_range_query *query,
676 gori_compute *gori,
677 const vec<basic_block> &path)
678 : fur_depend (s, gori, query)
679 {
680 gcc_checking_assert (!path.is_empty ());
681
682 m_entry = path[path.length () - 1];
683
684 if (dom_info_available_p (CDI_DOMINATORS))
685 m_oracle = query->oracle ();
686 else
687 m_oracle = NULL;
688 }
689
690 // Ignore statement and register relation on entry to path.
691
692 void
693 jt_fur_source::register_relation (gimple *, relation_kind k, tree op1, tree op2)
694 {
695 if (m_oracle)
696 m_oracle->register_relation (m_entry, k, op1, op2);
697 }
698
699 // Ignore edge and register relation on entry to path.
700
701 void
702 jt_fur_source::register_relation (edge, relation_kind k, tree op1, tree op2)
703 {
704 if (m_oracle)
705 m_oracle->register_relation (m_entry, k, op1, op2);
706 }
707
708 relation_kind
709 jt_fur_source::query_relation (tree op1, tree op2)
710 {
711 if (!m_oracle)
712 return VREL_NONE;
713
714 if (TREE_CODE (op1) != SSA_NAME || TREE_CODE (op2) != SSA_NAME)
715 return VREL_NONE;
716
717 return m_oracle->query_relation (m_entry, op1, op2);
718 }
719
720 // Return the range of STMT at the end of the path being analyzed.
721
722 bool
723 path_range_query::range_of_stmt (irange &r, gimple *stmt, tree)
724 {
725 tree type = gimple_range_type (stmt);
726
727 if (!irange::supports_type_p (type))
728 return false;
729
730 // If resolving unknowns, fold the statement making use of any
731 // relations along the path.
732 if (m_resolve)
733 {
734 fold_using_range f;
735 jt_fur_source src (stmt, this, &m_ranger->gori (), m_path);
736 if (!f.fold_stmt (r, stmt, src))
737 r.set_varying (type);
738 }
739 // Otherwise, fold without relations.
740 else if (!fold_range (r, stmt, this))
741 r.set_varying (type);
742
743 return true;
744 }
745
746 void
747 path_range_query::maybe_register_phi_relation (gphi *phi, tree arg)
748 {
749 basic_block bb = gimple_bb (phi);
750 tree result = gimple_phi_result (phi);
751
752 // Avoid recording the equivalence if the arg is defined in this
753 // block, as that could create an ordering problem.
754 if (ssa_defined_in_bb (arg, bb))
755 return;
756
757 if (dump_file && (dump_flags & TDF_DETAILS))
758 fprintf (dump_file, " from bb%d:", bb->index);
759
760 get_path_oracle ()->killing_def (result);
761 m_oracle->register_relation (entry_bb (), EQ_EXPR, arg, result);
762 }
763
764 // Compute relations for each PHI in BB. For example:
765 //
766 // x_5 = PHI<y_9(5),...>
767 //
768 // If the path flows through BB5, we can register that x_5 == y_9.
769
770 void
771 path_range_query::compute_phi_relations (basic_block bb, basic_block prev)
772 {
773 if (prev == NULL)
774 return;
775
776 edge e_in = find_edge (prev, bb);
777
778 for (gphi_iterator iter = gsi_start_phis (bb); !gsi_end_p (iter);
779 gsi_next (&iter))
780 {
781 gphi *phi = iter.phi ();
782 tree result = gimple_phi_result (phi);
783 unsigned nargs = gimple_phi_num_args (phi);
784
785 if (!import_p (result))
786 continue;
787
788 for (size_t i = 0; i < nargs; ++i)
789 if (e_in == gimple_phi_arg_edge (phi, i))
790 {
791 tree arg = gimple_phi_arg_def (phi, i);
792
793 if (gimple_range_ssa_p (arg))
794 maybe_register_phi_relation (phi, arg);
795 break;
796 }
797 }
798 }
799
800 // Compute outgoing relations from BB to NEXT.
801
802 void
803 path_range_query::compute_outgoing_relations (basic_block bb, basic_block next)
804 {
805 gimple *stmt = last_stmt (bb);
806
807 if (stmt
808 && gimple_code (stmt) == GIMPLE_COND
809 && (import_p (gimple_cond_lhs (stmt))
810 || import_p (gimple_cond_rhs (stmt))))
811 {
812 int_range<2> r;
813 gcond *cond = as_a<gcond *> (stmt);
814 edge e0 = EDGE_SUCC (bb, 0);
815 edge e1 = EDGE_SUCC (bb, 1);
816
817 if (e0->dest == next)
818 gcond_edge_range (r, e0);
819 else if (e1->dest == next)
820 gcond_edge_range (r, e1);
821 else
822 gcc_unreachable ();
823
824 jt_fur_source src (NULL, this, &m_ranger->gori (), m_path);
825 src.register_outgoing_edges (cond, r, e0, e1);
826 }
827 }