]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-range-infer.cc
Revamp irange_allocator to handle vranges.
[thirdparty/gcc.git] / gcc / gimple-range-infer.cc
CommitLineData
156d7d8d 1/* Gimple range inference implementation.
b7501739
AM
2 Copyright (C) 2022 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "insn-codes.h"
26#include "tree.h"
27#include "gimple.h"
28#include "ssa.h"
29#include "gimple-pretty-print.h"
30#include "gimple-range.h"
31#include "tree-cfg.h"
32#include "target.h"
33#include "attribs.h"
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
36#include "cfganal.h"
37
38// Adapted from infer_nonnull_range_by_dereference and check_loadstore
39// to process nonnull ssa_name OP in S. DATA contains a pointer to a
156d7d8d 40// stmt range inference instance.
b7501739
AM
41
42static bool
43non_null_loadstore (gimple *, tree op, tree, void *data)
44{
45 if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
46 {
47 /* Some address spaces may legitimately dereference zero. */
48 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
49 if (!targetm.addr_space.zero_address_valid (as))
50 {
51 tree ssa = TREE_OPERAND (op, 0);
156d7d8d 52 ((gimple_infer_range *)data)->add_nonzero (ssa);
b7501739
AM
53 }
54 }
55 return false;
56}
57
156d7d8d 58// Add NAME and RANGE to the the range inference summary.
b7501739
AM
59
60void
156d7d8d 61gimple_infer_range::add_range (tree name, irange &range)
b7501739
AM
62{
63 m_names[num_args] = name;
64 m_ranges[num_args] = range;
65 if (num_args < size_limit - 1)
66 num_args++;
67}
68
156d7d8d 69// Add a nonzero range for NAME to the range inference summary.
b7501739
AM
70
71void
156d7d8d 72gimple_infer_range::add_nonzero (tree name)
b7501739
AM
73{
74 if (!gimple_range_ssa_p (name))
75 return;
76 int_range<2> nz;
77 nz.set_nonzero (TREE_TYPE (name));
78 add_range (name, nz);
79}
80
156d7d8d
AM
81// Process S for range inference and fill in the summary list.
82// This is the routine where new inferred ranges should be added.
b7501739 83
156d7d8d 84gimple_infer_range::gimple_infer_range (gimple *s)
b7501739
AM
85{
86 num_args = 0;
87
88 if (is_a<gphi *> (s))
89 return;
90
91 if (is_a<gcall *> (s) && flag_delete_null_pointer_checks)
92 {
93 tree fntype = gimple_call_fntype (s);
94 bitmap nonnullargs = get_nonnull_args (fntype);
95 // Process any non-null arguments
96 if (nonnullargs)
97 {
98 for (unsigned i = 0; i < gimple_call_num_args (s); i++)
99 {
100 if (bitmap_empty_p (nonnullargs)
101 || bitmap_bit_p (nonnullargs, i))
102 {
103 tree op = gimple_call_arg (s, i);
104 if (POINTER_TYPE_P (TREE_TYPE (op)))
105 add_nonzero (op);
106 }
107 }
108 BITMAP_FREE (nonnullargs);
109 }
110 // Fallthru and walk load/store ops now.
111 }
112
113 // Look for possible non-null values.
114 if (flag_delete_null_pointer_checks && gimple_code (s) != GIMPLE_ASM
115 && !gimple_clobber_p (s))
116 walk_stmt_load_store_ops (s, (void *)this, non_null_loadstore,
117 non_null_loadstore);
118
119}
120
121// -------------------------------------------------------------------------
122
761cc32e 123// This class is an element in the list of infered ranges.
b7501739
AM
124
125class exit_range
126{
127public:
128 tree name;
129 irange *range;
130 exit_range *next;
131};
132
133// If there is an element which matches SSA, return a pointer to the element.
134// Otherwise return NULL.
135
136exit_range *
156d7d8d 137infer_range_manager::exit_range_head::find_ptr (tree ssa)
b7501739
AM
138{
139 // Return NULL if SSA is not in this list.
140 if (!m_names || !bitmap_bit_p (m_names, SSA_NAME_VERSION (ssa)))
141 return NULL;
142 for (exit_range *ptr = head; ptr != NULL; ptr = ptr->next)
143 if (ptr->name == ssa)
144 return ptr;
145 // Should be unreachable.
146 gcc_unreachable ();
147 return NULL;
148}
149
156d7d8d 150// Construct a range infer manager. DO_SEARCH indicates whether an immediate
b7501739
AM
151// use scan should be made the first time a name is processed. This is for
152// on-demand clients who may not visit every statement and may miss uses.
153
156d7d8d 154infer_range_manager::infer_range_manager (bool do_search)
b7501739
AM
155{
156 bitmap_obstack_initialize (&m_bitmaps);
157 m_on_exit.create (0);
158 m_on_exit.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
159 // m_seen == NULL indicates no scanning. Otherwise the bit indicates a
160 // scan has been performed on NAME.
161 if (do_search)
162 m_seen = BITMAP_ALLOC (&m_bitmaps);
163 else
164 m_seen = NULL;
165 obstack_init (&m_list_obstack);
166 // Non-zero elements are very common, so cache them for each ssa-name.
167 m_nonzero.create (0);
168 m_nonzero.safe_grow_cleared (num_ssa_names + 1);
169}
170
156d7d8d 171// Destruct a range infer manager.
b7501739 172
156d7d8d 173infer_range_manager::~infer_range_manager ()
b7501739
AM
174{
175 m_nonzero.release ();
176 obstack_free (&m_list_obstack, NULL);
177 m_on_exit.release ();
178 bitmap_obstack_release (&m_bitmaps);
179}
180
181// Return a non-zero range value of the appropriate type for NAME from
182// the cache, creating it if necessary.
183
184const irange&
156d7d8d 185infer_range_manager::get_nonzero (tree name)
b7501739
AM
186{
187 unsigned v = SSA_NAME_VERSION (name);
188 if (v >= m_nonzero.length ())
189 m_nonzero.safe_grow_cleared (num_ssa_names + 20);
190 if (!m_nonzero[v])
191 {
d8474337
AH
192 tree type = TREE_TYPE (name);
193 m_nonzero[v]
194 = static_cast <irange *> (m_range_allocator.alloc_vrange (type));
195 m_nonzero[v]->set_nonzero (type);
b7501739
AM
196 }
197 return *(m_nonzero[v]);
198}
199
156d7d8d 200// Return TRUE if NAME has a range inference in block BB.
b7501739
AM
201
202bool
156d7d8d 203infer_range_manager::has_range_p (tree name, basic_block bb)
b7501739
AM
204{
205 // Check if this is an immediate use search model.
206 if (m_seen && !bitmap_bit_p (m_seen, SSA_NAME_VERSION (name)))
207 register_all_uses (name);
208
209 if (bb->index >= (int)m_on_exit.length ())
210 return false;
211 if (!m_on_exit[bb->index].m_names)
212 return false;
213 if (!bitmap_bit_p (m_on_exit[bb->index].m_names, SSA_NAME_VERSION (name)))
214 return false;
215 return true;
216}
217
156d7d8d 218// Return TRUE if NAME has a range inference in block BB, and adjust range R
b7501739
AM
219// to include it.
220
221bool
156d7d8d 222infer_range_manager::maybe_adjust_range (irange &r, tree name, basic_block bb)
b7501739
AM
223{
224 if (!has_range_p (name, bb))
225 return false;
226 exit_range *ptr = m_on_exit[bb->index].find_ptr (name);
227 gcc_checking_assert (ptr);
228 // Return true if this exit range changes R, otherwise false.
229 return r.intersect (*(ptr->range));
230}
231
156d7d8d 232// Add range R as an inferred range for NAME in block BB.
b7501739
AM
233
234void
156d7d8d 235infer_range_manager::add_range (tree name, basic_block bb, const irange &r)
b7501739
AM
236{
237 if (bb->index >= (int)m_on_exit.length ())
238 m_on_exit.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
239
240 // Create the summary list bitmap if it doesn't exist.
241 if (!m_on_exit[bb->index].m_names)
242 m_on_exit[bb->index].m_names = BITMAP_ALLOC (&m_bitmaps);
243
244 if (dump_file && (dump_flags & TDF_DETAILS))
245 {
246 fprintf (dump_file, " on-exit update ");
247 print_generic_expr (dump_file, name, TDF_SLIM);
248 fprintf (dump_file, " in BB%d : ",bb->index);
249 r.dump (dump_file);
250 fprintf (dump_file, "\n");
251 }
252
253 // If NAME already has a range, intersect them and done.
254 exit_range *ptr = m_on_exit[bb->index].find_ptr (name);
255 if (ptr)
256 {
257 int_range_max cur = r;
258 // If no new info is added, just return.
259 if (!cur.intersect (*(ptr->range)))
260 return;
261 if (ptr->range->fits_p (cur))
262 *(ptr->range) = cur;
263 else
d8474337
AH
264 {
265 vrange &v = cur;
266 ptr->range = static_cast <irange *> (m_range_allocator.clone (v));
267 }
b7501739
AM
268 return;
269 }
270
271 // Otherwise create a record.
272 bitmap_set_bit (m_on_exit[bb->index].m_names, SSA_NAME_VERSION (name));
273 ptr = (exit_range *)obstack_alloc (&m_list_obstack, sizeof (exit_range));
d8474337 274 ptr->range = m_range_allocator.clone (r);
b7501739
AM
275 ptr->name = name;
276 ptr->next = m_on_exit[bb->index].head;
277 m_on_exit[bb->index].head = ptr;
278}
279
156d7d8d 280// Add a non-zero inferred range for NAME in block BB.
b7501739
AM
281
282void
156d7d8d 283infer_range_manager::add_nonzero (tree name, basic_block bb)
b7501739
AM
284{
285 add_range (name, bb, get_nonzero (name));
286}
287
156d7d8d 288// Follow immediate use chains and find all inferred ranges for NAME.
b7501739
AM
289
290void
156d7d8d 291infer_range_manager::register_all_uses (tree name)
b7501739
AM
292{
293 gcc_checking_assert (m_seen);
294
295 // Check if we've already processed this name.
296 unsigned v = SSA_NAME_VERSION (name);
297 if (bitmap_bit_p (m_seen, v))
298 return;
299 bitmap_set_bit (m_seen, v);
300
301 use_operand_p use_p;
302 imm_use_iterator iter;
303
156d7d8d 304 // Loop over each immediate use and see if it has an inferred range.
b7501739
AM
305 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
306 {
307 gimple *s = USE_STMT (use_p);
156d7d8d
AM
308 gimple_infer_range infer (s);
309 for (unsigned x = 0; x < infer.num (); x++)
b7501739 310 {
156d7d8d
AM
311 if (name == infer.name (x))
312 add_range (name, gimple_bb (s), infer.range (x));
b7501739
AM
313 }
314 }
315}