]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-range-infer.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / gimple-range-infer.cc
1 /* Gimple range inference implementation.
2 Copyright (C) 2022 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "insn-codes.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "gimple-range.h"
31 #include "value-range-storage.h"
32 #include "tree-cfg.h"
33 #include "target.h"
34 #include "attribs.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "cfganal.h"
38 #include "tree-dfa.h"
39
40 // Adapted from infer_nonnull_range_by_dereference and check_loadstore
41 // to process nonnull ssa_name OP in S. DATA contains a pointer to a
42 // stmt range inference instance.
43
44 static bool
45 non_null_loadstore (gimple *, tree op, tree, void *data)
46 {
47 if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
48 {
49 /* Some address spaces may legitimately dereference zero. */
50 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
51 if (!targetm.addr_space.zero_address_valid (as))
52 {
53 tree ssa = TREE_OPERAND (op, 0);
54 ((gimple_infer_range *)data)->add_nonzero (ssa);
55 }
56 }
57 return false;
58 }
59
60 // Process an ASSUME call to see if there are any inferred ranges available.
61
62 void
63 gimple_infer_range::check_assume_func (gcall *call)
64 {
65 tree arg;
66 unsigned i;
67 tree assume_id = TREE_OPERAND (gimple_call_arg (call, 0), 0);
68 if (!assume_id)
69 return;
70 struct function *fun = DECL_STRUCT_FUNCTION (assume_id);
71 if (!fun)
72 return;
73 // Loop over arguments, matching them to the assume paramters.
74 for (arg = DECL_ARGUMENTS (assume_id), i = 1;
75 arg && i < gimple_call_num_args (call);
76 i++, arg = DECL_CHAIN (arg))
77 {
78 tree op = gimple_call_arg (call, i);
79 tree type = TREE_TYPE (op);
80 if (gimple_range_ssa_p (op) && Value_Range::supports_type_p (type))
81 {
82 tree default_def = ssa_default_def (fun, arg);
83 if (!default_def || type != TREE_TYPE (default_def))
84 continue;
85 // Query the global range of the default def in the assume function.
86 Value_Range assume_range (type);
87 global_ranges.range_of_expr (assume_range, default_def);
88 // If there is a non-varying result, add it as an inferred range.
89 if (!assume_range.varying_p ())
90 {
91 add_range (op, assume_range);
92 if (dump_file)
93 {
94 print_generic_expr (dump_file, assume_id, TDF_SLIM);
95 fprintf (dump_file, " assume inferred range of ");
96 print_generic_expr (dump_file, op, TDF_SLIM);
97 fprintf (dump_file, " (param ");
98 print_generic_expr (dump_file, arg, TDF_SLIM);
99 fprintf (dump_file, ") = ");
100 assume_range.dump (dump_file);
101 fputc ('\n', dump_file);
102 }
103 }
104 }
105 }
106 }
107
108 // Add NAME and RANGE to the range inference summary.
109
110 void
111 gimple_infer_range::add_range (tree name, vrange &range)
112 {
113 m_names[num_args] = name;
114 m_ranges[num_args] = range;
115 if (num_args < size_limit - 1)
116 num_args++;
117 }
118
119 // Add a nonzero range for NAME to the range inference summary.
120
121 void
122 gimple_infer_range::add_nonzero (tree name)
123 {
124 if (!gimple_range_ssa_p (name))
125 return;
126 int_range<2> nz;
127 nz.set_nonzero (TREE_TYPE (name));
128 add_range (name, nz);
129 }
130
131 // Process S for range inference and fill in the summary list.
132 // This is the routine where new inferred ranges should be added.
133
134 gimple_infer_range::gimple_infer_range (gimple *s)
135 {
136 num_args = 0;
137
138 if (is_a<gphi *> (s))
139 return;
140
141 if (is_a<gcall *> (s) && flag_delete_null_pointer_checks)
142 {
143 tree fntype = gimple_call_fntype (s);
144 bitmap nonnullargs = get_nonnull_args (fntype);
145 // Process any non-null arguments
146 if (nonnullargs)
147 {
148 for (unsigned i = 0; i < gimple_call_num_args (s); i++)
149 {
150 if (bitmap_empty_p (nonnullargs)
151 || bitmap_bit_p (nonnullargs, i))
152 {
153 tree op = gimple_call_arg (s, i);
154 if (POINTER_TYPE_P (TREE_TYPE (op)))
155 add_nonzero (op);
156 }
157 }
158 BITMAP_FREE (nonnullargs);
159 }
160 // Fallthru and walk load/store ops now.
161 }
162
163 // Check for inferred ranges from ASSUME calls.
164 if (is_a<gcall *> (s) && gimple_call_internal_p (s)
165 && gimple_call_internal_fn (s) == IFN_ASSUME)
166 check_assume_func (as_a<gcall *> (s));
167
168 // Look for possible non-null values.
169 if (flag_delete_null_pointer_checks && gimple_code (s) != GIMPLE_ASM
170 && !gimple_clobber_p (s))
171 walk_stmt_load_store_ops (s, (void *)this, non_null_loadstore,
172 non_null_loadstore);
173
174 }
175
176 // -------------------------------------------------------------------------
177
178 // This class is an element in the list of infered ranges.
179
180 class exit_range
181 {
182 public:
183 tree name;
184 vrange *range;
185 exit_range *next;
186 };
187
188 // If there is an element which matches SSA, return a pointer to the element.
189 // Otherwise return NULL.
190
191 exit_range *
192 infer_range_manager::exit_range_head::find_ptr (tree ssa)
193 {
194 // Return NULL if SSA is not in this list.
195 if (!m_names || !bitmap_bit_p (m_names, SSA_NAME_VERSION (ssa)))
196 return NULL;
197 for (exit_range *ptr = head; ptr != NULL; ptr = ptr->next)
198 if (ptr->name == ssa)
199 return ptr;
200 // Should be unreachable.
201 gcc_unreachable ();
202 return NULL;
203 }
204
205 // Construct a range infer manager. DO_SEARCH indicates whether an immediate
206 // use scan should be made the first time a name is processed. This is for
207 // on-demand clients who may not visit every statement and may miss uses.
208
209 infer_range_manager::infer_range_manager (bool do_search)
210 {
211 bitmap_obstack_initialize (&m_bitmaps);
212 m_on_exit.create (0);
213 m_on_exit.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
214 // m_seen == NULL indicates no scanning. Otherwise the bit indicates a
215 // scan has been performed on NAME.
216 if (do_search)
217 m_seen = BITMAP_ALLOC (&m_bitmaps);
218 else
219 m_seen = NULL;
220 obstack_init (&m_list_obstack);
221 // Non-zero elements are very common, so cache them for each ssa-name.
222 m_nonzero.create (0);
223 m_nonzero.safe_grow_cleared (num_ssa_names + 1);
224 m_range_allocator = new obstack_vrange_allocator;
225 }
226
227 // Destruct a range infer manager.
228
229 infer_range_manager::~infer_range_manager ()
230 {
231 m_nonzero.release ();
232 obstack_free (&m_list_obstack, NULL);
233 m_on_exit.release ();
234 bitmap_obstack_release (&m_bitmaps);
235 delete m_range_allocator;
236 }
237
238 // Return a non-zero range value of the appropriate type for NAME from
239 // the cache, creating it if necessary.
240
241 const vrange&
242 infer_range_manager::get_nonzero (tree name)
243 {
244 unsigned v = SSA_NAME_VERSION (name);
245 if (v >= m_nonzero.length ())
246 m_nonzero.safe_grow_cleared (num_ssa_names + 20);
247 if (!m_nonzero[v])
248 {
249 m_nonzero[v] = m_range_allocator->alloc_vrange (TREE_TYPE (name));
250 m_nonzero[v]->set_nonzero (TREE_TYPE (name));
251 }
252 return *(m_nonzero[v]);
253 }
254
255 // Return TRUE if NAME has a range inference in block BB.
256
257 bool
258 infer_range_manager::has_range_p (tree name, basic_block bb)
259 {
260 // Check if this is an immediate use search model.
261 if (m_seen && !bitmap_bit_p (m_seen, SSA_NAME_VERSION (name)))
262 register_all_uses (name);
263
264 if (bb->index >= (int)m_on_exit.length ())
265 return false;
266 if (!m_on_exit[bb->index].m_names)
267 return false;
268 if (!bitmap_bit_p (m_on_exit[bb->index].m_names, SSA_NAME_VERSION (name)))
269 return false;
270 return true;
271 }
272
273 // Return TRUE if NAME has a range inference in block BB, and adjust range R
274 // to include it.
275
276 bool
277 infer_range_manager::maybe_adjust_range (vrange &r, tree name, basic_block bb)
278 {
279 if (!has_range_p (name, bb))
280 return false;
281 exit_range *ptr = m_on_exit[bb->index].find_ptr (name);
282 gcc_checking_assert (ptr);
283 // Return true if this exit range changes R, otherwise false.
284 return r.intersect (*(ptr->range));
285 }
286
287 // Add range R as an inferred range for NAME in block BB.
288
289 void
290 infer_range_manager::add_range (tree name, basic_block bb, const vrange &r)
291 {
292 if (bb->index >= (int)m_on_exit.length ())
293 m_on_exit.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
294
295 // Create the summary list bitmap if it doesn't exist.
296 if (!m_on_exit[bb->index].m_names)
297 m_on_exit[bb->index].m_names = BITMAP_ALLOC (&m_bitmaps);
298
299 if (dump_file && (dump_flags & TDF_DETAILS))
300 {
301 fprintf (dump_file, " on-exit update ");
302 print_generic_expr (dump_file, name, TDF_SLIM);
303 fprintf (dump_file, " in BB%d : ",bb->index);
304 r.dump (dump_file);
305 fprintf (dump_file, "\n");
306 }
307
308 // If NAME already has a range, intersect them and done.
309 exit_range *ptr = m_on_exit[bb->index].find_ptr (name);
310 if (ptr)
311 {
312 Value_Range cur (r);
313 // If no new info is added, just return.
314 if (!cur.intersect (*(ptr->range)))
315 return;
316 if (ptr->range->fits_p (cur))
317 *(ptr->range) = cur;
318 else
319 {
320 vrange &v = cur;
321 ptr->range = m_range_allocator->clone (v);
322 }
323 return;
324 }
325
326 // Otherwise create a record.
327 bitmap_set_bit (m_on_exit[bb->index].m_names, SSA_NAME_VERSION (name));
328 ptr = (exit_range *)obstack_alloc (&m_list_obstack, sizeof (exit_range));
329 ptr->range = m_range_allocator->clone (r);
330 ptr->name = name;
331 ptr->next = m_on_exit[bb->index].head;
332 m_on_exit[bb->index].head = ptr;
333 }
334
335 // Add a non-zero inferred range for NAME in block BB.
336
337 void
338 infer_range_manager::add_nonzero (tree name, basic_block bb)
339 {
340 add_range (name, bb, get_nonzero (name));
341 }
342
343 // Follow immediate use chains and find all inferred ranges for NAME.
344
345 void
346 infer_range_manager::register_all_uses (tree name)
347 {
348 gcc_checking_assert (m_seen);
349
350 // Check if we've already processed this name.
351 unsigned v = SSA_NAME_VERSION (name);
352 if (bitmap_bit_p (m_seen, v))
353 return;
354 bitmap_set_bit (m_seen, v);
355
356 use_operand_p use_p;
357 imm_use_iterator iter;
358
359 // Loop over each immediate use and see if it has an inferred range.
360 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
361 {
362 gimple *s = USE_STMT (use_p);
363 gimple_infer_range infer (s);
364 for (unsigned x = 0; x < infer.num (); x++)
365 {
366 if (name == infer.name (x))
367 add_range (name, gimple_bb (s), infer.range (x));
368 }
369 }
370 }