]>
Commit | Line | Data |
---|---|---|
89fb70a3 | 1 | /* SCC value numbering for trees |
5624e564 | 2 | Copyright (C) 2006-2015 Free Software Foundation, Inc. |
89fb70a3 DB |
3 | Contributed by Daniel Berlin <dan@dberlin.org> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
89fb70a3 DB |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
89fb70a3 DB |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
40e23961 MC |
25 | #include "hash-set.h" |
26 | #include "machmode.h" | |
27 | #include "vec.h" | |
28 | #include "double-int.h" | |
29 | #include "input.h" | |
30 | #include "alias.h" | |
31 | #include "symtab.h" | |
32 | #include "wide-int.h" | |
33 | #include "inchash.h" | |
89fb70a3 | 34 | #include "tree.h" |
40e23961 | 35 | #include "fold-const.h" |
d8a2d370 | 36 | #include "stor-layout.h" |
60393bbc | 37 | #include "predict.h" |
60393bbc AM |
38 | #include "hard-reg-set.h" |
39 | #include "input.h" | |
40 | #include "function.h" | |
41 | #include "dominance.h" | |
42 | #include "cfg.h" | |
43 | #include "cfganal.h" | |
89fb70a3 | 44 | #include "basic-block.h" |
cf835838 | 45 | #include "gimple-pretty-print.h" |
89fb70a3 | 46 | #include "tree-inline.h" |
2fb9a547 AM |
47 | #include "hash-table.h" |
48 | #include "tree-ssa-alias.h" | |
49 | #include "internal-fn.h" | |
6d8eb96b | 50 | #include "inchash.h" |
2fb9a547 AM |
51 | #include "gimple-fold.h" |
52 | #include "tree-eh.h" | |
53 | #include "gimple-expr.h" | |
54 | #include "is-a.h" | |
18f429e2 | 55 | #include "gimple.h" |
45b0be94 | 56 | #include "gimplify.h" |
442b4905 AM |
57 | #include "gimple-ssa.h" |
58 | #include "tree-phinodes.h" | |
59 | #include "ssa-iterators.h" | |
d8a2d370 | 60 | #include "stringpool.h" |
442b4905 | 61 | #include "tree-ssanames.h" |
d8a2d370 | 62 | #include "expr.h" |
442b4905 AM |
63 | #include "tree-dfa.h" |
64 | #include "tree-ssa.h" | |
7ee2468b | 65 | #include "dumpfile.h" |
89fb70a3 | 66 | #include "alloc-pool.h" |
89fb70a3 | 67 | #include "flags.h" |
89fb70a3 | 68 | #include "cfgloop.h" |
863d2a57 | 69 | #include "params.h" |
c2979eaf | 70 | #include "tree-ssa-propagate.h" |
89fb70a3 | 71 | #include "tree-ssa-sccvn.h" |
a764d660 RB |
72 | #include "tree-cfg.h" |
73 | #include "domwalk.h" | |
8403c2cf RB |
74 | #include "ipa-ref.h" |
75 | #include "plugin-api.h" | |
76 | #include "cgraph.h" | |
89fb70a3 DB |
77 | |
78 | /* This algorithm is based on the SCC algorithm presented by Keith | |
79 | Cooper and L. Taylor Simpson in "SCC-Based Value numbering" | |
80 | (http://citeseer.ist.psu.edu/41805.html). In | |
81 | straight line code, it is equivalent to a regular hash based value | |
82 | numbering that is performed in reverse postorder. | |
83 | ||
84 | For code with cycles, there are two alternatives, both of which | |
85 | require keeping the hashtables separate from the actual list of | |
86 | value numbers for SSA names. | |
87 | ||
88 | 1. Iterate value numbering in an RPO walk of the blocks, removing | |
89 | all the entries from the hashtable after each iteration (but | |
90 | keeping the SSA name->value number mapping between iterations). | |
91 | Iterate until it does not change. | |
92 | ||
93 | 2. Perform value numbering as part of an SCC walk on the SSA graph, | |
94 | iterating only the cycles in the SSA graph until they do not change | |
95 | (using a separate, optimistic hashtable for value numbering the SCC | |
96 | operands). | |
97 | ||
98 | The second is not just faster in practice (because most SSA graph | |
99 | cycles do not involve all the variables in the graph), it also has | |
100 | some nice properties. | |
101 | ||
102 | One of these nice properties is that when we pop an SCC off the | |
103 | stack, we are guaranteed to have processed all the operands coming from | |
104 | *outside of that SCC*, so we do not need to do anything special to | |
105 | ensure they have value numbers. | |
106 | ||
107 | Another nice property is that the SCC walk is done as part of a DFS | |
108 | of the SSA graph, which makes it easy to perform combining and | |
109 | simplifying operations at the same time. | |
110 | ||
111 | The code below is deliberately written in a way that makes it easy | |
112 | to separate the SCC walk from the other work it does. | |
113 | ||
114 | In order to propagate constants through the code, we track which | |
115 | expressions contain constants, and use those while folding. In | |
116 | theory, we could also track expressions whose value numbers are | |
117 | replaced, in case we end up folding based on expression | |
118 | identities. | |
119 | ||
120 | In order to value number memory, we assign value numbers to vuses. | |
121 | This enables us to note that, for example, stores to the same | |
122 | address of the same value from the same starting memory states are | |
070b797d | 123 | equivalent. |
89fb70a3 DB |
124 | TODO: |
125 | ||
126 | 1. We can iterate only the changing portions of the SCC's, but | |
127 | I have not seen an SCC big enough for this to be a win. | |
128 | 2. If you differentiate between phi nodes for loops and phi nodes | |
129 | for if-then-else, you can properly consider phi nodes in different | |
130 | blocks for equivalence. | |
131 | 3. We could value number vuses in more cases, particularly, whole | |
132 | structure copies. | |
133 | */ | |
134 | ||
bf190e8d LC |
135 | |
136 | /* vn_nary_op hashtable helpers. */ | |
137 | ||
138 | struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s> | |
139 | { | |
140 | typedef vn_nary_op_s value_type; | |
141 | typedef vn_nary_op_s compare_type; | |
142 | static inline hashval_t hash (const value_type *); | |
143 | static inline bool equal (const value_type *, const compare_type *); | |
144 | }; | |
145 | ||
146 | /* Return the computed hashcode for nary operation P1. */ | |
147 | ||
148 | inline hashval_t | |
149 | vn_nary_op_hasher::hash (const value_type *vno1) | |
150 | { | |
151 | return vno1->hashcode; | |
152 | } | |
153 | ||
154 | /* Compare nary operations P1 and P2 and return true if they are | |
155 | equivalent. */ | |
156 | ||
157 | inline bool | |
158 | vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2) | |
159 | { | |
160 | return vn_nary_op_eq (vno1, vno2); | |
161 | } | |
162 | ||
c203e8a7 | 163 | typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type; |
bf190e8d LC |
164 | typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type; |
165 | ||
166 | ||
167 | /* vn_phi hashtable helpers. */ | |
168 | ||
169 | static int | |
170 | vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2); | |
171 | ||
172 | struct vn_phi_hasher | |
173 | { | |
174 | typedef vn_phi_s value_type; | |
175 | typedef vn_phi_s compare_type; | |
176 | static inline hashval_t hash (const value_type *); | |
177 | static inline bool equal (const value_type *, const compare_type *); | |
178 | static inline void remove (value_type *); | |
179 | }; | |
180 | ||
181 | /* Return the computed hashcode for phi operation P1. */ | |
182 | ||
183 | inline hashval_t | |
184 | vn_phi_hasher::hash (const value_type *vp1) | |
185 | { | |
186 | return vp1->hashcode; | |
187 | } | |
188 | ||
189 | /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ | |
190 | ||
191 | inline bool | |
192 | vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2) | |
193 | { | |
194 | return vn_phi_eq (vp1, vp2); | |
195 | } | |
196 | ||
197 | /* Free a phi operation structure VP. */ | |
198 | ||
199 | inline void | |
200 | vn_phi_hasher::remove (value_type *phi) | |
201 | { | |
202 | phi->phiargs.release (); | |
203 | } | |
204 | ||
c203e8a7 | 205 | typedef hash_table<vn_phi_hasher> vn_phi_table_type; |
bf190e8d LC |
206 | typedef vn_phi_table_type::iterator vn_phi_iterator_type; |
207 | ||
208 | ||
209 | /* Compare two reference operands P1 and P2 for equality. Return true if | |
210 | they are equal, and false otherwise. */ | |
211 | ||
212 | static int | |
213 | vn_reference_op_eq (const void *p1, const void *p2) | |
214 | { | |
215 | const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1; | |
216 | const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2; | |
217 | ||
218 | return (vro1->opcode == vro2->opcode | |
219 | /* We do not care for differences in type qualification. */ | |
220 | && (vro1->type == vro2->type | |
221 | || (vro1->type && vro2->type | |
222 | && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type), | |
223 | TYPE_MAIN_VARIANT (vro2->type)))) | |
224 | && expressions_equal_p (vro1->op0, vro2->op0) | |
225 | && expressions_equal_p (vro1->op1, vro2->op1) | |
226 | && expressions_equal_p (vro1->op2, vro2->op2)); | |
227 | } | |
228 | ||
229 | /* Free a reference operation structure VP. */ | |
230 | ||
231 | static inline void | |
232 | free_reference (vn_reference_s *vr) | |
233 | { | |
234 | vr->operands.release (); | |
235 | } | |
236 | ||
237 | ||
238 | /* vn_reference hashtable helpers. */ | |
239 | ||
240 | struct vn_reference_hasher | |
241 | { | |
242 | typedef vn_reference_s value_type; | |
243 | typedef vn_reference_s compare_type; | |
244 | static inline hashval_t hash (const value_type *); | |
245 | static inline bool equal (const value_type *, const compare_type *); | |
246 | static inline void remove (value_type *); | |
247 | }; | |
248 | ||
249 | /* Return the hashcode for a given reference operation P1. */ | |
250 | ||
251 | inline hashval_t | |
252 | vn_reference_hasher::hash (const value_type *vr1) | |
253 | { | |
254 | return vr1->hashcode; | |
255 | } | |
256 | ||
257 | inline bool | |
258 | vn_reference_hasher::equal (const value_type *v, const compare_type *c) | |
259 | { | |
260 | return vn_reference_eq (v, c); | |
261 | } | |
262 | ||
263 | inline void | |
264 | vn_reference_hasher::remove (value_type *v) | |
265 | { | |
266 | free_reference (v); | |
267 | } | |
268 | ||
c203e8a7 | 269 | typedef hash_table<vn_reference_hasher> vn_reference_table_type; |
bf190e8d LC |
270 | typedef vn_reference_table_type::iterator vn_reference_iterator_type; |
271 | ||
272 | ||
89fb70a3 DB |
273 | /* The set of hashtables and alloc_pool's for their items. */ |
274 | ||
275 | typedef struct vn_tables_s | |
276 | { | |
c203e8a7 TS |
277 | vn_nary_op_table_type *nary; |
278 | vn_phi_table_type *phis; | |
279 | vn_reference_table_type *references; | |
49a1fb2d | 280 | struct obstack nary_obstack; |
89fb70a3 DB |
281 | alloc_pool phis_pool; |
282 | alloc_pool references_pool; | |
283 | } *vn_tables_t; | |
284 | ||
bf190e8d LC |
285 | |
286 | /* vn_constant hashtable helpers. */ | |
287 | ||
288 | struct vn_constant_hasher : typed_free_remove <vn_constant_s> | |
289 | { | |
290 | typedef vn_constant_s value_type; | |
291 | typedef vn_constant_s compare_type; | |
292 | static inline hashval_t hash (const value_type *); | |
293 | static inline bool equal (const value_type *, const compare_type *); | |
294 | }; | |
295 | ||
296 | /* Hash table hash function for vn_constant_t. */ | |
297 | ||
298 | inline hashval_t | |
299 | vn_constant_hasher::hash (const value_type *vc1) | |
300 | { | |
301 | return vc1->hashcode; | |
302 | } | |
303 | ||
304 | /* Hash table equality function for vn_constant_t. */ | |
305 | ||
306 | inline bool | |
307 | vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2) | |
308 | { | |
309 | if (vc1->hashcode != vc2->hashcode) | |
310 | return false; | |
311 | ||
312 | return vn_constant_eq_with_type (vc1->constant, vc2->constant); | |
313 | } | |
314 | ||
c203e8a7 | 315 | static hash_table<vn_constant_hasher> *constant_to_value_id; |
c9145754 | 316 | static bitmap constant_value_ids; |
89fb70a3 | 317 | |
89fb70a3 DB |
318 | |
319 | /* Valid hashtables storing information we have proven to be | |
320 | correct. */ | |
321 | ||
322 | static vn_tables_t valid_info; | |
323 | ||
324 | /* Optimistic hashtables storing information we are making assumptions about | |
325 | during iterations. */ | |
326 | ||
327 | static vn_tables_t optimistic_info; | |
328 | ||
89fb70a3 DB |
329 | /* Pointer to the set of hashtables that is currently being used. |
330 | Should always point to either the optimistic_info, or the | |
331 | valid_info. */ | |
332 | ||
333 | static vn_tables_t current_info; | |
334 | ||
335 | ||
336 | /* Reverse post order index for each basic block. */ | |
337 | ||
338 | static int *rpo_numbers; | |
339 | ||
340 | #define SSA_VAL(x) (VN_INFO ((x))->valnum) | |
341 | ||
d1c0308e RB |
342 | /* Return the SSA value of the VUSE x, supporting released VDEFs |
343 | during elimination which will value-number the VDEF to the | |
344 | associated VUSE (but not substitute in the whole lattice). */ | |
345 | ||
346 | static inline tree | |
347 | vuse_ssa_val (tree x) | |
348 | { | |
349 | if (!x) | |
350 | return NULL_TREE; | |
351 | ||
352 | do | |
353 | { | |
354 | x = SSA_VAL (x); | |
355 | } | |
356 | while (SSA_NAME_IN_FREE_LIST (x)); | |
357 | ||
358 | return x; | |
359 | } | |
360 | ||
89fb70a3 DB |
361 | /* This represents the top of the VN lattice, which is the universal |
362 | value. */ | |
363 | ||
364 | tree VN_TOP; | |
365 | ||
c9145754 DB |
366 | /* Unique counter for our value ids. */ |
367 | ||
368 | static unsigned int next_value_id; | |
369 | ||
89fb70a3 DB |
370 | /* Next DFS number and the stack for strongly connected component |
371 | detection. */ | |
372 | ||
373 | static unsigned int next_dfs_num; | |
9771b263 | 374 | static vec<tree> sccstack; |
89fb70a3 | 375 | |
3d45dd59 | 376 | |
89fb70a3 | 377 | |
cbfb21c1 SB |
378 | /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects |
379 | are allocated on an obstack for locality reasons, and to free them | |
9771b263 | 380 | without looping over the vec. */ |
89fb70a3 | 381 | |
9771b263 | 382 | static vec<vn_ssa_aux_t> vn_ssa_aux_table; |
cbfb21c1 | 383 | static struct obstack vn_ssa_aux_obstack; |
89fb70a3 DB |
384 | |
385 | /* Return the value numbering information for a given SSA name. */ | |
386 | ||
387 | vn_ssa_aux_t | |
388 | VN_INFO (tree name) | |
389 | { | |
9771b263 | 390 | vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)]; |
7a40b8b1 | 391 | gcc_checking_assert (res); |
c9145754 | 392 | return res; |
89fb70a3 DB |
393 | } |
394 | ||
395 | /* Set the value numbering info for a given SSA name to a given | |
396 | value. */ | |
397 | ||
398 | static inline void | |
399 | VN_INFO_SET (tree name, vn_ssa_aux_t value) | |
400 | { | |
9771b263 | 401 | vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value; |
89fb70a3 DB |
402 | } |
403 | ||
cbfb21c1 SB |
404 | /* Initialize the value numbering info for a given SSA name. |
405 | This should be called just once for every SSA name. */ | |
89fb70a3 DB |
406 | |
407 | vn_ssa_aux_t | |
408 | VN_INFO_GET (tree name) | |
409 | { | |
cbfb21c1 SB |
410 | vn_ssa_aux_t newinfo; |
411 | ||
3d9a9f94 | 412 | newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux); |
cbfb21c1 | 413 | memset (newinfo, 0, sizeof (struct vn_ssa_aux)); |
9771b263 DN |
414 | if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()) |
415 | vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1); | |
416 | vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo; | |
89fb70a3 DB |
417 | return newinfo; |
418 | } | |
419 | ||
420 | ||
726a989a RB |
421 | /* Get the representative expression for the SSA_NAME NAME. Returns |
422 | the representative SSA_NAME if there is no expression associated with it. */ | |
423 | ||
424 | tree | |
425 | vn_get_expr_for (tree name) | |
426 | { | |
427 | vn_ssa_aux_t vn = VN_INFO (name); | |
428 | gimple def_stmt; | |
429 | tree expr = NULL_TREE; | |
1a60c352 | 430 | enum tree_code code; |
726a989a RB |
431 | |
432 | if (vn->valnum == VN_TOP) | |
433 | return name; | |
434 | ||
435 | /* If the value-number is a constant it is the representative | |
436 | expression. */ | |
437 | if (TREE_CODE (vn->valnum) != SSA_NAME) | |
438 | return vn->valnum; | |
439 | ||
440 | /* Get to the information of the value of this SSA_NAME. */ | |
441 | vn = VN_INFO (vn->valnum); | |
442 | ||
443 | /* If the value-number is a constant it is the representative | |
444 | expression. */ | |
445 | if (TREE_CODE (vn->valnum) != SSA_NAME) | |
446 | return vn->valnum; | |
447 | ||
448 | /* Else if we have an expression, return it. */ | |
449 | if (vn->expr != NULL_TREE) | |
450 | return vn->expr; | |
451 | ||
452 | /* Otherwise use the defining statement to build the expression. */ | |
453 | def_stmt = SSA_NAME_DEF_STMT (vn->valnum); | |
454 | ||
1a60c352 | 455 | /* If the value number is not an assignment use it directly. */ |
726a989a RB |
456 | if (!is_gimple_assign (def_stmt)) |
457 | return vn->valnum; | |
458 | ||
a5eaec42 RB |
459 | /* Note that we can valueize here because we clear the cached |
460 | simplified expressions after each optimistic iteration. */ | |
1a60c352 RG |
461 | code = gimple_assign_rhs_code (def_stmt); |
462 | switch (TREE_CODE_CLASS (code)) | |
726a989a RB |
463 | { |
464 | case tcc_reference: | |
1a60c352 RG |
465 | if ((code == REALPART_EXPR |
466 | || code == IMAGPART_EXPR | |
467 | || code == VIEW_CONVERT_EXPR) | |
468 | && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt), | |
469 | 0)) == SSA_NAME) | |
470 | expr = fold_build1 (code, | |
726a989a | 471 | gimple_expr_type (def_stmt), |
a5eaec42 RB |
472 | vn_valueize (TREE_OPERAND |
473 | (gimple_assign_rhs1 (def_stmt), 0))); | |
726a989a RB |
474 | break; |
475 | ||
476 | case tcc_unary: | |
1a60c352 | 477 | expr = fold_build1 (code, |
726a989a | 478 | gimple_expr_type (def_stmt), |
a5eaec42 | 479 | vn_valueize (gimple_assign_rhs1 (def_stmt))); |
726a989a RB |
480 | break; |
481 | ||
482 | case tcc_binary: | |
1a60c352 | 483 | expr = fold_build2 (code, |
726a989a | 484 | gimple_expr_type (def_stmt), |
a5eaec42 RB |
485 | vn_valueize (gimple_assign_rhs1 (def_stmt)), |
486 | vn_valueize (gimple_assign_rhs2 (def_stmt))); | |
726a989a RB |
487 | break; |
488 | ||
18474649 RG |
489 | case tcc_exceptional: |
490 | if (code == CONSTRUCTOR | |
491 | && TREE_CODE | |
492 | (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE) | |
493 | expr = gimple_assign_rhs1 (def_stmt); | |
494 | break; | |
495 | ||
726a989a RB |
496 | default:; |
497 | } | |
498 | if (expr == NULL_TREE) | |
499 | return vn->valnum; | |
500 | ||
501 | /* Cache the expression. */ | |
502 | vn->expr = expr; | |
503 | ||
504 | return expr; | |
505 | } | |
506 | ||
17742d62 RG |
507 | /* Return the vn_kind the expression computed by the stmt should be |
508 | associated with. */ | |
509 | ||
510 | enum vn_kind | |
511 | vn_get_stmt_kind (gimple stmt) | |
512 | { | |
513 | switch (gimple_code (stmt)) | |
514 | { | |
515 | case GIMPLE_CALL: | |
516 | return VN_REFERENCE; | |
517 | case GIMPLE_PHI: | |
518 | return VN_PHI; | |
519 | case GIMPLE_ASSIGN: | |
520 | { | |
521 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
522 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
523 | switch (get_gimple_rhs_class (code)) | |
524 | { | |
525 | case GIMPLE_UNARY_RHS: | |
526 | case GIMPLE_BINARY_RHS: | |
527 | case GIMPLE_TERNARY_RHS: | |
528 | return VN_NARY; | |
529 | case GIMPLE_SINGLE_RHS: | |
530 | switch (TREE_CODE_CLASS (code)) | |
531 | { | |
532 | case tcc_reference: | |
533 | /* VOP-less references can go through unary case. */ | |
534 | if ((code == REALPART_EXPR | |
535 | || code == IMAGPART_EXPR | |
536 | || code == VIEW_CONVERT_EXPR | |
537 | || code == BIT_FIELD_REF) | |
538 | && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME) | |
539 | return VN_NARY; | |
540 | ||
541 | /* Fallthrough. */ | |
542 | case tcc_declaration: | |
543 | return VN_REFERENCE; | |
544 | ||
545 | case tcc_constant: | |
546 | return VN_CONSTANT; | |
547 | ||
548 | default: | |
549 | if (code == ADDR_EXPR) | |
550 | return (is_gimple_min_invariant (rhs1) | |
551 | ? VN_CONSTANT : VN_REFERENCE); | |
552 | else if (code == CONSTRUCTOR) | |
553 | return VN_NARY; | |
554 | return VN_NONE; | |
555 | } | |
556 | default: | |
557 | return VN_NONE; | |
558 | } | |
559 | } | |
560 | default: | |
561 | return VN_NONE; | |
562 | } | |
563 | } | |
726a989a | 564 | |
bb9e4199 RG |
565 | /* Lookup a value id for CONSTANT and return it. If it does not |
566 | exist returns 0. */ | |
567 | ||
568 | unsigned int | |
569 | get_constant_value_id (tree constant) | |
570 | { | |
bf190e8d | 571 | vn_constant_s **slot; |
bb9e4199 | 572 | struct vn_constant_s vc; |
726a989a RB |
573 | |
574 | vc.hashcode = vn_hash_constant_with_type (constant); | |
bb9e4199 | 575 | vc.constant = constant; |
c203e8a7 | 576 | slot = constant_to_value_id->find_slot (&vc, NO_INSERT); |
bb9e4199 | 577 | if (slot) |
bf190e8d | 578 | return (*slot)->value_id; |
bb9e4199 RG |
579 | return 0; |
580 | } | |
581 | ||
c9145754 DB |
582 | /* Lookup a value id for CONSTANT, and if it does not exist, create a |
583 | new one and return it. If it does exist, return it. */ | |
584 | ||
585 | unsigned int | |
586 | get_or_alloc_constant_value_id (tree constant) | |
587 | { | |
bf190e8d | 588 | vn_constant_s **slot; |
a7d04a53 RG |
589 | struct vn_constant_s vc; |
590 | vn_constant_t vcp; | |
b8698a0f | 591 | |
a7d04a53 RG |
592 | vc.hashcode = vn_hash_constant_with_type (constant); |
593 | vc.constant = constant; | |
c203e8a7 | 594 | slot = constant_to_value_id->find_slot (&vc, INSERT); |
c9145754 | 595 | if (*slot) |
bf190e8d | 596 | return (*slot)->value_id; |
a7d04a53 RG |
597 | |
598 | vcp = XNEW (struct vn_constant_s); | |
599 | vcp->hashcode = vc.hashcode; | |
600 | vcp->constant = constant; | |
601 | vcp->value_id = get_next_value_id (); | |
bf190e8d | 602 | *slot = vcp; |
a7d04a53 RG |
603 | bitmap_set_bit (constant_value_ids, vcp->value_id); |
604 | return vcp->value_id; | |
c9145754 DB |
605 | } |
606 | ||
607 | /* Return true if V is a value id for a constant. */ | |
608 | ||
609 | bool | |
610 | value_id_constant_p (unsigned int v) | |
611 | { | |
b8698a0f | 612 | return bitmap_bit_p (constant_value_ids, v); |
c9145754 DB |
613 | } |
614 | ||
b40bf772 | 615 | /* Compute the hash for a reference operand VRO1. */ |
89fb70a3 | 616 | |
4e44a6e8 AK |
617 | static void |
618 | vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate) | |
89fb70a3 | 619 | { |
4e44a6e8 | 620 | hstate.add_int (vro1->opcode); |
85169114 | 621 | if (vro1->op0) |
4e44a6e8 | 622 | inchash::add_expr (vro1->op0, hstate); |
85169114 | 623 | if (vro1->op1) |
4e44a6e8 | 624 | inchash::add_expr (vro1->op1, hstate); |
85169114 | 625 | if (vro1->op2) |
4e44a6e8 | 626 | inchash::add_expr (vro1->op2, hstate); |
89fb70a3 DB |
627 | } |
628 | ||
89fb70a3 DB |
629 | /* Compute a hash for the reference operation VR1 and return it. */ |
630 | ||
26f3a4e1 | 631 | static hashval_t |
89fb70a3 DB |
632 | vn_reference_compute_hash (const vn_reference_t vr1) |
633 | { | |
4e44a6e8 AK |
634 | inchash::hash hstate; |
635 | hashval_t result; | |
89fb70a3 DB |
636 | int i; |
637 | vn_reference_op_t vro; | |
70f34814 RG |
638 | HOST_WIDE_INT off = -1; |
639 | bool deref = false; | |
89fb70a3 | 640 | |
9771b263 | 641 | FOR_EACH_VEC_ELT (vr1->operands, i, vro) |
70f34814 RG |
642 | { |
643 | if (vro->opcode == MEM_REF) | |
644 | deref = true; | |
645 | else if (vro->opcode != ADDR_EXPR) | |
646 | deref = false; | |
647 | if (vro->off != -1) | |
648 | { | |
649 | if (off == -1) | |
650 | off = 0; | |
651 | off += vro->off; | |
652 | } | |
653 | else | |
654 | { | |
655 | if (off != -1 | |
656 | && off != 0) | |
4e44a6e8 | 657 | hstate.add_int (off); |
70f34814 RG |
658 | off = -1; |
659 | if (deref | |
660 | && vro->opcode == ADDR_EXPR) | |
661 | { | |
662 | if (vro->op0) | |
663 | { | |
664 | tree op = TREE_OPERAND (vro->op0, 0); | |
4e44a6e8 AK |
665 | hstate.add_int (TREE_CODE (op)); |
666 | inchash::add_expr (op, hstate); | |
70f34814 RG |
667 | } |
668 | } | |
669 | else | |
4e44a6e8 | 670 | vn_reference_op_compute_hash (vro, hstate); |
70f34814 RG |
671 | } |
672 | } | |
4e44a6e8 AK |
673 | result = hstate.end (); |
674 | /* ??? We would ICE later if we hash instead of adding that in. */ | |
9708c51d RG |
675 | if (vr1->vuse) |
676 | result += SSA_NAME_VERSION (vr1->vuse); | |
89fb70a3 DB |
677 | |
678 | return result; | |
679 | } | |
680 | ||
bf190e8d | 681 | /* Return true if reference operations VR1 and VR2 are equivalent. This |
89fb70a3 DB |
682 | means they have the same set of operands and vuses. */ |
683 | ||
bf190e8d LC |
684 | bool |
685 | vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2) | |
89fb70a3 | 686 | { |
70f34814 | 687 | unsigned i, j; |
89fb70a3 | 688 | |
5006671f RG |
689 | /* Early out if this is not a hash collision. */ |
690 | if (vr1->hashcode != vr2->hashcode) | |
691 | return false; | |
89fb70a3 | 692 | |
5006671f RG |
693 | /* The VOP needs to be the same. */ |
694 | if (vr1->vuse != vr2->vuse) | |
89fb70a3 DB |
695 | return false; |
696 | ||
5006671f RG |
697 | /* If the operands are the same we are done. */ |
698 | if (vr1->operands == vr2->operands) | |
699 | return true; | |
700 | ||
70f34814 | 701 | if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type))) |
89fb70a3 DB |
702 | return false; |
703 | ||
5ccbfc1f RG |
704 | if (INTEGRAL_TYPE_P (vr1->type) |
705 | && INTEGRAL_TYPE_P (vr2->type)) | |
706 | { | |
707 | if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type)) | |
708 | return false; | |
709 | } | |
710 | else if (INTEGRAL_TYPE_P (vr1->type) | |
711 | && (TYPE_PRECISION (vr1->type) | |
712 | != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type)))) | |
713 | return false; | |
714 | else if (INTEGRAL_TYPE_P (vr2->type) | |
715 | && (TYPE_PRECISION (vr2->type) | |
716 | != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type)))) | |
717 | return false; | |
718 | ||
70f34814 RG |
719 | i = 0; |
720 | j = 0; | |
721 | do | |
722 | { | |
723 | HOST_WIDE_INT off1 = 0, off2 = 0; | |
724 | vn_reference_op_t vro1, vro2; | |
725 | vn_reference_op_s tem1, tem2; | |
726 | bool deref1 = false, deref2 = false; | |
9771b263 | 727 | for (; vr1->operands.iterate (i, &vro1); i++) |
70f34814 RG |
728 | { |
729 | if (vro1->opcode == MEM_REF) | |
730 | deref1 = true; | |
731 | if (vro1->off == -1) | |
732 | break; | |
733 | off1 += vro1->off; | |
734 | } | |
9771b263 | 735 | for (; vr2->operands.iterate (j, &vro2); j++) |
70f34814 RG |
736 | { |
737 | if (vro2->opcode == MEM_REF) | |
738 | deref2 = true; | |
739 | if (vro2->off == -1) | |
740 | break; | |
741 | off2 += vro2->off; | |
742 | } | |
743 | if (off1 != off2) | |
744 | return false; | |
745 | if (deref1 && vro1->opcode == ADDR_EXPR) | |
746 | { | |
747 | memset (&tem1, 0, sizeof (tem1)); | |
748 | tem1.op0 = TREE_OPERAND (vro1->op0, 0); | |
749 | tem1.type = TREE_TYPE (tem1.op0); | |
750 | tem1.opcode = TREE_CODE (tem1.op0); | |
751 | vro1 = &tem1; | |
8bf43909 | 752 | deref1 = false; |
70f34814 RG |
753 | } |
754 | if (deref2 && vro2->opcode == ADDR_EXPR) | |
755 | { | |
756 | memset (&tem2, 0, sizeof (tem2)); | |
757 | tem2.op0 = TREE_OPERAND (vro2->op0, 0); | |
758 | tem2.type = TREE_TYPE (tem2.op0); | |
759 | tem2.opcode = TREE_CODE (tem2.op0); | |
760 | vro2 = &tem2; | |
8bf43909 | 761 | deref2 = false; |
70f34814 | 762 | } |
8bf43909 RG |
763 | if (deref1 != deref2) |
764 | return false; | |
70f34814 RG |
765 | if (!vn_reference_op_eq (vro1, vro2)) |
766 | return false; | |
767 | ++j; | |
768 | ++i; | |
769 | } | |
9771b263 DN |
770 | while (vr1->operands.length () != i |
771 | || vr2->operands.length () != j); | |
89fb70a3 | 772 | |
5006671f | 773 | return true; |
89fb70a3 DB |
774 | } |
775 | ||
726a989a | 776 | /* Copy the operations present in load/store REF into RESULT, a vector of |
89fb70a3 DB |
777 | vn_reference_op_s's. */ |
778 | ||
26f3a4e1 | 779 | static void |
9771b263 | 780 | copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result) |
89fb70a3 | 781 | { |
9f59420d RG |
782 | if (TREE_CODE (ref) == TARGET_MEM_REF) |
783 | { | |
784 | vn_reference_op_s temp; | |
785 | ||
39e843e8 RB |
786 | result->reserve (3); |
787 | ||
9f59420d | 788 | memset (&temp, 0, sizeof (temp)); |
6d6c9525 | 789 | temp.type = TREE_TYPE (ref); |
9f59420d | 790 | temp.opcode = TREE_CODE (ref); |
150e3929 RG |
791 | temp.op0 = TMR_INDEX (ref); |
792 | temp.op1 = TMR_STEP (ref); | |
793 | temp.op2 = TMR_OFFSET (ref); | |
70f34814 | 794 | temp.off = -1; |
39e843e8 | 795 | result->quick_push (temp); |
9f59420d RG |
796 | |
797 | memset (&temp, 0, sizeof (temp)); | |
798 | temp.type = NULL_TREE; | |
4d948885 RG |
799 | temp.opcode = ERROR_MARK; |
800 | temp.op0 = TMR_INDEX2 (ref); | |
801 | temp.off = -1; | |
39e843e8 | 802 | result->quick_push (temp); |
4d948885 RG |
803 | |
804 | memset (&temp, 0, sizeof (temp)); | |
805 | temp.type = NULL_TREE; | |
806 | temp.opcode = TREE_CODE (TMR_BASE (ref)); | |
807 | temp.op0 = TMR_BASE (ref); | |
70f34814 | 808 | temp.off = -1; |
39e843e8 | 809 | result->quick_push (temp); |
9f59420d RG |
810 | return; |
811 | } | |
812 | ||
89fb70a3 | 813 | /* For non-calls, store the information that makes up the address. */ |
1eadb567 | 814 | tree orig = ref; |
89fb70a3 DB |
815 | while (ref) |
816 | { | |
817 | vn_reference_op_s temp; | |
818 | ||
819 | memset (&temp, 0, sizeof (temp)); | |
6d6c9525 | 820 | temp.type = TREE_TYPE (ref); |
89fb70a3 | 821 | temp.opcode = TREE_CODE (ref); |
70f34814 | 822 | temp.off = -1; |
89fb70a3 DB |
823 | |
824 | switch (temp.opcode) | |
825 | { | |
4ec0a198 TV |
826 | case MODIFY_EXPR: |
827 | temp.op0 = TREE_OPERAND (ref, 1); | |
828 | break; | |
842679dc TV |
829 | case WITH_SIZE_EXPR: |
830 | temp.op0 = TREE_OPERAND (ref, 1); | |
831 | temp.off = 0; | |
832 | break; | |
70f34814 RG |
833 | case MEM_REF: |
834 | /* The base address gets its own vn_reference_op_s structure. */ | |
835 | temp.op0 = TREE_OPERAND (ref, 1); | |
9541ffee | 836 | if (tree_fits_shwi_p (TREE_OPERAND (ref, 1))) |
eb1ce453 | 837 | temp.off = tree_to_shwi (TREE_OPERAND (ref, 1)); |
70f34814 | 838 | break; |
89fb70a3 DB |
839 | case BIT_FIELD_REF: |
840 | /* Record bits and position. */ | |
841 | temp.op0 = TREE_OPERAND (ref, 1); | |
842 | temp.op1 = TREE_OPERAND (ref, 2); | |
843 | break; | |
844 | case COMPONENT_REF: | |
ba2e1892 RG |
845 | /* The field decl is enough to unambiguously specify the field, |
846 | a matching type is not necessary and a mismatching type | |
847 | is always a spurious difference. */ | |
848 | temp.type = NULL_TREE; | |
b45d2719 RG |
849 | temp.op0 = TREE_OPERAND (ref, 1); |
850 | temp.op1 = TREE_OPERAND (ref, 2); | |
70f34814 RG |
851 | { |
852 | tree this_offset = component_ref_field_offset (ref); | |
853 | if (this_offset | |
854 | && TREE_CODE (this_offset) == INTEGER_CST) | |
855 | { | |
856 | tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)); | |
857 | if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0) | |
858 | { | |
807e902e KZ |
859 | offset_int off |
860 | = (wi::to_offset (this_offset) | |
861 | + wi::lrshift (wi::to_offset (bit_offset), | |
862 | LOG2_BITS_PER_UNIT)); | |
863 | if (wi::fits_shwi_p (off) | |
1eadb567 RB |
864 | /* Probibit value-numbering zero offset components |
865 | of addresses the same before the pass folding | |
866 | __builtin_object_size had a chance to run | |
867 | (checking cfun->after_inlining does the | |
868 | trick here). */ | |
869 | && (TREE_CODE (orig) != ADDR_EXPR | |
807e902e | 870 | || off != 0 |
1eadb567 | 871 | || cfun->after_inlining)) |
807e902e | 872 | temp.off = off.to_shwi (); |
70f34814 RG |
873 | } |
874 | } | |
875 | } | |
89fb70a3 DB |
876 | break; |
877 | case ARRAY_RANGE_REF: | |
878 | case ARRAY_REF: | |
879 | /* Record index as operand. */ | |
880 | temp.op0 = TREE_OPERAND (ref, 1); | |
e52201b6 RG |
881 | /* Always record lower bounds and element size. */ |
882 | temp.op1 = array_ref_low_bound (ref); | |
883 | temp.op2 = array_ref_element_size (ref); | |
70f34814 RG |
884 | if (TREE_CODE (temp.op0) == INTEGER_CST |
885 | && TREE_CODE (temp.op1) == INTEGER_CST | |
886 | && TREE_CODE (temp.op2) == INTEGER_CST) | |
887 | { | |
807e902e KZ |
888 | offset_int off = ((wi::to_offset (temp.op0) |
889 | - wi::to_offset (temp.op1)) | |
890 | * wi::to_offset (temp.op2)); | |
891 | if (wi::fits_shwi_p (off)) | |
892 | temp.off = off.to_shwi(); | |
70f34814 | 893 | } |
89fb70a3 | 894 | break; |
6d6c9525 RG |
895 | case VAR_DECL: |
896 | if (DECL_HARD_REGISTER (ref)) | |
897 | { | |
898 | temp.op0 = ref; | |
899 | break; | |
900 | } | |
901 | /* Fallthru. */ | |
902 | case PARM_DECL: | |
903 | case CONST_DECL: | |
904 | case RESULT_DECL: | |
905 | /* Canonicalize decls to MEM[&decl] which is what we end up with | |
906 | when valueizing MEM[ptr] with ptr = &decl. */ | |
907 | temp.opcode = MEM_REF; | |
908 | temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0); | |
909 | temp.off = 0; | |
9771b263 | 910 | result->safe_push (temp); |
6d6c9525 | 911 | temp.opcode = ADDR_EXPR; |
39e843e8 | 912 | temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref); |
6d6c9525 RG |
913 | temp.type = TREE_TYPE (temp.op0); |
914 | temp.off = -1; | |
915 | break; | |
4794afa5 DB |
916 | case STRING_CST: |
917 | case INTEGER_CST: | |
918 | case COMPLEX_CST: | |
919 | case VECTOR_CST: | |
26b70b9f | 920 | case REAL_CST: |
0747aae4 | 921 | case FIXED_CST: |
bb0c55f6 | 922 | case CONSTRUCTOR: |
89fb70a3 DB |
923 | case SSA_NAME: |
924 | temp.op0 = ref; | |
925 | break; | |
ce94d354 RG |
926 | case ADDR_EXPR: |
927 | if (is_gimple_min_invariant (ref)) | |
928 | { | |
929 | temp.op0 = ref; | |
930 | break; | |
931 | } | |
8403c2cf | 932 | break; |
4794afa5 DB |
933 | /* These are only interesting for their operands, their |
934 | existence, and their type. They will never be the last | |
935 | ref in the chain of references (IE they require an | |
936 | operand), so we don't have to put anything | |
937 | for op* as it will be handled by the iteration */ | |
4794afa5 DB |
938 | case REALPART_EXPR: |
939 | case VIEW_CONVERT_EXPR: | |
70f34814 RG |
940 | temp.off = 0; |
941 | break; | |
942 | case IMAGPART_EXPR: | |
943 | /* This is only interesting for its constant offset. */ | |
944 | temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref))); | |
89fb70a3 | 945 | break; |
4794afa5 DB |
946 | default: |
947 | gcc_unreachable (); | |
89fb70a3 | 948 | } |
9771b263 | 949 | result->safe_push (temp); |
89fb70a3 | 950 | |
ce94d354 | 951 | if (REFERENCE_CLASS_P (ref) |
4ec0a198 | 952 | || TREE_CODE (ref) == MODIFY_EXPR |
842679dc | 953 | || TREE_CODE (ref) == WITH_SIZE_EXPR |
ce94d354 RG |
954 | || (TREE_CODE (ref) == ADDR_EXPR |
955 | && !is_gimple_min_invariant (ref))) | |
89fb70a3 DB |
956 | ref = TREE_OPERAND (ref, 0); |
957 | else | |
958 | ref = NULL_TREE; | |
959 | } | |
960 | } | |
961 | ||
b45d2719 RG |
962 | /* Build a alias-oracle reference abstraction in *REF from the vn_reference |
963 | operands in *OPS, the reference alias set SET and the reference type TYPE. | |
964 | Return true if something useful was produced. */ | |
53f3815c | 965 | |
b45d2719 RG |
966 | bool |
967 | ao_ref_init_from_vn_reference (ao_ref *ref, | |
968 | alias_set_type set, tree type, | |
9771b263 | 969 | vec<vn_reference_op_s> ops) |
53f3815c RG |
970 | { |
971 | vn_reference_op_t op; | |
972 | unsigned i; | |
b45d2719 RG |
973 | tree base = NULL_TREE; |
974 | tree *op0_p = &base; | |
975 | HOST_WIDE_INT offset = 0; | |
976 | HOST_WIDE_INT max_size; | |
977 | HOST_WIDE_INT size = -1; | |
978 | tree size_tree = NULL_TREE; | |
70f34814 | 979 | alias_set_type base_alias_set = -1; |
b45d2719 RG |
980 | |
981 | /* First get the final access size from just the outermost expression. */ | |
9771b263 | 982 | op = &ops[0]; |
b45d2719 | 983 | if (op->opcode == COMPONENT_REF) |
70f34814 | 984 | size_tree = DECL_SIZE (op->op0); |
b45d2719 RG |
985 | else if (op->opcode == BIT_FIELD_REF) |
986 | size_tree = op->op0; | |
987 | else | |
988 | { | |
ef4bddc2 | 989 | machine_mode mode = TYPE_MODE (type); |
b45d2719 RG |
990 | if (mode == BLKmode) |
991 | size_tree = TYPE_SIZE (type); | |
992 | else | |
993 | size = GET_MODE_BITSIZE (mode); | |
994 | } | |
995 | if (size_tree != NULL_TREE) | |
996 | { | |
cc269bb6 | 997 | if (!tree_fits_uhwi_p (size_tree)) |
b45d2719 RG |
998 | size = -1; |
999 | else | |
eb1ce453 | 1000 | size = tree_to_uhwi (size_tree); |
b45d2719 RG |
1001 | } |
1002 | ||
1003 | /* Initially, maxsize is the same as the accessed element size. | |
1004 | In the following it will only grow (or become -1). */ | |
1005 | max_size = size; | |
53f3815c | 1006 | |
b45d2719 RG |
1007 | /* Compute cumulative bit-offset for nested component-refs and array-refs, |
1008 | and find the ultimate containing object. */ | |
9771b263 | 1009 | FOR_EACH_VEC_ELT (ops, i, op) |
53f3815c RG |
1010 | { |
1011 | switch (op->opcode) | |
1012 | { | |
b45d2719 RG |
1013 | /* These may be in the reference ops, but we cannot do anything |
1014 | sensible with them here. */ | |
b45d2719 | 1015 | case ADDR_EXPR: |
70f34814 RG |
1016 | /* Apart from ADDR_EXPR arguments to MEM_REF. */ |
1017 | if (base != NULL_TREE | |
1018 | && TREE_CODE (base) == MEM_REF | |
1019 | && op->op0 | |
1020 | && DECL_P (TREE_OPERAND (op->op0, 0))) | |
1021 | { | |
9771b263 | 1022 | vn_reference_op_t pop = &ops[i-1]; |
70f34814 RG |
1023 | base = TREE_OPERAND (op->op0, 0); |
1024 | if (pop->off == -1) | |
1025 | { | |
1026 | max_size = -1; | |
1027 | offset = 0; | |
1028 | } | |
1029 | else | |
1030 | offset += pop->off * BITS_PER_UNIT; | |
1031 | op0_p = NULL; | |
1032 | break; | |
1033 | } | |
1034 | /* Fallthru. */ | |
1035 | case CALL_EXPR: | |
b45d2719 | 1036 | return false; |
53f3815c | 1037 | |
b45d2719 | 1038 | /* Record the base objects. */ |
70f34814 RG |
1039 | case MEM_REF: |
1040 | base_alias_set = get_deref_alias_set (op->op0); | |
1041 | *op0_p = build2 (MEM_REF, op->type, | |
1042 | NULL_TREE, op->op0); | |
1043 | op0_p = &TREE_OPERAND (*op0_p, 0); | |
1044 | break; | |
1045 | ||
b45d2719 RG |
1046 | case VAR_DECL: |
1047 | case PARM_DECL: | |
1048 | case RESULT_DECL: | |
1049 | case SSA_NAME: | |
b45d2719 | 1050 | *op0_p = op->op0; |
70f34814 | 1051 | op0_p = NULL; |
b45d2719 RG |
1052 | break; |
1053 | ||
1054 | /* And now the usual component-reference style ops. */ | |
53f3815c | 1055 | case BIT_FIELD_REF: |
9439e9a1 | 1056 | offset += tree_to_shwi (op->op1); |
53f3815c RG |
1057 | break; |
1058 | ||
1059 | case COMPONENT_REF: | |
b45d2719 RG |
1060 | { |
1061 | tree field = op->op0; | |
1062 | /* We do not have a complete COMPONENT_REF tree here so we | |
1063 | cannot use component_ref_field_offset. Do the interesting | |
1064 | parts manually. */ | |
1065 | ||
70f34814 | 1066 | if (op->op1 |
cc269bb6 | 1067 | || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))) |
b45d2719 RG |
1068 | max_size = -1; |
1069 | else | |
1070 | { | |
eb1ce453 | 1071 | offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field)) |
b45d2719 RG |
1072 | * BITS_PER_UNIT); |
1073 | offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1074 | } | |
1075 | break; | |
1076 | } | |
53f3815c RG |
1077 | |
1078 | case ARRAY_RANGE_REF: | |
1079 | case ARRAY_REF: | |
e52201b6 | 1080 | /* We recorded the lower bound and the element size. */ |
9541ffee RS |
1081 | if (!tree_fits_shwi_p (op->op0) |
1082 | || !tree_fits_shwi_p (op->op1) | |
1083 | || !tree_fits_shwi_p (op->op2)) | |
b45d2719 RG |
1084 | max_size = -1; |
1085 | else | |
1086 | { | |
eb1ce453 KZ |
1087 | HOST_WIDE_INT hindex = tree_to_shwi (op->op0); |
1088 | hindex -= tree_to_shwi (op->op1); | |
1089 | hindex *= tree_to_shwi (op->op2); | |
e52201b6 | 1090 | hindex *= BITS_PER_UNIT; |
b45d2719 RG |
1091 | offset += hindex; |
1092 | } | |
1093 | break; | |
1094 | ||
1095 | case REALPART_EXPR: | |
1096 | break; | |
1097 | ||
1098 | case IMAGPART_EXPR: | |
1099 | offset += size; | |
1100 | break; | |
1101 | ||
1102 | case VIEW_CONVERT_EXPR: | |
53f3815c RG |
1103 | break; |
1104 | ||
1105 | case STRING_CST: | |
1106 | case INTEGER_CST: | |
1107 | case COMPLEX_CST: | |
1108 | case VECTOR_CST: | |
1109 | case REAL_CST: | |
1110 | case CONSTRUCTOR: | |
53f3815c | 1111 | case CONST_DECL: |
b45d2719 | 1112 | return false; |
53f3815c RG |
1113 | |
1114 | default: | |
b45d2719 | 1115 | return false; |
53f3815c RG |
1116 | } |
1117 | } | |
1118 | ||
b45d2719 RG |
1119 | if (base == NULL_TREE) |
1120 | return false; | |
1121 | ||
1122 | ref->ref = NULL_TREE; | |
1123 | ref->base = base; | |
1124 | ref->offset = offset; | |
1125 | ref->size = size; | |
1126 | ref->max_size = max_size; | |
1127 | ref->ref_alias_set = set; | |
70f34814 RG |
1128 | if (base_alias_set != -1) |
1129 | ref->base_alias_set = base_alias_set; | |
1130 | else | |
1131 | ref->base_alias_set = get_alias_set (base); | |
14cd91f9 RG |
1132 | /* We discount volatiles from value-numbering elsewhere. */ |
1133 | ref->volatile_p = false; | |
b45d2719 RG |
1134 | |
1135 | return true; | |
53f3815c RG |
1136 | } |
1137 | ||
726a989a RB |
1138 | /* Copy the operations present in load/store/call REF into RESULT, a vector of |
1139 | vn_reference_op_s's. */ | |
1140 | ||
26f3a4e1 | 1141 | static void |
538dd0b7 | 1142 | copy_reference_ops_from_call (gcall *call, |
9771b263 | 1143 | vec<vn_reference_op_s> *result) |
726a989a RB |
1144 | { |
1145 | vn_reference_op_s temp; | |
726a989a | 1146 | unsigned i; |
6867d9a9 | 1147 | tree lhs = gimple_call_lhs (call); |
7eab31ed | 1148 | int lr; |
6867d9a9 TV |
1149 | |
1150 | /* If 2 calls have a different non-ssa lhs, vdef value numbers should be | |
1151 | different. By adding the lhs here in the vector, we ensure that the | |
1152 | hashcode is different, guaranteeing a different value number. */ | |
1153 | if (lhs && TREE_CODE (lhs) != SSA_NAME) | |
1154 | { | |
1155 | memset (&temp, 0, sizeof (temp)); | |
1156 | temp.opcode = MODIFY_EXPR; | |
1157 | temp.type = TREE_TYPE (lhs); | |
1158 | temp.op0 = lhs; | |
1159 | temp.off = -1; | |
9771b263 | 1160 | result->safe_push (temp); |
6867d9a9 | 1161 | } |
726a989a | 1162 | |
7eab31ed | 1163 | /* Copy the type, opcode, function, static chain and EH region, if any. */ |
726a989a RB |
1164 | memset (&temp, 0, sizeof (temp)); |
1165 | temp.type = gimple_call_return_type (call); | |
1166 | temp.opcode = CALL_EXPR; | |
ce94d354 | 1167 | temp.op0 = gimple_call_fn (call); |
7aec7a38 | 1168 | temp.op1 = gimple_call_chain (call); |
7eab31ed EB |
1169 | if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0) |
1170 | temp.op2 = size_int (lr); | |
70f34814 | 1171 | temp.off = -1; |
d5e254e1 IE |
1172 | if (gimple_call_with_bounds_p (call)) |
1173 | temp.with_bounds = 1; | |
9771b263 | 1174 | result->safe_push (temp); |
726a989a | 1175 | |
ce94d354 RG |
1176 | /* Copy the call arguments. As they can be references as well, |
1177 | just chain them together. */ | |
726a989a RB |
1178 | for (i = 0; i < gimple_call_num_args (call); ++i) |
1179 | { | |
1180 | tree callarg = gimple_call_arg (call, i); | |
ce94d354 | 1181 | copy_reference_ops_from_ref (callarg, result); |
726a989a | 1182 | } |
726a989a RB |
1183 | } |
1184 | ||
aa7069aa RG |
1185 | /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates |
1186 | *I_P to point to the last element of the replacement. */ | |
1187 | void | |
9771b263 | 1188 | vn_reference_fold_indirect (vec<vn_reference_op_s> *ops, |
aa7069aa | 1189 | unsigned int *i_p) |
89fb70a3 | 1190 | { |
aa7069aa | 1191 | unsigned int i = *i_p; |
9771b263 DN |
1192 | vn_reference_op_t op = &(*ops)[i]; |
1193 | vn_reference_op_t mem_op = &(*ops)[i - 1]; | |
70f34814 | 1194 | tree addr_base; |
8b4f7b47 | 1195 | HOST_WIDE_INT addr_offset = 0; |
70f34814 RG |
1196 | |
1197 | /* The only thing we have to do is from &OBJ.foo.bar add the offset | |
073a8998 | 1198 | from .foo.bar to the preceding MEM_REF offset and replace the |
70f34814 RG |
1199 | address with &OBJ. */ |
1200 | addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0), | |
1201 | &addr_offset); | |
1202 | gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF); | |
d1de852b | 1203 | if (addr_base != TREE_OPERAND (op->op0, 0)) |
70f34814 | 1204 | { |
807e902e KZ |
1205 | offset_int off = offset_int::from (mem_op->op0, SIGNED); |
1206 | off += addr_offset; | |
1207 | mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off); | |
70f34814 | 1208 | op->op0 = build_fold_addr_expr (addr_base); |
9541ffee | 1209 | if (tree_fits_shwi_p (mem_op->op0)) |
eb1ce453 | 1210 | mem_op->off = tree_to_shwi (mem_op->op0); |
70f34814 RG |
1211 | else |
1212 | mem_op->off = -1; | |
aa7069aa | 1213 | } |
aa7069aa | 1214 | } |
89fb70a3 | 1215 | |
a03a9774 RG |
1216 | /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates |
1217 | *I_P to point to the last element of the replacement. */ | |
1218 | static void | |
9771b263 | 1219 | vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops, |
a03a9774 RG |
1220 | unsigned int *i_p) |
1221 | { | |
1222 | unsigned int i = *i_p; | |
9771b263 DN |
1223 | vn_reference_op_t op = &(*ops)[i]; |
1224 | vn_reference_op_t mem_op = &(*ops)[i - 1]; | |
a03a9774 RG |
1225 | gimple def_stmt; |
1226 | enum tree_code code; | |
807e902e | 1227 | offset_int off; |
a03a9774 RG |
1228 | |
1229 | def_stmt = SSA_NAME_DEF_STMT (op->op0); | |
d0c422cb | 1230 | if (!is_gimple_assign (def_stmt)) |
a03a9774 RG |
1231 | return; |
1232 | ||
1233 | code = gimple_assign_rhs_code (def_stmt); | |
1234 | if (code != ADDR_EXPR | |
1235 | && code != POINTER_PLUS_EXPR) | |
1236 | return; | |
1237 | ||
807e902e | 1238 | off = offset_int::from (mem_op->op0, SIGNED); |
a03a9774 RG |
1239 | |
1240 | /* The only thing we have to do is from &OBJ.foo.bar add the offset | |
073a8998 | 1241 | from .foo.bar to the preceding MEM_REF offset and replace the |
a03a9774 RG |
1242 | address with &OBJ. */ |
1243 | if (code == ADDR_EXPR) | |
1244 | { | |
1245 | tree addr, addr_base; | |
1246 | HOST_WIDE_INT addr_offset; | |
1247 | ||
1248 | addr = gimple_assign_rhs1 (def_stmt); | |
1249 | addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), | |
1250 | &addr_offset); | |
1251 | if (!addr_base | |
1252 | || TREE_CODE (addr_base) != MEM_REF) | |
1253 | return; | |
1254 | ||
807e902e | 1255 | off += addr_offset; |
27bcd47c | 1256 | off += mem_ref_offset (addr_base); |
a03a9774 RG |
1257 | op->op0 = TREE_OPERAND (addr_base, 0); |
1258 | } | |
1259 | else | |
1260 | { | |
1261 | tree ptr, ptroff; | |
1262 | ptr = gimple_assign_rhs1 (def_stmt); | |
1263 | ptroff = gimple_assign_rhs2 (def_stmt); | |
1264 | if (TREE_CODE (ptr) != SSA_NAME | |
1265 | || TREE_CODE (ptroff) != INTEGER_CST) | |
1266 | return; | |
1267 | ||
807e902e | 1268 | off += wi::to_offset (ptroff); |
d0c422cb | 1269 | op->op0 = ptr; |
a03a9774 RG |
1270 | } |
1271 | ||
807e902e | 1272 | mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off); |
9541ffee | 1273 | if (tree_fits_shwi_p (mem_op->op0)) |
eb1ce453 | 1274 | mem_op->off = tree_to_shwi (mem_op->op0); |
a03a9774 RG |
1275 | else |
1276 | mem_op->off = -1; | |
1277 | if (TREE_CODE (op->op0) == SSA_NAME) | |
e093ffe3 RG |
1278 | op->op0 = SSA_VAL (op->op0); |
1279 | if (TREE_CODE (op->op0) != SSA_NAME) | |
1280 | op->opcode = TREE_CODE (op->op0); | |
a03a9774 RG |
1281 | |
1282 | /* And recurse. */ | |
1283 | if (TREE_CODE (op->op0) == SSA_NAME) | |
1284 | vn_reference_maybe_forwprop_address (ops, i_p); | |
1285 | else if (TREE_CODE (op->op0) == ADDR_EXPR) | |
1286 | vn_reference_fold_indirect (ops, i_p); | |
1287 | } | |
1288 | ||
12bd5a1e RG |
1289 | /* Optimize the reference REF to a constant if possible or return |
1290 | NULL_TREE if not. */ | |
1291 | ||
1292 | tree | |
1293 | fully_constant_vn_reference_p (vn_reference_t ref) | |
1294 | { | |
9771b263 | 1295 | vec<vn_reference_op_s> operands = ref->operands; |
12bd5a1e RG |
1296 | vn_reference_op_t op; |
1297 | ||
1298 | /* Try to simplify the translated expression if it is | |
1299 | a call to a builtin function with at most two arguments. */ | |
9771b263 | 1300 | op = &operands[0]; |
12bd5a1e RG |
1301 | if (op->opcode == CALL_EXPR |
1302 | && TREE_CODE (op->op0) == ADDR_EXPR | |
1303 | && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL | |
1304 | && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0)) | |
9771b263 DN |
1305 | && operands.length () >= 2 |
1306 | && operands.length () <= 3) | |
12bd5a1e RG |
1307 | { |
1308 | vn_reference_op_t arg0, arg1 = NULL; | |
1309 | bool anyconst = false; | |
9771b263 DN |
1310 | arg0 = &operands[1]; |
1311 | if (operands.length () > 2) | |
1312 | arg1 = &operands[2]; | |
12bd5a1e RG |
1313 | if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant |
1314 | || (arg0->opcode == ADDR_EXPR | |
1315 | && is_gimple_min_invariant (arg0->op0))) | |
1316 | anyconst = true; | |
1317 | if (arg1 | |
1318 | && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant | |
1319 | || (arg1->opcode == ADDR_EXPR | |
1320 | && is_gimple_min_invariant (arg1->op0)))) | |
1321 | anyconst = true; | |
1322 | if (anyconst) | |
1323 | { | |
1324 | tree folded = build_call_expr (TREE_OPERAND (op->op0, 0), | |
1325 | arg1 ? 2 : 1, | |
1326 | arg0->op0, | |
1327 | arg1 ? arg1->op0 : NULL); | |
1328 | if (folded | |
1329 | && TREE_CODE (folded) == NOP_EXPR) | |
1330 | folded = TREE_OPERAND (folded, 0); | |
1331 | if (folded | |
1332 | && is_gimple_min_invariant (folded)) | |
1333 | return folded; | |
1334 | } | |
1335 | } | |
1336 | ||
8403c2cf RB |
1337 | /* Simplify reads from constants or constant initializers. */ |
1338 | else if (BITS_PER_UNIT == 8 | |
1339 | && is_gimple_reg_type (ref->type) | |
1340 | && (!INTEGRAL_TYPE_P (ref->type) | |
1341 | || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0)) | |
12bd5a1e | 1342 | { |
8403c2cf RB |
1343 | HOST_WIDE_INT off = 0; |
1344 | HOST_WIDE_INT size = tree_to_shwi (TYPE_SIZE (ref->type)); | |
1345 | if (size % BITS_PER_UNIT != 0 | |
1346 | || size > MAX_BITSIZE_MODE_ANY_MODE) | |
1347 | return NULL_TREE; | |
1348 | size /= BITS_PER_UNIT; | |
1349 | unsigned i; | |
1350 | for (i = 0; i < operands.length (); ++i) | |
1351 | { | |
1352 | if (operands[i].off == -1) | |
1353 | return NULL_TREE; | |
1354 | off += operands[i].off; | |
1355 | if (operands[i].opcode == MEM_REF) | |
1356 | { | |
1357 | ++i; | |
1358 | break; | |
1359 | } | |
1360 | } | |
1361 | vn_reference_op_t base = &operands[--i]; | |
1362 | tree ctor = error_mark_node; | |
1363 | tree decl = NULL_TREE; | |
1364 | if (TREE_CODE_CLASS (base->opcode) == tcc_constant) | |
1365 | ctor = base->op0; | |
1366 | else if (base->opcode == MEM_REF | |
1367 | && base[1].opcode == ADDR_EXPR | |
1368 | && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL | |
1369 | || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL)) | |
1370 | { | |
1371 | decl = TREE_OPERAND (base[1].op0, 0); | |
1372 | ctor = ctor_for_folding (decl); | |
1373 | } | |
1374 | if (ctor == NULL_TREE) | |
1375 | return build_zero_cst (ref->type); | |
1376 | else if (ctor != error_mark_node) | |
1377 | { | |
1378 | if (decl) | |
1379 | { | |
1380 | tree res = fold_ctor_reference (ref->type, ctor, | |
1381 | off * BITS_PER_UNIT, | |
1382 | size * BITS_PER_UNIT, decl); | |
1383 | if (res) | |
1384 | { | |
1385 | STRIP_USELESS_TYPE_CONVERSION (res); | |
1386 | if (is_gimple_min_invariant (res)) | |
1387 | return res; | |
1388 | } | |
1389 | } | |
1390 | else | |
1391 | { | |
1392 | unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; | |
1393 | if (native_encode_expr (ctor, buf, size, off) > 0) | |
1394 | return native_interpret_expr (ref->type, buf, size); | |
1395 | } | |
1396 | } | |
12bd5a1e RG |
1397 | } |
1398 | ||
1399 | return NULL_TREE; | |
1400 | } | |
1401 | ||
89fb70a3 DB |
1402 | /* Transform any SSA_NAME's in a vector of vn_reference_op_s |
1403 | structures into their value numbers. This is done in-place, and | |
3ceaf2f5 RG |
1404 | the vector passed in is returned. *VALUEIZED_ANYTHING will specify |
1405 | whether any operands were valueized. */ | |
89fb70a3 | 1406 | |
9771b263 DN |
1407 | static vec<vn_reference_op_s> |
1408 | valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything) | |
89fb70a3 DB |
1409 | { |
1410 | vn_reference_op_t vro; | |
aa7069aa | 1411 | unsigned int i; |
89fb70a3 | 1412 | |
3ceaf2f5 RG |
1413 | *valueized_anything = false; |
1414 | ||
9771b263 | 1415 | FOR_EACH_VEC_ELT (orig, i, vro) |
89fb70a3 DB |
1416 | { |
1417 | if (vro->opcode == SSA_NAME | |
1418 | || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)) | |
c9145754 | 1419 | { |
3ceaf2f5 RG |
1420 | tree tem = SSA_VAL (vro->op0); |
1421 | if (tem != vro->op0) | |
1422 | { | |
1423 | *valueized_anything = true; | |
1424 | vro->op0 = tem; | |
1425 | } | |
c9145754 DB |
1426 | /* If it transforms from an SSA_NAME to a constant, update |
1427 | the opcode. */ | |
1428 | if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME) | |
1429 | vro->opcode = TREE_CODE (vro->op0); | |
1430 | } | |
aa7069aa | 1431 | if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) |
3ceaf2f5 RG |
1432 | { |
1433 | tree tem = SSA_VAL (vro->op1); | |
1434 | if (tem != vro->op1) | |
1435 | { | |
1436 | *valueized_anything = true; | |
1437 | vro->op1 = tem; | |
1438 | } | |
1439 | } | |
aa7069aa | 1440 | if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) |
3ceaf2f5 RG |
1441 | { |
1442 | tree tem = SSA_VAL (vro->op2); | |
1443 | if (tem != vro->op2) | |
1444 | { | |
1445 | *valueized_anything = true; | |
1446 | vro->op2 = tem; | |
1447 | } | |
1448 | } | |
70f34814 RG |
1449 | /* If it transforms from an SSA_NAME to an address, fold with |
1450 | a preceding indirect reference. */ | |
1451 | if (i > 0 | |
1452 | && vro->op0 | |
1453 | && TREE_CODE (vro->op0) == ADDR_EXPR | |
9771b263 | 1454 | && orig[i - 1].opcode == MEM_REF) |
70f34814 | 1455 | vn_reference_fold_indirect (&orig, &i); |
a03a9774 RG |
1456 | else if (i > 0 |
1457 | && vro->opcode == SSA_NAME | |
9771b263 | 1458 | && orig[i - 1].opcode == MEM_REF) |
a03a9774 | 1459 | vn_reference_maybe_forwprop_address (&orig, &i); |
70f34814 RG |
1460 | /* If it transforms a non-constant ARRAY_REF into a constant |
1461 | one, adjust the constant offset. */ | |
1462 | else if (vro->opcode == ARRAY_REF | |
1463 | && vro->off == -1 | |
1464 | && TREE_CODE (vro->op0) == INTEGER_CST | |
1465 | && TREE_CODE (vro->op1) == INTEGER_CST | |
1466 | && TREE_CODE (vro->op2) == INTEGER_CST) | |
1467 | { | |
807e902e KZ |
1468 | offset_int off = ((wi::to_offset (vro->op0) |
1469 | - wi::to_offset (vro->op1)) | |
1470 | * wi::to_offset (vro->op2)); | |
1471 | if (wi::fits_shwi_p (off)) | |
1472 | vro->off = off.to_shwi (); | |
70f34814 | 1473 | } |
89fb70a3 DB |
1474 | } |
1475 | ||
1476 | return orig; | |
1477 | } | |
1478 | ||
9771b263 DN |
1479 | static vec<vn_reference_op_s> |
1480 | valueize_refs (vec<vn_reference_op_s> orig) | |
3ceaf2f5 RG |
1481 | { |
1482 | bool tem; | |
1483 | return valueize_refs_1 (orig, &tem); | |
1484 | } | |
1485 | ||
9771b263 | 1486 | static vec<vn_reference_op_s> shared_lookup_references; |
aa7069aa RG |
1487 | |
1488 | /* Create a vector of vn_reference_op_s structures from REF, a | |
1489 | REFERENCE_CLASS_P tree. The vector is shared among all callers of | |
3ceaf2f5 RG |
1490 | this function. *VALUEIZED_ANYTHING will specify whether any |
1491 | operands were valueized. */ | |
aa7069aa | 1492 | |
9771b263 | 1493 | static vec<vn_reference_op_s> |
3ceaf2f5 | 1494 | valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything) |
aa7069aa RG |
1495 | { |
1496 | if (!ref) | |
6e1aa848 | 1497 | return vNULL; |
9771b263 | 1498 | shared_lookup_references.truncate (0); |
aa7069aa | 1499 | copy_reference_ops_from_ref (ref, &shared_lookup_references); |
3ceaf2f5 RG |
1500 | shared_lookup_references = valueize_refs_1 (shared_lookup_references, |
1501 | valueized_anything); | |
aa7069aa RG |
1502 | return shared_lookup_references; |
1503 | } | |
1504 | ||
1505 | /* Create a vector of vn_reference_op_s structures from CALL, a | |
1506 | call statement. The vector is shared among all callers of | |
1507 | this function. */ | |
1508 | ||
9771b263 | 1509 | static vec<vn_reference_op_s> |
538dd0b7 | 1510 | valueize_shared_reference_ops_from_call (gcall *call) |
aa7069aa RG |
1511 | { |
1512 | if (!call) | |
6e1aa848 | 1513 | return vNULL; |
9771b263 | 1514 | shared_lookup_references.truncate (0); |
aa7069aa RG |
1515 | copy_reference_ops_from_call (call, &shared_lookup_references); |
1516 | shared_lookup_references = valueize_refs (shared_lookup_references); | |
1517 | return shared_lookup_references; | |
1518 | } | |
1519 | ||
896c8b96 RG |
1520 | /* Lookup a SCCVN reference operation VR in the current hash table. |
1521 | Returns the resulting value number if it exists in the hash table, | |
c9145754 DB |
1522 | NULL_TREE otherwise. VNRESULT will be filled in with the actual |
1523 | vn_reference_t stored in the hashtable if something is found. */ | |
896c8b96 RG |
1524 | |
1525 | static tree | |
c9145754 | 1526 | vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) |
896c8b96 | 1527 | { |
bf190e8d | 1528 | vn_reference_s **slot; |
896c8b96 RG |
1529 | hashval_t hash; |
1530 | ||
1531 | hash = vr->hashcode; | |
c203e8a7 | 1532 | slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
896c8b96 | 1533 | if (!slot && current_info == optimistic_info) |
c203e8a7 | 1534 | slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
896c8b96 | 1535 | if (slot) |
c9145754 DB |
1536 | { |
1537 | if (vnresult) | |
1538 | *vnresult = (vn_reference_t)*slot; | |
1539 | return ((vn_reference_t)*slot)->result; | |
1540 | } | |
b8698a0f | 1541 | |
896c8b96 RG |
1542 | return NULL_TREE; |
1543 | } | |
1544 | ||
d0ca0bcb | 1545 | static tree *last_vuse_ptr; |
3bc27de7 | 1546 | static vn_lookup_kind vn_walk_kind; |
1ec87690 | 1547 | static vn_lookup_kind default_vn_walk_kind; |
d0ca0bcb | 1548 | |
5006671f RG |
1549 | /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ |
1550 | with the current VUSE and performs the expression lookup. */ | |
1551 | ||
1552 | static void * | |
9bb06c2a RG |
1553 | vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, |
1554 | unsigned int cnt, void *vr_) | |
5006671f RG |
1555 | { |
1556 | vn_reference_t vr = (vn_reference_t)vr_; | |
bf190e8d | 1557 | vn_reference_s **slot; |
5006671f RG |
1558 | hashval_t hash; |
1559 | ||
9bb06c2a RG |
1560 | /* This bounds the stmt walks we perform on reference lookups |
1561 | to O(1) instead of O(N) where N is the number of dominating | |
1562 | stores. */ | |
1563 | if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS)) | |
1564 | return (void *)-1; | |
1565 | ||
d0ca0bcb RG |
1566 | if (last_vuse_ptr) |
1567 | *last_vuse_ptr = vuse; | |
1568 | ||
5006671f | 1569 | /* Fixup vuse and hash. */ |
9708c51d RG |
1570 | if (vr->vuse) |
1571 | vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse); | |
d1c0308e | 1572 | vr->vuse = vuse_ssa_val (vuse); |
9708c51d RG |
1573 | if (vr->vuse) |
1574 | vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse); | |
5006671f RG |
1575 | |
1576 | hash = vr->hashcode; | |
c203e8a7 | 1577 | slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
5006671f | 1578 | if (!slot && current_info == optimistic_info) |
c203e8a7 | 1579 | slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
5006671f RG |
1580 | if (slot) |
1581 | return *slot; | |
b8698a0f | 1582 | |
5006671f RG |
1583 | return NULL; |
1584 | } | |
c9145754 | 1585 | |
b55eb410 RG |
1586 | /* Lookup an existing or insert a new vn_reference entry into the |
1587 | value table for the VUSE, SET, TYPE, OPERANDS reference which | |
9179ed9d | 1588 | has the value VALUE which is either a constant or an SSA name. */ |
b55eb410 RG |
1589 | |
1590 | static vn_reference_t | |
9179ed9d RG |
1591 | vn_reference_lookup_or_insert_for_pieces (tree vuse, |
1592 | alias_set_type set, | |
1593 | tree type, | |
9771b263 DN |
1594 | vec<vn_reference_op_s, |
1595 | va_heap> operands, | |
9179ed9d | 1596 | tree value) |
b55eb410 RG |
1597 | { |
1598 | struct vn_reference_s vr1; | |
1599 | vn_reference_t result; | |
9179ed9d | 1600 | unsigned value_id; |
b55eb410 RG |
1601 | vr1.vuse = vuse; |
1602 | vr1.operands = operands; | |
1603 | vr1.type = type; | |
1604 | vr1.set = set; | |
1605 | vr1.hashcode = vn_reference_compute_hash (&vr1); | |
1606 | if (vn_reference_lookup_1 (&vr1, &result)) | |
1607 | return result; | |
9179ed9d RG |
1608 | if (TREE_CODE (value) == SSA_NAME) |
1609 | value_id = VN_INFO (value)->value_id; | |
1610 | else | |
1611 | value_id = get_or_alloc_constant_value_id (value); | |
b55eb410 | 1612 | return vn_reference_insert_pieces (vuse, set, type, |
9771b263 | 1613 | operands.copy (), value, value_id); |
b55eb410 RG |
1614 | } |
1615 | ||
01df5c8a RG |
1616 | /* Callback for walk_non_aliased_vuses. Tries to perform a lookup |
1617 | from the statement defining VUSE and if not successful tries to | |
073a8998 | 1618 | translate *REFP and VR_ through an aggregate copy at the definition |
01df5c8a RG |
1619 | of VUSE. */ |
1620 | ||
1621 | static void * | |
50f0aa20 RB |
1622 | vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, |
1623 | bool disambiguate_only) | |
01df5c8a RG |
1624 | { |
1625 | vn_reference_t vr = (vn_reference_t)vr_; | |
1626 | gimple def_stmt = SSA_NAME_DEF_STMT (vuse); | |
01df5c8a | 1627 | tree base; |
0f900dfa | 1628 | HOST_WIDE_INT offset, maxsize; |
9771b263 | 1629 | static vec<vn_reference_op_s> |
6e1aa848 | 1630 | lhs_ops = vNULL; |
8ea34dab RG |
1631 | ao_ref lhs_ref; |
1632 | bool lhs_ref_ok = false; | |
01df5c8a | 1633 | |
4fa4929e RG |
1634 | /* First try to disambiguate after value-replacing in the definitions LHS. */ |
1635 | if (is_gimple_assign (def_stmt)) | |
1636 | { | |
9771b263 | 1637 | vec<vn_reference_op_s> tem; |
4fa4929e | 1638 | tree lhs = gimple_assign_lhs (def_stmt); |
25aa059e | 1639 | bool valueized_anything = false; |
8ea34dab | 1640 | /* Avoid re-allocation overhead. */ |
9771b263 | 1641 | lhs_ops.truncate (0); |
8ea34dab RG |
1642 | copy_reference_ops_from_ref (lhs, &lhs_ops); |
1643 | tem = lhs_ops; | |
25aa059e | 1644 | lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything); |
8ea34dab | 1645 | gcc_assert (lhs_ops == tem); |
25aa059e RG |
1646 | if (valueized_anything) |
1647 | { | |
1648 | lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, | |
1649 | get_alias_set (lhs), | |
1650 | TREE_TYPE (lhs), lhs_ops); | |
1651 | if (lhs_ref_ok | |
1652 | && !refs_may_alias_p_1 (ref, &lhs_ref, true)) | |
1653 | return NULL; | |
1654 | } | |
1655 | else | |
1656 | { | |
1657 | ao_ref_init (&lhs_ref, lhs); | |
1658 | lhs_ref_ok = true; | |
1659 | } | |
4fa4929e | 1660 | } |
50f0aa20 RB |
1661 | else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL) |
1662 | && gimple_call_num_args (def_stmt) <= 4) | |
1663 | { | |
1664 | /* For builtin calls valueize its arguments and call the | |
1665 | alias oracle again. Valueization may improve points-to | |
1666 | info of pointers and constify size and position arguments. | |
1667 | Originally this was motivated by PR61034 which has | |
1668 | conditional calls to free falsely clobbering ref because | |
1669 | of imprecise points-to info of the argument. */ | |
1670 | tree oldargs[4]; | |
61dd7fbc | 1671 | bool valueized_anything = false; |
50f0aa20 RB |
1672 | for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i) |
1673 | { | |
1674 | oldargs[i] = gimple_call_arg (def_stmt, i); | |
1675 | if (TREE_CODE (oldargs[i]) == SSA_NAME | |
1676 | && VN_INFO (oldargs[i])->valnum != oldargs[i]) | |
1677 | { | |
1678 | gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum); | |
1679 | valueized_anything = true; | |
1680 | } | |
1681 | } | |
1682 | if (valueized_anything) | |
1683 | { | |
538dd0b7 DM |
1684 | bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt), |
1685 | ref); | |
50f0aa20 RB |
1686 | for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i) |
1687 | gimple_call_set_arg (def_stmt, i, oldargs[i]); | |
1688 | if (!res) | |
1689 | return NULL; | |
1690 | } | |
1691 | } | |
1692 | ||
1693 | if (disambiguate_only) | |
1694 | return (void *)-1; | |
4fa4929e | 1695 | |
b45d2719 RG |
1696 | base = ao_ref_base (ref); |
1697 | offset = ref->offset; | |
b45d2719 | 1698 | maxsize = ref->max_size; |
01df5c8a RG |
1699 | |
1700 | /* If we cannot constrain the size of the reference we cannot | |
1701 | test if anything kills it. */ | |
1702 | if (maxsize == -1) | |
1703 | return (void *)-1; | |
1704 | ||
47598145 MM |
1705 | /* We can't deduce anything useful from clobbers. */ |
1706 | if (gimple_clobber_p (def_stmt)) | |
1707 | return (void *)-1; | |
1708 | ||
01df5c8a | 1709 | /* def_stmt may-defs *ref. See if we can derive a value for *ref |
47598145 | 1710 | from that definition. |
01df5c8a | 1711 | 1) Memset. */ |
b45d2719 | 1712 | if (is_gimple_reg_type (vr->type) |
c7ee7b45 | 1713 | && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET) |
01df5c8a | 1714 | && integer_zerop (gimple_call_arg (def_stmt, 1)) |
cc269bb6 | 1715 | && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)) |
01df5c8a RG |
1716 | && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR) |
1717 | { | |
1718 | tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0); | |
1719 | tree base2; | |
1720 | HOST_WIDE_INT offset2, size2, maxsize2; | |
1721 | base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2); | |
eb1ce453 | 1722 | size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8; |
01df5c8a | 1723 | if ((unsigned HOST_WIDE_INT)size2 / 8 |
eb1ce453 | 1724 | == tree_to_uhwi (gimple_call_arg (def_stmt, 2)) |
9c8cbc74 | 1725 | && maxsize2 != -1 |
01df5c8a RG |
1726 | && operand_equal_p (base, base2, 0) |
1727 | && offset2 <= offset | |
1728 | && offset2 + size2 >= offset + maxsize) | |
b45d2719 | 1729 | { |
e8160c9a | 1730 | tree val = build_zero_cst (vr->type); |
9179ed9d | 1731 | return vn_reference_lookup_or_insert_for_pieces |
b55eb410 | 1732 | (vuse, vr->set, vr->type, vr->operands, val); |
b45d2719 | 1733 | } |
01df5c8a RG |
1734 | } |
1735 | ||
1736 | /* 2) Assignment from an empty CONSTRUCTOR. */ | |
b45d2719 | 1737 | else if (is_gimple_reg_type (vr->type) |
01df5c8a RG |
1738 | && gimple_assign_single_p (def_stmt) |
1739 | && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR | |
1740 | && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0) | |
1741 | { | |
1742 | tree base2; | |
1743 | HOST_WIDE_INT offset2, size2, maxsize2; | |
1744 | base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), | |
1745 | &offset2, &size2, &maxsize2); | |
9c8cbc74 EB |
1746 | if (maxsize2 != -1 |
1747 | && operand_equal_p (base, base2, 0) | |
01df5c8a RG |
1748 | && offset2 <= offset |
1749 | && offset2 + size2 >= offset + maxsize) | |
b45d2719 | 1750 | { |
e8160c9a | 1751 | tree val = build_zero_cst (vr->type); |
9179ed9d | 1752 | return vn_reference_lookup_or_insert_for_pieces |
b55eb410 | 1753 | (vuse, vr->set, vr->type, vr->operands, val); |
b45d2719 | 1754 | } |
01df5c8a RG |
1755 | } |
1756 | ||
c867aba0 RG |
1757 | /* 3) Assignment from a constant. We can use folds native encode/interpret |
1758 | routines to extract the assigned bits. */ | |
b2e51979 RG |
1759 | else if (vn_walk_kind == VN_WALKREWRITE |
1760 | && CHAR_BIT == 8 && BITS_PER_UNIT == 8 | |
c867aba0 RG |
1761 | && ref->size == maxsize |
1762 | && maxsize % BITS_PER_UNIT == 0 | |
1763 | && offset % BITS_PER_UNIT == 0 | |
1764 | && is_gimple_reg_type (vr->type) | |
1765 | && gimple_assign_single_p (def_stmt) | |
1766 | && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) | |
1767 | { | |
1768 | tree base2; | |
1769 | HOST_WIDE_INT offset2, size2, maxsize2; | |
1770 | base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), | |
1771 | &offset2, &size2, &maxsize2); | |
1772 | if (maxsize2 != -1 | |
1773 | && maxsize2 == size2 | |
1774 | && size2 % BITS_PER_UNIT == 0 | |
1775 | && offset2 % BITS_PER_UNIT == 0 | |
1776 | && operand_equal_p (base, base2, 0) | |
1777 | && offset2 <= offset | |
1778 | && offset2 + size2 >= offset + maxsize) | |
1779 | { | |
1780 | /* We support up to 512-bit values (for V8DFmode). */ | |
1781 | unsigned char buffer[64]; | |
1782 | int len; | |
1783 | ||
1784 | len = native_encode_expr (gimple_assign_rhs1 (def_stmt), | |
1785 | buffer, sizeof (buffer)); | |
1786 | if (len > 0) | |
1787 | { | |
1788 | tree val = native_interpret_expr (vr->type, | |
1789 | buffer | |
1790 | + ((offset - offset2) | |
1791 | / BITS_PER_UNIT), | |
1792 | ref->size / BITS_PER_UNIT); | |
1793 | if (val) | |
9179ed9d | 1794 | return vn_reference_lookup_or_insert_for_pieces |
b55eb410 | 1795 | (vuse, vr->set, vr->type, vr->operands, val); |
c867aba0 RG |
1796 | } |
1797 | } | |
1798 | } | |
1799 | ||
0147184e RG |
1800 | /* 4) Assignment from an SSA name which definition we may be able |
1801 | to access pieces from. */ | |
1802 | else if (ref->size == maxsize | |
1803 | && is_gimple_reg_type (vr->type) | |
1804 | && gimple_assign_single_p (def_stmt) | |
1805 | && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME) | |
1806 | { | |
1807 | tree rhs1 = gimple_assign_rhs1 (def_stmt); | |
1808 | gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1); | |
1809 | if (is_gimple_assign (def_stmt2) | |
1810 | && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR | |
1811 | || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR) | |
1812 | && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1)))) | |
1813 | { | |
1814 | tree base2; | |
1815 | HOST_WIDE_INT offset2, size2, maxsize2, off; | |
1816 | base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), | |
1817 | &offset2, &size2, &maxsize2); | |
1818 | off = offset - offset2; | |
1819 | if (maxsize2 != -1 | |
1820 | && maxsize2 == size2 | |
1821 | && operand_equal_p (base, base2, 0) | |
1822 | && offset2 <= offset | |
1823 | && offset2 + size2 >= offset + maxsize) | |
1824 | { | |
1825 | tree val = NULL_TREE; | |
1826 | HOST_WIDE_INT elsz | |
1827 | = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1)))); | |
1828 | if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR) | |
1829 | { | |
1830 | if (off == 0) | |
1831 | val = gimple_assign_rhs1 (def_stmt2); | |
1832 | else if (off == elsz) | |
1833 | val = gimple_assign_rhs2 (def_stmt2); | |
1834 | } | |
1835 | else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR | |
1836 | && off % elsz == 0) | |
1837 | { | |
1838 | tree ctor = gimple_assign_rhs1 (def_stmt2); | |
1839 | unsigned i = off / elsz; | |
1840 | if (i < CONSTRUCTOR_NELTS (ctor)) | |
1841 | { | |
1842 | constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i); | |
13396b6e JJ |
1843 | if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE) |
1844 | { | |
1845 | if (TREE_CODE (TREE_TYPE (elt->value)) | |
1846 | != VECTOR_TYPE) | |
1847 | val = elt->value; | |
1848 | } | |
0147184e RG |
1849 | } |
1850 | } | |
1851 | if (val) | |
9179ed9d | 1852 | return vn_reference_lookup_or_insert_for_pieces |
b55eb410 | 1853 | (vuse, vr->set, vr->type, vr->operands, val); |
0147184e RG |
1854 | } |
1855 | } | |
1856 | } | |
1857 | ||
1858 | /* 5) For aggregate copies translate the reference through them if | |
01df5c8a | 1859 | the copy kills ref. */ |
3bc27de7 RG |
1860 | else if (vn_walk_kind == VN_WALKREWRITE |
1861 | && gimple_assign_single_p (def_stmt) | |
01df5c8a | 1862 | && (DECL_P (gimple_assign_rhs1 (def_stmt)) |
70f34814 | 1863 | || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF |
01df5c8a RG |
1864 | || handled_component_p (gimple_assign_rhs1 (def_stmt)))) |
1865 | { | |
1866 | tree base2; | |
9c8cbc74 | 1867 | HOST_WIDE_INT offset2, size2, maxsize2; |
01df5c8a | 1868 | int i, j; |
ef062b13 | 1869 | auto_vec<vn_reference_op_s> rhs; |
01df5c8a | 1870 | vn_reference_op_t vro; |
b45d2719 | 1871 | ao_ref r; |
01df5c8a | 1872 | |
8ea34dab RG |
1873 | if (!lhs_ref_ok) |
1874 | return (void *)-1; | |
1875 | ||
01df5c8a | 1876 | /* See if the assignment kills REF. */ |
8ea34dab RG |
1877 | base2 = ao_ref_base (&lhs_ref); |
1878 | offset2 = lhs_ref.offset; | |
1879 | size2 = lhs_ref.size; | |
9c8cbc74 EB |
1880 | maxsize2 = lhs_ref.max_size; |
1881 | if (maxsize2 == -1 | |
1882 | || (base != base2 && !operand_equal_p (base, base2, 0)) | |
01df5c8a RG |
1883 | || offset2 > offset |
1884 | || offset2 + size2 < offset + maxsize) | |
1885 | return (void *)-1; | |
1886 | ||
8ea34dab RG |
1887 | /* Find the common base of ref and the lhs. lhs_ops already |
1888 | contains valueized operands for the lhs. */ | |
9771b263 DN |
1889 | i = vr->operands.length () - 1; |
1890 | j = lhs_ops.length () - 1; | |
35ecd408 | 1891 | while (j >= 0 && i >= 0 |
9771b263 | 1892 | && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j])) |
01df5c8a RG |
1893 | { |
1894 | i--; | |
1895 | j--; | |
1896 | } | |
35ecd408 | 1897 | |
25aa059e RG |
1898 | /* ??? The innermost op should always be a MEM_REF and we already |
1899 | checked that the assignment to the lhs kills vr. Thus for | |
1900 | aggregate copies using char[] types the vn_reference_op_eq | |
1901 | may fail when comparing types for compatibility. But we really | |
1902 | don't care here - further lookups with the rewritten operands | |
1903 | will simply fail if we messed up types too badly. */ | |
8403c2cf | 1904 | HOST_WIDE_INT extra_off = 0; |
4f9dbaaa | 1905 | if (j == 0 && i >= 0 |
9771b263 | 1906 | && lhs_ops[0].opcode == MEM_REF |
8403c2cf RB |
1907 | && lhs_ops[0].off != -1) |
1908 | { | |
1909 | if (lhs_ops[0].off == vr->operands[i].off) | |
1910 | i--, j--; | |
1911 | else if (vr->operands[i].opcode == MEM_REF | |
1912 | && vr->operands[i].off != -1) | |
1913 | { | |
1914 | extra_off = vr->operands[i].off - lhs_ops[0].off; | |
1915 | i--, j--; | |
1916 | } | |
1917 | } | |
25aa059e | 1918 | |
01df5c8a RG |
1919 | /* i now points to the first additional op. |
1920 | ??? LHS may not be completely contained in VR, one or more | |
1921 | VIEW_CONVERT_EXPRs could be in its way. We could at least | |
1922 | try handling outermost VIEW_CONVERT_EXPRs. */ | |
1923 | if (j != -1) | |
1924 | return (void *)-1; | |
01df5c8a RG |
1925 | |
1926 | /* Now re-write REF to be based on the rhs of the assignment. */ | |
1927 | copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); | |
8403c2cf RB |
1928 | |
1929 | /* Apply an extra offset to the inner MEM_REF of the RHS. */ | |
1930 | if (extra_off != 0) | |
1931 | { | |
1932 | if (rhs.length () < 2 | |
1933 | || rhs[0].opcode != MEM_REF | |
1934 | || rhs[0].off == -1) | |
1935 | return (void *)-1; | |
1936 | rhs[0].off += extra_off; | |
1937 | rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0, | |
1938 | build_int_cst (TREE_TYPE (rhs[0].op0), | |
1939 | extra_off)); | |
1940 | } | |
1941 | ||
01df5c8a | 1942 | /* We need to pre-pend vr->operands[0..i] to rhs. */ |
26f3a4e1 | 1943 | vec<vn_reference_op_s> old = vr->operands; |
9771b263 | 1944 | if (i + 1 + rhs.length () > vr->operands.length ()) |
01df5c8a | 1945 | { |
9771b263 | 1946 | vr->operands.safe_grow (i + 1 + rhs.length ()); |
26f3a4e1 RB |
1947 | if (old == shared_lookup_references) |
1948 | shared_lookup_references = vr->operands; | |
01df5c8a RG |
1949 | } |
1950 | else | |
9771b263 DN |
1951 | vr->operands.truncate (i + 1 + rhs.length ()); |
1952 | FOR_EACH_VEC_ELT (rhs, j, vro) | |
1953 | vr->operands[i + 1 + j] = *vro; | |
b55eb410 | 1954 | vr->operands = valueize_refs (vr->operands); |
26f3a4e1 RB |
1955 | if (old == shared_lookup_references) |
1956 | shared_lookup_references = vr->operands; | |
01df5c8a | 1957 | vr->hashcode = vn_reference_compute_hash (vr); |
c7ee7b45 | 1958 | |
8403c2cf RB |
1959 | /* Try folding the new reference to a constant. */ |
1960 | tree val = fully_constant_vn_reference_p (vr); | |
1961 | if (val) | |
1962 | return vn_reference_lookup_or_insert_for_pieces | |
1963 | (vuse, vr->set, vr->type, vr->operands, val); | |
1964 | ||
c7ee7b45 RG |
1965 | /* Adjust *ref from the new operands. */ |
1966 | if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) | |
1967 | return (void *)-1; | |
1968 | /* This can happen with bitfields. */ | |
1969 | if (ref->size != r.size) | |
1970 | return (void *)-1; | |
1971 | *ref = r; | |
1972 | ||
1973 | /* Do not update last seen VUSE after translating. */ | |
1974 | last_vuse_ptr = NULL; | |
1975 | ||
1976 | /* Keep looking for the adjusted *REF / VR pair. */ | |
1977 | return NULL; | |
1978 | } | |
1979 | ||
0147184e | 1980 | /* 6) For memcpy copies translate the reference through them if |
c7ee7b45 RG |
1981 | the copy kills ref. */ |
1982 | else if (vn_walk_kind == VN_WALKREWRITE | |
1983 | && is_gimple_reg_type (vr->type) | |
1984 | /* ??? Handle BCOPY as well. */ | |
1985 | && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY) | |
1986 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY) | |
1987 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)) | |
1988 | && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR | |
1989 | || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME) | |
1990 | && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR | |
1991 | || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME) | |
cc269bb6 | 1992 | && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))) |
c7ee7b45 RG |
1993 | { |
1994 | tree lhs, rhs; | |
1995 | ao_ref r; | |
1996 | HOST_WIDE_INT rhs_offset, copy_size, lhs_offset; | |
1997 | vn_reference_op_s op; | |
1998 | HOST_WIDE_INT at; | |
1999 | ||
2000 | ||
2001 | /* Only handle non-variable, addressable refs. */ | |
2002 | if (ref->size != maxsize | |
2003 | || offset % BITS_PER_UNIT != 0 | |
2004 | || ref->size % BITS_PER_UNIT != 0) | |
2005 | return (void *)-1; | |
2006 | ||
2007 | /* Extract a pointer base and an offset for the destination. */ | |
2008 | lhs = gimple_call_arg (def_stmt, 0); | |
2009 | lhs_offset = 0; | |
2010 | if (TREE_CODE (lhs) == SSA_NAME) | |
2011 | lhs = SSA_VAL (lhs); | |
2012 | if (TREE_CODE (lhs) == ADDR_EXPR) | |
2013 | { | |
2014 | tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0), | |
2015 | &lhs_offset); | |
2016 | if (!tem) | |
2017 | return (void *)-1; | |
2018 | if (TREE_CODE (tem) == MEM_REF | |
cc269bb6 | 2019 | && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) |
c7ee7b45 RG |
2020 | { |
2021 | lhs = TREE_OPERAND (tem, 0); | |
eb1ce453 | 2022 | lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); |
c7ee7b45 RG |
2023 | } |
2024 | else if (DECL_P (tem)) | |
2025 | lhs = build_fold_addr_expr (tem); | |
2026 | else | |
2027 | return (void *)-1; | |
2028 | } | |
2029 | if (TREE_CODE (lhs) != SSA_NAME | |
2030 | && TREE_CODE (lhs) != ADDR_EXPR) | |
2031 | return (void *)-1; | |
2032 | ||
2033 | /* Extract a pointer base and an offset for the source. */ | |
2034 | rhs = gimple_call_arg (def_stmt, 1); | |
2035 | rhs_offset = 0; | |
2036 | if (TREE_CODE (rhs) == SSA_NAME) | |
2037 | rhs = SSA_VAL (rhs); | |
2038 | if (TREE_CODE (rhs) == ADDR_EXPR) | |
2039 | { | |
2040 | tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0), | |
2041 | &rhs_offset); | |
2042 | if (!tem) | |
2043 | return (void *)-1; | |
2044 | if (TREE_CODE (tem) == MEM_REF | |
cc269bb6 | 2045 | && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) |
c7ee7b45 RG |
2046 | { |
2047 | rhs = TREE_OPERAND (tem, 0); | |
eb1ce453 | 2048 | rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); |
c7ee7b45 RG |
2049 | } |
2050 | else if (DECL_P (tem)) | |
2051 | rhs = build_fold_addr_expr (tem); | |
2052 | else | |
2053 | return (void *)-1; | |
2054 | } | |
2055 | if (TREE_CODE (rhs) != SSA_NAME | |
2056 | && TREE_CODE (rhs) != ADDR_EXPR) | |
2057 | return (void *)-1; | |
2058 | ||
eb1ce453 | 2059 | copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2)); |
c7ee7b45 RG |
2060 | |
2061 | /* The bases of the destination and the references have to agree. */ | |
2062 | if ((TREE_CODE (base) != MEM_REF | |
2063 | && !DECL_P (base)) | |
2064 | || (TREE_CODE (base) == MEM_REF | |
2065 | && (TREE_OPERAND (base, 0) != lhs | |
cc269bb6 | 2066 | || !tree_fits_uhwi_p (TREE_OPERAND (base, 1)))) |
c7ee7b45 RG |
2067 | || (DECL_P (base) |
2068 | && (TREE_CODE (lhs) != ADDR_EXPR | |
2069 | || TREE_OPERAND (lhs, 0) != base))) | |
2070 | return (void *)-1; | |
2071 | ||
2072 | /* And the access has to be contained within the memcpy destination. */ | |
2073 | at = offset / BITS_PER_UNIT; | |
2074 | if (TREE_CODE (base) == MEM_REF) | |
eb1ce453 | 2075 | at += tree_to_uhwi (TREE_OPERAND (base, 1)); |
c7ee7b45 RG |
2076 | if (lhs_offset > at |
2077 | || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT) | |
2078 | return (void *)-1; | |
2079 | ||
2080 | /* Make room for 2 operands in the new reference. */ | |
9771b263 | 2081 | if (vr->operands.length () < 2) |
c7ee7b45 | 2082 | { |
9771b263 DN |
2083 | vec<vn_reference_op_s> old = vr->operands; |
2084 | vr->operands.safe_grow_cleared (2); | |
c7ee7b45 RG |
2085 | if (old == shared_lookup_references |
2086 | && vr->operands != old) | |
26f3a4e1 | 2087 | shared_lookup_references = vr->operands; |
c7ee7b45 RG |
2088 | } |
2089 | else | |
9771b263 | 2090 | vr->operands.truncate (2); |
c7ee7b45 RG |
2091 | |
2092 | /* The looked-through reference is a simple MEM_REF. */ | |
2093 | memset (&op, 0, sizeof (op)); | |
2094 | op.type = vr->type; | |
2095 | op.opcode = MEM_REF; | |
2096 | op.op0 = build_int_cst (ptr_type_node, at - rhs_offset); | |
2097 | op.off = at - lhs_offset + rhs_offset; | |
9771b263 | 2098 | vr->operands[0] = op; |
6d6c9525 | 2099 | op.type = TREE_TYPE (rhs); |
c7ee7b45 RG |
2100 | op.opcode = TREE_CODE (rhs); |
2101 | op.op0 = rhs; | |
2102 | op.off = -1; | |
9771b263 | 2103 | vr->operands[1] = op; |
c7ee7b45 | 2104 | vr->hashcode = vn_reference_compute_hash (vr); |
b45d2719 RG |
2105 | |
2106 | /* Adjust *ref from the new operands. */ | |
2107 | if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) | |
01df5c8a | 2108 | return (void *)-1; |
03472fdd EB |
2109 | /* This can happen with bitfields. */ |
2110 | if (ref->size != r.size) | |
2111 | return (void *)-1; | |
b45d2719 | 2112 | *ref = r; |
01df5c8a | 2113 | |
d0ca0bcb RG |
2114 | /* Do not update last seen VUSE after translating. */ |
2115 | last_vuse_ptr = NULL; | |
2116 | ||
01df5c8a RG |
2117 | /* Keep looking for the adjusted *REF / VR pair. */ |
2118 | return NULL; | |
2119 | } | |
2120 | ||
2121 | /* Bail out and stop walking. */ | |
2122 | return (void *)-1; | |
2123 | } | |
2124 | ||
c9145754 DB |
2125 | /* Lookup a reference operation by it's parts, in the current hash table. |
2126 | Returns the resulting value number if it exists in the hash table, | |
2127 | NULL_TREE otherwise. VNRESULT will be filled in with the actual | |
2128 | vn_reference_t stored in the hashtable if something is found. */ | |
89fb70a3 DB |
2129 | |
2130 | tree | |
b45d2719 | 2131 | vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, |
9771b263 | 2132 | vec<vn_reference_op_s> operands, |
3bc27de7 | 2133 | vn_reference_t *vnresult, vn_lookup_kind kind) |
c9145754 DB |
2134 | { |
2135 | struct vn_reference_s vr1; | |
5006671f | 2136 | vn_reference_t tmp; |
12bd5a1e | 2137 | tree cst; |
5006671f RG |
2138 | |
2139 | if (!vnresult) | |
2140 | vnresult = &tmp; | |
2141 | *vnresult = NULL; | |
01df5c8a | 2142 | |
d1c0308e | 2143 | vr1.vuse = vuse_ssa_val (vuse); |
9771b263 DN |
2144 | shared_lookup_references.truncate (0); |
2145 | shared_lookup_references.safe_grow (operands.length ()); | |
2146 | memcpy (shared_lookup_references.address (), | |
2147 | operands.address (), | |
01df5c8a | 2148 | sizeof (vn_reference_op_s) |
9771b263 | 2149 | * operands.length ()); |
01df5c8a RG |
2150 | vr1.operands = operands = shared_lookup_references |
2151 | = valueize_refs (shared_lookup_references); | |
b45d2719 RG |
2152 | vr1.type = type; |
2153 | vr1.set = set; | |
c9145754 | 2154 | vr1.hashcode = vn_reference_compute_hash (&vr1); |
12bd5a1e RG |
2155 | if ((cst = fully_constant_vn_reference_p (&vr1))) |
2156 | return cst; | |
c9145754 | 2157 | |
12bd5a1e | 2158 | vn_reference_lookup_1 (&vr1, vnresult); |
5006671f | 2159 | if (!*vnresult |
3bc27de7 | 2160 | && kind != VN_NOWALK |
5006671f | 2161 | && vr1.vuse) |
53f3815c | 2162 | { |
b45d2719 | 2163 | ao_ref r; |
3bc27de7 | 2164 | vn_walk_kind = kind; |
b45d2719 | 2165 | if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands)) |
01df5c8a | 2166 | *vnresult = |
b45d2719 | 2167 | (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, |
01df5c8a | 2168 | vn_reference_lookup_2, |
92a5094e | 2169 | vn_reference_lookup_3, |
76be46db | 2170 | vuse_ssa_val, &vr1); |
26f3a4e1 | 2171 | gcc_checking_assert (vr1.operands == shared_lookup_references); |
53f3815c RG |
2172 | } |
2173 | ||
5006671f RG |
2174 | if (*vnresult) |
2175 | return (*vnresult)->result; | |
2176 | ||
2177 | return NULL_TREE; | |
c9145754 DB |
2178 | } |
2179 | ||
2180 | /* Lookup OP in the current hash table, and return the resulting value | |
2181 | number if it exists in the hash table. Return NULL_TREE if it does | |
2182 | not exist in the hash table or if the result field of the structure | |
2183 | was NULL.. VNRESULT will be filled in with the vn_reference_t | |
2184 | stored in the hashtable if one exists. */ | |
2185 | ||
2186 | tree | |
3bc27de7 | 2187 | vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, |
c9145754 | 2188 | vn_reference_t *vnresult) |
89fb70a3 | 2189 | { |
9771b263 | 2190 | vec<vn_reference_op_s> operands; |
89fb70a3 | 2191 | struct vn_reference_s vr1; |
12bd5a1e | 2192 | tree cst; |
3ceaf2f5 | 2193 | bool valuezied_anything; |
5006671f | 2194 | |
c9145754 DB |
2195 | if (vnresult) |
2196 | *vnresult = NULL; | |
89fb70a3 | 2197 | |
d1c0308e | 2198 | vr1.vuse = vuse_ssa_val (vuse); |
3ceaf2f5 RG |
2199 | vr1.operands = operands |
2200 | = valueize_shared_reference_ops_from_ref (op, &valuezied_anything); | |
b45d2719 RG |
2201 | vr1.type = TREE_TYPE (op); |
2202 | vr1.set = get_alias_set (op); | |
89fb70a3 | 2203 | vr1.hashcode = vn_reference_compute_hash (&vr1); |
12bd5a1e RG |
2204 | if ((cst = fully_constant_vn_reference_p (&vr1))) |
2205 | return cst; | |
896c8b96 | 2206 | |
3bc27de7 | 2207 | if (kind != VN_NOWALK |
5006671f RG |
2208 | && vr1.vuse) |
2209 | { | |
2210 | vn_reference_t wvnresult; | |
b45d2719 | 2211 | ao_ref r; |
3ceaf2f5 RG |
2212 | /* Make sure to use a valueized reference if we valueized anything. |
2213 | Otherwise preserve the full reference for advanced TBAA. */ | |
2214 | if (!valuezied_anything | |
2215 | || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type, | |
2216 | vr1.operands)) | |
6d6c9525 | 2217 | ao_ref_init (&r, op); |
3bc27de7 | 2218 | vn_walk_kind = kind; |
5006671f | 2219 | wvnresult = |
b45d2719 | 2220 | (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, |
01df5c8a | 2221 | vn_reference_lookup_2, |
92a5094e | 2222 | vn_reference_lookup_3, |
76be46db | 2223 | vuse_ssa_val, &vr1); |
26f3a4e1 | 2224 | gcc_checking_assert (vr1.operands == shared_lookup_references); |
5006671f RG |
2225 | if (wvnresult) |
2226 | { | |
2227 | if (vnresult) | |
2228 | *vnresult = wvnresult; | |
2229 | return wvnresult->result; | |
2230 | } | |
2231 | ||
2232 | return NULL_TREE; | |
896c8b96 | 2233 | } |
89fb70a3 | 2234 | |
5006671f | 2235 | return vn_reference_lookup_1 (&vr1, vnresult); |
89fb70a3 DB |
2236 | } |
2237 | ||
26f3a4e1 RB |
2238 | /* Lookup CALL in the current hash table and return the entry in |
2239 | *VNRESULT if found. Populates *VR for the hashtable lookup. */ | |
2240 | ||
2241 | void | |
538dd0b7 | 2242 | vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult, |
26f3a4e1 RB |
2243 | vn_reference_t vr) |
2244 | { | |
27732ffd ML |
2245 | if (vnresult) |
2246 | *vnresult = NULL; | |
2247 | ||
26f3a4e1 RB |
2248 | tree vuse = gimple_vuse (call); |
2249 | ||
2250 | vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; | |
2251 | vr->operands = valueize_shared_reference_ops_from_call (call); | |
2252 | vr->type = gimple_expr_type (call); | |
2253 | vr->set = 0; | |
2254 | vr->hashcode = vn_reference_compute_hash (vr); | |
2255 | vn_reference_lookup_1 (vr, vnresult); | |
2256 | } | |
c9145754 | 2257 | |
89fb70a3 | 2258 | /* Insert OP into the current hash table with a value number of |
c9145754 | 2259 | RESULT, and return the resulting reference structure we created. */ |
89fb70a3 | 2260 | |
26f3a4e1 | 2261 | static vn_reference_t |
4ec0a198 | 2262 | vn_reference_insert (tree op, tree result, tree vuse, tree vdef) |
89fb70a3 | 2263 | { |
bf190e8d | 2264 | vn_reference_s **slot; |
89fb70a3 | 2265 | vn_reference_t vr1; |
39e843e8 | 2266 | bool tem; |
89fb70a3 DB |
2267 | |
2268 | vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); | |
c9145754 DB |
2269 | if (TREE_CODE (result) == SSA_NAME) |
2270 | vr1->value_id = VN_INFO (result)->value_id; | |
2271 | else | |
2272 | vr1->value_id = get_or_alloc_constant_value_id (result); | |
5006671f | 2273 | vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; |
39e843e8 | 2274 | vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy (); |
b45d2719 RG |
2275 | vr1->type = TREE_TYPE (op); |
2276 | vr1->set = get_alias_set (op); | |
89fb70a3 DB |
2277 | vr1->hashcode = vn_reference_compute_hash (vr1); |
2278 | vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; | |
4ec0a198 | 2279 | vr1->result_vdef = vdef; |
89fb70a3 | 2280 | |
c203e8a7 TS |
2281 | slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode, |
2282 | INSERT); | |
89fb70a3 DB |
2283 | |
2284 | /* Because we lookup stores using vuses, and value number failures | |
2285 | using the vdefs (see visit_reference_op_store for how and why), | |
2286 | it's possible that on failure we may try to insert an already | |
2287 | inserted store. This is not wrong, there is no ssa name for a | |
2288 | store that we could use as a differentiator anyway. Thus, unlike | |
2289 | the other lookup functions, you cannot gcc_assert (!*slot) | |
2290 | here. */ | |
2291 | ||
8d0eca24 RG |
2292 | /* But free the old slot in case of a collision. */ |
2293 | if (*slot) | |
2294 | free_reference (*slot); | |
89fb70a3 DB |
2295 | |
2296 | *slot = vr1; | |
c9145754 DB |
2297 | return vr1; |
2298 | } | |
2299 | ||
2300 | /* Insert a reference by it's pieces into the current hash table with | |
2301 | a value number of RESULT. Return the resulting reference | |
2302 | structure we created. */ | |
2303 | ||
2304 | vn_reference_t | |
b45d2719 | 2305 | vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, |
9771b263 | 2306 | vec<vn_reference_op_s> operands, |
c9145754 DB |
2307 | tree result, unsigned int value_id) |
2308 | ||
2309 | { | |
bf190e8d | 2310 | vn_reference_s **slot; |
c9145754 DB |
2311 | vn_reference_t vr1; |
2312 | ||
2313 | vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); | |
5006671f RG |
2314 | vr1->value_id = value_id; |
2315 | vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; | |
c9145754 | 2316 | vr1->operands = valueize_refs (operands); |
b45d2719 RG |
2317 | vr1->type = type; |
2318 | vr1->set = set; | |
c9145754 DB |
2319 | vr1->hashcode = vn_reference_compute_hash (vr1); |
2320 | if (result && TREE_CODE (result) == SSA_NAME) | |
2321 | result = SSA_VAL (result); | |
2322 | vr1->result = result; | |
2323 | ||
c203e8a7 TS |
2324 | slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode, |
2325 | INSERT); | |
b8698a0f | 2326 | |
c9145754 | 2327 | /* At this point we should have all the things inserted that we have |
5006671f RG |
2328 | seen before, and we should never try inserting something that |
2329 | already exists. */ | |
c9145754 DB |
2330 | gcc_assert (!*slot); |
2331 | if (*slot) | |
2332 | free_reference (*slot); | |
2333 | ||
2334 | *slot = vr1; | |
2335 | return vr1; | |
89fb70a3 DB |
2336 | } |
2337 | ||
49a1fb2d | 2338 | /* Compute and return the hash value for nary operation VBO1. */ |
89fb70a3 | 2339 | |
26f3a4e1 | 2340 | static hashval_t |
49a1fb2d | 2341 | vn_nary_op_compute_hash (const vn_nary_op_t vno1) |
89fb70a3 | 2342 | { |
4e44a6e8 | 2343 | inchash::hash hstate; |
49a1fb2d | 2344 | unsigned i; |
89fb70a3 | 2345 | |
49a1fb2d RG |
2346 | for (i = 0; i < vno1->length; ++i) |
2347 | if (TREE_CODE (vno1->op[i]) == SSA_NAME) | |
2348 | vno1->op[i] = SSA_VAL (vno1->op[i]); | |
89fb70a3 | 2349 | |
49a1fb2d RG |
2350 | if (vno1->length == 2 |
2351 | && commutative_tree_code (vno1->opcode) | |
2352 | && tree_swap_operands_p (vno1->op[0], vno1->op[1], false)) | |
2353 | { | |
2354 | tree temp = vno1->op[0]; | |
2355 | vno1->op[0] = vno1->op[1]; | |
2356 | vno1->op[1] = temp; | |
2357 | } | |
89fb70a3 | 2358 | |
4e44a6e8 | 2359 | hstate.add_int (vno1->opcode); |
49a1fb2d | 2360 | for (i = 0; i < vno1->length; ++i) |
4e44a6e8 | 2361 | inchash::add_expr (vno1->op[i], hstate); |
89fb70a3 | 2362 | |
4e44a6e8 | 2363 | return hstate.end (); |
89fb70a3 DB |
2364 | } |
2365 | ||
bf190e8d | 2366 | /* Compare nary operations VNO1 and VNO2 and return true if they are |
89fb70a3 DB |
2367 | equivalent. */ |
2368 | ||
bf190e8d LC |
2369 | bool |
2370 | vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2) | |
89fb70a3 | 2371 | { |
49a1fb2d RG |
2372 | unsigned i; |
2373 | ||
85169114 PB |
2374 | if (vno1->hashcode != vno2->hashcode) |
2375 | return false; | |
2376 | ||
5a7d7f9c RG |
2377 | if (vno1->length != vno2->length) |
2378 | return false; | |
2379 | ||
49a1fb2d | 2380 | if (vno1->opcode != vno2->opcode |
63a14fa3 | 2381 | || !types_compatible_p (vno1->type, vno2->type)) |
49a1fb2d RG |
2382 | return false; |
2383 | ||
2384 | for (i = 0; i < vno1->length; ++i) | |
2385 | if (!expressions_equal_p (vno1->op[i], vno2->op[i])) | |
2386 | return false; | |
2387 | ||
2388 | return true; | |
89fb70a3 DB |
2389 | } |
2390 | ||
9ad6bebe | 2391 | /* Initialize VNO from the pieces provided. */ |
89fb70a3 | 2392 | |
9ad6bebe NF |
2393 | static void |
2394 | init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length, | |
5a7d7f9c | 2395 | enum tree_code code, tree type, tree *ops) |
9ad6bebe NF |
2396 | { |
2397 | vno->opcode = code; | |
2398 | vno->length = length; | |
2399 | vno->type = type; | |
5a7d7f9c | 2400 | memcpy (&vno->op[0], ops, sizeof (tree) * length); |
9ad6bebe NF |
2401 | } |
2402 | ||
2403 | /* Initialize VNO from OP. */ | |
2404 | ||
2405 | static void | |
2406 | init_vn_nary_op_from_op (vn_nary_op_t vno, tree op) | |
2407 | { | |
2408 | unsigned i; | |
2409 | ||
2410 | vno->opcode = TREE_CODE (op); | |
2411 | vno->length = TREE_CODE_LENGTH (TREE_CODE (op)); | |
2412 | vno->type = TREE_TYPE (op); | |
2413 | for (i = 0; i < vno->length; ++i) | |
2414 | vno->op[i] = TREE_OPERAND (op, i); | |
2415 | } | |
2416 | ||
5a7d7f9c RG |
2417 | /* Return the number of operands for a vn_nary ops structure from STMT. */ |
2418 | ||
2419 | static unsigned int | |
2420 | vn_nary_length_from_stmt (gimple stmt) | |
2421 | { | |
2422 | switch (gimple_assign_rhs_code (stmt)) | |
2423 | { | |
2424 | case REALPART_EXPR: | |
2425 | case IMAGPART_EXPR: | |
2426 | case VIEW_CONVERT_EXPR: | |
2427 | return 1; | |
2428 | ||
91af9dc9 RG |
2429 | case BIT_FIELD_REF: |
2430 | return 3; | |
2431 | ||
5a7d7f9c RG |
2432 | case CONSTRUCTOR: |
2433 | return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); | |
2434 | ||
2435 | default: | |
2436 | return gimple_num_ops (stmt) - 1; | |
2437 | } | |
2438 | } | |
2439 | ||
9ad6bebe NF |
2440 | /* Initialize VNO from STMT. */ |
2441 | ||
2442 | static void | |
2443 | init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt) | |
2444 | { | |
2445 | unsigned i; | |
2446 | ||
2447 | vno->opcode = gimple_assign_rhs_code (stmt); | |
9ad6bebe | 2448 | vno->type = gimple_expr_type (stmt); |
5a7d7f9c RG |
2449 | switch (vno->opcode) |
2450 | { | |
2451 | case REALPART_EXPR: | |
2452 | case IMAGPART_EXPR: | |
2453 | case VIEW_CONVERT_EXPR: | |
2454 | vno->length = 1; | |
2455 | vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); | |
2456 | break; | |
2457 | ||
91af9dc9 RG |
2458 | case BIT_FIELD_REF: |
2459 | vno->length = 3; | |
2460 | vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); | |
2461 | vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1); | |
2462 | vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2); | |
2463 | break; | |
2464 | ||
5a7d7f9c RG |
2465 | case CONSTRUCTOR: |
2466 | vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); | |
2467 | for (i = 0; i < vno->length; ++i) | |
2468 | vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value; | |
2469 | break; | |
2470 | ||
2471 | default: | |
91af9dc9 | 2472 | gcc_checking_assert (!gimple_assign_single_p (stmt)); |
5a7d7f9c RG |
2473 | vno->length = gimple_num_ops (stmt) - 1; |
2474 | for (i = 0; i < vno->length; ++i) | |
2475 | vno->op[i] = gimple_op (stmt, i + 1); | |
2476 | } | |
9ad6bebe NF |
2477 | } |
2478 | ||
2479 | /* Compute the hashcode for VNO and look for it in the hash table; | |
2480 | return the resulting value number if it exists in the hash table. | |
2481 | Return NULL_TREE if it does not exist in the hash table or if the | |
2482 | result field of the operation is NULL. VNRESULT will contain the | |
2483 | vn_nary_op_t from the hashtable if it exists. */ | |
2484 | ||
2485 | static tree | |
2486 | vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult) | |
c9145754 | 2487 | { |
bf190e8d | 2488 | vn_nary_op_s **slot; |
9ad6bebe | 2489 | |
c9145754 DB |
2490 | if (vnresult) |
2491 | *vnresult = NULL; | |
9ad6bebe NF |
2492 | |
2493 | vno->hashcode = vn_nary_op_compute_hash (vno); | |
c203e8a7 TS |
2494 | slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode, |
2495 | NO_INSERT); | |
c9145754 | 2496 | if (!slot && current_info == optimistic_info) |
c203e8a7 TS |
2497 | slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, |
2498 | NO_INSERT); | |
c9145754 DB |
2499 | if (!slot) |
2500 | return NULL_TREE; | |
2501 | if (vnresult) | |
bf190e8d LC |
2502 | *vnresult = *slot; |
2503 | return (*slot)->result; | |
c9145754 DB |
2504 | } |
2505 | ||
9ad6bebe NF |
2506 | /* Lookup a n-ary operation by its pieces and return the resulting value |
2507 | number if it exists in the hash table. Return NULL_TREE if it does | |
2508 | not exist in the hash table or if the result field of the operation | |
2509 | is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable | |
2510 | if it exists. */ | |
2511 | ||
2512 | tree | |
2513 | vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, | |
5a7d7f9c | 2514 | tree type, tree *ops, vn_nary_op_t *vnresult) |
9ad6bebe | 2515 | { |
5a7d7f9c RG |
2516 | vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s, |
2517 | sizeof_vn_nary_op (length)); | |
2518 | init_vn_nary_op_from_pieces (vno1, length, code, type, ops); | |
2519 | return vn_nary_op_lookup_1 (vno1, vnresult); | |
9ad6bebe NF |
2520 | } |
2521 | ||
c9145754 DB |
2522 | /* Lookup OP in the current hash table, and return the resulting value |
2523 | number if it exists in the hash table. Return NULL_TREE if it does | |
2524 | not exist in the hash table or if the result field of the operation | |
2525 | is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable | |
2526 | if it exists. */ | |
2527 | ||
2528 | tree | |
2529 | vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult) | |
89fb70a3 | 2530 | { |
5a7d7f9c RG |
2531 | vn_nary_op_t vno1 |
2532 | = XALLOCAVAR (struct vn_nary_op_s, | |
2533 | sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op)))); | |
2534 | init_vn_nary_op_from_op (vno1, op); | |
2535 | return vn_nary_op_lookup_1 (vno1, vnresult); | |
89fb70a3 DB |
2536 | } |
2537 | ||
726a989a RB |
2538 | /* Lookup the rhs of STMT in the current hash table, and return the resulting |
2539 | value number if it exists in the hash table. Return NULL_TREE if | |
2540 | it does not exist in the hash table. VNRESULT will contain the | |
2541 | vn_nary_op_t from the hashtable if it exists. */ | |
2542 | ||
2543 | tree | |
2544 | vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult) | |
2545 | { | |
5a7d7f9c RG |
2546 | vn_nary_op_t vno1 |
2547 | = XALLOCAVAR (struct vn_nary_op_s, | |
2548 | sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt))); | |
2549 | init_vn_nary_op_from_stmt (vno1, stmt); | |
2550 | return vn_nary_op_lookup_1 (vno1, vnresult); | |
9ad6bebe NF |
2551 | } |
2552 | ||
2553 | /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */ | |
2554 | ||
2555 | static vn_nary_op_t | |
2556 | alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack) | |
2557 | { | |
2558 | return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length)); | |
2559 | } | |
2560 | ||
2561 | /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's | |
2562 | obstack. */ | |
2563 | ||
2564 | static vn_nary_op_t | |
2565 | alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id) | |
2566 | { | |
2567 | vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, | |
2568 | ¤t_info->nary_obstack); | |
2569 | ||
2570 | vno1->value_id = value_id; | |
2571 | vno1->length = length; | |
2572 | vno1->result = result; | |
2573 | ||
2574 | return vno1; | |
2575 | } | |
2576 | ||
2577 | /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute | |
2578 | VNO->HASHCODE first. */ | |
2579 | ||
2580 | static vn_nary_op_t | |
c203e8a7 | 2581 | vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table, |
bf190e8d | 2582 | bool compute_hash) |
9ad6bebe | 2583 | { |
bf190e8d | 2584 | vn_nary_op_s **slot; |
9ad6bebe NF |
2585 | |
2586 | if (compute_hash) | |
2587 | vno->hashcode = vn_nary_op_compute_hash (vno); | |
2588 | ||
c203e8a7 | 2589 | slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT); |
9ad6bebe NF |
2590 | gcc_assert (!*slot); |
2591 | ||
2592 | *slot = vno; | |
2593 | return vno; | |
726a989a RB |
2594 | } |
2595 | ||
c9145754 DB |
2596 | /* Insert a n-ary operation into the current hash table using it's |
2597 | pieces. Return the vn_nary_op_t structure we created and put in | |
2598 | the hashtable. */ | |
2599 | ||
2600 | vn_nary_op_t | |
2601 | vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, | |
5a7d7f9c RG |
2602 | tree type, tree *ops, |
2603 | tree result, unsigned int value_id) | |
c9145754 | 2604 | { |
5a7d7f9c RG |
2605 | vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id); |
2606 | init_vn_nary_op_from_pieces (vno1, length, code, type, ops); | |
9ad6bebe | 2607 | return vn_nary_op_insert_into (vno1, current_info->nary, true); |
c9145754 DB |
2608 | } |
2609 | ||
89fb70a3 | 2610 | /* Insert OP into the current hash table with a value number of |
c9145754 DB |
2611 | RESULT. Return the vn_nary_op_t structure we created and put in |
2612 | the hashtable. */ | |
89fb70a3 | 2613 | |
c9145754 | 2614 | vn_nary_op_t |
49a1fb2d | 2615 | vn_nary_op_insert (tree op, tree result) |
89fb70a3 | 2616 | { |
49a1fb2d | 2617 | unsigned length = TREE_CODE_LENGTH (TREE_CODE (op)); |
49a1fb2d | 2618 | vn_nary_op_t vno1; |
49a1fb2d | 2619 | |
9ad6bebe NF |
2620 | vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id); |
2621 | init_vn_nary_op_from_op (vno1, op); | |
2622 | return vn_nary_op_insert_into (vno1, current_info->nary, true); | |
89fb70a3 DB |
2623 | } |
2624 | ||
726a989a RB |
2625 | /* Insert the rhs of STMT into the current hash table with a value number of |
2626 | RESULT. */ | |
2627 | ||
2628 | vn_nary_op_t | |
2629 | vn_nary_op_insert_stmt (gimple stmt, tree result) | |
2630 | { | |
5a7d7f9c RG |
2631 | vn_nary_op_t vno1 |
2632 | = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt), | |
2633 | result, VN_INFO (result)->value_id); | |
9ad6bebe NF |
2634 | init_vn_nary_op_from_stmt (vno1, stmt); |
2635 | return vn_nary_op_insert_into (vno1, current_info->nary, true); | |
726a989a RB |
2636 | } |
2637 | ||
89fb70a3 DB |
2638 | /* Compute a hashcode for PHI operation VP1 and return it. */ |
2639 | ||
2640 | static inline hashval_t | |
2641 | vn_phi_compute_hash (vn_phi_t vp1) | |
2642 | { | |
4e44a6e8 | 2643 | inchash::hash hstate (vp1->block->index); |
89fb70a3 DB |
2644 | int i; |
2645 | tree phi1op; | |
1d295886 | 2646 | tree type; |
89fb70a3 | 2647 | |
1d295886 RG |
2648 | /* If all PHI arguments are constants we need to distinguish |
2649 | the PHI node via its type. */ | |
24d63016 | 2650 | type = vp1->type; |
4e44a6e8 | 2651 | hstate.merge_hash (vn_hash_type (type)); |
1d295886 | 2652 | |
9771b263 | 2653 | FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op) |
89fb70a3 DB |
2654 | { |
2655 | if (phi1op == VN_TOP) | |
2656 | continue; | |
4e44a6e8 | 2657 | inchash::add_expr (phi1op, hstate); |
89fb70a3 DB |
2658 | } |
2659 | ||
4e44a6e8 | 2660 | return hstate.end (); |
89fb70a3 DB |
2661 | } |
2662 | ||
89fb70a3 DB |
2663 | /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ |
2664 | ||
2665 | static int | |
bf190e8d | 2666 | vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2) |
89fb70a3 | 2667 | { |
85169114 PB |
2668 | if (vp1->hashcode != vp2->hashcode) |
2669 | return false; | |
2670 | ||
89fb70a3 DB |
2671 | if (vp1->block == vp2->block) |
2672 | { | |
2673 | int i; | |
2674 | tree phi1op; | |
2675 | ||
1d295886 RG |
2676 | /* If the PHI nodes do not have compatible types |
2677 | they are not the same. */ | |
24d63016 | 2678 | if (!types_compatible_p (vp1->type, vp2->type)) |
1d295886 RG |
2679 | return false; |
2680 | ||
89fb70a3 DB |
2681 | /* Any phi in the same block will have it's arguments in the |
2682 | same edge order, because of how we store phi nodes. */ | |
9771b263 | 2683 | FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op) |
89fb70a3 | 2684 | { |
9771b263 | 2685 | tree phi2op = vp2->phiargs[i]; |
89fb70a3 DB |
2686 | if (phi1op == VN_TOP || phi2op == VN_TOP) |
2687 | continue; | |
2688 | if (!expressions_equal_p (phi1op, phi2op)) | |
2689 | return false; | |
2690 | } | |
2691 | return true; | |
2692 | } | |
2693 | return false; | |
2694 | } | |
2695 | ||
9771b263 | 2696 | static vec<tree> shared_lookup_phiargs; |
89fb70a3 DB |
2697 | |
2698 | /* Lookup PHI in the current hash table, and return the resulting | |
2699 | value number if it exists in the hash table. Return NULL_TREE if | |
2700 | it does not exist in the hash table. */ | |
2701 | ||
de081cfd | 2702 | static tree |
726a989a | 2703 | vn_phi_lookup (gimple phi) |
89fb70a3 | 2704 | { |
bf190e8d | 2705 | vn_phi_s **slot; |
89fb70a3 | 2706 | struct vn_phi_s vp1; |
726a989a | 2707 | unsigned i; |
89fb70a3 | 2708 | |
9771b263 | 2709 | shared_lookup_phiargs.truncate (0); |
89fb70a3 DB |
2710 | |
2711 | /* Canonicalize the SSA_NAME's to their value number. */ | |
726a989a | 2712 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
89fb70a3 DB |
2713 | { |
2714 | tree def = PHI_ARG_DEF (phi, i); | |
2715 | def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; | |
9771b263 | 2716 | shared_lookup_phiargs.safe_push (def); |
89fb70a3 | 2717 | } |
24d63016 | 2718 | vp1.type = TREE_TYPE (gimple_phi_result (phi)); |
89fb70a3 | 2719 | vp1.phiargs = shared_lookup_phiargs; |
726a989a | 2720 | vp1.block = gimple_bb (phi); |
89fb70a3 | 2721 | vp1.hashcode = vn_phi_compute_hash (&vp1); |
c203e8a7 TS |
2722 | slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode, |
2723 | NO_INSERT); | |
27fa4044 | 2724 | if (!slot && current_info == optimistic_info) |
c203e8a7 TS |
2725 | slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode, |
2726 | NO_INSERT); | |
89fb70a3 DB |
2727 | if (!slot) |
2728 | return NULL_TREE; | |
bf190e8d | 2729 | return (*slot)->result; |
89fb70a3 DB |
2730 | } |
2731 | ||
2732 | /* Insert PHI into the current hash table with a value number of | |
2733 | RESULT. */ | |
2734 | ||
c9145754 | 2735 | static vn_phi_t |
726a989a | 2736 | vn_phi_insert (gimple phi, tree result) |
89fb70a3 | 2737 | { |
bf190e8d | 2738 | vn_phi_s **slot; |
89fb70a3 | 2739 | vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool); |
726a989a | 2740 | unsigned i; |
6e1aa848 | 2741 | vec<tree> args = vNULL; |
89fb70a3 DB |
2742 | |
2743 | /* Canonicalize the SSA_NAME's to their value number. */ | |
726a989a | 2744 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
89fb70a3 DB |
2745 | { |
2746 | tree def = PHI_ARG_DEF (phi, i); | |
2747 | def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; | |
9771b263 | 2748 | args.safe_push (def); |
89fb70a3 | 2749 | } |
c9145754 | 2750 | vp1->value_id = VN_INFO (result)->value_id; |
24d63016 | 2751 | vp1->type = TREE_TYPE (gimple_phi_result (phi)); |
89fb70a3 | 2752 | vp1->phiargs = args; |
726a989a | 2753 | vp1->block = gimple_bb (phi); |
89fb70a3 DB |
2754 | vp1->result = result; |
2755 | vp1->hashcode = vn_phi_compute_hash (vp1); | |
2756 | ||
c203e8a7 | 2757 | slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT); |
89fb70a3 DB |
2758 | |
2759 | /* Because we iterate over phi operations more than once, it's | |
2760 | possible the slot might already exist here, hence no assert.*/ | |
2761 | *slot = vp1; | |
c9145754 | 2762 | return vp1; |
89fb70a3 DB |
2763 | } |
2764 | ||
2765 | ||
2766 | /* Print set of components in strongly connected component SCC to OUT. */ | |
2767 | ||
2768 | static void | |
9771b263 | 2769 | print_scc (FILE *out, vec<tree> scc) |
89fb70a3 DB |
2770 | { |
2771 | tree var; | |
2772 | unsigned int i; | |
2773 | ||
0eb09f31 | 2774 | fprintf (out, "SCC consists of:"); |
9771b263 | 2775 | FOR_EACH_VEC_ELT (scc, i, var) |
89fb70a3 | 2776 | { |
89fb70a3 | 2777 | fprintf (out, " "); |
0eb09f31 | 2778 | print_generic_expr (out, var, 0); |
89fb70a3 DB |
2779 | } |
2780 | fprintf (out, "\n"); | |
2781 | } | |
2782 | ||
2783 | /* Set the value number of FROM to TO, return true if it has changed | |
2784 | as a result. */ | |
2785 | ||
2786 | static inline bool | |
2787 | set_ssa_val_to (tree from, tree to) | |
2788 | { | |
90bc4623 | 2789 | tree currval = SSA_VAL (from); |
d1de852b | 2790 | HOST_WIDE_INT toff, coff; |
89fb70a3 | 2791 | |
a764d660 RB |
2792 | /* The only thing we allow as value numbers are ssa_names |
2793 | and invariants. So assert that here. We don't allow VN_TOP | |
2794 | as visiting a stmt should produce a value-number other than | |
2795 | that. | |
2796 | ??? Still VN_TOP can happen for unreachable code, so force | |
2797 | it to varying in that case. Not all code is prepared to | |
2798 | get VN_TOP on valueization. */ | |
2799 | if (to == VN_TOP) | |
2800 | { | |
2801 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2802 | fprintf (dump_file, "Forcing value number to varying on " | |
2803 | "receiving VN_TOP\n"); | |
2804 | to = from; | |
2805 | } | |
2806 | ||
2807 | gcc_assert (to != NULL_TREE | |
2808 | && (TREE_CODE (to) == SSA_NAME | |
2809 | || is_gimple_min_invariant (to))); | |
2810 | ||
90bc4623 RG |
2811 | if (from != to) |
2812 | { | |
2813 | if (currval == from) | |
2814 | { | |
2815 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2816 | { | |
2817 | fprintf (dump_file, "Not changing value number of "); | |
2818 | print_generic_expr (dump_file, from, 0); | |
2819 | fprintf (dump_file, " from VARYING to "); | |
2820 | print_generic_expr (dump_file, to, 0); | |
2821 | fprintf (dump_file, "\n"); | |
2822 | } | |
2823 | return false; | |
2824 | } | |
2825 | else if (TREE_CODE (to) == SSA_NAME | |
2826 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)) | |
2827 | to = from; | |
2828 | } | |
fe4fefa0 | 2829 | |
89fb70a3 DB |
2830 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2831 | { | |
2832 | fprintf (dump_file, "Setting value number of "); | |
2833 | print_generic_expr (dump_file, from, 0); | |
2834 | fprintf (dump_file, " to "); | |
2835 | print_generic_expr (dump_file, to, 0); | |
89fb70a3 DB |
2836 | } |
2837 | ||
d1de852b RB |
2838 | if (currval != to |
2839 | && !operand_equal_p (currval, to, 0) | |
2840 | /* ??? For addresses involving volatile objects or types operand_equal_p | |
2841 | does not reliably detect ADDR_EXPRs as equal. We know we are only | |
2842 | getting invariant gimple addresses here, so can use | |
2843 | get_addr_base_and_unit_offset to do this comparison. */ | |
2844 | && !(TREE_CODE (currval) == ADDR_EXPR | |
2845 | && TREE_CODE (to) == ADDR_EXPR | |
2846 | && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff) | |
2847 | == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff)) | |
2848 | && coff == toff)) | |
89fb70a3 | 2849 | { |
5006671f | 2850 | VN_INFO (from)->valnum = to; |
8495c94f RG |
2851 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2852 | fprintf (dump_file, " (changed)\n"); | |
89fb70a3 DB |
2853 | return true; |
2854 | } | |
8495c94f RG |
2855 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2856 | fprintf (dump_file, "\n"); | |
89fb70a3 DB |
2857 | return false; |
2858 | } | |
2859 | ||
00115921 TV |
2860 | /* Mark as processed all the definitions in the defining stmt of USE, or |
2861 | the USE itself. */ | |
2862 | ||
2863 | static void | |
2864 | mark_use_processed (tree use) | |
2865 | { | |
2866 | ssa_op_iter iter; | |
2867 | def_operand_p defp; | |
2868 | gimple stmt = SSA_NAME_DEF_STMT (use); | |
2869 | ||
2870 | if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI) | |
2871 | { | |
2872 | VN_INFO (use)->use_processed = true; | |
2873 | return; | |
2874 | } | |
2875 | ||
2876 | FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS) | |
2877 | { | |
2878 | tree def = DEF_FROM_PTR (defp); | |
2879 | ||
2880 | VN_INFO (def)->use_processed = true; | |
2881 | } | |
2882 | } | |
2883 | ||
89fb70a3 DB |
2884 | /* Set all definitions in STMT to value number to themselves. |
2885 | Return true if a value number changed. */ | |
2886 | ||
2887 | static bool | |
726a989a | 2888 | defs_to_varying (gimple stmt) |
89fb70a3 DB |
2889 | { |
2890 | bool changed = false; | |
2891 | ssa_op_iter iter; | |
2892 | def_operand_p defp; | |
2893 | ||
2894 | FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS) | |
2895 | { | |
2896 | tree def = DEF_FROM_PTR (defp); | |
89fb70a3 DB |
2897 | changed |= set_ssa_val_to (def, def); |
2898 | } | |
2899 | return changed; | |
2900 | } | |
2901 | ||
26fa9076 | 2902 | static bool expr_has_constants (tree expr); |
3d45dd59 | 2903 | |
89fb70a3 DB |
2904 | /* Visit a copy between LHS and RHS, return true if the value number |
2905 | changed. */ | |
2906 | ||
2907 | static bool | |
2908 | visit_copy (tree lhs, tree rhs) | |
2909 | { | |
89fb70a3 DB |
2910 | /* The copy may have a more interesting constant filled expression |
2911 | (we don't, since we know our RHS is just an SSA name). */ | |
0d5a1b56 RB |
2912 | VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants; |
2913 | VN_INFO (lhs)->expr = VN_INFO (rhs)->expr; | |
2914 | ||
2915 | /* And finally valueize. */ | |
2916 | rhs = SSA_VAL (rhs); | |
89fb70a3 DB |
2917 | |
2918 | return set_ssa_val_to (lhs, rhs); | |
2919 | } | |
2920 | ||
2262707f | 2921 | /* Visit a nary operator RHS, value number it, and return true if the |
89fb70a3 DB |
2922 | value number of LHS has changed as a result. */ |
2923 | ||
2924 | static bool | |
2262707f | 2925 | visit_nary_op (tree lhs, gimple stmt) |
89fb70a3 DB |
2926 | { |
2927 | bool changed = false; | |
726a989a | 2928 | tree result = vn_nary_op_lookup_stmt (stmt, NULL); |
89fb70a3 DB |
2929 | |
2930 | if (result) | |
2262707f | 2931 | changed = set_ssa_val_to (lhs, result); |
726a989a RB |
2932 | else |
2933 | { | |
2934 | changed = set_ssa_val_to (lhs, lhs); | |
2935 | vn_nary_op_insert_stmt (stmt, lhs); | |
2936 | } | |
2937 | ||
2938 | return changed; | |
2939 | } | |
2940 | ||
2941 | /* Visit a call STMT storing into LHS. Return true if the value number | |
2942 | of the LHS has changed as a result. */ | |
2943 | ||
2944 | static bool | |
538dd0b7 | 2945 | visit_reference_op_call (tree lhs, gcall *stmt) |
89fb70a3 DB |
2946 | { |
2947 | bool changed = false; | |
726a989a | 2948 | struct vn_reference_s vr1; |
00115921 | 2949 | vn_reference_t vnresult = NULL; |
00115921 | 2950 | tree vdef = gimple_vdef (stmt); |
89fb70a3 | 2951 | |
6867d9a9 TV |
2952 | /* Non-ssa lhs is handled in copy_reference_ops_from_call. */ |
2953 | if (lhs && TREE_CODE (lhs) != SSA_NAME) | |
2954 | lhs = NULL_TREE; | |
2955 | ||
26f3a4e1 | 2956 | vn_reference_lookup_call (stmt, &vnresult, &vr1); |
00115921 | 2957 | if (vnresult) |
89fb70a3 | 2958 | { |
4583fada | 2959 | if (vnresult->result_vdef && vdef) |
00115921 TV |
2960 | changed |= set_ssa_val_to (vdef, vnresult->result_vdef); |
2961 | ||
2962 | if (!vnresult->result && lhs) | |
2963 | vnresult->result = lhs; | |
2964 | ||
2965 | if (vnresult->result && lhs) | |
2966 | { | |
2967 | changed |= set_ssa_val_to (lhs, vnresult->result); | |
2968 | ||
2969 | if (VN_INFO (vnresult->result)->has_constants) | |
2970 | VN_INFO (lhs)->has_constants = true; | |
2971 | } | |
89fb70a3 DB |
2972 | } |
2973 | else | |
2974 | { | |
726a989a | 2975 | vn_reference_t vr2; |
26f3a4e1 | 2976 | vn_reference_s **slot; |
00115921 TV |
2977 | if (vdef) |
2978 | changed |= set_ssa_val_to (vdef, vdef); | |
2979 | if (lhs) | |
2980 | changed |= set_ssa_val_to (lhs, lhs); | |
726a989a | 2981 | vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); |
5006671f | 2982 | vr2->vuse = vr1.vuse; |
26f3a4e1 RB |
2983 | /* As we are not walking the virtual operand chain we know the |
2984 | shared_lookup_references are still original so we can re-use | |
2985 | them here. */ | |
2986 | vr2->operands = vr1.operands.copy (); | |
b45d2719 RG |
2987 | vr2->type = vr1.type; |
2988 | vr2->set = vr1.set; | |
726a989a RB |
2989 | vr2->hashcode = vr1.hashcode; |
2990 | vr2->result = lhs; | |
00115921 | 2991 | vr2->result_vdef = vdef; |
c203e8a7 TS |
2992 | slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode, |
2993 | INSERT); | |
26f3a4e1 | 2994 | gcc_assert (!*slot); |
726a989a | 2995 | *slot = vr2; |
89fb70a3 DB |
2996 | } |
2997 | ||
2998 | return changed; | |
2999 | } | |
3000 | ||
3001 | /* Visit a load from a reference operator RHS, part of STMT, value number it, | |
3002 | and return true if the value number of the LHS has changed as a result. */ | |
3003 | ||
3004 | static bool | |
726a989a | 3005 | visit_reference_op_load (tree lhs, tree op, gimple stmt) |
89fb70a3 DB |
3006 | { |
3007 | bool changed = false; | |
d0ca0bcb RG |
3008 | tree last_vuse; |
3009 | tree result; | |
3010 | ||
3011 | last_vuse = gimple_vuse (stmt); | |
3012 | last_vuse_ptr = &last_vuse; | |
1ec87690 RG |
3013 | result = vn_reference_lookup (op, gimple_vuse (stmt), |
3014 | default_vn_walk_kind, NULL); | |
d0ca0bcb | 3015 | last_vuse_ptr = NULL; |
89fb70a3 | 3016 | |
3d45dd59 RG |
3017 | /* We handle type-punning through unions by value-numbering based |
3018 | on offset and size of the access. Be prepared to handle a | |
3019 | type-mismatch here via creating a VIEW_CONVERT_EXPR. */ | |
3020 | if (result | |
3021 | && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op))) | |
3022 | { | |
3023 | /* We will be setting the value number of lhs to the value number | |
3024 | of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result). | |
3025 | So first simplify and lookup this expression to see if it | |
3026 | is already available. */ | |
3027 | tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result); | |
ecb4e37b RG |
3028 | if ((CONVERT_EXPR_P (val) |
3029 | || TREE_CODE (val) == VIEW_CONVERT_EXPR) | |
3030 | && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME) | |
3d45dd59 | 3031 | { |
a5eaec42 | 3032 | tree tem = vn_get_expr_for (TREE_OPERAND (val, 0)); |
ecb4e37b RG |
3033 | if ((CONVERT_EXPR_P (tem) |
3034 | || TREE_CODE (tem) == VIEW_CONVERT_EXPR) | |
9bacafeb PB |
3035 | && (tem = fold_unary_ignore_overflow (TREE_CODE (val), |
3036 | TREE_TYPE (val), tem))) | |
3d45dd59 RG |
3037 | val = tem; |
3038 | } | |
3039 | result = val; | |
3040 | if (!is_gimple_min_invariant (val) | |
3041 | && TREE_CODE (val) != SSA_NAME) | |
c9145754 | 3042 | result = vn_nary_op_lookup (val, NULL); |
3d45dd59 RG |
3043 | /* If the expression is not yet available, value-number lhs to |
3044 | a new SSA_NAME we create. */ | |
70f34814 | 3045 | if (!result) |
3d45dd59 | 3046 | { |
70b5e7dc RG |
3047 | result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (), |
3048 | "vntemp"); | |
3d45dd59 RG |
3049 | /* Initialize value-number information properly. */ |
3050 | VN_INFO_GET (result)->valnum = result; | |
726a989a | 3051 | VN_INFO (result)->value_id = get_next_value_id (); |
3d45dd59 | 3052 | VN_INFO (result)->expr = val; |
26fa9076 | 3053 | VN_INFO (result)->has_constants = expr_has_constants (val); |
3d45dd59 RG |
3054 | VN_INFO (result)->needs_insertion = true; |
3055 | /* As all "inserted" statements are singleton SCCs, insert | |
3056 | to the valid table. This is strictly needed to | |
3057 | avoid re-generating new value SSA_NAMEs for the same | |
3058 | expression during SCC iteration over and over (the | |
3059 | optimistic table gets cleared after each iteration). | |
3060 | We do not need to insert into the optimistic table, as | |
3061 | lookups there will fall back to the valid table. */ | |
3062 | if (current_info == optimistic_info) | |
3063 | { | |
3064 | current_info = valid_info; | |
3065 | vn_nary_op_insert (val, result); | |
3066 | current_info = optimistic_info; | |
3067 | } | |
3068 | else | |
3069 | vn_nary_op_insert (val, result); | |
3070 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3071 | { | |
3072 | fprintf (dump_file, "Inserting name "); | |
3073 | print_generic_expr (dump_file, result, 0); | |
3074 | fprintf (dump_file, " for expression "); | |
3075 | print_generic_expr (dump_file, val, 0); | |
3076 | fprintf (dump_file, "\n"); | |
3077 | } | |
3078 | } | |
3079 | } | |
3080 | ||
89fb70a3 DB |
3081 | if (result) |
3082 | { | |
3083 | changed = set_ssa_val_to (lhs, result); | |
b80280f2 RG |
3084 | if (TREE_CODE (result) == SSA_NAME |
3085 | && VN_INFO (result)->has_constants) | |
3086 | { | |
3087 | VN_INFO (lhs)->expr = VN_INFO (result)->expr; | |
3088 | VN_INFO (lhs)->has_constants = true; | |
3089 | } | |
89fb70a3 DB |
3090 | } |
3091 | else | |
3092 | { | |
3093 | changed = set_ssa_val_to (lhs, lhs); | |
4ec0a198 | 3094 | vn_reference_insert (op, lhs, last_vuse, NULL_TREE); |
89fb70a3 DB |
3095 | } |
3096 | ||
3097 | return changed; | |
3098 | } | |
3099 | ||
3100 | ||
3101 | /* Visit a store to a reference operator LHS, part of STMT, value number it, | |
3102 | and return true if the value number of the LHS has changed as a result. */ | |
3103 | ||
3104 | static bool | |
726a989a | 3105 | visit_reference_op_store (tree lhs, tree op, gimple stmt) |
89fb70a3 DB |
3106 | { |
3107 | bool changed = false; | |
4ec0a198 TV |
3108 | vn_reference_t vnresult = NULL; |
3109 | tree result, assign; | |
89fb70a3 | 3110 | bool resultsame = false; |
4ec0a198 TV |
3111 | tree vuse = gimple_vuse (stmt); |
3112 | tree vdef = gimple_vdef (stmt); | |
89fb70a3 DB |
3113 | |
3114 | /* First we want to lookup using the *vuses* from the store and see | |
3115 | if there the last store to this location with the same address | |
3116 | had the same value. | |
3117 | ||
3118 | The vuses represent the memory state before the store. If the | |
3119 | memory state, address, and value of the store is the same as the | |
3120 | last store to this location, then this store will produce the | |
3121 | same memory state as that store. | |
3122 | ||
3123 | In this case the vdef versions for this store are value numbered to those | |
3124 | vuse versions, since they represent the same memory state after | |
3125 | this store. | |
3126 | ||
3127 | Otherwise, the vdefs for the store are used when inserting into | |
3128 | the table, since the store generates a new memory state. */ | |
3129 | ||
4ec0a198 | 3130 | result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL); |
89fb70a3 DB |
3131 | |
3132 | if (result) | |
3133 | { | |
3134 | if (TREE_CODE (result) == SSA_NAME) | |
3135 | result = SSA_VAL (result); | |
5be891a4 RG |
3136 | if (TREE_CODE (op) == SSA_NAME) |
3137 | op = SSA_VAL (op); | |
89fb70a3 DB |
3138 | resultsame = expressions_equal_p (result, op); |
3139 | } | |
3140 | ||
26f3a4e1 RB |
3141 | if ((!result || !resultsame) |
3142 | /* Only perform the following when being called from PRE | |
3143 | which embeds tail merging. */ | |
3144 | && default_vn_walk_kind == VN_WALK) | |
89fb70a3 | 3145 | { |
4ec0a198 TV |
3146 | assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op); |
3147 | vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult); | |
3148 | if (vnresult) | |
3149 | { | |
3150 | VN_INFO (vdef)->use_processed = true; | |
3151 | return set_ssa_val_to (vdef, vnresult->result_vdef); | |
3152 | } | |
3153 | } | |
89fb70a3 | 3154 | |
4ec0a198 TV |
3155 | if (!result || !resultsame) |
3156 | { | |
89fb70a3 DB |
3157 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3158 | { | |
3159 | fprintf (dump_file, "No store match\n"); | |
3160 | fprintf (dump_file, "Value numbering store "); | |
3161 | print_generic_expr (dump_file, lhs, 0); | |
3162 | fprintf (dump_file, " to "); | |
3163 | print_generic_expr (dump_file, op, 0); | |
3164 | fprintf (dump_file, "\n"); | |
3165 | } | |
3166 | /* Have to set value numbers before insert, since insert is | |
3167 | going to valueize the references in-place. */ | |
4ec0a198 | 3168 | if (vdef) |
89fb70a3 | 3169 | { |
89fb70a3 DB |
3170 | changed |= set_ssa_val_to (vdef, vdef); |
3171 | } | |
3172 | ||
9a327766 RG |
3173 | /* Do not insert structure copies into the tables. */ |
3174 | if (is_gimple_min_invariant (op) | |
3175 | || is_gimple_reg (op)) | |
4ec0a198 TV |
3176 | vn_reference_insert (lhs, op, vdef, NULL); |
3177 | ||
26f3a4e1 RB |
3178 | /* Only perform the following when being called from PRE |
3179 | which embeds tail merging. */ | |
3180 | if (default_vn_walk_kind == VN_WALK) | |
3181 | { | |
3182 | assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op); | |
3183 | vn_reference_insert (assign, lhs, vuse, vdef); | |
3184 | } | |
89fb70a3 DB |
3185 | } |
3186 | else | |
3187 | { | |
5006671f RG |
3188 | /* We had a match, so value number the vdef to have the value |
3189 | number of the vuse it came from. */ | |
89fb70a3 DB |
3190 | |
3191 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3192 | fprintf (dump_file, "Store matched earlier value," | |
3193 | "value numbering store vdefs to matching vuses.\n"); | |
3194 | ||
4ec0a198 | 3195 | changed |= set_ssa_val_to (vdef, SSA_VAL (vuse)); |
89fb70a3 DB |
3196 | } |
3197 | ||
3198 | return changed; | |
3199 | } | |
3200 | ||
3201 | /* Visit and value number PHI, return true if the value number | |
3202 | changed. */ | |
3203 | ||
3204 | static bool | |
726a989a | 3205 | visit_phi (gimple phi) |
89fb70a3 DB |
3206 | { |
3207 | bool changed = false; | |
3208 | tree result; | |
3209 | tree sameval = VN_TOP; | |
3210 | bool allsame = true; | |
89fb70a3 | 3211 | |
62b0d9ec DJ |
3212 | /* TODO: We could check for this in init_sccvn, and replace this |
3213 | with a gcc_assert. */ | |
3214 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))) | |
3215 | return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi)); | |
3216 | ||
89fb70a3 DB |
3217 | /* See if all non-TOP arguments have the same value. TOP is |
3218 | equivalent to everything, so we can ignore it. */ | |
a764d660 RB |
3219 | edge_iterator ei; |
3220 | edge e; | |
3221 | FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds) | |
3222 | if (e->flags & EDGE_EXECUTABLE) | |
3223 | { | |
3224 | tree def = PHI_ARG_DEF_FROM_EDGE (phi, e); | |
89fb70a3 | 3225 | |
a764d660 RB |
3226 | if (TREE_CODE (def) == SSA_NAME) |
3227 | def = SSA_VAL (def); | |
3228 | if (def == VN_TOP) | |
3229 | continue; | |
3230 | if (sameval == VN_TOP) | |
3231 | { | |
3232 | sameval = def; | |
3233 | } | |
3234 | else | |
3235 | { | |
3236 | if (!expressions_equal_p (def, sameval)) | |
3237 | { | |
3238 | allsame = false; | |
3239 | break; | |
3240 | } | |
3241 | } | |
3242 | } | |
89fb70a3 DB |
3243 | |
3244 | /* If all value numbered to the same value, the phi node has that | |
3245 | value. */ | |
3246 | if (allsame) | |
c1604254 | 3247 | return set_ssa_val_to (PHI_RESULT (phi), sameval); |
89fb70a3 DB |
3248 | |
3249 | /* Otherwise, see if it is equivalent to a phi node in this block. */ | |
3250 | result = vn_phi_lookup (phi); | |
3251 | if (result) | |
c1604254 | 3252 | changed = set_ssa_val_to (PHI_RESULT (phi), result); |
89fb70a3 DB |
3253 | else |
3254 | { | |
3255 | vn_phi_insert (phi, PHI_RESULT (phi)); | |
3256 | VN_INFO (PHI_RESULT (phi))->has_constants = false; | |
3257 | VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi); | |
3258 | changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi)); | |
3259 | } | |
3260 | ||
3261 | return changed; | |
3262 | } | |
3263 | ||
3264 | /* Return true if EXPR contains constants. */ | |
3265 | ||
3266 | static bool | |
3267 | expr_has_constants (tree expr) | |
3268 | { | |
3269 | switch (TREE_CODE_CLASS (TREE_CODE (expr))) | |
3270 | { | |
3271 | case tcc_unary: | |
3272 | return is_gimple_min_invariant (TREE_OPERAND (expr, 0)); | |
3273 | ||
3274 | case tcc_binary: | |
3275 | return is_gimple_min_invariant (TREE_OPERAND (expr, 0)) | |
3276 | || is_gimple_min_invariant (TREE_OPERAND (expr, 1)); | |
3277 | /* Constants inside reference ops are rarely interesting, but | |
3278 | it can take a lot of looking to find them. */ | |
3279 | case tcc_reference: | |
e9bd9cf3 | 3280 | case tcc_declaration: |
89fb70a3 DB |
3281 | return false; |
3282 | default: | |
3283 | return is_gimple_min_invariant (expr); | |
3284 | } | |
3285 | return false; | |
3286 | } | |
3287 | ||
726a989a RB |
3288 | /* Return true if STMT contains constants. */ |
3289 | ||
3290 | static bool | |
3291 | stmt_has_constants (gimple stmt) | |
3292 | { | |
0d5a1b56 RB |
3293 | tree tem; |
3294 | ||
726a989a RB |
3295 | if (gimple_code (stmt) != GIMPLE_ASSIGN) |
3296 | return false; | |
3297 | ||
3298 | switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) | |
3299 | { | |
0d5a1b56 RB |
3300 | case GIMPLE_TERNARY_RHS: |
3301 | tem = gimple_assign_rhs3 (stmt); | |
3302 | if (TREE_CODE (tem) == SSA_NAME) | |
3303 | tem = SSA_VAL (tem); | |
3304 | if (is_gimple_min_invariant (tem)) | |
3305 | return true; | |
3306 | /* Fallthru. */ | |
726a989a RB |
3307 | |
3308 | case GIMPLE_BINARY_RHS: | |
0d5a1b56 RB |
3309 | tem = gimple_assign_rhs2 (stmt); |
3310 | if (TREE_CODE (tem) == SSA_NAME) | |
3311 | tem = SSA_VAL (tem); | |
3312 | if (is_gimple_min_invariant (tem)) | |
3313 | return true; | |
3314 | /* Fallthru. */ | |
3315 | ||
726a989a RB |
3316 | case GIMPLE_SINGLE_RHS: |
3317 | /* Constants inside reference ops are rarely interesting, but | |
3318 | it can take a lot of looking to find them. */ | |
0d5a1b56 RB |
3319 | case GIMPLE_UNARY_RHS: |
3320 | tem = gimple_assign_rhs1 (stmt); | |
3321 | if (TREE_CODE (tem) == SSA_NAME) | |
3322 | tem = SSA_VAL (tem); | |
3323 | return is_gimple_min_invariant (tem); | |
3324 | ||
726a989a RB |
3325 | default: |
3326 | gcc_unreachable (); | |
3327 | } | |
3328 | return false; | |
3329 | } | |
3330 | ||
89fb70a3 DB |
3331 | /* Simplify the binary expression RHS, and return the result if |
3332 | simplified. */ | |
3333 | ||
3334 | static tree | |
726a989a | 3335 | simplify_binary_expression (gimple stmt) |
89fb70a3 DB |
3336 | { |
3337 | tree result = NULL_TREE; | |
726a989a RB |
3338 | tree op0 = gimple_assign_rhs1 (stmt); |
3339 | tree op1 = gimple_assign_rhs2 (stmt); | |
1a60c352 | 3340 | enum tree_code code = gimple_assign_rhs_code (stmt); |
89fb70a3 DB |
3341 | |
3342 | /* This will not catch every single case we could combine, but will | |
3343 | catch those with constants. The goal here is to simultaneously | |
3344 | combine constants between expressions, but avoid infinite | |
3345 | expansion of expressions during simplification. */ | |
c1604254 RB |
3346 | op0 = vn_valueize (op0); |
3347 | if (TREE_CODE (op0) == SSA_NAME | |
3348 | && (VN_INFO (op0)->has_constants | |
1a60c352 | 3349 | || TREE_CODE_CLASS (code) == tcc_comparison |
c1604254 RB |
3350 | || code == COMPLEX_EXPR)) |
3351 | op0 = vn_get_expr_for (op0); | |
89fb70a3 | 3352 | |
c1604254 RB |
3353 | op1 = vn_valueize (op1); |
3354 | if (TREE_CODE (op1) == SSA_NAME | |
3355 | && (VN_INFO (op1)->has_constants | |
3356 | || code == COMPLEX_EXPR)) | |
3357 | op1 = vn_get_expr_for (op1); | |
c2979eaf | 3358 | |
cfef45c8 RG |
3359 | /* Pointer plus constant can be represented as invariant address. |
3360 | Do so to allow further propatation, see also tree forwprop. */ | |
1a60c352 | 3361 | if (code == POINTER_PLUS_EXPR |
cc269bb6 | 3362 | && tree_fits_uhwi_p (op1) |
cfef45c8 RG |
3363 | && TREE_CODE (op0) == ADDR_EXPR |
3364 | && is_gimple_min_invariant (op0)) | |
3365 | return build_invariant_address (TREE_TYPE (op0), | |
3366 | TREE_OPERAND (op0, 0), | |
eb1ce453 | 3367 | tree_to_uhwi (op1)); |
cfef45c8 | 3368 | |
eb2c3940 | 3369 | /* Avoid folding if nothing changed. */ |
726a989a RB |
3370 | if (op0 == gimple_assign_rhs1 (stmt) |
3371 | && op1 == gimple_assign_rhs2 (stmt)) | |
eb2c3940 RG |
3372 | return NULL_TREE; |
3373 | ||
e233ac97 ILT |
3374 | fold_defer_overflow_warnings (); |
3375 | ||
1a60c352 | 3376 | result = fold_binary (code, gimple_expr_type (stmt), op0, op1); |
8495c94f RG |
3377 | if (result) |
3378 | STRIP_USELESS_TYPE_CONVERSION (result); | |
89fb70a3 | 3379 | |
726a989a | 3380 | fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result), |
e233ac97 ILT |
3381 | stmt, 0); |
3382 | ||
6ed3da00 | 3383 | /* Make sure result is not a complex expression consisting |
89fb70a3 DB |
3384 | of operators of operators (IE (a + b) + (a + c)) |
3385 | Otherwise, we will end up with unbounded expressions if | |
3386 | fold does anything at all. */ | |
726a989a | 3387 | if (result && valid_gimple_rhs_p (result)) |
c2979eaf EB |
3388 | return result; |
3389 | ||
89fb70a3 DB |
3390 | return NULL_TREE; |
3391 | } | |
3392 | ||
eb2c3940 RG |
3393 | /* Simplify the unary expression RHS, and return the result if |
3394 | simplified. */ | |
3395 | ||
3396 | static tree | |
538dd0b7 | 3397 | simplify_unary_expression (gassign *stmt) |
eb2c3940 RG |
3398 | { |
3399 | tree result = NULL_TREE; | |
726a989a | 3400 | tree orig_op0, op0 = gimple_assign_rhs1 (stmt); |
1a60c352 | 3401 | enum tree_code code = gimple_assign_rhs_code (stmt); |
726a989a RB |
3402 | |
3403 | /* We handle some tcc_reference codes here that are all | |
3404 | GIMPLE_ASSIGN_SINGLE codes. */ | |
1a60c352 RG |
3405 | if (code == REALPART_EXPR |
3406 | || code == IMAGPART_EXPR | |
18474649 RG |
3407 | || code == VIEW_CONVERT_EXPR |
3408 | || code == BIT_FIELD_REF) | |
726a989a | 3409 | op0 = TREE_OPERAND (op0, 0); |
eb2c3940 | 3410 | |
726a989a | 3411 | orig_op0 = op0; |
c1604254 RB |
3412 | op0 = vn_valueize (op0); |
3413 | if (TREE_CODE (op0) == SSA_NAME) | |
eb2c3940 | 3414 | { |
c1604254 RB |
3415 | if (VN_INFO (op0)->has_constants) |
3416 | op0 = vn_get_expr_for (op0); | |
3417 | else if (CONVERT_EXPR_CODE_P (code) | |
3418 | || code == REALPART_EXPR | |
3419 | || code == IMAGPART_EXPR | |
3420 | || code == VIEW_CONVERT_EXPR | |
3421 | || code == BIT_FIELD_REF) | |
3422 | { | |
3423 | /* We want to do tree-combining on conversion-like expressions. | |
3424 | Make sure we feed only SSA_NAMEs or constants to fold though. */ | |
3425 | tree tem = vn_get_expr_for (op0); | |
3426 | if (UNARY_CLASS_P (tem) | |
3427 | || BINARY_CLASS_P (tem) | |
3428 | || TREE_CODE (tem) == VIEW_CONVERT_EXPR | |
3429 | || TREE_CODE (tem) == SSA_NAME | |
3430 | || TREE_CODE (tem) == CONSTRUCTOR | |
3431 | || is_gimple_min_invariant (tem)) | |
3432 | op0 = tem; | |
3433 | } | |
eb2c3940 RG |
3434 | } |
3435 | ||
3436 | /* Avoid folding if nothing changed, but remember the expression. */ | |
726a989a RB |
3437 | if (op0 == orig_op0) |
3438 | return NULL_TREE; | |
eb2c3940 | 3439 | |
18474649 RG |
3440 | if (code == BIT_FIELD_REF) |
3441 | { | |
3442 | tree rhs = gimple_assign_rhs1 (stmt); | |
3443 | result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs), | |
3444 | op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2)); | |
3445 | } | |
3446 | else | |
3447 | result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0); | |
eb2c3940 RG |
3448 | if (result) |
3449 | { | |
3450 | STRIP_USELESS_TYPE_CONVERSION (result); | |
726a989a | 3451 | if (valid_gimple_rhs_p (result)) |
eb2c3940 RG |
3452 | return result; |
3453 | } | |
3454 | ||
726a989a | 3455 | return NULL_TREE; |
eb2c3940 RG |
3456 | } |
3457 | ||
89fb70a3 DB |
3458 | /* Try to simplify RHS using equivalences and constant folding. */ |
3459 | ||
3460 | static tree | |
538dd0b7 | 3461 | try_to_simplify (gassign *stmt) |
89fb70a3 | 3462 | { |
d3878abf | 3463 | enum tree_code code = gimple_assign_rhs_code (stmt); |
ed97ddc6 RG |
3464 | tree tem; |
3465 | ||
5be891a4 RG |
3466 | /* For stores we can end up simplifying a SSA_NAME rhs. Just return |
3467 | in this case, there is no point in doing extra work. */ | |
d3878abf | 3468 | if (code == SSA_NAME) |
726a989a | 3469 | return NULL_TREE; |
ed97ddc6 | 3470 | |
cfef45c8 | 3471 | /* First try constant folding based on our current lattice. */ |
ff734093 | 3472 | tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize); |
d3878abf RG |
3473 | if (tem |
3474 | && (TREE_CODE (tem) == SSA_NAME | |
3475 | || is_gimple_min_invariant (tem))) | |
cfef45c8 RG |
3476 | return tem; |
3477 | ||
3478 | /* If that didn't work try combining multiple statements. */ | |
d3878abf | 3479 | switch (TREE_CODE_CLASS (code)) |
89fb70a3 | 3480 | { |
ed97ddc6 | 3481 | case tcc_reference: |
d3878abf RG |
3482 | /* Fallthrough for some unary codes that can operate on registers. */ |
3483 | if (!(code == REALPART_EXPR | |
3484 | || code == IMAGPART_EXPR | |
18474649 RG |
3485 | || code == VIEW_CONVERT_EXPR |
3486 | || code == BIT_FIELD_REF)) | |
ed97ddc6 RG |
3487 | break; |
3488 | /* We could do a little more with unary ops, if they expand | |
3489 | into binary ops, but it's debatable whether it is worth it. */ | |
3490 | case tcc_unary: | |
726a989a | 3491 | return simplify_unary_expression (stmt); |
cfef45c8 | 3492 | |
ed97ddc6 RG |
3493 | case tcc_comparison: |
3494 | case tcc_binary: | |
726a989a | 3495 | return simplify_binary_expression (stmt); |
cfef45c8 | 3496 | |
ed97ddc6 RG |
3497 | default: |
3498 | break; | |
89fb70a3 | 3499 | } |
ed97ddc6 | 3500 | |
726a989a | 3501 | return NULL_TREE; |
89fb70a3 DB |
3502 | } |
3503 | ||
3504 | /* Visit and value number USE, return true if the value number | |
3505 | changed. */ | |
3506 | ||
3507 | static bool | |
3508 | visit_use (tree use) | |
3509 | { | |
3510 | bool changed = false; | |
726a989a | 3511 | gimple stmt = SSA_NAME_DEF_STMT (use); |
89fb70a3 | 3512 | |
00115921 | 3513 | mark_use_processed (use); |
89fb70a3 DB |
3514 | |
3515 | gcc_assert (!SSA_NAME_IN_FREE_LIST (use)); | |
3d45dd59 | 3516 | if (dump_file && (dump_flags & TDF_DETAILS) |
726a989a | 3517 | && !SSA_NAME_IS_DEFAULT_DEF (use)) |
89fb70a3 DB |
3518 | { |
3519 | fprintf (dump_file, "Value numbering "); | |
3520 | print_generic_expr (dump_file, use, 0); | |
3521 | fprintf (dump_file, " stmt = "); | |
726a989a | 3522 | print_gimple_stmt (dump_file, stmt, 0, 0); |
89fb70a3 DB |
3523 | } |
3524 | ||
89fb70a3 | 3525 | /* Handle uninitialized uses. */ |
726a989a RB |
3526 | if (SSA_NAME_IS_DEFAULT_DEF (use)) |
3527 | changed = set_ssa_val_to (use, use); | |
89fb70a3 DB |
3528 | else |
3529 | { | |
726a989a RB |
3530 | if (gimple_code (stmt) == GIMPLE_PHI) |
3531 | changed = visit_phi (stmt); | |
00115921 | 3532 | else if (gimple_has_volatile_ops (stmt)) |
726a989a RB |
3533 | changed = defs_to_varying (stmt); |
3534 | else if (is_gimple_assign (stmt)) | |
89fb70a3 | 3535 | { |
32dba5ef | 3536 | enum tree_code code = gimple_assign_rhs_code (stmt); |
726a989a | 3537 | tree lhs = gimple_assign_lhs (stmt); |
32dba5ef | 3538 | tree rhs1 = gimple_assign_rhs1 (stmt); |
89fb70a3 DB |
3539 | tree simplified; |
3540 | ||
8b0a5125 DB |
3541 | /* Shortcut for copies. Simplifying copies is pointless, |
3542 | since we copy the expression and value they represent. */ | |
32dba5ef | 3543 | if (code == SSA_NAME |
726a989a | 3544 | && TREE_CODE (lhs) == SSA_NAME) |
8b0a5125 | 3545 | { |
32dba5ef | 3546 | changed = visit_copy (lhs, rhs1); |
8b0a5125 DB |
3547 | goto done; |
3548 | } | |
538dd0b7 | 3549 | simplified = try_to_simplify (as_a <gassign *> (stmt)); |
726a989a | 3550 | if (simplified) |
89fb70a3 DB |
3551 | { |
3552 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3553 | { | |
3554 | fprintf (dump_file, "RHS "); | |
726a989a | 3555 | print_gimple_expr (dump_file, stmt, 0, 0); |
89fb70a3 DB |
3556 | fprintf (dump_file, " simplified to "); |
3557 | print_generic_expr (dump_file, simplified, 0); | |
3558 | if (TREE_CODE (lhs) == SSA_NAME) | |
3559 | fprintf (dump_file, " has constants %d\n", | |
896c8b96 | 3560 | expr_has_constants (simplified)); |
89fb70a3 DB |
3561 | else |
3562 | fprintf (dump_file, "\n"); | |
89fb70a3 DB |
3563 | } |
3564 | } | |
3565 | /* Setting value numbers to constants will occasionally | |
3566 | screw up phi congruence because constants are not | |
3567 | uniquely associated with a single ssa name that can be | |
3568 | looked up. */ | |
726a989a RB |
3569 | if (simplified |
3570 | && is_gimple_min_invariant (simplified) | |
3571 | && TREE_CODE (lhs) == SSA_NAME) | |
89fb70a3 DB |
3572 | { |
3573 | VN_INFO (lhs)->expr = simplified; | |
3574 | VN_INFO (lhs)->has_constants = true; | |
3575 | changed = set_ssa_val_to (lhs, simplified); | |
3576 | goto done; | |
3577 | } | |
726a989a RB |
3578 | else if (simplified |
3579 | && TREE_CODE (simplified) == SSA_NAME | |
89fb70a3 DB |
3580 | && TREE_CODE (lhs) == SSA_NAME) |
3581 | { | |
3582 | changed = visit_copy (lhs, simplified); | |
3583 | goto done; | |
3584 | } | |
3585 | else if (simplified) | |
3586 | { | |
3587 | if (TREE_CODE (lhs) == SSA_NAME) | |
3588 | { | |
3589 | VN_INFO (lhs)->has_constants = expr_has_constants (simplified); | |
3590 | /* We have to unshare the expression or else | |
3591 | valuizing may change the IL stream. */ | |
3592 | VN_INFO (lhs)->expr = unshare_expr (simplified); | |
3593 | } | |
89fb70a3 | 3594 | } |
726a989a RB |
3595 | else if (stmt_has_constants (stmt) |
3596 | && TREE_CODE (lhs) == SSA_NAME) | |
3597 | VN_INFO (lhs)->has_constants = true; | |
89fb70a3 DB |
3598 | else if (TREE_CODE (lhs) == SSA_NAME) |
3599 | { | |
3600 | /* We reset expr and constantness here because we may | |
3601 | have been value numbering optimistically, and | |
3602 | iterating. They may become non-constant in this case, | |
3603 | even if they were optimistically constant. */ | |
070b797d | 3604 | |
89fb70a3 | 3605 | VN_INFO (lhs)->has_constants = false; |
726a989a | 3606 | VN_INFO (lhs)->expr = NULL_TREE; |
89fb70a3 DB |
3607 | } |
3608 | ||
b505e785 RG |
3609 | if ((TREE_CODE (lhs) == SSA_NAME |
3610 | /* We can substitute SSA_NAMEs that are live over | |
3611 | abnormal edges with their constant value. */ | |
3612 | && !(gimple_assign_copy_p (stmt) | |
32dba5ef | 3613 | && is_gimple_min_invariant (rhs1)) |
b505e785 RG |
3614 | && !(simplified |
3615 | && is_gimple_min_invariant (simplified)) | |
3616 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
3617 | /* Stores or copies from SSA_NAMEs that are live over | |
3618 | abnormal edges are a problem. */ | |
32dba5ef RG |
3619 | || (code == SSA_NAME |
3620 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))) | |
89fb70a3 | 3621 | changed = defs_to_varying (stmt); |
32dba5ef RG |
3622 | else if (REFERENCE_CLASS_P (lhs) |
3623 | || DECL_P (lhs)) | |
3624 | changed = visit_reference_op_store (lhs, rhs1, stmt); | |
89fb70a3 DB |
3625 | else if (TREE_CODE (lhs) == SSA_NAME) |
3626 | { | |
726a989a | 3627 | if ((gimple_assign_copy_p (stmt) |
32dba5ef | 3628 | && is_gimple_min_invariant (rhs1)) |
726a989a RB |
3629 | || (simplified |
3630 | && is_gimple_min_invariant (simplified))) | |
89fb70a3 DB |
3631 | { |
3632 | VN_INFO (lhs)->has_constants = true; | |
726a989a RB |
3633 | if (simplified) |
3634 | changed = set_ssa_val_to (lhs, simplified); | |
3635 | else | |
32dba5ef | 3636 | changed = set_ssa_val_to (lhs, rhs1); |
89fb70a3 | 3637 | } |
89fb70a3 DB |
3638 | else |
3639 | { | |
5630e3e1 JL |
3640 | /* First try to lookup the simplified expression. */ |
3641 | if (simplified) | |
3642 | { | |
3643 | enum gimple_rhs_class rhs_class; | |
3644 | ||
3645 | ||
3646 | rhs_class = get_gimple_rhs_class (TREE_CODE (simplified)); | |
3647 | if ((rhs_class == GIMPLE_UNARY_RHS | |
3648 | || rhs_class == GIMPLE_BINARY_RHS | |
3649 | || rhs_class == GIMPLE_TERNARY_RHS) | |
3650 | && valid_gimple_rhs_p (simplified)) | |
3651 | { | |
3652 | tree result = vn_nary_op_lookup (simplified, NULL); | |
3653 | if (result) | |
3654 | { | |
3655 | changed = set_ssa_val_to (lhs, result); | |
3656 | goto done; | |
3657 | } | |
3658 | } | |
3659 | } | |
3660 | ||
3661 | /* Otherwise visit the original statement. */ | |
17742d62 | 3662 | switch (vn_get_stmt_kind (stmt)) |
89fb70a3 | 3663 | { |
17742d62 | 3664 | case VN_NARY: |
2262707f | 3665 | changed = visit_nary_op (lhs, stmt); |
89fb70a3 | 3666 | break; |
17742d62 RG |
3667 | case VN_REFERENCE: |
3668 | changed = visit_reference_op_load (lhs, rhs1, stmt); | |
726a989a | 3669 | break; |
89fb70a3 DB |
3670 | default: |
3671 | changed = defs_to_varying (stmt); | |
3672 | break; | |
3673 | } | |
3674 | } | |
3675 | } | |
3676 | else | |
3677 | changed = defs_to_varying (stmt); | |
3678 | } | |
538dd0b7 | 3679 | else if (gcall *call_stmt = dyn_cast <gcall *> (stmt)) |
726a989a RB |
3680 | { |
3681 | tree lhs = gimple_call_lhs (stmt); | |
00115921 | 3682 | if (lhs && TREE_CODE (lhs) == SSA_NAME) |
726a989a | 3683 | { |
a27c3860 RB |
3684 | /* Try constant folding based on our current lattice. */ |
3685 | tree simplified = gimple_fold_stmt_to_constant_1 (stmt, | |
3686 | vn_valueize); | |
3687 | if (simplified) | |
3688 | { | |
3689 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3690 | { | |
3691 | fprintf (dump_file, "call "); | |
3692 | print_gimple_expr (dump_file, stmt, 0, 0); | |
3693 | fprintf (dump_file, " simplified to "); | |
3694 | print_generic_expr (dump_file, simplified, 0); | |
3695 | if (TREE_CODE (lhs) == SSA_NAME) | |
3696 | fprintf (dump_file, " has constants %d\n", | |
3697 | expr_has_constants (simplified)); | |
3698 | else | |
3699 | fprintf (dump_file, "\n"); | |
3700 | } | |
3701 | } | |
3702 | /* Setting value numbers to constants will occasionally | |
3703 | screw up phi congruence because constants are not | |
3704 | uniquely associated with a single ssa name that can be | |
3705 | looked up. */ | |
3706 | if (simplified | |
3707 | && is_gimple_min_invariant (simplified)) | |
00115921 | 3708 | { |
a27c3860 RB |
3709 | VN_INFO (lhs)->expr = simplified; |
3710 | VN_INFO (lhs)->has_constants = true; | |
3711 | changed = set_ssa_val_to (lhs, simplified); | |
3712 | if (gimple_vdef (stmt)) | |
3713 | changed |= set_ssa_val_to (gimple_vdef (stmt), | |
3714 | gimple_vuse (stmt)); | |
3715 | goto done; | |
00115921 | 3716 | } |
a27c3860 RB |
3717 | else if (simplified |
3718 | && TREE_CODE (simplified) == SSA_NAME) | |
00115921 | 3719 | { |
a27c3860 RB |
3720 | changed = visit_copy (lhs, simplified); |
3721 | if (gimple_vdef (stmt)) | |
3722 | changed |= set_ssa_val_to (gimple_vdef (stmt), | |
3723 | gimple_vuse (stmt)); | |
00115921 TV |
3724 | goto done; |
3725 | } | |
a27c3860 RB |
3726 | else |
3727 | { | |
3728 | if (stmt_has_constants (stmt)) | |
3729 | VN_INFO (lhs)->has_constants = true; | |
3730 | else | |
3731 | { | |
3732 | /* We reset expr and constantness here because we may | |
3733 | have been value numbering optimistically, and | |
3734 | iterating. They may become non-constant in this case, | |
3735 | even if they were optimistically constant. */ | |
3736 | VN_INFO (lhs)->has_constants = false; | |
3737 | VN_INFO (lhs)->expr = NULL_TREE; | |
3738 | } | |
3739 | ||
3740 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
3741 | { | |
3742 | changed = defs_to_varying (stmt); | |
3743 | goto done; | |
3744 | } | |
3745 | } | |
726a989a RB |
3746 | } |
3747 | ||
00115921 | 3748 | if (!gimple_call_internal_p (stmt) |
6867d9a9 TV |
3749 | && (/* Calls to the same function with the same vuse |
3750 | and the same operands do not necessarily return the same | |
3751 | value, unless they're pure or const. */ | |
3752 | gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST) | |
3753 | /* If calls have a vdef, subsequent calls won't have | |
3754 | the same incoming vuse. So, if 2 calls with vdef have the | |
3755 | same vuse, we know they're not subsequent. | |
3756 | We can value number 2 calls to the same function with the | |
3757 | same vuse and the same operands which are not subsequent | |
3758 | the same, because there is no code in the program that can | |
92a8d7a7 RB |
3759 | compare the 2 values... */ |
3760 | || (gimple_vdef (stmt) | |
3761 | /* ... unless the call returns a pointer which does | |
3762 | not alias with anything else. In which case the | |
3763 | information that the values are distinct are encoded | |
3764 | in the IL. */ | |
538dd0b7 | 3765 | && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS) |
26f3a4e1 RB |
3766 | /* Only perform the following when being called from PRE |
3767 | which embeds tail merging. */ | |
3768 | && default_vn_walk_kind == VN_WALK))) | |
538dd0b7 | 3769 | changed = visit_reference_op_call (lhs, call_stmt); |
726a989a RB |
3770 | else |
3771 | changed = defs_to_varying (stmt); | |
3772 | } | |
00115921 TV |
3773 | else |
3774 | changed = defs_to_varying (stmt); | |
89fb70a3 DB |
3775 | } |
3776 | done: | |
3777 | return changed; | |
3778 | } | |
3779 | ||
3780 | /* Compare two operands by reverse postorder index */ | |
3781 | ||
3782 | static int | |
3783 | compare_ops (const void *pa, const void *pb) | |
3784 | { | |
3785 | const tree opa = *((const tree *)pa); | |
3786 | const tree opb = *((const tree *)pb); | |
726a989a RB |
3787 | gimple opstmta = SSA_NAME_DEF_STMT (opa); |
3788 | gimple opstmtb = SSA_NAME_DEF_STMT (opb); | |
89fb70a3 DB |
3789 | basic_block bba; |
3790 | basic_block bbb; | |
3791 | ||
726a989a | 3792 | if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb)) |
3d8fa148 | 3793 | return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); |
726a989a | 3794 | else if (gimple_nop_p (opstmta)) |
89fb70a3 | 3795 | return -1; |
726a989a | 3796 | else if (gimple_nop_p (opstmtb)) |
89fb70a3 DB |
3797 | return 1; |
3798 | ||
726a989a RB |
3799 | bba = gimple_bb (opstmta); |
3800 | bbb = gimple_bb (opstmtb); | |
89fb70a3 DB |
3801 | |
3802 | if (!bba && !bbb) | |
3d8fa148 | 3803 | return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); |
89fb70a3 DB |
3804 | else if (!bba) |
3805 | return -1; | |
3806 | else if (!bbb) | |
3807 | return 1; | |
3808 | ||
3809 | if (bba == bbb) | |
3810 | { | |
726a989a RB |
3811 | if (gimple_code (opstmta) == GIMPLE_PHI |
3812 | && gimple_code (opstmtb) == GIMPLE_PHI) | |
3d8fa148 | 3813 | return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); |
726a989a | 3814 | else if (gimple_code (opstmta) == GIMPLE_PHI) |
89fb70a3 | 3815 | return -1; |
726a989a | 3816 | else if (gimple_code (opstmtb) == GIMPLE_PHI) |
89fb70a3 | 3817 | return 1; |
3d8fa148 DK |
3818 | else if (gimple_uid (opstmta) != gimple_uid (opstmtb)) |
3819 | return gimple_uid (opstmta) - gimple_uid (opstmtb); | |
3820 | else | |
3821 | return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); | |
89fb70a3 DB |
3822 | } |
3823 | return rpo_numbers[bba->index] - rpo_numbers[bbb->index]; | |
3824 | } | |
3825 | ||
3826 | /* Sort an array containing members of a strongly connected component | |
3827 | SCC so that the members are ordered by RPO number. | |
3828 | This means that when the sort is complete, iterating through the | |
3829 | array will give you the members in RPO order. */ | |
3830 | ||
3831 | static void | |
9771b263 | 3832 | sort_scc (vec<tree> scc) |
89fb70a3 | 3833 | { |
9771b263 | 3834 | scc.qsort (compare_ops); |
89fb70a3 DB |
3835 | } |
3836 | ||
880ad25f | 3837 | /* Insert the no longer used nary ONARY to the hash INFO. */ |
c82e0b3b | 3838 | |
880ad25f RG |
3839 | static void |
3840 | copy_nary (vn_nary_op_t onary, vn_tables_t info) | |
c82e0b3b | 3841 | { |
9ad6bebe NF |
3842 | size_t size = sizeof_vn_nary_op (onary->length); |
3843 | vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length, | |
3844 | &info->nary_obstack); | |
c82e0b3b | 3845 | memcpy (nary, onary, size); |
9ad6bebe | 3846 | vn_nary_op_insert_into (nary, info->nary, false); |
c82e0b3b RG |
3847 | } |
3848 | ||
880ad25f | 3849 | /* Insert the no longer used phi OPHI to the hash INFO. */ |
c82e0b3b | 3850 | |
880ad25f RG |
3851 | static void |
3852 | copy_phi (vn_phi_t ophi, vn_tables_t info) | |
c82e0b3b | 3853 | { |
880ad25f | 3854 | vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool); |
bf190e8d | 3855 | vn_phi_s **slot; |
c82e0b3b | 3856 | memcpy (phi, ophi, sizeof (*phi)); |
9771b263 | 3857 | ophi->phiargs.create (0); |
c203e8a7 | 3858 | slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT); |
880ad25f | 3859 | gcc_assert (!*slot); |
c82e0b3b | 3860 | *slot = phi; |
c82e0b3b RG |
3861 | } |
3862 | ||
880ad25f | 3863 | /* Insert the no longer used reference OREF to the hash INFO. */ |
c82e0b3b | 3864 | |
880ad25f RG |
3865 | static void |
3866 | copy_reference (vn_reference_t oref, vn_tables_t info) | |
c82e0b3b | 3867 | { |
c82e0b3b | 3868 | vn_reference_t ref; |
bf190e8d | 3869 | vn_reference_s **slot; |
880ad25f | 3870 | ref = (vn_reference_t) pool_alloc (info->references_pool); |
c82e0b3b | 3871 | memcpy (ref, oref, sizeof (*ref)); |
9771b263 | 3872 | oref->operands.create (0); |
c203e8a7 | 3873 | slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT); |
c82e0b3b RG |
3874 | if (*slot) |
3875 | free_reference (*slot); | |
3876 | *slot = ref; | |
c82e0b3b RG |
3877 | } |
3878 | ||
89fb70a3 DB |
3879 | /* Process a strongly connected component in the SSA graph. */ |
3880 | ||
3881 | static void | |
9771b263 | 3882 | process_scc (vec<tree> scc) |
89fb70a3 | 3883 | { |
880ad25f RG |
3884 | tree var; |
3885 | unsigned int i; | |
3886 | unsigned int iterations = 0; | |
3887 | bool changed = true; | |
bf190e8d LC |
3888 | vn_nary_op_iterator_type hin; |
3889 | vn_phi_iterator_type hip; | |
3890 | vn_reference_iterator_type hir; | |
880ad25f RG |
3891 | vn_nary_op_t nary; |
3892 | vn_phi_t phi; | |
3893 | vn_reference_t ref; | |
89fb70a3 | 3894 | |
880ad25f | 3895 | /* If the SCC has a single member, just visit it. */ |
9771b263 | 3896 | if (scc.length () == 1) |
89fb70a3 | 3897 | { |
9771b263 | 3898 | tree use = scc[0]; |
72a07d9b RB |
3899 | if (VN_INFO (use)->use_processed) |
3900 | return; | |
3901 | /* We need to make sure it doesn't form a cycle itself, which can | |
3902 | happen for self-referential PHI nodes. In that case we would | |
3903 | end up inserting an expression with VN_TOP operands into the | |
3904 | valid table which makes us derive bogus equivalences later. | |
3905 | The cheapest way to check this is to assume it for all PHI nodes. */ | |
3906 | if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI) | |
3907 | /* Fallthru to iteration. */ ; | |
3908 | else | |
3909 | { | |
3910 | visit_use (use); | |
3911 | return; | |
3912 | } | |
89fb70a3 | 3913 | } |
880ad25f | 3914 | |
b2b222b3 RB |
3915 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3916 | print_scc (dump_file, scc); | |
3917 | ||
880ad25f RG |
3918 | /* Iterate over the SCC with the optimistic table until it stops |
3919 | changing. */ | |
3920 | current_info = optimistic_info; | |
3921 | while (changed) | |
89fb70a3 | 3922 | { |
880ad25f RG |
3923 | changed = false; |
3924 | iterations++; | |
90bc4623 RG |
3925 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3926 | fprintf (dump_file, "Starting iteration %d\n", iterations); | |
880ad25f RG |
3927 | /* As we are value-numbering optimistically we have to |
3928 | clear the expression tables and the simplified expressions | |
3929 | in each iteration until we converge. */ | |
c203e8a7 TS |
3930 | optimistic_info->nary->empty (); |
3931 | optimistic_info->phis->empty (); | |
3932 | optimistic_info->references->empty (); | |
880ad25f RG |
3933 | obstack_free (&optimistic_info->nary_obstack, NULL); |
3934 | gcc_obstack_init (&optimistic_info->nary_obstack); | |
3935 | empty_alloc_pool (optimistic_info->phis_pool); | |
3936 | empty_alloc_pool (optimistic_info->references_pool); | |
9771b263 | 3937 | FOR_EACH_VEC_ELT (scc, i, var) |
880ad25f | 3938 | VN_INFO (var)->expr = NULL_TREE; |
9771b263 | 3939 | FOR_EACH_VEC_ELT (scc, i, var) |
880ad25f RG |
3940 | changed |= visit_use (var); |
3941 | } | |
89fb70a3 | 3942 | |
b2b222b3 RB |
3943 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3944 | fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations); | |
880ad25f | 3945 | statistics_histogram_event (cfun, "SCC iterations", iterations); |
89fb70a3 | 3946 | |
880ad25f RG |
3947 | /* Finally, copy the contents of the no longer used optimistic |
3948 | table to the valid table. */ | |
c203e8a7 | 3949 | FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin) |
880ad25f | 3950 | copy_nary (nary, valid_info); |
c203e8a7 | 3951 | FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip) |
880ad25f | 3952 | copy_phi (phi, valid_info); |
c203e8a7 | 3953 | FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references, |
bf190e8d | 3954 | ref, vn_reference_t, hir) |
880ad25f RG |
3955 | copy_reference (ref, valid_info); |
3956 | ||
3957 | current_info = valid_info; | |
89fb70a3 DB |
3958 | } |
3959 | ||
6be34936 RG |
3960 | |
3961 | /* Pop the components of the found SCC for NAME off the SCC stack | |
3962 | and process them. Returns true if all went well, false if | |
3963 | we run into resource limits. */ | |
3964 | ||
3965 | static bool | |
3966 | extract_and_process_scc_for_name (tree name) | |
3967 | { | |
ef062b13 | 3968 | auto_vec<tree> scc; |
6be34936 RG |
3969 | tree x; |
3970 | ||
3971 | /* Found an SCC, pop the components off the SCC stack and | |
3972 | process them. */ | |
3973 | do | |
3974 | { | |
9771b263 | 3975 | x = sccstack.pop (); |
6be34936 RG |
3976 | |
3977 | VN_INFO (x)->on_sccstack = false; | |
9771b263 | 3978 | scc.safe_push (x); |
6be34936 RG |
3979 | } while (x != name); |
3980 | ||
3981 | /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */ | |
9771b263 | 3982 | if (scc.length () |
6be34936 RG |
3983 | > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE)) |
3984 | { | |
3985 | if (dump_file) | |
3986 | fprintf (dump_file, "WARNING: Giving up with SCCVN due to " | |
9771b263 | 3987 | "SCC size %u exceeding %u\n", scc.length (), |
6be34936 | 3988 | (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE)); |
f5843d08 | 3989 | |
6be34936 RG |
3990 | return false; |
3991 | } | |
3992 | ||
9771b263 | 3993 | if (scc.length () > 1) |
6be34936 RG |
3994 | sort_scc (scc); |
3995 | ||
6be34936 RG |
3996 | process_scc (scc); |
3997 | ||
6be34936 RG |
3998 | return true; |
3999 | } | |
4000 | ||
89fb70a3 DB |
4001 | /* Depth first search on NAME to discover and process SCC's in the SSA |
4002 | graph. | |
4003 | Execution of this algorithm relies on the fact that the SCC's are | |
863d2a57 RG |
4004 | popped off the stack in topological order. |
4005 | Returns true if successful, false if we stopped processing SCC's due | |
fa10beec | 4006 | to resource constraints. */ |
89fb70a3 | 4007 | |
863d2a57 | 4008 | static bool |
89fb70a3 DB |
4009 | DFS (tree name) |
4010 | { | |
6e1aa848 DN |
4011 | vec<ssa_op_iter> itervec = vNULL; |
4012 | vec<tree> namevec = vNULL; | |
6be34936 | 4013 | use_operand_p usep = NULL; |
726a989a RB |
4014 | gimple defstmt; |
4015 | tree use; | |
89fb70a3 | 4016 | ssa_op_iter iter; |
89fb70a3 | 4017 | |
6be34936 | 4018 | start_over: |
89fb70a3 DB |
4019 | /* SCC info */ |
4020 | VN_INFO (name)->dfsnum = next_dfs_num++; | |
4021 | VN_INFO (name)->visited = true; | |
4022 | VN_INFO (name)->low = VN_INFO (name)->dfsnum; | |
4023 | ||
9771b263 | 4024 | sccstack.safe_push (name); |
89fb70a3 DB |
4025 | VN_INFO (name)->on_sccstack = true; |
4026 | defstmt = SSA_NAME_DEF_STMT (name); | |
4027 | ||
4028 | /* Recursively DFS on our operands, looking for SCC's. */ | |
726a989a | 4029 | if (!gimple_nop_p (defstmt)) |
89fb70a3 | 4030 | { |
6be34936 | 4031 | /* Push a new iterator. */ |
538dd0b7 DM |
4032 | if (gphi *phi = dyn_cast <gphi *> (defstmt)) |
4033 | usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES); | |
6be34936 RG |
4034 | else |
4035 | usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES); | |
4036 | } | |
4037 | else | |
81f5094d | 4038 | clear_and_done_ssa_iter (&iter); |
6be34936 RG |
4039 | |
4040 | while (1) | |
4041 | { | |
4042 | /* If we are done processing uses of a name, go up the stack | |
4043 | of iterators and process SCCs as we found them. */ | |
4044 | if (op_iter_done (&iter)) | |
89fb70a3 | 4045 | { |
6be34936 RG |
4046 | /* See if we found an SCC. */ |
4047 | if (VN_INFO (name)->low == VN_INFO (name)->dfsnum) | |
4048 | if (!extract_and_process_scc_for_name (name)) | |
4049 | { | |
9771b263 DN |
4050 | namevec.release (); |
4051 | itervec.release (); | |
6be34936 RG |
4052 | return false; |
4053 | } | |
89fb70a3 | 4054 | |
6be34936 | 4055 | /* Check if we are done. */ |
9771b263 | 4056 | if (namevec.is_empty ()) |
6be34936 | 4057 | { |
9771b263 DN |
4058 | namevec.release (); |
4059 | itervec.release (); | |
6be34936 RG |
4060 | return true; |
4061 | } | |
4062 | ||
4063 | /* Restore the last use walker and continue walking there. */ | |
4064 | use = name; | |
9771b263 DN |
4065 | name = namevec.pop (); |
4066 | memcpy (&iter, &itervec.last (), | |
6be34936 | 4067 | sizeof (ssa_op_iter)); |
9771b263 | 4068 | itervec.pop (); |
6be34936 RG |
4069 | goto continue_walking; |
4070 | } | |
89fb70a3 | 4071 | |
6be34936 RG |
4072 | use = USE_FROM_PTR (usep); |
4073 | ||
4074 | /* Since we handle phi nodes, we will sometimes get | |
4075 | invariants in the use expression. */ | |
4076 | if (TREE_CODE (use) == SSA_NAME) | |
4077 | { | |
89fb70a3 DB |
4078 | if (! (VN_INFO (use)->visited)) |
4079 | { | |
6be34936 RG |
4080 | /* Recurse by pushing the current use walking state on |
4081 | the stack and starting over. */ | |
9771b263 DN |
4082 | itervec.safe_push (iter); |
4083 | namevec.safe_push (name); | |
6be34936 RG |
4084 | name = use; |
4085 | goto start_over; | |
4086 | ||
4087 | continue_walking: | |
89fb70a3 DB |
4088 | VN_INFO (name)->low = MIN (VN_INFO (name)->low, |
4089 | VN_INFO (use)->low); | |
4090 | } | |
4091 | if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum | |
4092 | && VN_INFO (use)->on_sccstack) | |
4093 | { | |
4094 | VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum, | |
4095 | VN_INFO (name)->low); | |
4096 | } | |
4097 | } | |
863d2a57 | 4098 | |
6be34936 | 4099 | usep = op_iter_next_use (&iter); |
89fb70a3 DB |
4100 | } |
4101 | } | |
4102 | ||
89fb70a3 DB |
4103 | /* Allocate a value number table. */ |
4104 | ||
4105 | static void | |
4106 | allocate_vn_table (vn_tables_t table) | |
4107 | { | |
c203e8a7 TS |
4108 | table->phis = new vn_phi_table_type (23); |
4109 | table->nary = new vn_nary_op_table_type (23); | |
4110 | table->references = new vn_reference_table_type (23); | |
89fb70a3 | 4111 | |
49a1fb2d | 4112 | gcc_obstack_init (&table->nary_obstack); |
89fb70a3 DB |
4113 | table->phis_pool = create_alloc_pool ("VN phis", |
4114 | sizeof (struct vn_phi_s), | |
4115 | 30); | |
4116 | table->references_pool = create_alloc_pool ("VN references", | |
4117 | sizeof (struct vn_reference_s), | |
4118 | 30); | |
4119 | } | |
4120 | ||
4121 | /* Free a value number table. */ | |
4122 | ||
4123 | static void | |
4124 | free_vn_table (vn_tables_t table) | |
4125 | { | |
c203e8a7 TS |
4126 | delete table->phis; |
4127 | table->phis = NULL; | |
4128 | delete table->nary; | |
4129 | table->nary = NULL; | |
4130 | delete table->references; | |
4131 | table->references = NULL; | |
49a1fb2d | 4132 | obstack_free (&table->nary_obstack, NULL); |
89fb70a3 DB |
4133 | free_alloc_pool (table->phis_pool); |
4134 | free_alloc_pool (table->references_pool); | |
4135 | } | |
4136 | ||
4137 | static void | |
4138 | init_scc_vn (void) | |
4139 | { | |
4140 | size_t i; | |
4141 | int j; | |
4142 | int *rpo_numbers_temp; | |
89fb70a3 DB |
4143 | |
4144 | calculate_dominance_info (CDI_DOMINATORS); | |
9771b263 | 4145 | sccstack.create (0); |
c203e8a7 | 4146 | constant_to_value_id = new hash_table<vn_constant_hasher> (23); |
b8698a0f | 4147 | |
c9145754 | 4148 | constant_value_ids = BITMAP_ALLOC (NULL); |
b8698a0f | 4149 | |
89fb70a3 | 4150 | next_dfs_num = 1; |
c9145754 | 4151 | next_value_id = 1; |
b8698a0f | 4152 | |
9771b263 | 4153 | vn_ssa_aux_table.create (num_ssa_names + 1); |
89fb70a3 DB |
4154 | /* VEC_alloc doesn't actually grow it to the right size, it just |
4155 | preallocates the space to do so. */ | |
9771b263 | 4156 | vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1); |
cbfb21c1 SB |
4157 | gcc_obstack_init (&vn_ssa_aux_obstack); |
4158 | ||
9771b263 DN |
4159 | shared_lookup_phiargs.create (0); |
4160 | shared_lookup_references.create (0); | |
8b1c6fd7 | 4161 | rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
0cae8d31 DM |
4162 | rpo_numbers_temp = |
4163 | XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); | |
89fb70a3 DB |
4164 | pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false); |
4165 | ||
4166 | /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that | |
4167 | the i'th block in RPO order is bb. We want to map bb's to RPO | |
4168 | numbers, so we need to rearrange this array. */ | |
0cae8d31 | 4169 | for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++) |
89fb70a3 DB |
4170 | rpo_numbers[rpo_numbers_temp[j]] = j; |
4171 | ||
cbfb21c1 | 4172 | XDELETE (rpo_numbers_temp); |
89fb70a3 DB |
4173 | |
4174 | VN_TOP = create_tmp_var_raw (void_type_node, "vn_top"); | |
4175 | ||
4176 | /* Create the VN_INFO structures, and initialize value numbers to | |
4177 | TOP. */ | |
4178 | for (i = 0; i < num_ssa_names; i++) | |
4179 | { | |
4180 | tree name = ssa_name (i); | |
4181 | if (name) | |
4182 | { | |
4183 | VN_INFO_GET (name)->valnum = VN_TOP; | |
726a989a | 4184 | VN_INFO (name)->expr = NULL_TREE; |
c9145754 | 4185 | VN_INFO (name)->value_id = 0; |
89fb70a3 DB |
4186 | } |
4187 | } | |
4188 | ||
908ff6a3 | 4189 | renumber_gimple_stmt_uids (); |
89fb70a3 DB |
4190 | |
4191 | /* Create the valid and optimistic value numbering tables. */ | |
4192 | valid_info = XCNEW (struct vn_tables_s); | |
4193 | allocate_vn_table (valid_info); | |
4194 | optimistic_info = XCNEW (struct vn_tables_s); | |
4195 | allocate_vn_table (optimistic_info); | |
89fb70a3 DB |
4196 | } |
4197 | ||
4198 | void | |
4199 | free_scc_vn (void) | |
4200 | { | |
4201 | size_t i; | |
4202 | ||
c203e8a7 TS |
4203 | delete constant_to_value_id; |
4204 | constant_to_value_id = NULL; | |
c9145754 | 4205 | BITMAP_FREE (constant_value_ids); |
9771b263 DN |
4206 | shared_lookup_phiargs.release (); |
4207 | shared_lookup_references.release (); | |
89fb70a3 | 4208 | XDELETEVEC (rpo_numbers); |
cbfb21c1 | 4209 | |
89fb70a3 DB |
4210 | for (i = 0; i < num_ssa_names; i++) |
4211 | { | |
4212 | tree name = ssa_name (i); | |
3d45dd59 RG |
4213 | if (name |
4214 | && VN_INFO (name)->needs_insertion) | |
4215 | release_ssa_name (name); | |
89fb70a3 | 4216 | } |
cbfb21c1 | 4217 | obstack_free (&vn_ssa_aux_obstack, NULL); |
9771b263 | 4218 | vn_ssa_aux_table.release (); |
cbfb21c1 | 4219 | |
9771b263 | 4220 | sccstack.release (); |
89fb70a3 DB |
4221 | free_vn_table (valid_info); |
4222 | XDELETE (valid_info); | |
4223 | free_vn_table (optimistic_info); | |
4224 | XDELETE (optimistic_info); | |
89fb70a3 DB |
4225 | } |
4226 | ||
9ca966ca | 4227 | /* Set *ID according to RESULT. */ |
9ad6bebe NF |
4228 | |
4229 | static void | |
4230 | set_value_id_for_result (tree result, unsigned int *id) | |
4231 | { | |
9ca966ca RB |
4232 | if (result && TREE_CODE (result) == SSA_NAME) |
4233 | *id = VN_INFO (result)->value_id; | |
4234 | else if (result && is_gimple_min_invariant (result)) | |
4235 | *id = get_or_alloc_constant_value_id (result); | |
4236 | else | |
4237 | *id = get_next_value_id (); | |
9ad6bebe NF |
4238 | } |
4239 | ||
caf55296 | 4240 | /* Set the value ids in the valid hash tables. */ |
c9145754 DB |
4241 | |
4242 | static void | |
4243 | set_hashtable_value_ids (void) | |
4244 | { | |
bf190e8d LC |
4245 | vn_nary_op_iterator_type hin; |
4246 | vn_phi_iterator_type hip; | |
4247 | vn_reference_iterator_type hir; | |
c9145754 DB |
4248 | vn_nary_op_t vno; |
4249 | vn_reference_t vr; | |
4250 | vn_phi_t vp; | |
caf55296 | 4251 | |
c9145754 DB |
4252 | /* Now set the value ids of the things we had put in the hash |
4253 | table. */ | |
4254 | ||
c203e8a7 | 4255 | FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin) |
9ad6bebe | 4256 | set_value_id_for_result (vno->result, &vno->value_id); |
c9145754 | 4257 | |
c203e8a7 | 4258 | FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip) |
9ad6bebe | 4259 | set_value_id_for_result (vp->result, &vp->value_id); |
c9145754 | 4260 | |
c203e8a7 TS |
4261 | FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t, |
4262 | hir) | |
9ad6bebe | 4263 | set_value_id_for_result (vr->result, &vr->value_id); |
c9145754 DB |
4264 | } |
4265 | ||
a764d660 RB |
4266 | class cond_dom_walker : public dom_walker |
4267 | { | |
4268 | public: | |
4269 | cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {} | |
4270 | ||
4271 | virtual void before_dom_children (basic_block); | |
4272 | ||
4273 | bool fail; | |
4274 | }; | |
4275 | ||
4276 | void | |
4277 | cond_dom_walker::before_dom_children (basic_block bb) | |
4278 | { | |
4279 | edge e; | |
4280 | edge_iterator ei; | |
4281 | ||
4282 | if (fail) | |
4283 | return; | |
4284 | ||
1d44def2 RB |
4285 | /* If any of the predecessor edges that do not come from blocks dominated |
4286 | by us are still marked as possibly executable consider this block | |
4287 | reachable. */ | |
a764d660 RB |
4288 | bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun); |
4289 | FOR_EACH_EDGE (e, ei, bb->preds) | |
1d44def2 RB |
4290 | if (!dominated_by_p (CDI_DOMINATORS, e->src, bb)) |
4291 | reachable |= (e->flags & EDGE_EXECUTABLE); | |
a764d660 RB |
4292 | |
4293 | /* If the block is not reachable all outgoing edges are not | |
4294 | executable. */ | |
4295 | if (!reachable) | |
4296 | { | |
4297 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4298 | fprintf (dump_file, "Marking all outgoing edges of unreachable " | |
4299 | "BB %d as not executable\n", bb->index); | |
4300 | ||
4301 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4302 | e->flags &= ~EDGE_EXECUTABLE; | |
4303 | return; | |
4304 | } | |
4305 | ||
4306 | gimple stmt = last_stmt (bb); | |
4307 | if (!stmt) | |
4308 | return; | |
4309 | ||
b2b222b3 RB |
4310 | enum gimple_code code = gimple_code (stmt); |
4311 | if (code != GIMPLE_COND | |
4312 | && code != GIMPLE_SWITCH | |
4313 | && code != GIMPLE_GOTO) | |
4314 | return; | |
4315 | ||
4316 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4317 | { | |
4318 | fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ", | |
4319 | bb->index); | |
4320 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
4321 | } | |
4322 | ||
a764d660 RB |
4323 | /* Value-number the last stmts SSA uses. */ |
4324 | ssa_op_iter i; | |
4325 | tree op; | |
4326 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE) | |
4327 | if (VN_INFO (op)->visited == false | |
4328 | && !DFS (op)) | |
4329 | { | |
4330 | fail = true; | |
4331 | return; | |
4332 | } | |
4333 | ||
4334 | /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges | |
4335 | if value-numbering can prove they are not reachable. Handling | |
4336 | computed gotos is also possible. */ | |
4337 | tree val; | |
b2b222b3 | 4338 | switch (code) |
a764d660 RB |
4339 | { |
4340 | case GIMPLE_COND: | |
4341 | { | |
4342 | tree lhs = gimple_cond_lhs (stmt); | |
4343 | tree rhs = gimple_cond_rhs (stmt); | |
4344 | /* Work hard in computing the condition and take into account | |
4345 | the valueization of the defining stmt. */ | |
4346 | if (TREE_CODE (lhs) == SSA_NAME) | |
4347 | lhs = vn_get_expr_for (lhs); | |
4348 | if (TREE_CODE (rhs) == SSA_NAME) | |
4349 | rhs = vn_get_expr_for (rhs); | |
4350 | val = fold_binary (gimple_cond_code (stmt), | |
4351 | boolean_type_node, lhs, rhs); | |
4352 | break; | |
4353 | } | |
4354 | case GIMPLE_SWITCH: | |
538dd0b7 | 4355 | val = gimple_switch_index (as_a <gswitch *> (stmt)); |
a764d660 RB |
4356 | break; |
4357 | case GIMPLE_GOTO: | |
4358 | val = gimple_goto_dest (stmt); | |
4359 | break; | |
4360 | default: | |
b2b222b3 | 4361 | gcc_unreachable (); |
a764d660 RB |
4362 | } |
4363 | if (!val) | |
4364 | return; | |
4365 | ||
4366 | edge taken = find_taken_edge (bb, vn_valueize (val)); | |
4367 | if (!taken) | |
4368 | return; | |
4369 | ||
4370 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4371 | fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as " | |
4372 | "not executable\n", bb->index, bb->index, taken->dest->index); | |
4373 | ||
4374 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4375 | if (e != taken) | |
4376 | e->flags &= ~EDGE_EXECUTABLE; | |
4377 | } | |
4378 | ||
863d2a57 | 4379 | /* Do SCCVN. Returns true if it finished, false if we bailed out |
1ec87690 RG |
4380 | due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies |
4381 | how we use the alias oracle walking during the VN process. */ | |
863d2a57 RG |
4382 | |
4383 | bool | |
1ec87690 | 4384 | run_scc_vn (vn_lookup_kind default_vn_walk_kind_) |
89fb70a3 | 4385 | { |
a764d660 | 4386 | basic_block bb; |
89fb70a3 DB |
4387 | size_t i; |
4388 | tree param; | |
b8698a0f | 4389 | |
1ec87690 RG |
4390 | default_vn_walk_kind = default_vn_walk_kind_; |
4391 | ||
89fb70a3 DB |
4392 | init_scc_vn (); |
4393 | current_info = valid_info; | |
4394 | ||
4395 | for (param = DECL_ARGUMENTS (current_function_decl); | |
4396 | param; | |
910ad8de | 4397 | param = DECL_CHAIN (param)) |
89fb70a3 | 4398 | { |
32244553 RG |
4399 | tree def = ssa_default_def (cfun, param); |
4400 | if (def) | |
b2b222b3 RB |
4401 | { |
4402 | VN_INFO (def)->visited = true; | |
4403 | VN_INFO (def)->valnum = def; | |
4404 | } | |
89fb70a3 DB |
4405 | } |
4406 | ||
a764d660 RB |
4407 | /* Mark all edges as possibly executable. */ |
4408 | FOR_ALL_BB_FN (bb, cfun) | |
4409 | { | |
4410 | edge_iterator ei; | |
4411 | edge e; | |
4412 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4413 | e->flags |= EDGE_EXECUTABLE; | |
4414 | } | |
4415 | ||
4416 | /* Walk all blocks in dominator order, value-numbering the last stmts | |
4417 | SSA uses and decide whether outgoing edges are not executable. */ | |
4418 | cond_dom_walker walker; | |
4419 | walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
4420 | if (walker.fail) | |
4421 | { | |
4422 | free_scc_vn (); | |
4423 | return false; | |
4424 | } | |
4425 | ||
4426 | /* Value-number remaining SSA names. */ | |
3d45dd59 | 4427 | for (i = 1; i < num_ssa_names; ++i) |
89fb70a3 DB |
4428 | { |
4429 | tree name = ssa_name (i); | |
4430 | if (name | |
4431 | && VN_INFO (name)->visited == false | |
4432 | && !has_zero_uses (name)) | |
863d2a57 RG |
4433 | if (!DFS (name)) |
4434 | { | |
4435 | free_scc_vn (); | |
4436 | return false; | |
4437 | } | |
89fb70a3 DB |
4438 | } |
4439 | ||
c9145754 | 4440 | /* Initialize the value ids. */ |
b8698a0f | 4441 | |
c9145754 DB |
4442 | for (i = 1; i < num_ssa_names; ++i) |
4443 | { | |
4444 | tree name = ssa_name (i); | |
4445 | vn_ssa_aux_t info; | |
4446 | if (!name) | |
4447 | continue; | |
4448 | info = VN_INFO (name); | |
f116fecf RG |
4449 | if (info->valnum == name |
4450 | || info->valnum == VN_TOP) | |
c9145754 DB |
4451 | info->value_id = get_next_value_id (); |
4452 | else if (is_gimple_min_invariant (info->valnum)) | |
4453 | info->value_id = get_or_alloc_constant_value_id (info->valnum); | |
4454 | } | |
b8698a0f | 4455 | |
bb35348a RB |
4456 | /* Propagate. */ |
4457 | for (i = 1; i < num_ssa_names; ++i) | |
c9145754 | 4458 | { |
bb35348a RB |
4459 | tree name = ssa_name (i); |
4460 | vn_ssa_aux_t info; | |
4461 | if (!name) | |
4462 | continue; | |
4463 | info = VN_INFO (name); | |
4464 | if (TREE_CODE (info->valnum) == SSA_NAME | |
4465 | && info->valnum != name | |
4466 | && info->value_id != VN_INFO (info->valnum)->value_id) | |
4467 | info->value_id = VN_INFO (info->valnum)->value_id; | |
c9145754 | 4468 | } |
b8698a0f | 4469 | |
c9145754 | 4470 | set_hashtable_value_ids (); |
b8698a0f | 4471 | |
89fb70a3 DB |
4472 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4473 | { | |
4474 | fprintf (dump_file, "Value numbers:\n"); | |
4475 | for (i = 0; i < num_ssa_names; i++) | |
4476 | { | |
4477 | tree name = ssa_name (i); | |
caf55296 RG |
4478 | if (name |
4479 | && VN_INFO (name)->visited | |
4480 | && SSA_VAL (name) != name) | |
89fb70a3 DB |
4481 | { |
4482 | print_generic_expr (dump_file, name, 0); | |
4483 | fprintf (dump_file, " = "); | |
caf55296 | 4484 | print_generic_expr (dump_file, SSA_VAL (name), 0); |
89fb70a3 DB |
4485 | fprintf (dump_file, "\n"); |
4486 | } | |
4487 | } | |
4488 | } | |
863d2a57 RG |
4489 | |
4490 | return true; | |
89fb70a3 | 4491 | } |
c9145754 DB |
4492 | |
4493 | /* Return the maximum value id we have ever seen. */ | |
4494 | ||
4495 | unsigned int | |
b8698a0f | 4496 | get_max_value_id (void) |
c9145754 DB |
4497 | { |
4498 | return next_value_id; | |
4499 | } | |
4500 | ||
4501 | /* Return the next unique value id. */ | |
4502 | ||
4503 | unsigned int | |
4504 | get_next_value_id (void) | |
4505 | { | |
4506 | return next_value_id++; | |
4507 | } | |
4508 | ||
4509 | ||
330e765e | 4510 | /* Compare two expressions E1 and E2 and return true if they are equal. */ |
c9145754 DB |
4511 | |
4512 | bool | |
4513 | expressions_equal_p (tree e1, tree e2) | |
4514 | { | |
330e765e | 4515 | /* The obvious case. */ |
c9145754 DB |
4516 | if (e1 == e2) |
4517 | return true; | |
4518 | ||
330e765e EB |
4519 | /* If only one of them is null, they cannot be equal. */ |
4520 | if (!e1 || !e2) | |
4521 | return false; | |
4522 | ||
330e765e EB |
4523 | /* Now perform the actual comparison. */ |
4524 | if (TREE_CODE (e1) == TREE_CODE (e2) | |
4525 | && operand_equal_p (e1, e2, OEP_PURE_SAME)) | |
c9145754 DB |
4526 | return true; |
4527 | ||
4528 | return false; | |
4529 | } | |
4530 | ||
890065bf RG |
4531 | |
4532 | /* Return true if the nary operation NARY may trap. This is a copy | |
4533 | of stmt_could_throw_1_p adjusted to the SCCVN IL. */ | |
4534 | ||
4535 | bool | |
4536 | vn_nary_may_trap (vn_nary_op_t nary) | |
4537 | { | |
4538 | tree type; | |
6141b7db | 4539 | tree rhs2 = NULL_TREE; |
890065bf RG |
4540 | bool honor_nans = false; |
4541 | bool honor_snans = false; | |
4542 | bool fp_operation = false; | |
4543 | bool honor_trapv = false; | |
4544 | bool handled, ret; | |
4545 | unsigned i; | |
4546 | ||
4547 | if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison | |
4548 | || TREE_CODE_CLASS (nary->opcode) == tcc_unary | |
4549 | || TREE_CODE_CLASS (nary->opcode) == tcc_binary) | |
4550 | { | |
4551 | type = nary->type; | |
4552 | fp_operation = FLOAT_TYPE_P (type); | |
4553 | if (fp_operation) | |
4554 | { | |
4555 | honor_nans = flag_trapping_math && !flag_finite_math_only; | |
4556 | honor_snans = flag_signaling_nans != 0; | |
4557 | } | |
4558 | else if (INTEGRAL_TYPE_P (type) | |
4559 | && TYPE_OVERFLOW_TRAPS (type)) | |
4560 | honor_trapv = true; | |
4561 | } | |
6141b7db RG |
4562 | if (nary->length >= 2) |
4563 | rhs2 = nary->op[1]; | |
890065bf RG |
4564 | ret = operation_could_trap_helper_p (nary->opcode, fp_operation, |
4565 | honor_trapv, | |
4566 | honor_nans, honor_snans, rhs2, | |
4567 | &handled); | |
4568 | if (handled | |
4569 | && ret) | |
4570 | return true; | |
4571 | ||
4572 | for (i = 0; i < nary->length; ++i) | |
4573 | if (tree_could_trap_p (nary->op[i])) | |
4574 | return true; | |
4575 | ||
4576 | return false; | |
4577 | } |