]>
Commit | Line | Data |
---|---|---|
e53b6e56 | 1 | /* Preamble and helpers for the autogenerated gimple-match.cc file. |
7adcbafe | 2 | Copyright (C) 2014-2022 Free Software Foundation, Inc. |
3d2cf79f RB |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8 | Software Foundation; either version 3, or (at your option) any later | |
9 | version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING3. If not see | |
18 | <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 AM |
24 | #include "target.h" |
25 | #include "rtl.h" | |
c7131fb2 AM |
26 | #include "tree.h" |
27 | #include "gimple.h" | |
c7131fb2 | 28 | #include "ssa.h" |
957060b5 | 29 | #include "cgraph.h" |
ebd733a7 | 30 | #include "vec-perm-indices.h" |
40e23961 | 31 | #include "fold-const.h" |
c9e926ce | 32 | #include "fold-const-call.h" |
3d2cf79f | 33 | #include "stor-layout.h" |
ba206889 | 34 | #include "gimple-iterator.h" |
3d2cf79f | 35 | #include "gimple-fold.h" |
36566b39 | 36 | #include "calls.h" |
3d2cf79f RB |
37 | #include "tree-dfa.h" |
38 | #include "builtins.h" | |
3d2cf79f | 39 | #include "gimple-match.h" |
53f3cd25 | 40 | #include "tree-pass.h" |
c9e926ce RS |
41 | #include "internal-fn.h" |
42 | #include "case-cfn-macros.h" | |
a3ca1bc5 | 43 | #include "gimplify.h" |
71f82be9 | 44 | #include "optabs-tree.h" |
6a86928d | 45 | #include "tree-eh.h" |
d398999d | 46 | #include "dbgcnt.h" |
75f89001 | 47 | #include "tm.h" |
45f4e2b0 | 48 | #include "gimple-range.h" |
3d2cf79f RB |
49 | |
50 | /* Forward declarations of the private auto-generated matchers. | |
51 | They expect valueized operands in canonical order and do not | |
52 | perform simplification of all-constant operands. */ | |
5d75ad95 | 53 | static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
3d2cf79f | 54 | code_helper, tree, tree); |
5d75ad95 | 55 | static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
3d2cf79f | 56 | code_helper, tree, tree, tree); |
5d75ad95 | 57 | static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
3d2cf79f | 58 | code_helper, tree, tree, tree, tree); |
0d2b3bca RS |
59 | static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
60 | code_helper, tree, tree, tree, tree, tree); | |
b41d1f6e RS |
61 | static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
62 | code_helper, tree, tree, tree, tree, tree, tree); | |
b7fe158a RB |
63 | static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *, |
64 | tree (*)(tree)); | |
65 | static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *, | |
66 | tree (*)(tree)); | |
67 | static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *, | |
68 | tree (*)(tree)); | |
69 | static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *, | |
70 | tree (*)(tree)); | |
71 | static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *, | |
72 | tree (*)(tree)); | |
3d2cf79f | 73 | |
5d75ad95 | 74 | const unsigned int gimple_match_op::MAX_NUM_OPS; |
3d2cf79f RB |
75 | |
76 | /* Return whether T is a constant that we'll dispatch to fold to | |
77 | evaluate fully constant expressions. */ | |
78 | ||
79 | static inline bool | |
80 | constant_for_folding (tree t) | |
81 | { | |
82 | return (CONSTANT_CLASS_P (t) | |
83 | /* The following is only interesting to string builtins. */ | |
84 | || (TREE_CODE (t) == ADDR_EXPR | |
85 | && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)); | |
86 | } | |
87 | ||
6a86928d RS |
88 | /* Try to convert conditional operation ORIG_OP into an IFN_COND_* |
89 | operation. Return true on success, storing the new operation in NEW_OP. */ | |
90 | ||
91 | static bool | |
92 | convert_conditional_op (gimple_match_op *orig_op, | |
93 | gimple_match_op *new_op) | |
94 | { | |
95 | internal_fn ifn; | |
96 | if (orig_op->code.is_tree_code ()) | |
97 | ifn = get_conditional_internal_fn ((tree_code) orig_op->code); | |
98 | else | |
b41d1f6e | 99 | { |
0c1fb64d | 100 | auto cfn = combined_fn (orig_op->code); |
b41d1f6e RS |
101 | if (!internal_fn_p (cfn)) |
102 | return false; | |
103 | ifn = get_conditional_internal_fn (as_internal_fn (cfn)); | |
104 | } | |
6a86928d RS |
105 | if (ifn == IFN_LAST) |
106 | return false; | |
107 | unsigned int num_ops = orig_op->num_ops; | |
108 | new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2); | |
109 | new_op->ops[0] = orig_op->cond.cond; | |
110 | for (unsigned int i = 0; i < num_ops; ++i) | |
111 | new_op->ops[i + 1] = orig_op->ops[i]; | |
112 | tree else_value = orig_op->cond.else_value; | |
113 | if (!else_value) | |
114 | else_value = targetm.preferred_else_value (ifn, orig_op->type, | |
115 | num_ops, orig_op->ops); | |
116 | new_op->ops[num_ops + 1] = else_value; | |
117 | return true; | |
118 | } | |
119 | ||
120 | /* RES_OP is the result of a simplification. If it is conditional, | |
121 | try to replace it with the equivalent UNCOND form, such as an | |
122 | IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the | |
123 | result of the replacement if appropriate, adding any new statements to | |
124 | SEQ and using VALUEIZE as the valueization function. Return true if | |
125 | this resimplification occurred and resulted in at least one change. */ | |
126 | ||
127 | static bool | |
128 | maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op, | |
129 | tree (*valueize) (tree)) | |
130 | { | |
131 | if (!res_op->cond.cond) | |
132 | return false; | |
133 | ||
134 | if (!res_op->cond.else_value | |
135 | && res_op->code.is_tree_code ()) | |
136 | { | |
137 | /* The "else" value doesn't matter. If the "then" value is a | |
138 | gimple value, just use it unconditionally. This isn't a | |
139 | simplification in itself, since there was no operation to | |
140 | build in the first place. */ | |
141 | if (gimple_simplified_result_is_gimple_val (res_op)) | |
142 | { | |
143 | res_op->cond.cond = NULL_TREE; | |
144 | return false; | |
145 | } | |
146 | ||
147 | /* Likewise if the operation would not trap. */ | |
148 | bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type) | |
149 | && TYPE_OVERFLOW_TRAPS (res_op->type)); | |
6851de8b FH |
150 | tree_code op_code = (tree_code) res_op->code; |
151 | bool op_could_trap; | |
152 | ||
35b2be21 | 153 | /* COND_EXPR will trap if, and only if, the condition |
6851de8b FH |
154 | traps and hence we have to check this. For all other operations, we |
155 | don't need to consider the operands. */ | |
35b2be21 | 156 | if (op_code == COND_EXPR) |
6851de8b FH |
157 | op_could_trap = generic_expr_could_trap_p (res_op->ops[0]); |
158 | else | |
159 | op_could_trap = operation_could_trap_p ((tree_code) res_op->code, | |
160 | FLOAT_TYPE_P (res_op->type), | |
161 | honor_trapv, | |
162 | res_op->op_or_null (1)); | |
163 | ||
164 | if (!op_could_trap) | |
6a86928d RS |
165 | { |
166 | res_op->cond.cond = NULL_TREE; | |
167 | return false; | |
168 | } | |
169 | } | |
170 | ||
171 | /* If the "then" value is a gimple value and the "else" value matters, | |
172 | create a VEC_COND_EXPR between them, then see if it can be further | |
173 | simplified. */ | |
174 | gimple_match_op new_op; | |
175 | if (res_op->cond.else_value | |
176 | && VECTOR_TYPE_P (res_op->type) | |
177 | && gimple_simplified_result_is_gimple_val (res_op)) | |
178 | { | |
179 | new_op.set_op (VEC_COND_EXPR, res_op->type, | |
180 | res_op->cond.cond, res_op->ops[0], | |
181 | res_op->cond.else_value); | |
182 | *res_op = new_op; | |
183 | return gimple_resimplify3 (seq, res_op, valueize); | |
184 | } | |
185 | ||
186 | /* Otherwise try rewriting the operation as an IFN_COND_* call. | |
187 | Again, this isn't a simplification in itself, since it's what | |
188 | RES_OP already described. */ | |
189 | if (convert_conditional_op (res_op, &new_op)) | |
190 | *res_op = new_op; | |
191 | ||
192 | return false; | |
193 | } | |
3d2cf79f RB |
194 | |
195 | /* Helper that matches and simplifies the toplevel result from | |
196 | a gimple_simplify run (where we don't want to build | |
197 | a stmt in case it's used in in-place folding). Replaces | |
5d75ad95 RS |
198 | RES_OP with a simplified and/or canonicalized result and |
199 | returns whether any change was made. */ | |
3d2cf79f | 200 | |
b7fe158a | 201 | static bool |
5d75ad95 | 202 | gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op, |
3d2cf79f RB |
203 | tree (*valueize)(tree)) |
204 | { | |
5d75ad95 | 205 | if (constant_for_folding (res_op->ops[0])) |
3d2cf79f RB |
206 | { |
207 | tree tem = NULL_TREE; | |
5d75ad95 | 208 | if (res_op->code.is_tree_code ()) |
c47bee97 | 209 | { |
0c1fb64d | 210 | auto code = tree_code (res_op->code); |
c47bee97 JJ |
211 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) |
212 | && TREE_CODE_LENGTH (code) == 1) | |
0c1fb64d | 213 | tem = const_unop (code, res_op->type, res_op->ops[0]); |
c47bee97 | 214 | } |
3d2cf79f | 215 | else |
5d75ad95 RS |
216 | tem = fold_const_call (combined_fn (res_op->code), res_op->type, |
217 | res_op->ops[0]); | |
3d2cf79f RB |
218 | if (tem != NULL_TREE |
219 | && CONSTANT_CLASS_P (tem)) | |
220 | { | |
1f3131cb RB |
221 | if (TREE_OVERFLOW_P (tem)) |
222 | tem = drop_tree_overflow (tem); | |
5d75ad95 | 223 | res_op->set_value (tem); |
6a86928d | 224 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
3d2cf79f RB |
225 | return true; |
226 | } | |
227 | } | |
228 | ||
19353855 RB |
229 | /* Limit recursion, there are cases like PR80887 and others, for |
230 | example when value-numbering presents us with unfolded expressions | |
231 | that we are really not prepared to handle without eventual | |
232 | oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50 | |
233 | itself as available expression. */ | |
234 | static unsigned depth; | |
235 | if (depth > 10) | |
236 | { | |
237 | if (dump_file && (dump_flags & TDF_FOLDING)) | |
238 | fprintf (dump_file, "Aborting expression simplification due to " | |
239 | "deep recursion\n"); | |
240 | return false; | |
241 | } | |
242 | ||
243 | ++depth; | |
5d75ad95 RS |
244 | gimple_match_op res_op2 (*res_op); |
245 | if (gimple_simplify (&res_op2, seq, valueize, | |
246 | res_op->code, res_op->type, res_op->ops[0])) | |
3d2cf79f | 247 | { |
19353855 | 248 | --depth; |
5d75ad95 | 249 | *res_op = res_op2; |
3d2cf79f RB |
250 | return true; |
251 | } | |
19353855 | 252 | --depth; |
3d2cf79f | 253 | |
6a86928d RS |
254 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
255 | return true; | |
256 | ||
3d2cf79f RB |
257 | return false; |
258 | } | |
259 | ||
260 | /* Helper that matches and simplifies the toplevel result from | |
261 | a gimple_simplify run (where we don't want to build | |
262 | a stmt in case it's used in in-place folding). Replaces | |
5d75ad95 RS |
263 | RES_OP with a simplified and/or canonicalized result and |
264 | returns whether any change was made. */ | |
3d2cf79f | 265 | |
b7fe158a | 266 | static bool |
5d75ad95 | 267 | gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op, |
3d2cf79f RB |
268 | tree (*valueize)(tree)) |
269 | { | |
5d75ad95 RS |
270 | if (constant_for_folding (res_op->ops[0]) |
271 | && constant_for_folding (res_op->ops[1])) | |
3d2cf79f RB |
272 | { |
273 | tree tem = NULL_TREE; | |
5d75ad95 | 274 | if (res_op->code.is_tree_code ()) |
c47bee97 | 275 | { |
0c1fb64d | 276 | auto code = tree_code (res_op->code); |
c47bee97 JJ |
277 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) |
278 | && TREE_CODE_LENGTH (code) == 2) | |
0c1fb64d | 279 | tem = const_binop (code, res_op->type, |
c47bee97 JJ |
280 | res_op->ops[0], res_op->ops[1]); |
281 | } | |
3d2cf79f | 282 | else |
5d75ad95 RS |
283 | tem = fold_const_call (combined_fn (res_op->code), res_op->type, |
284 | res_op->ops[0], res_op->ops[1]); | |
3d2cf79f RB |
285 | if (tem != NULL_TREE |
286 | && CONSTANT_CLASS_P (tem)) | |
287 | { | |
1f3131cb RB |
288 | if (TREE_OVERFLOW_P (tem)) |
289 | tem = drop_tree_overflow (tem); | |
5d75ad95 | 290 | res_op->set_value (tem); |
6a86928d | 291 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
3d2cf79f RB |
292 | return true; |
293 | } | |
294 | } | |
295 | ||
296 | /* Canonicalize operand order. */ | |
297 | bool canonicalized = false; | |
e9fff24c RS |
298 | bool is_comparison |
299 | = (res_op->code.is_tree_code () | |
300 | && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison); | |
301 | if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type)) | |
302 | && tree_swap_operands_p (res_op->ops[0], res_op->ops[1])) | |
3d2cf79f | 303 | { |
e9fff24c RS |
304 | std::swap (res_op->ops[0], res_op->ops[1]); |
305 | if (is_comparison) | |
306 | res_op->code = swap_tree_comparison (tree_code (res_op->code)); | |
307 | canonicalized = true; | |
3d2cf79f RB |
308 | } |
309 | ||
19353855 RB |
310 | /* Limit recursion, see gimple_resimplify1. */ |
311 | static unsigned depth; | |
312 | if (depth > 10) | |
313 | { | |
314 | if (dump_file && (dump_flags & TDF_FOLDING)) | |
315 | fprintf (dump_file, "Aborting expression simplification due to " | |
316 | "deep recursion\n"); | |
317 | return false; | |
318 | } | |
319 | ||
320 | ++depth; | |
5d75ad95 RS |
321 | gimple_match_op res_op2 (*res_op); |
322 | if (gimple_simplify (&res_op2, seq, valueize, | |
323 | res_op->code, res_op->type, | |
324 | res_op->ops[0], res_op->ops[1])) | |
3d2cf79f | 325 | { |
19353855 | 326 | --depth; |
5d75ad95 | 327 | *res_op = res_op2; |
3d2cf79f RB |
328 | return true; |
329 | } | |
19353855 | 330 | --depth; |
3d2cf79f | 331 | |
6a86928d RS |
332 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
333 | return true; | |
334 | ||
3d2cf79f RB |
335 | return canonicalized; |
336 | } | |
337 | ||
338 | /* Helper that matches and simplifies the toplevel result from | |
339 | a gimple_simplify run (where we don't want to build | |
340 | a stmt in case it's used in in-place folding). Replaces | |
5d75ad95 RS |
341 | RES_OP with a simplified and/or canonicalized result and |
342 | returns whether any change was made. */ | |
3d2cf79f | 343 | |
b7fe158a | 344 | static bool |
5d75ad95 | 345 | gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op, |
3d2cf79f RB |
346 | tree (*valueize)(tree)) |
347 | { | |
5d75ad95 RS |
348 | if (constant_for_folding (res_op->ops[0]) |
349 | && constant_for_folding (res_op->ops[1]) | |
350 | && constant_for_folding (res_op->ops[2])) | |
3d2cf79f RB |
351 | { |
352 | tree tem = NULL_TREE; | |
5d75ad95 | 353 | if (res_op->code.is_tree_code ()) |
c47bee97 | 354 | { |
0c1fb64d | 355 | auto code = tree_code (res_op->code); |
c47bee97 JJ |
356 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) |
357 | && TREE_CODE_LENGTH (code) == 3) | |
0c1fb64d | 358 | tem = fold_ternary/*_to_constant*/ (code, res_op->type, |
c47bee97 JJ |
359 | res_op->ops[0], res_op->ops[1], |
360 | res_op->ops[2]); | |
361 | } | |
3d2cf79f | 362 | else |
5d75ad95 RS |
363 | tem = fold_const_call (combined_fn (res_op->code), res_op->type, |
364 | res_op->ops[0], res_op->ops[1], res_op->ops[2]); | |
3d2cf79f RB |
365 | if (tem != NULL_TREE |
366 | && CONSTANT_CLASS_P (tem)) | |
367 | { | |
1f3131cb RB |
368 | if (TREE_OVERFLOW_P (tem)) |
369 | tem = drop_tree_overflow (tem); | |
5d75ad95 | 370 | res_op->set_value (tem); |
6a86928d | 371 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
3d2cf79f RB |
372 | return true; |
373 | } | |
374 | } | |
375 | ||
376 | /* Canonicalize operand order. */ | |
377 | bool canonicalized = false; | |
e9fff24c RS |
378 | int argno = first_commutative_argument (res_op->code, res_op->type); |
379 | if (argno >= 0 | |
380 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) | |
3d2cf79f | 381 | { |
e9fff24c | 382 | std::swap (res_op->ops[argno], res_op->ops[argno + 1]); |
3d2cf79f RB |
383 | canonicalized = true; |
384 | } | |
385 | ||
19353855 RB |
386 | /* Limit recursion, see gimple_resimplify1. */ |
387 | static unsigned depth; | |
388 | if (depth > 10) | |
389 | { | |
390 | if (dump_file && (dump_flags & TDF_FOLDING)) | |
391 | fprintf (dump_file, "Aborting expression simplification due to " | |
392 | "deep recursion\n"); | |
393 | return false; | |
394 | } | |
395 | ||
396 | ++depth; | |
5d75ad95 RS |
397 | gimple_match_op res_op2 (*res_op); |
398 | if (gimple_simplify (&res_op2, seq, valueize, | |
399 | res_op->code, res_op->type, | |
400 | res_op->ops[0], res_op->ops[1], res_op->ops[2])) | |
3d2cf79f | 401 | { |
19353855 | 402 | --depth; |
5d75ad95 | 403 | *res_op = res_op2; |
3d2cf79f RB |
404 | return true; |
405 | } | |
19353855 | 406 | --depth; |
3d2cf79f | 407 | |
6a86928d RS |
408 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
409 | return true; | |
410 | ||
3d2cf79f RB |
411 | return canonicalized; |
412 | } | |
413 | ||
0d2b3bca RS |
414 | /* Helper that matches and simplifies the toplevel result from |
415 | a gimple_simplify run (where we don't want to build | |
416 | a stmt in case it's used in in-place folding). Replaces | |
417 | RES_OP with a simplified and/or canonicalized result and | |
418 | returns whether any change was made. */ | |
419 | ||
b7fe158a | 420 | static bool |
0d2b3bca RS |
421 | gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op, |
422 | tree (*valueize)(tree)) | |
423 | { | |
424 | /* No constant folding is defined for four-operand functions. */ | |
425 | ||
e9fff24c RS |
426 | /* Canonicalize operand order. */ |
427 | bool canonicalized = false; | |
428 | int argno = first_commutative_argument (res_op->code, res_op->type); | |
429 | if (argno >= 0 | |
430 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) | |
431 | { | |
432 | std::swap (res_op->ops[argno], res_op->ops[argno + 1]); | |
433 | canonicalized = true; | |
434 | } | |
435 | ||
19353855 RB |
436 | /* Limit recursion, see gimple_resimplify1. */ |
437 | static unsigned depth; | |
438 | if (depth > 10) | |
439 | { | |
440 | if (dump_file && (dump_flags & TDF_FOLDING)) | |
441 | fprintf (dump_file, "Aborting expression simplification due to " | |
442 | "deep recursion\n"); | |
443 | return false; | |
444 | } | |
445 | ||
446 | ++depth; | |
0d2b3bca RS |
447 | gimple_match_op res_op2 (*res_op); |
448 | if (gimple_simplify (&res_op2, seq, valueize, | |
449 | res_op->code, res_op->type, | |
450 | res_op->ops[0], res_op->ops[1], res_op->ops[2], | |
451 | res_op->ops[3])) | |
452 | { | |
19353855 | 453 | --depth; |
0d2b3bca RS |
454 | *res_op = res_op2; |
455 | return true; | |
456 | } | |
19353855 | 457 | --depth; |
0d2b3bca | 458 | |
6a86928d RS |
459 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
460 | return true; | |
461 | ||
e9fff24c | 462 | return canonicalized; |
0d2b3bca | 463 | } |
3d2cf79f | 464 | |
b41d1f6e RS |
465 | /* Helper that matches and simplifies the toplevel result from |
466 | a gimple_simplify run (where we don't want to build | |
467 | a stmt in case it's used in in-place folding). Replaces | |
468 | RES_OP with a simplified and/or canonicalized result and | |
469 | returns whether any change was made. */ | |
470 | ||
b7fe158a | 471 | static bool |
b41d1f6e RS |
472 | gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op, |
473 | tree (*valueize)(tree)) | |
474 | { | |
475 | /* No constant folding is defined for five-operand functions. */ | |
476 | ||
e9fff24c RS |
477 | /* Canonicalize operand order. */ |
478 | bool canonicalized = false; | |
479 | int argno = first_commutative_argument (res_op->code, res_op->type); | |
480 | if (argno >= 0 | |
481 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) | |
482 | { | |
483 | std::swap (res_op->ops[argno], res_op->ops[argno + 1]); | |
484 | canonicalized = true; | |
485 | } | |
486 | ||
b41d1f6e RS |
487 | gimple_match_op res_op2 (*res_op); |
488 | if (gimple_simplify (&res_op2, seq, valueize, | |
489 | res_op->code, res_op->type, | |
490 | res_op->ops[0], res_op->ops[1], res_op->ops[2], | |
491 | res_op->ops[3], res_op->ops[4])) | |
492 | { | |
493 | *res_op = res_op2; | |
494 | return true; | |
495 | } | |
496 | ||
497 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) | |
498 | return true; | |
499 | ||
e9fff24c | 500 | return canonicalized; |
b41d1f6e RS |
501 | } |
502 | ||
b7fe158a RB |
503 | /* Match and simplify the toplevel valueized operation THIS. |
504 | Replaces THIS with a simplified and/or canonicalized result and | |
505 | returns whether any change was made. */ | |
506 | ||
507 | bool | |
508 | gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree)) | |
509 | { | |
510 | switch (num_ops) | |
511 | { | |
512 | case 1: | |
513 | return gimple_resimplify1 (seq, this, valueize); | |
514 | case 2: | |
515 | return gimple_resimplify2 (seq, this, valueize); | |
516 | case 3: | |
517 | return gimple_resimplify3 (seq, this, valueize); | |
518 | case 4: | |
519 | return gimple_resimplify4 (seq, this, valueize); | |
520 | case 5: | |
521 | return gimple_resimplify5 (seq, this, valueize); | |
522 | default: | |
523 | gcc_unreachable (); | |
524 | } | |
525 | } | |
526 | ||
5d75ad95 RS |
527 | /* If in GIMPLE the operation described by RES_OP should be single-rhs, |
528 | build a GENERIC tree for that expression and update RES_OP accordingly. */ | |
3d2cf79f RB |
529 | |
530 | void | |
5d75ad95 | 531 | maybe_build_generic_op (gimple_match_op *res_op) |
3d2cf79f | 532 | { |
5d75ad95 | 533 | tree_code code = (tree_code) res_op->code; |
de310193 | 534 | tree val; |
3d2cf79f RB |
535 | switch (code) |
536 | { | |
537 | case REALPART_EXPR: | |
538 | case IMAGPART_EXPR: | |
539 | case VIEW_CONVERT_EXPR: | |
de310193 EB |
540 | val = build1 (code, res_op->type, res_op->ops[0]); |
541 | res_op->set_value (val); | |
3d2cf79f RB |
542 | break; |
543 | case BIT_FIELD_REF: | |
de310193 EB |
544 | val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1], |
545 | res_op->ops[2]); | |
546 | REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse; | |
547 | res_op->set_value (val); | |
3d2cf79f RB |
548 | break; |
549 | default:; | |
550 | } | |
551 | } | |
552 | ||
5d75ad95 | 553 | tree (*mprts_hook) (gimple_match_op *); |
34050b6b | 554 | |
5d75ad95 RS |
555 | /* Try to build RES_OP, which is known to be a call to FN. Return null |
556 | if the target doesn't support the function. */ | |
c9e926ce RS |
557 | |
558 | static gcall * | |
5d75ad95 | 559 | build_call_internal (internal_fn fn, gimple_match_op *res_op) |
c9e926ce RS |
560 | { |
561 | if (direct_internal_fn_p (fn)) | |
562 | { | |
5d75ad95 RS |
563 | tree_pair types = direct_internal_fn_types (fn, res_op->type, |
564 | res_op->ops); | |
d95ab70a | 565 | if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH)) |
c9e926ce RS |
566 | return NULL; |
567 | } | |
5d75ad95 RS |
568 | return gimple_build_call_internal (fn, res_op->num_ops, |
569 | res_op->op_or_null (0), | |
570 | res_op->op_or_null (1), | |
0d2b3bca | 571 | res_op->op_or_null (2), |
b41d1f6e RS |
572 | res_op->op_or_null (3), |
573 | res_op->op_or_null (4)); | |
c9e926ce RS |
574 | } |
575 | ||
5d75ad95 RS |
576 | /* Push the exploded expression described by RES_OP as a statement to |
577 | SEQ if necessary and return a gimple value denoting the value of the | |
578 | expression. If RES is not NULL then the result will be always RES | |
579 | and even gimple values are pushed to SEQ. */ | |
3d2cf79f RB |
580 | |
581 | tree | |
5d75ad95 | 582 | maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res) |
3d2cf79f | 583 | { |
5d75ad95 RS |
584 | tree *ops = res_op->ops; |
585 | unsigned num_ops = res_op->num_ops; | |
586 | ||
6a86928d RS |
587 | /* The caller should have converted conditional operations into an UNCOND |
588 | form and resimplified as appropriate. The conditional form only | |
589 | survives this far if that conversion failed. */ | |
590 | if (res_op->cond.cond) | |
591 | return NULL_TREE; | |
592 | ||
5d75ad95 | 593 | if (res_op->code.is_tree_code ()) |
3d2cf79f RB |
594 | { |
595 | if (!res | |
5d75ad95 | 596 | && gimple_simplified_result_is_gimple_val (res_op)) |
3d2cf79f | 597 | return ops[0]; |
34050b6b RB |
598 | if (mprts_hook) |
599 | { | |
5d75ad95 | 600 | tree tem = mprts_hook (res_op); |
34050b6b RB |
601 | if (tem) |
602 | return tem; | |
603 | } | |
5d75ad95 RS |
604 | } |
605 | ||
606 | if (!seq) | |
607 | return NULL_TREE; | |
608 | ||
609 | /* Play safe and do not allow abnormals to be mentioned in | |
610 | newly created statements. */ | |
611 | for (unsigned int i = 0; i < num_ops; ++i) | |
612 | if (TREE_CODE (ops[i]) == SSA_NAME | |
613 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])) | |
614 | return NULL_TREE; | |
615 | ||
616 | if (num_ops > 0 && COMPARISON_CLASS_P (ops[0])) | |
617 | for (unsigned int i = 0; i < 2; ++i) | |
618 | if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME | |
619 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))) | |
3d2cf79f | 620 | return NULL_TREE; |
5d75ad95 RS |
621 | |
622 | if (res_op->code.is_tree_code ()) | |
623 | { | |
0c1fb64d | 624 | auto code = tree_code (res_op->code); |
3d2cf79f | 625 | if (!res) |
4aecfe19 RS |
626 | { |
627 | if (gimple_in_ssa_p (cfun)) | |
5d75ad95 | 628 | res = make_ssa_name (res_op->type); |
4aecfe19 | 629 | else |
5d75ad95 | 630 | res = create_tmp_reg (res_op->type); |
4aecfe19 | 631 | } |
5d75ad95 | 632 | maybe_build_generic_op (res_op); |
0c1fb64d | 633 | gimple *new_stmt = gimple_build_assign (res, code, |
5d75ad95 RS |
634 | res_op->op_or_null (0), |
635 | res_op->op_or_null (1), | |
636 | res_op->op_or_null (2)); | |
3d2cf79f RB |
637 | gimple_seq_add_stmt_without_update (seq, new_stmt); |
638 | return res; | |
639 | } | |
640 | else | |
641 | { | |
5d75ad95 | 642 | gcc_assert (num_ops != 0); |
0c1fb64d | 643 | auto fn = combined_fn (res_op->code); |
c9e926ce RS |
644 | gcall *new_stmt = NULL; |
645 | if (internal_fn_p (fn)) | |
646 | { | |
647 | /* Generate the given function if we can. */ | |
648 | internal_fn ifn = as_internal_fn (fn); | |
5d75ad95 | 649 | new_stmt = build_call_internal (ifn, res_op); |
c9e926ce RS |
650 | if (!new_stmt) |
651 | return NULL_TREE; | |
652 | } | |
653 | else | |
654 | { | |
655 | /* Find the function we want to call. */ | |
656 | tree decl = builtin_decl_implicit (as_builtin_fn (fn)); | |
657 | if (!decl) | |
658 | return NULL; | |
659 | ||
660 | /* We can't and should not emit calls to non-const functions. */ | |
661 | if (!(flags_from_decl_or_type (decl) & ECF_CONST)) | |
662 | return NULL; | |
663 | ||
5d75ad95 RS |
664 | new_stmt = gimple_build_call (decl, num_ops, |
665 | res_op->op_or_null (0), | |
666 | res_op->op_or_null (1), | |
0d2b3bca | 667 | res_op->op_or_null (2), |
b41d1f6e RS |
668 | res_op->op_or_null (3), |
669 | res_op->op_or_null (4)); | |
c9e926ce | 670 | } |
3d2cf79f | 671 | if (!res) |
4aecfe19 RS |
672 | { |
673 | if (gimple_in_ssa_p (cfun)) | |
5d75ad95 | 674 | res = make_ssa_name (res_op->type); |
4aecfe19 | 675 | else |
5d75ad95 | 676 | res = create_tmp_reg (res_op->type); |
4aecfe19 | 677 | } |
3d2cf79f RB |
678 | gimple_call_set_lhs (new_stmt, res); |
679 | gimple_seq_add_stmt_without_update (seq, new_stmt); | |
680 | return res; | |
681 | } | |
682 | } | |
683 | ||
684 | ||
685 | /* Public API overloads follow for operation being tree_code or | |
686 | built_in_function and for one to three operands or arguments. | |
687 | They return NULL_TREE if nothing could be simplified or | |
688 | the resulting simplified value with parts pushed to SEQ. | |
689 | If SEQ is NULL then if the simplification needs to create | |
690 | new stmts it will fail. If VALUEIZE is non-NULL then all | |
691 | SSA names will be valueized using that hook prior to | |
692 | applying simplifications. */ | |
693 | ||
694 | /* Unary ops. */ | |
695 | ||
696 | tree | |
697 | gimple_simplify (enum tree_code code, tree type, | |
698 | tree op0, | |
699 | gimple_seq *seq, tree (*valueize)(tree)) | |
700 | { | |
701 | if (constant_for_folding (op0)) | |
702 | { | |
8006f46b | 703 | tree res = const_unop (code, type, op0); |
3d2cf79f RB |
704 | if (res != NULL_TREE |
705 | && CONSTANT_CLASS_P (res)) | |
706 | return res; | |
707 | } | |
708 | ||
5d75ad95 RS |
709 | gimple_match_op res_op; |
710 | if (!gimple_simplify (&res_op, seq, valueize, code, type, op0)) | |
3d2cf79f | 711 | return NULL_TREE; |
5d75ad95 | 712 | return maybe_push_res_to_seq (&res_op, seq); |
3d2cf79f RB |
713 | } |
714 | ||
715 | /* Binary ops. */ | |
716 | ||
717 | tree | |
718 | gimple_simplify (enum tree_code code, tree type, | |
719 | tree op0, tree op1, | |
720 | gimple_seq *seq, tree (*valueize)(tree)) | |
721 | { | |
722 | if (constant_for_folding (op0) && constant_for_folding (op1)) | |
723 | { | |
8006f46b | 724 | tree res = const_binop (code, type, op0, op1); |
3d2cf79f RB |
725 | if (res != NULL_TREE |
726 | && CONSTANT_CLASS_P (res)) | |
727 | return res; | |
728 | } | |
729 | ||
730 | /* Canonicalize operand order both for matching and fallback stmt | |
731 | generation. */ | |
732 | if ((commutative_tree_code (code) | |
733 | || TREE_CODE_CLASS (code) == tcc_comparison) | |
14e72812 | 734 | && tree_swap_operands_p (op0, op1)) |
3d2cf79f | 735 | { |
6b4db501 | 736 | std::swap (op0, op1); |
3d2cf79f RB |
737 | if (TREE_CODE_CLASS (code) == tcc_comparison) |
738 | code = swap_tree_comparison (code); | |
739 | } | |
740 | ||
5d75ad95 RS |
741 | gimple_match_op res_op; |
742 | if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1)) | |
3d2cf79f | 743 | return NULL_TREE; |
5d75ad95 | 744 | return maybe_push_res_to_seq (&res_op, seq); |
3d2cf79f RB |
745 | } |
746 | ||
747 | /* Ternary ops. */ | |
748 | ||
749 | tree | |
750 | gimple_simplify (enum tree_code code, tree type, | |
751 | tree op0, tree op1, tree op2, | |
752 | gimple_seq *seq, tree (*valueize)(tree)) | |
753 | { | |
754 | if (constant_for_folding (op0) && constant_for_folding (op1) | |
755 | && constant_for_folding (op2)) | |
756 | { | |
757 | tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2); | |
758 | if (res != NULL_TREE | |
759 | && CONSTANT_CLASS_P (res)) | |
760 | return res; | |
761 | } | |
762 | ||
763 | /* Canonicalize operand order both for matching and fallback stmt | |
764 | generation. */ | |
765 | if (commutative_ternary_tree_code (code) | |
14e72812 | 766 | && tree_swap_operands_p (op0, op1)) |
6b4db501 | 767 | std::swap (op0, op1); |
3d2cf79f | 768 | |
5d75ad95 RS |
769 | gimple_match_op res_op; |
770 | if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2)) | |
3d2cf79f | 771 | return NULL_TREE; |
5d75ad95 | 772 | return maybe_push_res_to_seq (&res_op, seq); |
3d2cf79f RB |
773 | } |
774 | ||
eb69361d | 775 | /* Builtin or internal function with one argument. */ |
3d2cf79f RB |
776 | |
777 | tree | |
eb69361d | 778 | gimple_simplify (combined_fn fn, tree type, |
3d2cf79f RB |
779 | tree arg0, |
780 | gimple_seq *seq, tree (*valueize)(tree)) | |
781 | { | |
782 | if (constant_for_folding (arg0)) | |
783 | { | |
eb69361d | 784 | tree res = fold_const_call (fn, type, arg0); |
c9e926ce RS |
785 | if (res && CONSTANT_CLASS_P (res)) |
786 | return res; | |
3d2cf79f RB |
787 | } |
788 | ||
5d75ad95 RS |
789 | gimple_match_op res_op; |
790 | if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0)) | |
3d2cf79f | 791 | return NULL_TREE; |
5d75ad95 | 792 | return maybe_push_res_to_seq (&res_op, seq); |
3d2cf79f RB |
793 | } |
794 | ||
eb69361d | 795 | /* Builtin or internal function with two arguments. */ |
3d2cf79f RB |
796 | |
797 | tree | |
eb69361d | 798 | gimple_simplify (combined_fn fn, tree type, |
3d2cf79f RB |
799 | tree arg0, tree arg1, |
800 | gimple_seq *seq, tree (*valueize)(tree)) | |
801 | { | |
802 | if (constant_for_folding (arg0) | |
803 | && constant_for_folding (arg1)) | |
804 | { | |
eb69361d | 805 | tree res = fold_const_call (fn, type, arg0, arg1); |
c9e926ce RS |
806 | if (res && CONSTANT_CLASS_P (res)) |
807 | return res; | |
3d2cf79f RB |
808 | } |
809 | ||
5d75ad95 RS |
810 | gimple_match_op res_op; |
811 | if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1)) | |
3d2cf79f | 812 | return NULL_TREE; |
5d75ad95 | 813 | return maybe_push_res_to_seq (&res_op, seq); |
3d2cf79f RB |
814 | } |
815 | ||
eb69361d | 816 | /* Builtin or internal function with three arguments. */ |
3d2cf79f RB |
817 | |
818 | tree | |
eb69361d | 819 | gimple_simplify (combined_fn fn, tree type, |
3d2cf79f RB |
820 | tree arg0, tree arg1, tree arg2, |
821 | gimple_seq *seq, tree (*valueize)(tree)) | |
822 | { | |
823 | if (constant_for_folding (arg0) | |
824 | && constant_for_folding (arg1) | |
825 | && constant_for_folding (arg2)) | |
826 | { | |
eb69361d | 827 | tree res = fold_const_call (fn, type, arg0, arg1, arg2); |
c9e926ce RS |
828 | if (res && CONSTANT_CLASS_P (res)) |
829 | return res; | |
3d2cf79f RB |
830 | } |
831 | ||
5d75ad95 RS |
832 | gimple_match_op res_op; |
833 | if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2)) | |
3d2cf79f | 834 | return NULL_TREE; |
5d75ad95 | 835 | return maybe_push_res_to_seq (&res_op, seq); |
3d2cf79f RB |
836 | } |
837 | ||
37d486ab RB |
838 | /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting |
839 | VALUEIZED to true if valueization changed OP. */ | |
840 | ||
841 | static inline tree | |
842 | do_valueize (tree op, tree (*valueize)(tree), bool &valueized) | |
843 | { | |
844 | if (valueize && TREE_CODE (op) == SSA_NAME) | |
845 | { | |
846 | tree tem = valueize (op); | |
847 | if (tem && tem != op) | |
848 | { | |
849 | op = tem; | |
850 | valueized = true; | |
851 | } | |
852 | } | |
853 | return op; | |
854 | } | |
3d2cf79f | 855 | |
6a86928d RS |
856 | /* If RES_OP is a call to a conditional internal function, try simplifying |
857 | the associated unconditional operation and using the result to build | |
858 | a new conditional operation. For example, if RES_OP is: | |
859 | ||
860 | IFN_COND_ADD (COND, A, B, ELSE) | |
861 | ||
862 | try simplifying (plus A B) and using the result to build a replacement | |
863 | for the whole IFN_COND_ADD. | |
864 | ||
865 | Return true if this approach led to a simplification, otherwise leave | |
866 | RES_OP unchanged (and so suitable for other simplifications). When | |
867 | returning true, add any new statements to SEQ and use VALUEIZE as the | |
868 | valueization function. | |
869 | ||
870 | RES_OP is known to be a call to IFN. */ | |
871 | ||
872 | static bool | |
873 | try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op, | |
874 | gimple_seq *seq, tree (*valueize) (tree)) | |
875 | { | |
b41d1f6e | 876 | code_helper op; |
6a86928d | 877 | tree_code code = conditional_internal_fn_code (ifn); |
b41d1f6e RS |
878 | if (code != ERROR_MARK) |
879 | op = code; | |
880 | else | |
881 | { | |
882 | ifn = get_unconditional_internal_fn (ifn); | |
883 | if (ifn == IFN_LAST) | |
884 | return false; | |
885 | op = as_combined_fn (ifn); | |
886 | } | |
6a86928d RS |
887 | |
888 | unsigned int num_ops = res_op->num_ops; | |
889 | gimple_match_op cond_op (gimple_match_cond (res_op->ops[0], | |
890 | res_op->ops[num_ops - 1]), | |
b41d1f6e | 891 | op, res_op->type, num_ops - 2); |
49fb45c8 MS |
892 | |
893 | memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops); | |
6a86928d RS |
894 | switch (num_ops - 2) |
895 | { | |
20dcda98 | 896 | case 1: |
897 | if (!gimple_resimplify1 (seq, &cond_op, valueize)) | |
898 | return false; | |
899 | break; | |
6a86928d RS |
900 | case 2: |
901 | if (!gimple_resimplify2 (seq, &cond_op, valueize)) | |
902 | return false; | |
903 | break; | |
b41d1f6e RS |
904 | case 3: |
905 | if (!gimple_resimplify3 (seq, &cond_op, valueize)) | |
906 | return false; | |
907 | break; | |
6a86928d RS |
908 | default: |
909 | gcc_unreachable (); | |
910 | } | |
911 | *res_op = cond_op; | |
912 | maybe_resimplify_conditional_op (seq, res_op, valueize); | |
913 | return true; | |
914 | } | |
915 | ||
33973fa7 RS |
916 | /* Common subroutine of gimple_extract_op and gimple_simplify. Try to |
917 | describe STMT in RES_OP, returning true on success. Before recording | |
918 | an operand, call: | |
3d2cf79f | 919 | |
33973fa7 RS |
920 | - VALUEIZE_CONDITION for a COND_EXPR condition |
921 | - VALUEIZE_OP for every other top-level operand | |
922 | ||
923 | Both routines take a tree argument and returns a tree. */ | |
924 | ||
925 | template<typename ValueizeOp, typename ValueizeCondition> | |
926 | inline bool | |
927 | gimple_extract (gimple *stmt, gimple_match_op *res_op, | |
928 | ValueizeOp valueize_op, | |
929 | ValueizeCondition valueize_condition) | |
3d2cf79f RB |
930 | { |
931 | switch (gimple_code (stmt)) | |
932 | { | |
933 | case GIMPLE_ASSIGN: | |
934 | { | |
935 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
936 | tree type = TREE_TYPE (gimple_assign_lhs (stmt)); | |
937 | switch (gimple_assign_rhs_class (stmt)) | |
938 | { | |
939 | case GIMPLE_SINGLE_RHS: | |
940 | if (code == REALPART_EXPR | |
941 | || code == IMAGPART_EXPR | |
942 | || code == VIEW_CONVERT_EXPR) | |
943 | { | |
944 | tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); | |
33973fa7 RS |
945 | res_op->set_op (code, type, valueize_op (op0)); |
946 | return true; | |
3d2cf79f RB |
947 | } |
948 | else if (code == BIT_FIELD_REF) | |
949 | { | |
950 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
33973fa7 | 951 | tree op0 = valueize_op (TREE_OPERAND (rhs1, 0)); |
5d75ad95 RS |
952 | res_op->set_op (code, type, op0, |
953 | TREE_OPERAND (rhs1, 1), | |
de310193 EB |
954 | TREE_OPERAND (rhs1, 2), |
955 | REF_REVERSE_STORAGE_ORDER (rhs1)); | |
33973fa7 | 956 | return true; |
3d2cf79f | 957 | } |
33973fa7 | 958 | else if (code == SSA_NAME) |
3d2cf79f RB |
959 | { |
960 | tree op0 = gimple_assign_rhs1 (stmt); | |
33973fa7 | 961 | res_op->set_op (TREE_CODE (op0), type, valueize_op (op0)); |
3d2cf79f RB |
962 | return true; |
963 | } | |
964 | break; | |
965 | case GIMPLE_UNARY_RHS: | |
966 | { | |
967 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
33973fa7 RS |
968 | res_op->set_op (code, type, valueize_op (rhs1)); |
969 | return true; | |
3d2cf79f RB |
970 | } |
971 | case GIMPLE_BINARY_RHS: | |
972 | { | |
33973fa7 RS |
973 | tree rhs1 = valueize_op (gimple_assign_rhs1 (stmt)); |
974 | tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt)); | |
5d75ad95 | 975 | res_op->set_op (code, type, rhs1, rhs2); |
33973fa7 | 976 | return true; |
3d2cf79f RB |
977 | } |
978 | case GIMPLE_TERNARY_RHS: | |
979 | { | |
980 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
33973fa7 RS |
981 | if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1)) |
982 | rhs1 = valueize_condition (rhs1); | |
983 | else | |
984 | rhs1 = valueize_op (rhs1); | |
985 | tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt)); | |
986 | tree rhs3 = valueize_op (gimple_assign_rhs3 (stmt)); | |
5d75ad95 | 987 | res_op->set_op (code, type, rhs1, rhs2, rhs3); |
33973fa7 | 988 | return true; |
3d2cf79f RB |
989 | } |
990 | default: | |
991 | gcc_unreachable (); | |
992 | } | |
993 | break; | |
994 | } | |
995 | ||
996 | case GIMPLE_CALL: | |
997 | /* ??? This way we can't simplify calls with side-effects. */ | |
37d486ab RB |
998 | if (gimple_call_lhs (stmt) != NULL_TREE |
999 | && gimple_call_num_args (stmt) >= 1 | |
b41d1f6e | 1000 | && gimple_call_num_args (stmt) <= 5) |
3d2cf79f | 1001 | { |
5d75ad95 | 1002 | combined_fn cfn; |
c9e926ce | 1003 | if (gimple_call_internal_p (stmt)) |
5d75ad95 | 1004 | cfn = as_combined_fn (gimple_call_internal_fn (stmt)); |
c9e926ce RS |
1005 | else |
1006 | { | |
1007 | tree fn = gimple_call_fn (stmt); | |
1008 | if (!fn) | |
1009 | return false; | |
3d2cf79f | 1010 | |
33973fa7 | 1011 | fn = valueize_op (fn); |
c9e926ce RS |
1012 | if (TREE_CODE (fn) != ADDR_EXPR |
1013 | || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL) | |
1014 | return false; | |
1015 | ||
1016 | tree decl = TREE_OPERAND (fn, 0); | |
1017 | if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL | |
1018 | || !gimple_builtin_call_types_compatible_p (stmt, decl)) | |
1019 | return false; | |
1020 | ||
5d75ad95 | 1021 | cfn = as_combined_fn (DECL_FUNCTION_CODE (decl)); |
c9e926ce | 1022 | } |
37d486ab | 1023 | |
5d75ad95 RS |
1024 | unsigned int num_args = gimple_call_num_args (stmt); |
1025 | res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args); | |
1026 | for (unsigned i = 0; i < num_args; ++i) | |
33973fa7 RS |
1027 | res_op->ops[i] = valueize_op (gimple_call_arg (stmt, i)); |
1028 | return true; | |
3d2cf79f RB |
1029 | } |
1030 | break; | |
1031 | ||
1032 | case GIMPLE_COND: | |
1033 | { | |
33973fa7 RS |
1034 | tree lhs = valueize_op (gimple_cond_lhs (stmt)); |
1035 | tree rhs = valueize_op (gimple_cond_rhs (stmt)); | |
5d75ad95 | 1036 | res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs); |
33973fa7 | 1037 | return true; |
3d2cf79f RB |
1038 | } |
1039 | ||
1040 | default: | |
1041 | break; | |
1042 | } | |
1043 | ||
1044 | return false; | |
1045 | } | |
1046 | ||
33973fa7 RS |
1047 | /* Try to describe STMT in RES_OP, returning true on success. |
1048 | For GIMPLE_CONDs, describe the condition that is being tested. | |
1049 | For GIMPLE_ASSIGNs, describe the rhs of the assignment. | |
1050 | For GIMPLE_CALLs, describe the call. */ | |
1051 | ||
1052 | bool | |
1053 | gimple_extract_op (gimple *stmt, gimple_match_op *res_op) | |
1054 | { | |
1055 | auto nop = [](tree op) { return op; }; | |
1056 | return gimple_extract (stmt, res_op, nop, nop); | |
1057 | } | |
1058 | ||
1059 | /* The main STMT based simplification entry. It is used by the fold_stmt | |
1060 | and the fold_stmt_to_constant APIs. */ | |
1061 | ||
1062 | bool | |
1063 | gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq, | |
1064 | tree (*valueize)(tree), tree (*top_valueize)(tree)) | |
1065 | { | |
1066 | bool valueized = false; | |
1067 | auto valueize_op = [&](tree op) | |
1068 | { | |
1069 | return do_valueize (op, top_valueize, valueized); | |
1070 | }; | |
1071 | auto valueize_condition = [&](tree op) -> tree | |
1072 | { | |
1073 | bool cond_valueized = false; | |
1074 | tree lhs = do_valueize (TREE_OPERAND (op, 0), top_valueize, | |
1075 | cond_valueized); | |
1076 | tree rhs = do_valueize (TREE_OPERAND (op, 1), top_valueize, | |
1077 | cond_valueized); | |
1078 | gimple_match_op res_op2 (res_op->cond, TREE_CODE (op), | |
1079 | TREE_TYPE (op), lhs, rhs); | |
1080 | if ((gimple_resimplify2 (seq, &res_op2, valueize) | |
1081 | || cond_valueized) | |
1082 | && res_op2.code.is_tree_code ()) | |
1083 | { | |
0c1fb64d RS |
1084 | auto code = tree_code (res_op2.code); |
1085 | if (TREE_CODE_CLASS (code) == tcc_comparison) | |
33973fa7 RS |
1086 | { |
1087 | valueized = true; | |
0c1fb64d | 1088 | return build2 (code, TREE_TYPE (op), |
33973fa7 RS |
1089 | res_op2.ops[0], res_op2.ops[1]); |
1090 | } | |
0c1fb64d RS |
1091 | else if (code == SSA_NAME |
1092 | || code == INTEGER_CST | |
1093 | || code == VECTOR_CST) | |
33973fa7 RS |
1094 | { |
1095 | valueized = true; | |
1096 | return res_op2.ops[0]; | |
1097 | } | |
1098 | } | |
1099 | return valueize_op (op); | |
1100 | }; | |
1101 | ||
1102 | if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition)) | |
1103 | return false; | |
1104 | ||
1105 | if (res_op->code.is_internal_fn ()) | |
1106 | { | |
1107 | internal_fn ifn = internal_fn (res_op->code); | |
1108 | if (try_conditional_simplification (ifn, res_op, seq, valueize)) | |
1109 | return true; | |
1110 | } | |
1111 | ||
1112 | if (!res_op->reverse | |
1113 | && res_op->num_ops | |
1114 | && res_op->resimplify (seq, valueize)) | |
1115 | return true; | |
1116 | ||
1117 | return valueized; | |
1118 | } | |
3d2cf79f RB |
1119 | |
1120 | /* Helper for the autogenerated code, valueize OP. */ | |
1121 | ||
1122 | inline tree | |
1123 | do_valueize (tree (*valueize)(tree), tree op) | |
1124 | { | |
1125 | if (valueize && TREE_CODE (op) == SSA_NAME) | |
4f450a2b RB |
1126 | { |
1127 | tree tem = valueize (op); | |
1128 | if (tem) | |
1129 | return tem; | |
1130 | } | |
3d2cf79f RB |
1131 | return op; |
1132 | } | |
1133 | ||
4f450a2b RB |
1134 | /* Helper for the autogenerated code, get at the definition of NAME when |
1135 | VALUEIZE allows that. */ | |
1136 | ||
1137 | inline gimple * | |
1138 | get_def (tree (*valueize)(tree), tree name) | |
1139 | { | |
1140 | if (valueize && ! valueize (name)) | |
1141 | return NULL; | |
1142 | return SSA_NAME_DEF_STMT (name); | |
1143 | } | |
1144 | ||
48451e8f | 1145 | /* Routine to determine if the types T1 and T2 are effectively |
aea417d7 MG |
1146 | the same for GIMPLE. If T1 or T2 is not a type, the test |
1147 | applies to their TREE_TYPE. */ | |
48451e8f JL |
1148 | |
1149 | static inline bool | |
1150 | types_match (tree t1, tree t2) | |
1151 | { | |
aea417d7 MG |
1152 | if (!TYPE_P (t1)) |
1153 | t1 = TREE_TYPE (t1); | |
1154 | if (!TYPE_P (t2)) | |
1155 | t2 = TREE_TYPE (t2); | |
1156 | ||
48451e8f JL |
1157 | return types_compatible_p (t1, t2); |
1158 | } | |
1159 | ||
1160 | /* Return if T has a single use. For GIMPLE, we also allow any | |
1161 | non-SSA_NAME (ie constants) and zero uses to cope with uses | |
1162 | that aren't linked up yet. */ | |
1163 | ||
6aef670e RS |
1164 | static bool |
1165 | single_use (const_tree) ATTRIBUTE_PURE; | |
1166 | ||
1167 | static bool | |
1168 | single_use (const_tree t) | |
48451e8f | 1169 | { |
6aef670e RS |
1170 | if (TREE_CODE (t) != SSA_NAME) |
1171 | return true; | |
1172 | ||
1173 | /* Inline return has_zero_uses (t) || has_single_use (t); */ | |
1174 | const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (t)); | |
1175 | const ssa_use_operand_t *ptr; | |
1176 | bool single = false; | |
1177 | ||
1178 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1179 | if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr))) | |
1180 | { | |
1181 | if (single) | |
1182 | return false; | |
1183 | single = true; | |
1184 | } | |
1185 | return true; | |
48451e8f | 1186 | } |
53f3cd25 RS |
1187 | |
1188 | /* Return true if math operations should be canonicalized, | |
1189 | e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */ | |
1190 | ||
1191 | static inline bool | |
1192 | canonicalize_math_p () | |
1193 | { | |
1194 | return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0; | |
1195 | } | |
848bb6fc JJ |
1196 | |
1197 | /* Return true if math operations that are beneficial only after | |
1198 | vectorization should be canonicalized. */ | |
1199 | ||
1200 | static inline bool | |
1201 | canonicalize_math_after_vectorization_p () | |
1202 | { | |
1203 | return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0; | |
1204 | } | |
30a2c10e | 1205 | |
a1ee6d50 MG |
1206 | /* Return true if we can still perform transformations that may introduce |
1207 | vector operations that are not supported by the target. Vector lowering | |
1208 | normally handles those, but after that pass, it becomes unsafe. */ | |
1209 | ||
1210 | static inline bool | |
1211 | optimize_vectors_before_lowering_p () | |
1212 | { | |
1213 | return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0; | |
1214 | } | |
1215 | ||
30a2c10e JJ |
1216 | /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x). |
1217 | As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0 | |
1218 | is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...> | |
1219 | where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1) | |
1220 | will likely be exact, while exp (log (arg0) * arg1) might be not. | |
1221 | Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */ | |
1222 | ||
1223 | static bool | |
1224 | optimize_pow_to_exp (tree arg0, tree arg1) | |
1225 | { | |
1226 | gcc_assert (TREE_CODE (arg0) == REAL_CST); | |
1227 | if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0)))) | |
1228 | return true; | |
1229 | ||
1230 | if (TREE_CODE (arg1) != SSA_NAME) | |
1231 | return true; | |
1232 | ||
1233 | gimple *def = SSA_NAME_DEF_STMT (arg1); | |
1234 | gphi *phi = dyn_cast <gphi *> (def); | |
1235 | tree cst1 = NULL_TREE; | |
1236 | enum tree_code code = ERROR_MARK; | |
1237 | if (!phi) | |
1238 | { | |
1239 | if (!is_gimple_assign (def)) | |
1240 | return true; | |
1241 | code = gimple_assign_rhs_code (def); | |
1242 | switch (code) | |
1243 | { | |
1244 | case PLUS_EXPR: | |
1245 | case MINUS_EXPR: | |
1246 | break; | |
1247 | default: | |
1248 | return true; | |
1249 | } | |
1250 | if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME | |
1251 | || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST) | |
1252 | return true; | |
1253 | ||
1254 | cst1 = gimple_assign_rhs2 (def); | |
1255 | ||
1256 | phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def))); | |
1257 | if (!phi) | |
1258 | return true; | |
1259 | } | |
1260 | ||
1261 | tree cst2 = NULL_TREE; | |
1262 | int n = gimple_phi_num_args (phi); | |
1263 | for (int i = 0; i < n; i++) | |
1264 | { | |
1265 | tree arg = PHI_ARG_DEF (phi, i); | |
1266 | if (TREE_CODE (arg) != REAL_CST) | |
1267 | continue; | |
1268 | else if (cst2 == NULL_TREE) | |
1269 | cst2 = arg; | |
1270 | else if (!operand_equal_p (cst2, arg, 0)) | |
1271 | return true; | |
1272 | } | |
1273 | ||
1274 | if (cst1 && cst2) | |
1275 | cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1); | |
1276 | if (cst2 | |
1277 | && TREE_CODE (cst2) == REAL_CST | |
1278 | && real_isinteger (TREE_REAL_CST_PTR (cst2), | |
1279 | TYPE_MODE (TREE_TYPE (cst2)))) | |
1280 | return false; | |
1281 | return true; | |
1282 | } | |
98610dc5 JJ |
1283 | |
1284 | /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV | |
1285 | is another division can be optimized. Don't optimize if INNER_DIV | |
1286 | is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */ | |
1287 | ||
1288 | static bool | |
1289 | optimize_successive_divisions_p (tree divisor, tree inner_div) | |
1290 | { | |
1291 | if (!gimple_in_ssa_p (cfun)) | |
1292 | return false; | |
1293 | ||
1294 | imm_use_iterator imm_iter; | |
1295 | use_operand_p use_p; | |
1296 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div) | |
1297 | { | |
1298 | gimple *use_stmt = USE_STMT (use_p); | |
1299 | if (!is_gimple_assign (use_stmt) | |
1300 | || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR | |
1301 | || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0)) | |
1302 | continue; | |
1303 | return false; | |
1304 | } | |
1305 | return true; | |
1306 | } | |
30213ae9 RS |
1307 | |
1308 | /* Return a canonical form for CODE when operating on TYPE. The idea | |
1309 | is to remove redundant ways of representing the same operation so | |
1310 | that code_helpers can be hashed and compared for equality. | |
1311 | ||
1312 | The only current canonicalization is to replace built-in functions | |
1313 | with internal functions, in cases where internal-fn.def defines | |
1314 | such an internal function. | |
1315 | ||
1316 | Note that the new code_helper cannot necessarily be used in place of | |
1317 | the original code_helper. For example, the new code_helper might be | |
1318 | an internal function that the target does not support. */ | |
1319 | ||
1320 | code_helper | |
1321 | canonicalize_code (code_helper code, tree type) | |
1322 | { | |
1323 | if (code.is_fn_code ()) | |
1324 | return associated_internal_fn (combined_fn (code), type); | |
1325 | return code; | |
1326 | } | |
1327 | ||
1328 | /* Return true if CODE is a binary operation and if CODE is commutative when | |
1329 | operating on type TYPE. */ | |
1330 | ||
1331 | bool | |
1332 | commutative_binary_op_p (code_helper code, tree type) | |
1333 | { | |
1334 | if (code.is_tree_code ()) | |
1335 | return commutative_tree_code (tree_code (code)); | |
1336 | auto cfn = combined_fn (code); | |
1337 | return commutative_binary_fn_p (associated_internal_fn (cfn, type)); | |
1338 | } | |
1339 | ||
1340 | /* Return true if CODE represents a ternary operation and if the first two | |
1341 | operands are commutative when CODE is operating on TYPE. */ | |
1342 | ||
1343 | bool | |
1344 | commutative_ternary_op_p (code_helper code, tree type) | |
1345 | { | |
1346 | if (code.is_tree_code ()) | |
1347 | return commutative_ternary_tree_code (tree_code (code)); | |
1348 | auto cfn = combined_fn (code); | |
1349 | return commutative_ternary_fn_p (associated_internal_fn (cfn, type)); | |
1350 | } | |
1351 | ||
1352 | /* If CODE is commutative in two consecutive operands, return the | |
1353 | index of the first, otherwise return -1. */ | |
1354 | ||
1355 | int | |
1356 | first_commutative_argument (code_helper code, tree type) | |
1357 | { | |
1358 | if (code.is_tree_code ()) | |
1359 | { | |
1360 | auto tcode = tree_code (code); | |
1361 | if (commutative_tree_code (tcode) | |
1362 | || commutative_ternary_tree_code (tcode)) | |
1363 | return 0; | |
1364 | return -1; | |
1365 | } | |
1366 | auto cfn = combined_fn (code); | |
1367 | return first_commutative_argument (associated_internal_fn (cfn, type)); | |
1368 | } | |
1369 | ||
1370 | /* Return true if CODE is a binary operation that is associative when | |
1371 | operating on type TYPE. */ | |
1372 | ||
1373 | bool | |
1374 | associative_binary_op_p (code_helper code, tree type) | |
1375 | { | |
1376 | if (code.is_tree_code ()) | |
1377 | return associative_tree_code (tree_code (code)); | |
1378 | auto cfn = combined_fn (code); | |
1379 | return associative_binary_fn_p (associated_internal_fn (cfn, type)); | |
1380 | } | |
1381 | ||
1382 | /* Return true if the target directly supports operation CODE on type TYPE. | |
1383 | QUERY_TYPE acts as for optab_for_tree_code. */ | |
1384 | ||
1385 | bool | |
1386 | directly_supported_p (code_helper code, tree type, optab_subtype query_type) | |
1387 | { | |
1388 | if (code.is_tree_code ()) | |
1389 | { | |
1390 | direct_optab optab = optab_for_tree_code (tree_code (code), type, | |
1391 | query_type); | |
1392 | return (optab != unknown_optab | |
1393 | && optab_handler (optab, TYPE_MODE (type)) != CODE_FOR_nothing); | |
1394 | } | |
1395 | gcc_assert (query_type == optab_default | |
1396 | || (query_type == optab_vector && VECTOR_TYPE_P (type)) | |
1397 | || (query_type == optab_scalar && !VECTOR_TYPE_P (type))); | |
1398 | internal_fn ifn = associated_internal_fn (combined_fn (code), type); | |
1399 | return (direct_internal_fn_p (ifn) | |
1400 | && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED)); | |
1401 | } | |
1402 | ||
e53b6e56 | 1403 | /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn |
30213ae9 RS |
1404 | for a code_helper CODE operating on type TYPE. */ |
1405 | ||
1406 | internal_fn | |
1407 | get_conditional_internal_fn (code_helper code, tree type) | |
1408 | { | |
1409 | if (code.is_tree_code ()) | |
1410 | return get_conditional_internal_fn (tree_code (code)); | |
1411 | auto cfn = combined_fn (code); | |
1412 | return get_conditional_internal_fn (associated_internal_fn (cfn, type)); | |
1413 | } |