]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-match-head.c
* doc/extend.texi (Common Function Attributes): Clarify
[thirdparty/gcc.git] / gcc / gimple-match-head.c
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45
46
47 /* Forward declarations of the private auto-generated matchers.
48 They expect valueized operands in canonical order and do not
49 perform simplification of all-constant operands. */
50 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
51 code_helper, tree, tree);
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 code_helper, tree, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 code_helper, tree, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 code_helper, tree, tree, tree, tree, tree, tree);
60
61 const unsigned int gimple_match_op::MAX_NUM_OPS;
62
63 /* Return whether T is a constant that we'll dispatch to fold to
64 evaluate fully constant expressions. */
65
66 static inline bool
67 constant_for_folding (tree t)
68 {
69 return (CONSTANT_CLASS_P (t)
70 /* The following is only interesting to string builtins. */
71 || (TREE_CODE (t) == ADDR_EXPR
72 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
73 }
74
75 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
76 operation. Return true on success, storing the new operation in NEW_OP. */
77
78 static bool
79 convert_conditional_op (gimple_match_op *orig_op,
80 gimple_match_op *new_op)
81 {
82 internal_fn ifn;
83 if (orig_op->code.is_tree_code ())
84 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
85 else
86 {
87 combined_fn cfn = orig_op->code;
88 if (!internal_fn_p (cfn))
89 return false;
90 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
91 }
92 if (ifn == IFN_LAST)
93 return false;
94 unsigned int num_ops = orig_op->num_ops;
95 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
96 new_op->ops[0] = orig_op->cond.cond;
97 for (unsigned int i = 0; i < num_ops; ++i)
98 new_op->ops[i + 1] = orig_op->ops[i];
99 tree else_value = orig_op->cond.else_value;
100 if (!else_value)
101 else_value = targetm.preferred_else_value (ifn, orig_op->type,
102 num_ops, orig_op->ops);
103 new_op->ops[num_ops + 1] = else_value;
104 return true;
105 }
106
107 /* RES_OP is the result of a simplification. If it is conditional,
108 try to replace it with the equivalent UNCOND form, such as an
109 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
110 result of the replacement if appropriate, adding any new statements to
111 SEQ and using VALUEIZE as the valueization function. Return true if
112 this resimplification occurred and resulted in at least one change. */
113
114 static bool
115 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
116 tree (*valueize) (tree))
117 {
118 if (!res_op->cond.cond)
119 return false;
120
121 if (!res_op->cond.else_value
122 && res_op->code.is_tree_code ())
123 {
124 /* The "else" value doesn't matter. If the "then" value is a
125 gimple value, just use it unconditionally. This isn't a
126 simplification in itself, since there was no operation to
127 build in the first place. */
128 if (gimple_simplified_result_is_gimple_val (res_op))
129 {
130 res_op->cond.cond = NULL_TREE;
131 return false;
132 }
133
134 /* Likewise if the operation would not trap. */
135 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
136 && TYPE_OVERFLOW_TRAPS (res_op->type));
137 if (!operation_could_trap_p ((tree_code) res_op->code,
138 FLOAT_TYPE_P (res_op->type),
139 honor_trapv, res_op->op_or_null (1)))
140 {
141 res_op->cond.cond = NULL_TREE;
142 return false;
143 }
144 }
145
146 /* If the "then" value is a gimple value and the "else" value matters,
147 create a VEC_COND_EXPR between them, then see if it can be further
148 simplified. */
149 gimple_match_op new_op;
150 if (res_op->cond.else_value
151 && VECTOR_TYPE_P (res_op->type)
152 && gimple_simplified_result_is_gimple_val (res_op))
153 {
154 new_op.set_op (VEC_COND_EXPR, res_op->type,
155 res_op->cond.cond, res_op->ops[0],
156 res_op->cond.else_value);
157 *res_op = new_op;
158 return gimple_resimplify3 (seq, res_op, valueize);
159 }
160
161 /* Otherwise try rewriting the operation as an IFN_COND_* call.
162 Again, this isn't a simplification in itself, since it's what
163 RES_OP already described. */
164 if (convert_conditional_op (res_op, &new_op))
165 *res_op = new_op;
166
167 return false;
168 }
169
170 /* Helper that matches and simplifies the toplevel result from
171 a gimple_simplify run (where we don't want to build
172 a stmt in case it's used in in-place folding). Replaces
173 RES_OP with a simplified and/or canonicalized result and
174 returns whether any change was made. */
175
176 bool
177 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
178 tree (*valueize)(tree))
179 {
180 if (constant_for_folding (res_op->ops[0]))
181 {
182 tree tem = NULL_TREE;
183 if (res_op->code.is_tree_code ())
184 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
185 else
186 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
187 res_op->ops[0]);
188 if (tem != NULL_TREE
189 && CONSTANT_CLASS_P (tem))
190 {
191 if (TREE_OVERFLOW_P (tem))
192 tem = drop_tree_overflow (tem);
193 res_op->set_value (tem);
194 maybe_resimplify_conditional_op (seq, res_op, valueize);
195 return true;
196 }
197 }
198
199 /* Limit recursion, there are cases like PR80887 and others, for
200 example when value-numbering presents us with unfolded expressions
201 that we are really not prepared to handle without eventual
202 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
203 itself as available expression. */
204 static unsigned depth;
205 if (depth > 10)
206 {
207 if (dump_file && (dump_flags & TDF_FOLDING))
208 fprintf (dump_file, "Aborting expression simplification due to "
209 "deep recursion\n");
210 return false;
211 }
212
213 ++depth;
214 gimple_match_op res_op2 (*res_op);
215 if (gimple_simplify (&res_op2, seq, valueize,
216 res_op->code, res_op->type, res_op->ops[0]))
217 {
218 --depth;
219 *res_op = res_op2;
220 return true;
221 }
222 --depth;
223
224 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
225 return true;
226
227 return false;
228 }
229
230 /* Helper that matches and simplifies the toplevel result from
231 a gimple_simplify run (where we don't want to build
232 a stmt in case it's used in in-place folding). Replaces
233 RES_OP with a simplified and/or canonicalized result and
234 returns whether any change was made. */
235
236 bool
237 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
238 tree (*valueize)(tree))
239 {
240 if (constant_for_folding (res_op->ops[0])
241 && constant_for_folding (res_op->ops[1]))
242 {
243 tree tem = NULL_TREE;
244 if (res_op->code.is_tree_code ())
245 tem = const_binop (res_op->code, res_op->type,
246 res_op->ops[0], res_op->ops[1]);
247 else
248 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
249 res_op->ops[0], res_op->ops[1]);
250 if (tem != NULL_TREE
251 && CONSTANT_CLASS_P (tem))
252 {
253 if (TREE_OVERFLOW_P (tem))
254 tem = drop_tree_overflow (tem);
255 res_op->set_value (tem);
256 maybe_resimplify_conditional_op (seq, res_op, valueize);
257 return true;
258 }
259 }
260
261 /* Canonicalize operand order. */
262 bool canonicalized = false;
263 if (res_op->code.is_tree_code ()
264 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
265 || commutative_tree_code (res_op->code))
266 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
267 {
268 std::swap (res_op->ops[0], res_op->ops[1]);
269 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
270 res_op->code = swap_tree_comparison (res_op->code);
271 canonicalized = true;
272 }
273
274 /* Limit recursion, see gimple_resimplify1. */
275 static unsigned depth;
276 if (depth > 10)
277 {
278 if (dump_file && (dump_flags & TDF_FOLDING))
279 fprintf (dump_file, "Aborting expression simplification due to "
280 "deep recursion\n");
281 return false;
282 }
283
284 ++depth;
285 gimple_match_op res_op2 (*res_op);
286 if (gimple_simplify (&res_op2, seq, valueize,
287 res_op->code, res_op->type,
288 res_op->ops[0], res_op->ops[1]))
289 {
290 --depth;
291 *res_op = res_op2;
292 return true;
293 }
294 --depth;
295
296 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
297 return true;
298
299 return canonicalized;
300 }
301
302 /* Helper that matches and simplifies the toplevel result from
303 a gimple_simplify run (where we don't want to build
304 a stmt in case it's used in in-place folding). Replaces
305 RES_OP with a simplified and/or canonicalized result and
306 returns whether any change was made. */
307
308 bool
309 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
310 tree (*valueize)(tree))
311 {
312 if (constant_for_folding (res_op->ops[0])
313 && constant_for_folding (res_op->ops[1])
314 && constant_for_folding (res_op->ops[2]))
315 {
316 tree tem = NULL_TREE;
317 if (res_op->code.is_tree_code ())
318 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
319 res_op->ops[0], res_op->ops[1],
320 res_op->ops[2]);
321 else
322 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
323 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
324 if (tem != NULL_TREE
325 && CONSTANT_CLASS_P (tem))
326 {
327 if (TREE_OVERFLOW_P (tem))
328 tem = drop_tree_overflow (tem);
329 res_op->set_value (tem);
330 maybe_resimplify_conditional_op (seq, res_op, valueize);
331 return true;
332 }
333 }
334
335 /* Canonicalize operand order. */
336 bool canonicalized = false;
337 if (res_op->code.is_tree_code ()
338 && commutative_ternary_tree_code (res_op->code)
339 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
340 {
341 std::swap (res_op->ops[0], res_op->ops[1]);
342 canonicalized = true;
343 }
344
345 /* Limit recursion, see gimple_resimplify1. */
346 static unsigned depth;
347 if (depth > 10)
348 {
349 if (dump_file && (dump_flags & TDF_FOLDING))
350 fprintf (dump_file, "Aborting expression simplification due to "
351 "deep recursion\n");
352 return false;
353 }
354
355 ++depth;
356 gimple_match_op res_op2 (*res_op);
357 if (gimple_simplify (&res_op2, seq, valueize,
358 res_op->code, res_op->type,
359 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
360 {
361 --depth;
362 *res_op = res_op2;
363 return true;
364 }
365 --depth;
366
367 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
368 return true;
369
370 return canonicalized;
371 }
372
373 /* Helper that matches and simplifies the toplevel result from
374 a gimple_simplify run (where we don't want to build
375 a stmt in case it's used in in-place folding). Replaces
376 RES_OP with a simplified and/or canonicalized result and
377 returns whether any change was made. */
378
379 bool
380 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
381 tree (*valueize)(tree))
382 {
383 /* No constant folding is defined for four-operand functions. */
384
385 /* Limit recursion, see gimple_resimplify1. */
386 static unsigned depth;
387 if (depth > 10)
388 {
389 if (dump_file && (dump_flags & TDF_FOLDING))
390 fprintf (dump_file, "Aborting expression simplification due to "
391 "deep recursion\n");
392 return false;
393 }
394
395 ++depth;
396 gimple_match_op res_op2 (*res_op);
397 if (gimple_simplify (&res_op2, seq, valueize,
398 res_op->code, res_op->type,
399 res_op->ops[0], res_op->ops[1], res_op->ops[2],
400 res_op->ops[3]))
401 {
402 --depth;
403 *res_op = res_op2;
404 return true;
405 }
406 --depth;
407
408 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
409 return true;
410
411 return false;
412 }
413
414 /* Helper that matches and simplifies the toplevel result from
415 a gimple_simplify run (where we don't want to build
416 a stmt in case it's used in in-place folding). Replaces
417 RES_OP with a simplified and/or canonicalized result and
418 returns whether any change was made. */
419
420 bool
421 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
422 tree (*valueize)(tree))
423 {
424 /* No constant folding is defined for five-operand functions. */
425
426 gimple_match_op res_op2 (*res_op);
427 if (gimple_simplify (&res_op2, seq, valueize,
428 res_op->code, res_op->type,
429 res_op->ops[0], res_op->ops[1], res_op->ops[2],
430 res_op->ops[3], res_op->ops[4]))
431 {
432 *res_op = res_op2;
433 return true;
434 }
435
436 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
437 return true;
438
439 return false;
440 }
441
442 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
443 build a GENERIC tree for that expression and update RES_OP accordingly. */
444
445 void
446 maybe_build_generic_op (gimple_match_op *res_op)
447 {
448 tree_code code = (tree_code) res_op->code;
449 tree val;
450 switch (code)
451 {
452 case REALPART_EXPR:
453 case IMAGPART_EXPR:
454 case VIEW_CONVERT_EXPR:
455 val = build1 (code, res_op->type, res_op->ops[0]);
456 res_op->set_value (val);
457 break;
458 case BIT_FIELD_REF:
459 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
460 res_op->ops[2]);
461 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
462 res_op->set_value (val);
463 break;
464 default:;
465 }
466 }
467
468 tree (*mprts_hook) (gimple_match_op *);
469
470 /* Try to build RES_OP, which is known to be a call to FN. Return null
471 if the target doesn't support the function. */
472
473 static gcall *
474 build_call_internal (internal_fn fn, gimple_match_op *res_op)
475 {
476 if (direct_internal_fn_p (fn))
477 {
478 tree_pair types = direct_internal_fn_types (fn, res_op->type,
479 res_op->ops);
480 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
481 return NULL;
482 }
483 return gimple_build_call_internal (fn, res_op->num_ops,
484 res_op->op_or_null (0),
485 res_op->op_or_null (1),
486 res_op->op_or_null (2),
487 res_op->op_or_null (3),
488 res_op->op_or_null (4));
489 }
490
491 /* Push the exploded expression described by RES_OP as a statement to
492 SEQ if necessary and return a gimple value denoting the value of the
493 expression. If RES is not NULL then the result will be always RES
494 and even gimple values are pushed to SEQ. */
495
496 tree
497 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
498 {
499 tree *ops = res_op->ops;
500 unsigned num_ops = res_op->num_ops;
501
502 /* The caller should have converted conditional operations into an UNCOND
503 form and resimplified as appropriate. The conditional form only
504 survives this far if that conversion failed. */
505 if (res_op->cond.cond)
506 return NULL_TREE;
507
508 if (res_op->code.is_tree_code ())
509 {
510 if (!res
511 && gimple_simplified_result_is_gimple_val (res_op))
512 return ops[0];
513 if (mprts_hook)
514 {
515 tree tem = mprts_hook (res_op);
516 if (tem)
517 return tem;
518 }
519 }
520
521 if (!seq)
522 return NULL_TREE;
523
524 /* Play safe and do not allow abnormals to be mentioned in
525 newly created statements. */
526 for (unsigned int i = 0; i < num_ops; ++i)
527 if (TREE_CODE (ops[i]) == SSA_NAME
528 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
529 return NULL_TREE;
530
531 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
532 for (unsigned int i = 0; i < 2; ++i)
533 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
534 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
535 return NULL_TREE;
536
537 if (res_op->code.is_tree_code ())
538 {
539 if (!res)
540 {
541 if (gimple_in_ssa_p (cfun))
542 res = make_ssa_name (res_op->type);
543 else
544 res = create_tmp_reg (res_op->type);
545 }
546 maybe_build_generic_op (res_op);
547 gimple *new_stmt = gimple_build_assign (res, res_op->code,
548 res_op->op_or_null (0),
549 res_op->op_or_null (1),
550 res_op->op_or_null (2));
551 gimple_seq_add_stmt_without_update (seq, new_stmt);
552 return res;
553 }
554 else
555 {
556 gcc_assert (num_ops != 0);
557 combined_fn fn = res_op->code;
558 gcall *new_stmt = NULL;
559 if (internal_fn_p (fn))
560 {
561 /* Generate the given function if we can. */
562 internal_fn ifn = as_internal_fn (fn);
563 new_stmt = build_call_internal (ifn, res_op);
564 if (!new_stmt)
565 return NULL_TREE;
566 }
567 else
568 {
569 /* Find the function we want to call. */
570 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
571 if (!decl)
572 return NULL;
573
574 /* We can't and should not emit calls to non-const functions. */
575 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
576 return NULL;
577
578 new_stmt = gimple_build_call (decl, num_ops,
579 res_op->op_or_null (0),
580 res_op->op_or_null (1),
581 res_op->op_or_null (2),
582 res_op->op_or_null (3),
583 res_op->op_or_null (4));
584 }
585 if (!res)
586 {
587 if (gimple_in_ssa_p (cfun))
588 res = make_ssa_name (res_op->type);
589 else
590 res = create_tmp_reg (res_op->type);
591 }
592 gimple_call_set_lhs (new_stmt, res);
593 gimple_seq_add_stmt_without_update (seq, new_stmt);
594 return res;
595 }
596 }
597
598
599 /* Public API overloads follow for operation being tree_code or
600 built_in_function and for one to three operands or arguments.
601 They return NULL_TREE if nothing could be simplified or
602 the resulting simplified value with parts pushed to SEQ.
603 If SEQ is NULL then if the simplification needs to create
604 new stmts it will fail. If VALUEIZE is non-NULL then all
605 SSA names will be valueized using that hook prior to
606 applying simplifications. */
607
608 /* Unary ops. */
609
610 tree
611 gimple_simplify (enum tree_code code, tree type,
612 tree op0,
613 gimple_seq *seq, tree (*valueize)(tree))
614 {
615 if (constant_for_folding (op0))
616 {
617 tree res = const_unop (code, type, op0);
618 if (res != NULL_TREE
619 && CONSTANT_CLASS_P (res))
620 return res;
621 }
622
623 gimple_match_op res_op;
624 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
625 return NULL_TREE;
626 return maybe_push_res_to_seq (&res_op, seq);
627 }
628
629 /* Binary ops. */
630
631 tree
632 gimple_simplify (enum tree_code code, tree type,
633 tree op0, tree op1,
634 gimple_seq *seq, tree (*valueize)(tree))
635 {
636 if (constant_for_folding (op0) && constant_for_folding (op1))
637 {
638 tree res = const_binop (code, type, op0, op1);
639 if (res != NULL_TREE
640 && CONSTANT_CLASS_P (res))
641 return res;
642 }
643
644 /* Canonicalize operand order both for matching and fallback stmt
645 generation. */
646 if ((commutative_tree_code (code)
647 || TREE_CODE_CLASS (code) == tcc_comparison)
648 && tree_swap_operands_p (op0, op1))
649 {
650 std::swap (op0, op1);
651 if (TREE_CODE_CLASS (code) == tcc_comparison)
652 code = swap_tree_comparison (code);
653 }
654
655 gimple_match_op res_op;
656 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
657 return NULL_TREE;
658 return maybe_push_res_to_seq (&res_op, seq);
659 }
660
661 /* Ternary ops. */
662
663 tree
664 gimple_simplify (enum tree_code code, tree type,
665 tree op0, tree op1, tree op2,
666 gimple_seq *seq, tree (*valueize)(tree))
667 {
668 if (constant_for_folding (op0) && constant_for_folding (op1)
669 && constant_for_folding (op2))
670 {
671 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
672 if (res != NULL_TREE
673 && CONSTANT_CLASS_P (res))
674 return res;
675 }
676
677 /* Canonicalize operand order both for matching and fallback stmt
678 generation. */
679 if (commutative_ternary_tree_code (code)
680 && tree_swap_operands_p (op0, op1))
681 std::swap (op0, op1);
682
683 gimple_match_op res_op;
684 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
685 return NULL_TREE;
686 return maybe_push_res_to_seq (&res_op, seq);
687 }
688
689 /* Builtin or internal function with one argument. */
690
691 tree
692 gimple_simplify (combined_fn fn, tree type,
693 tree arg0,
694 gimple_seq *seq, tree (*valueize)(tree))
695 {
696 if (constant_for_folding (arg0))
697 {
698 tree res = fold_const_call (fn, type, arg0);
699 if (res && CONSTANT_CLASS_P (res))
700 return res;
701 }
702
703 gimple_match_op res_op;
704 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
705 return NULL_TREE;
706 return maybe_push_res_to_seq (&res_op, seq);
707 }
708
709 /* Builtin or internal function with two arguments. */
710
711 tree
712 gimple_simplify (combined_fn fn, tree type,
713 tree arg0, tree arg1,
714 gimple_seq *seq, tree (*valueize)(tree))
715 {
716 if (constant_for_folding (arg0)
717 && constant_for_folding (arg1))
718 {
719 tree res = fold_const_call (fn, type, arg0, arg1);
720 if (res && CONSTANT_CLASS_P (res))
721 return res;
722 }
723
724 gimple_match_op res_op;
725 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
726 return NULL_TREE;
727 return maybe_push_res_to_seq (&res_op, seq);
728 }
729
730 /* Builtin or internal function with three arguments. */
731
732 tree
733 gimple_simplify (combined_fn fn, tree type,
734 tree arg0, tree arg1, tree arg2,
735 gimple_seq *seq, tree (*valueize)(tree))
736 {
737 if (constant_for_folding (arg0)
738 && constant_for_folding (arg1)
739 && constant_for_folding (arg2))
740 {
741 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
742 if (res && CONSTANT_CLASS_P (res))
743 return res;
744 }
745
746 gimple_match_op res_op;
747 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
748 return NULL_TREE;
749 return maybe_push_res_to_seq (&res_op, seq);
750 }
751
752 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
753 VALUEIZED to true if valueization changed OP. */
754
755 static inline tree
756 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
757 {
758 if (valueize && TREE_CODE (op) == SSA_NAME)
759 {
760 tree tem = valueize (op);
761 if (tem && tem != op)
762 {
763 op = tem;
764 valueized = true;
765 }
766 }
767 return op;
768 }
769
770 /* If RES_OP is a call to a conditional internal function, try simplifying
771 the associated unconditional operation and using the result to build
772 a new conditional operation. For example, if RES_OP is:
773
774 IFN_COND_ADD (COND, A, B, ELSE)
775
776 try simplifying (plus A B) and using the result to build a replacement
777 for the whole IFN_COND_ADD.
778
779 Return true if this approach led to a simplification, otherwise leave
780 RES_OP unchanged (and so suitable for other simplifications). When
781 returning true, add any new statements to SEQ and use VALUEIZE as the
782 valueization function.
783
784 RES_OP is known to be a call to IFN. */
785
786 static bool
787 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
788 gimple_seq *seq, tree (*valueize) (tree))
789 {
790 code_helper op;
791 tree_code code = conditional_internal_fn_code (ifn);
792 if (code != ERROR_MARK)
793 op = code;
794 else
795 {
796 ifn = get_unconditional_internal_fn (ifn);
797 if (ifn == IFN_LAST)
798 return false;
799 op = as_combined_fn (ifn);
800 }
801
802 unsigned int num_ops = res_op->num_ops;
803 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
804 res_op->ops[num_ops - 1]),
805 op, res_op->type, num_ops - 2);
806 for (unsigned int i = 1; i < num_ops - 1; ++i)
807 cond_op.ops[i - 1] = res_op->ops[i];
808 switch (num_ops - 2)
809 {
810 case 2:
811 if (!gimple_resimplify2 (seq, &cond_op, valueize))
812 return false;
813 break;
814 case 3:
815 if (!gimple_resimplify3 (seq, &cond_op, valueize))
816 return false;
817 break;
818 default:
819 gcc_unreachable ();
820 }
821 *res_op = cond_op;
822 maybe_resimplify_conditional_op (seq, res_op, valueize);
823 return true;
824 }
825
826 /* The main STMT based simplification entry. It is used by the fold_stmt
827 and the fold_stmt_to_constant APIs. */
828
829 bool
830 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
831 tree (*valueize)(tree), tree (*top_valueize)(tree))
832 {
833 switch (gimple_code (stmt))
834 {
835 case GIMPLE_ASSIGN:
836 {
837 enum tree_code code = gimple_assign_rhs_code (stmt);
838 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
839 switch (gimple_assign_rhs_class (stmt))
840 {
841 case GIMPLE_SINGLE_RHS:
842 if (code == REALPART_EXPR
843 || code == IMAGPART_EXPR
844 || code == VIEW_CONVERT_EXPR)
845 {
846 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
847 bool valueized = false;
848 op0 = do_valueize (op0, top_valueize, valueized);
849 res_op->set_op (code, type, op0);
850 return (gimple_resimplify1 (seq, res_op, valueize)
851 || valueized);
852 }
853 else if (code == BIT_FIELD_REF)
854 {
855 tree rhs1 = gimple_assign_rhs1 (stmt);
856 tree op0 = TREE_OPERAND (rhs1, 0);
857 bool valueized = false;
858 op0 = do_valueize (op0, top_valueize, valueized);
859 res_op->set_op (code, type, op0,
860 TREE_OPERAND (rhs1, 1),
861 TREE_OPERAND (rhs1, 2),
862 REF_REVERSE_STORAGE_ORDER (rhs1));
863 if (res_op->reverse)
864 return valueized;
865 return (gimple_resimplify3 (seq, res_op, valueize)
866 || valueized);
867 }
868 else if (code == SSA_NAME
869 && top_valueize)
870 {
871 tree op0 = gimple_assign_rhs1 (stmt);
872 tree valueized = top_valueize (op0);
873 if (!valueized || op0 == valueized)
874 return false;
875 res_op->set_op (TREE_CODE (op0), type, valueized);
876 return true;
877 }
878 break;
879 case GIMPLE_UNARY_RHS:
880 {
881 tree rhs1 = gimple_assign_rhs1 (stmt);
882 bool valueized = false;
883 rhs1 = do_valueize (rhs1, top_valueize, valueized);
884 res_op->set_op (code, type, rhs1);
885 return (gimple_resimplify1 (seq, res_op, valueize)
886 || valueized);
887 }
888 case GIMPLE_BINARY_RHS:
889 {
890 tree rhs1 = gimple_assign_rhs1 (stmt);
891 tree rhs2 = gimple_assign_rhs2 (stmt);
892 bool valueized = false;
893 rhs1 = do_valueize (rhs1, top_valueize, valueized);
894 rhs2 = do_valueize (rhs2, top_valueize, valueized);
895 res_op->set_op (code, type, rhs1, rhs2);
896 return (gimple_resimplify2 (seq, res_op, valueize)
897 || valueized);
898 }
899 case GIMPLE_TERNARY_RHS:
900 {
901 bool valueized = false;
902 tree rhs1 = gimple_assign_rhs1 (stmt);
903 /* If this is a [VEC_]COND_EXPR first try to simplify an
904 embedded GENERIC condition. */
905 if (code == COND_EXPR
906 || code == VEC_COND_EXPR)
907 {
908 if (COMPARISON_CLASS_P (rhs1))
909 {
910 tree lhs = TREE_OPERAND (rhs1, 0);
911 tree rhs = TREE_OPERAND (rhs1, 1);
912 lhs = do_valueize (lhs, top_valueize, valueized);
913 rhs = do_valueize (rhs, top_valueize, valueized);
914 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
915 TREE_TYPE (rhs1), lhs, rhs);
916 if ((gimple_resimplify2 (seq, &res_op2, valueize)
917 || valueized)
918 && res_op2.code.is_tree_code ())
919 {
920 valueized = true;
921 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
922 == tcc_comparison)
923 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
924 res_op2.ops[0], res_op2.ops[1]);
925 else if (res_op2.code == SSA_NAME
926 || res_op2.code == INTEGER_CST
927 || res_op2.code == VECTOR_CST)
928 rhs1 = res_op2.ops[0];
929 else
930 valueized = false;
931 }
932 }
933 }
934 tree rhs2 = gimple_assign_rhs2 (stmt);
935 tree rhs3 = gimple_assign_rhs3 (stmt);
936 rhs1 = do_valueize (rhs1, top_valueize, valueized);
937 rhs2 = do_valueize (rhs2, top_valueize, valueized);
938 rhs3 = do_valueize (rhs3, top_valueize, valueized);
939 res_op->set_op (code, type, rhs1, rhs2, rhs3);
940 return (gimple_resimplify3 (seq, res_op, valueize)
941 || valueized);
942 }
943 default:
944 gcc_unreachable ();
945 }
946 break;
947 }
948
949 case GIMPLE_CALL:
950 /* ??? This way we can't simplify calls with side-effects. */
951 if (gimple_call_lhs (stmt) != NULL_TREE
952 && gimple_call_num_args (stmt) >= 1
953 && gimple_call_num_args (stmt) <= 5)
954 {
955 bool valueized = false;
956 combined_fn cfn;
957 if (gimple_call_internal_p (stmt))
958 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
959 else
960 {
961 tree fn = gimple_call_fn (stmt);
962 if (!fn)
963 return false;
964
965 fn = do_valueize (fn, top_valueize, valueized);
966 if (TREE_CODE (fn) != ADDR_EXPR
967 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
968 return false;
969
970 tree decl = TREE_OPERAND (fn, 0);
971 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
972 || !gimple_builtin_call_types_compatible_p (stmt, decl))
973 return false;
974
975 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
976 }
977
978 unsigned int num_args = gimple_call_num_args (stmt);
979 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
980 for (unsigned i = 0; i < num_args; ++i)
981 {
982 tree arg = gimple_call_arg (stmt, i);
983 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
984 }
985 if (internal_fn_p (cfn)
986 && try_conditional_simplification (as_internal_fn (cfn),
987 res_op, seq, valueize))
988 return true;
989 switch (num_args)
990 {
991 case 1:
992 return (gimple_resimplify1 (seq, res_op, valueize)
993 || valueized);
994 case 2:
995 return (gimple_resimplify2 (seq, res_op, valueize)
996 || valueized);
997 case 3:
998 return (gimple_resimplify3 (seq, res_op, valueize)
999 || valueized);
1000 case 4:
1001 return (gimple_resimplify4 (seq, res_op, valueize)
1002 || valueized);
1003 case 5:
1004 return (gimple_resimplify5 (seq, res_op, valueize)
1005 || valueized);
1006 default:
1007 gcc_unreachable ();
1008 }
1009 }
1010 break;
1011
1012 case GIMPLE_COND:
1013 {
1014 tree lhs = gimple_cond_lhs (stmt);
1015 tree rhs = gimple_cond_rhs (stmt);
1016 bool valueized = false;
1017 lhs = do_valueize (lhs, top_valueize, valueized);
1018 rhs = do_valueize (rhs, top_valueize, valueized);
1019 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1020 return (gimple_resimplify2 (seq, res_op, valueize)
1021 || valueized);
1022 }
1023
1024 default:
1025 break;
1026 }
1027
1028 return false;
1029 }
1030
1031
1032 /* Helper for the autogenerated code, valueize OP. */
1033
1034 inline tree
1035 do_valueize (tree (*valueize)(tree), tree op)
1036 {
1037 if (valueize && TREE_CODE (op) == SSA_NAME)
1038 {
1039 tree tem = valueize (op);
1040 if (tem)
1041 return tem;
1042 }
1043 return op;
1044 }
1045
1046 /* Helper for the autogenerated code, get at the definition of NAME when
1047 VALUEIZE allows that. */
1048
1049 inline gimple *
1050 get_def (tree (*valueize)(tree), tree name)
1051 {
1052 if (valueize && ! valueize (name))
1053 return NULL;
1054 return SSA_NAME_DEF_STMT (name);
1055 }
1056
1057 /* Routine to determine if the types T1 and T2 are effectively
1058 the same for GIMPLE. If T1 or T2 is not a type, the test
1059 applies to their TREE_TYPE. */
1060
1061 static inline bool
1062 types_match (tree t1, tree t2)
1063 {
1064 if (!TYPE_P (t1))
1065 t1 = TREE_TYPE (t1);
1066 if (!TYPE_P (t2))
1067 t2 = TREE_TYPE (t2);
1068
1069 return types_compatible_p (t1, t2);
1070 }
1071
1072 /* Return if T has a single use. For GIMPLE, we also allow any
1073 non-SSA_NAME (ie constants) and zero uses to cope with uses
1074 that aren't linked up yet. */
1075
1076 static inline bool
1077 single_use (tree t)
1078 {
1079 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1080 }
1081
1082 /* Return true if math operations should be canonicalized,
1083 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1084
1085 static inline bool
1086 canonicalize_math_p ()
1087 {
1088 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1089 }
1090
1091 /* Return true if math operations that are beneficial only after
1092 vectorization should be canonicalized. */
1093
1094 static inline bool
1095 canonicalize_math_after_vectorization_p ()
1096 {
1097 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1098 }
1099
1100 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1101 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1102 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1103 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1104 will likely be exact, while exp (log (arg0) * arg1) might be not.
1105 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1106
1107 static bool
1108 optimize_pow_to_exp (tree arg0, tree arg1)
1109 {
1110 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1111 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1112 return true;
1113
1114 if (TREE_CODE (arg1) != SSA_NAME)
1115 return true;
1116
1117 gimple *def = SSA_NAME_DEF_STMT (arg1);
1118 gphi *phi = dyn_cast <gphi *> (def);
1119 tree cst1 = NULL_TREE;
1120 enum tree_code code = ERROR_MARK;
1121 if (!phi)
1122 {
1123 if (!is_gimple_assign (def))
1124 return true;
1125 code = gimple_assign_rhs_code (def);
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 case MINUS_EXPR:
1130 break;
1131 default:
1132 return true;
1133 }
1134 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1135 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1136 return true;
1137
1138 cst1 = gimple_assign_rhs2 (def);
1139
1140 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1141 if (!phi)
1142 return true;
1143 }
1144
1145 tree cst2 = NULL_TREE;
1146 int n = gimple_phi_num_args (phi);
1147 for (int i = 0; i < n; i++)
1148 {
1149 tree arg = PHI_ARG_DEF (phi, i);
1150 if (TREE_CODE (arg) != REAL_CST)
1151 continue;
1152 else if (cst2 == NULL_TREE)
1153 cst2 = arg;
1154 else if (!operand_equal_p (cst2, arg, 0))
1155 return true;
1156 }
1157
1158 if (cst1 && cst2)
1159 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1160 if (cst2
1161 && TREE_CODE (cst2) == REAL_CST
1162 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1163 TYPE_MODE (TREE_TYPE (cst2))))
1164 return false;
1165 return true;
1166 }
1167
1168 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1169 is another division can be optimized. Don't optimize if INNER_DIV
1170 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1171
1172 static bool
1173 optimize_successive_divisions_p (tree divisor, tree inner_div)
1174 {
1175 if (!gimple_in_ssa_p (cfun))
1176 return false;
1177
1178 imm_use_iterator imm_iter;
1179 use_operand_p use_p;
1180 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1181 {
1182 gimple *use_stmt = USE_STMT (use_p);
1183 if (!is_gimple_assign (use_stmt)
1184 || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1185 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1186 continue;
1187 return false;
1188 }
1189 return true;
1190 }