]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-match-head.c
Put the CL into the right dir.
[thirdparty/gcc.git] / gcc / gimple-match-head.c
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
46
47 /* Forward declarations of the private auto-generated matchers.
48 They expect valueized operands in canonical order and do not
49 perform simplification of all-constant operands. */
50 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
51 code_helper, tree, tree);
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 code_helper, tree, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 code_helper, tree, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 code_helper, tree, tree, tree, tree, tree, tree);
60 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
61 tree (*)(tree));
62 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
63 tree (*)(tree));
64 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
65 tree (*)(tree));
66 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
67 tree (*)(tree));
68 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
69 tree (*)(tree));
70
71 const unsigned int gimple_match_op::MAX_NUM_OPS;
72
73 /* Return whether T is a constant that we'll dispatch to fold to
74 evaluate fully constant expressions. */
75
76 static inline bool
77 constant_for_folding (tree t)
78 {
79 return (CONSTANT_CLASS_P (t)
80 /* The following is only interesting to string builtins. */
81 || (TREE_CODE (t) == ADDR_EXPR
82 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
83 }
84
85 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
86 operation. Return true on success, storing the new operation in NEW_OP. */
87
88 static bool
89 convert_conditional_op (gimple_match_op *orig_op,
90 gimple_match_op *new_op)
91 {
92 internal_fn ifn;
93 if (orig_op->code.is_tree_code ())
94 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
95 else
96 {
97 combined_fn cfn = orig_op->code;
98 if (!internal_fn_p (cfn))
99 return false;
100 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
101 }
102 if (ifn == IFN_LAST)
103 return false;
104 unsigned int num_ops = orig_op->num_ops;
105 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
106 new_op->ops[0] = orig_op->cond.cond;
107 for (unsigned int i = 0; i < num_ops; ++i)
108 new_op->ops[i + 1] = orig_op->ops[i];
109 tree else_value = orig_op->cond.else_value;
110 if (!else_value)
111 else_value = targetm.preferred_else_value (ifn, orig_op->type,
112 num_ops, orig_op->ops);
113 new_op->ops[num_ops + 1] = else_value;
114 return true;
115 }
116
117 /* RES_OP is the result of a simplification. If it is conditional,
118 try to replace it with the equivalent UNCOND form, such as an
119 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
120 result of the replacement if appropriate, adding any new statements to
121 SEQ and using VALUEIZE as the valueization function. Return true if
122 this resimplification occurred and resulted in at least one change. */
123
124 static bool
125 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
126 tree (*valueize) (tree))
127 {
128 if (!res_op->cond.cond)
129 return false;
130
131 if (!res_op->cond.else_value
132 && res_op->code.is_tree_code ())
133 {
134 /* The "else" value doesn't matter. If the "then" value is a
135 gimple value, just use it unconditionally. This isn't a
136 simplification in itself, since there was no operation to
137 build in the first place. */
138 if (gimple_simplified_result_is_gimple_val (res_op))
139 {
140 res_op->cond.cond = NULL_TREE;
141 return false;
142 }
143
144 /* Likewise if the operation would not trap. */
145 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
146 && TYPE_OVERFLOW_TRAPS (res_op->type));
147 if (!operation_could_trap_p ((tree_code) res_op->code,
148 FLOAT_TYPE_P (res_op->type),
149 honor_trapv, res_op->op_or_null (1)))
150 {
151 res_op->cond.cond = NULL_TREE;
152 return false;
153 }
154 }
155
156 /* If the "then" value is a gimple value and the "else" value matters,
157 create a VEC_COND_EXPR between them, then see if it can be further
158 simplified. */
159 gimple_match_op new_op;
160 if (res_op->cond.else_value
161 && VECTOR_TYPE_P (res_op->type)
162 && gimple_simplified_result_is_gimple_val (res_op))
163 {
164 new_op.set_op (VEC_COND_EXPR, res_op->type,
165 res_op->cond.cond, res_op->ops[0],
166 res_op->cond.else_value);
167 *res_op = new_op;
168 return gimple_resimplify3 (seq, res_op, valueize);
169 }
170
171 /* Otherwise try rewriting the operation as an IFN_COND_* call.
172 Again, this isn't a simplification in itself, since it's what
173 RES_OP already described. */
174 if (convert_conditional_op (res_op, &new_op))
175 *res_op = new_op;
176
177 return false;
178 }
179
180 /* Helper that matches and simplifies the toplevel result from
181 a gimple_simplify run (where we don't want to build
182 a stmt in case it's used in in-place folding). Replaces
183 RES_OP with a simplified and/or canonicalized result and
184 returns whether any change was made. */
185
186 static bool
187 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
188 tree (*valueize)(tree))
189 {
190 if (constant_for_folding (res_op->ops[0]))
191 {
192 tree tem = NULL_TREE;
193 if (res_op->code.is_tree_code ())
194 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
195 else
196 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
197 res_op->ops[0]);
198 if (tem != NULL_TREE
199 && CONSTANT_CLASS_P (tem))
200 {
201 if (TREE_OVERFLOW_P (tem))
202 tem = drop_tree_overflow (tem);
203 res_op->set_value (tem);
204 maybe_resimplify_conditional_op (seq, res_op, valueize);
205 return true;
206 }
207 }
208
209 /* Limit recursion, there are cases like PR80887 and others, for
210 example when value-numbering presents us with unfolded expressions
211 that we are really not prepared to handle without eventual
212 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
213 itself as available expression. */
214 static unsigned depth;
215 if (depth > 10)
216 {
217 if (dump_file && (dump_flags & TDF_FOLDING))
218 fprintf (dump_file, "Aborting expression simplification due to "
219 "deep recursion\n");
220 return false;
221 }
222
223 ++depth;
224 gimple_match_op res_op2 (*res_op);
225 if (gimple_simplify (&res_op2, seq, valueize,
226 res_op->code, res_op->type, res_op->ops[0]))
227 {
228 --depth;
229 *res_op = res_op2;
230 return true;
231 }
232 --depth;
233
234 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
235 return true;
236
237 return false;
238 }
239
240 /* Helper that matches and simplifies the toplevel result from
241 a gimple_simplify run (where we don't want to build
242 a stmt in case it's used in in-place folding). Replaces
243 RES_OP with a simplified and/or canonicalized result and
244 returns whether any change was made. */
245
246 static bool
247 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
248 tree (*valueize)(tree))
249 {
250 if (constant_for_folding (res_op->ops[0])
251 && constant_for_folding (res_op->ops[1]))
252 {
253 tree tem = NULL_TREE;
254 if (res_op->code.is_tree_code ())
255 tem = const_binop (res_op->code, res_op->type,
256 res_op->ops[0], res_op->ops[1]);
257 else
258 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
259 res_op->ops[0], res_op->ops[1]);
260 if (tem != NULL_TREE
261 && CONSTANT_CLASS_P (tem))
262 {
263 if (TREE_OVERFLOW_P (tem))
264 tem = drop_tree_overflow (tem);
265 res_op->set_value (tem);
266 maybe_resimplify_conditional_op (seq, res_op, valueize);
267 return true;
268 }
269 }
270
271 /* Canonicalize operand order. */
272 bool canonicalized = false;
273 if (res_op->code.is_tree_code ()
274 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
275 || commutative_tree_code (res_op->code))
276 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
277 {
278 std::swap (res_op->ops[0], res_op->ops[1]);
279 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
280 res_op->code = swap_tree_comparison (res_op->code);
281 canonicalized = true;
282 }
283
284 /* Limit recursion, see gimple_resimplify1. */
285 static unsigned depth;
286 if (depth > 10)
287 {
288 if (dump_file && (dump_flags & TDF_FOLDING))
289 fprintf (dump_file, "Aborting expression simplification due to "
290 "deep recursion\n");
291 return false;
292 }
293
294 ++depth;
295 gimple_match_op res_op2 (*res_op);
296 if (gimple_simplify (&res_op2, seq, valueize,
297 res_op->code, res_op->type,
298 res_op->ops[0], res_op->ops[1]))
299 {
300 --depth;
301 *res_op = res_op2;
302 return true;
303 }
304 --depth;
305
306 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
307 return true;
308
309 return canonicalized;
310 }
311
312 /* Helper that matches and simplifies the toplevel result from
313 a gimple_simplify run (where we don't want to build
314 a stmt in case it's used in in-place folding). Replaces
315 RES_OP with a simplified and/or canonicalized result and
316 returns whether any change was made. */
317
318 static bool
319 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
320 tree (*valueize)(tree))
321 {
322 if (constant_for_folding (res_op->ops[0])
323 && constant_for_folding (res_op->ops[1])
324 && constant_for_folding (res_op->ops[2]))
325 {
326 tree tem = NULL_TREE;
327 if (res_op->code.is_tree_code ())
328 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
329 res_op->ops[0], res_op->ops[1],
330 res_op->ops[2]);
331 else
332 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
333 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
334 if (tem != NULL_TREE
335 && CONSTANT_CLASS_P (tem))
336 {
337 if (TREE_OVERFLOW_P (tem))
338 tem = drop_tree_overflow (tem);
339 res_op->set_value (tem);
340 maybe_resimplify_conditional_op (seq, res_op, valueize);
341 return true;
342 }
343 }
344
345 /* Canonicalize operand order. */
346 bool canonicalized = false;
347 if (res_op->code.is_tree_code ()
348 && commutative_ternary_tree_code (res_op->code)
349 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
350 {
351 std::swap (res_op->ops[0], res_op->ops[1]);
352 canonicalized = true;
353 }
354
355 /* Limit recursion, see gimple_resimplify1. */
356 static unsigned depth;
357 if (depth > 10)
358 {
359 if (dump_file && (dump_flags & TDF_FOLDING))
360 fprintf (dump_file, "Aborting expression simplification due to "
361 "deep recursion\n");
362 return false;
363 }
364
365 ++depth;
366 gimple_match_op res_op2 (*res_op);
367 if (gimple_simplify (&res_op2, seq, valueize,
368 res_op->code, res_op->type,
369 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
370 {
371 --depth;
372 *res_op = res_op2;
373 return true;
374 }
375 --depth;
376
377 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
378 return true;
379
380 return canonicalized;
381 }
382
383 /* Helper that matches and simplifies the toplevel result from
384 a gimple_simplify run (where we don't want to build
385 a stmt in case it's used in in-place folding). Replaces
386 RES_OP with a simplified and/or canonicalized result and
387 returns whether any change was made. */
388
389 static bool
390 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
391 tree (*valueize)(tree))
392 {
393 /* No constant folding is defined for four-operand functions. */
394
395 /* Limit recursion, see gimple_resimplify1. */
396 static unsigned depth;
397 if (depth > 10)
398 {
399 if (dump_file && (dump_flags & TDF_FOLDING))
400 fprintf (dump_file, "Aborting expression simplification due to "
401 "deep recursion\n");
402 return false;
403 }
404
405 ++depth;
406 gimple_match_op res_op2 (*res_op);
407 if (gimple_simplify (&res_op2, seq, valueize,
408 res_op->code, res_op->type,
409 res_op->ops[0], res_op->ops[1], res_op->ops[2],
410 res_op->ops[3]))
411 {
412 --depth;
413 *res_op = res_op2;
414 return true;
415 }
416 --depth;
417
418 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
419 return true;
420
421 return false;
422 }
423
424 /* Helper that matches and simplifies the toplevel result from
425 a gimple_simplify run (where we don't want to build
426 a stmt in case it's used in in-place folding). Replaces
427 RES_OP with a simplified and/or canonicalized result and
428 returns whether any change was made. */
429
430 static bool
431 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
432 tree (*valueize)(tree))
433 {
434 /* No constant folding is defined for five-operand functions. */
435
436 gimple_match_op res_op2 (*res_op);
437 if (gimple_simplify (&res_op2, seq, valueize,
438 res_op->code, res_op->type,
439 res_op->ops[0], res_op->ops[1], res_op->ops[2],
440 res_op->ops[3], res_op->ops[4]))
441 {
442 *res_op = res_op2;
443 return true;
444 }
445
446 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
447 return true;
448
449 return false;
450 }
451
452 /* Match and simplify the toplevel valueized operation THIS.
453 Replaces THIS with a simplified and/or canonicalized result and
454 returns whether any change was made. */
455
456 bool
457 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
458 {
459 switch (num_ops)
460 {
461 case 1:
462 return gimple_resimplify1 (seq, this, valueize);
463 case 2:
464 return gimple_resimplify2 (seq, this, valueize);
465 case 3:
466 return gimple_resimplify3 (seq, this, valueize);
467 case 4:
468 return gimple_resimplify4 (seq, this, valueize);
469 case 5:
470 return gimple_resimplify5 (seq, this, valueize);
471 default:
472 gcc_unreachable ();
473 }
474 }
475
476 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
477 build a GENERIC tree for that expression and update RES_OP accordingly. */
478
479 void
480 maybe_build_generic_op (gimple_match_op *res_op)
481 {
482 tree_code code = (tree_code) res_op->code;
483 tree val;
484 switch (code)
485 {
486 case REALPART_EXPR:
487 case IMAGPART_EXPR:
488 case VIEW_CONVERT_EXPR:
489 val = build1 (code, res_op->type, res_op->ops[0]);
490 res_op->set_value (val);
491 break;
492 case BIT_FIELD_REF:
493 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
494 res_op->ops[2]);
495 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
496 res_op->set_value (val);
497 break;
498 default:;
499 }
500 }
501
502 tree (*mprts_hook) (gimple_match_op *);
503
504 /* Try to build RES_OP, which is known to be a call to FN. Return null
505 if the target doesn't support the function. */
506
507 static gcall *
508 build_call_internal (internal_fn fn, gimple_match_op *res_op)
509 {
510 if (direct_internal_fn_p (fn))
511 {
512 tree_pair types = direct_internal_fn_types (fn, res_op->type,
513 res_op->ops);
514 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
515 return NULL;
516 }
517 return gimple_build_call_internal (fn, res_op->num_ops,
518 res_op->op_or_null (0),
519 res_op->op_or_null (1),
520 res_op->op_or_null (2),
521 res_op->op_or_null (3),
522 res_op->op_or_null (4));
523 }
524
525 /* Push the exploded expression described by RES_OP as a statement to
526 SEQ if necessary and return a gimple value denoting the value of the
527 expression. If RES is not NULL then the result will be always RES
528 and even gimple values are pushed to SEQ. */
529
530 tree
531 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
532 {
533 tree *ops = res_op->ops;
534 unsigned num_ops = res_op->num_ops;
535
536 /* The caller should have converted conditional operations into an UNCOND
537 form and resimplified as appropriate. The conditional form only
538 survives this far if that conversion failed. */
539 if (res_op->cond.cond)
540 return NULL_TREE;
541
542 if (res_op->code.is_tree_code ())
543 {
544 if (!res
545 && gimple_simplified_result_is_gimple_val (res_op))
546 return ops[0];
547 if (mprts_hook)
548 {
549 tree tem = mprts_hook (res_op);
550 if (tem)
551 return tem;
552 }
553 }
554
555 if (!seq)
556 return NULL_TREE;
557
558 /* Play safe and do not allow abnormals to be mentioned in
559 newly created statements. */
560 for (unsigned int i = 0; i < num_ops; ++i)
561 if (TREE_CODE (ops[i]) == SSA_NAME
562 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
563 return NULL_TREE;
564
565 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
566 for (unsigned int i = 0; i < 2; ++i)
567 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
568 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
569 return NULL_TREE;
570
571 if (res_op->code.is_tree_code ())
572 {
573 if (!res)
574 {
575 if (gimple_in_ssa_p (cfun))
576 res = make_ssa_name (res_op->type);
577 else
578 res = create_tmp_reg (res_op->type);
579 }
580 maybe_build_generic_op (res_op);
581 gimple *new_stmt = gimple_build_assign (res, res_op->code,
582 res_op->op_or_null (0),
583 res_op->op_or_null (1),
584 res_op->op_or_null (2));
585 gimple_seq_add_stmt_without_update (seq, new_stmt);
586 return res;
587 }
588 else
589 {
590 gcc_assert (num_ops != 0);
591 combined_fn fn = res_op->code;
592 gcall *new_stmt = NULL;
593 if (internal_fn_p (fn))
594 {
595 /* Generate the given function if we can. */
596 internal_fn ifn = as_internal_fn (fn);
597 new_stmt = build_call_internal (ifn, res_op);
598 if (!new_stmt)
599 return NULL_TREE;
600 }
601 else
602 {
603 /* Find the function we want to call. */
604 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
605 if (!decl)
606 return NULL;
607
608 /* We can't and should not emit calls to non-const functions. */
609 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
610 return NULL;
611
612 new_stmt = gimple_build_call (decl, num_ops,
613 res_op->op_or_null (0),
614 res_op->op_or_null (1),
615 res_op->op_or_null (2),
616 res_op->op_or_null (3),
617 res_op->op_or_null (4));
618 }
619 if (!res)
620 {
621 if (gimple_in_ssa_p (cfun))
622 res = make_ssa_name (res_op->type);
623 else
624 res = create_tmp_reg (res_op->type);
625 }
626 gimple_call_set_lhs (new_stmt, res);
627 gimple_seq_add_stmt_without_update (seq, new_stmt);
628 return res;
629 }
630 }
631
632
633 /* Public API overloads follow for operation being tree_code or
634 built_in_function and for one to three operands or arguments.
635 They return NULL_TREE if nothing could be simplified or
636 the resulting simplified value with parts pushed to SEQ.
637 If SEQ is NULL then if the simplification needs to create
638 new stmts it will fail. If VALUEIZE is non-NULL then all
639 SSA names will be valueized using that hook prior to
640 applying simplifications. */
641
642 /* Unary ops. */
643
644 tree
645 gimple_simplify (enum tree_code code, tree type,
646 tree op0,
647 gimple_seq *seq, tree (*valueize)(tree))
648 {
649 if (constant_for_folding (op0))
650 {
651 tree res = const_unop (code, type, op0);
652 if (res != NULL_TREE
653 && CONSTANT_CLASS_P (res))
654 return res;
655 }
656
657 gimple_match_op res_op;
658 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
659 return NULL_TREE;
660 return maybe_push_res_to_seq (&res_op, seq);
661 }
662
663 /* Binary ops. */
664
665 tree
666 gimple_simplify (enum tree_code code, tree type,
667 tree op0, tree op1,
668 gimple_seq *seq, tree (*valueize)(tree))
669 {
670 if (constant_for_folding (op0) && constant_for_folding (op1))
671 {
672 tree res = const_binop (code, type, op0, op1);
673 if (res != NULL_TREE
674 && CONSTANT_CLASS_P (res))
675 return res;
676 }
677
678 /* Canonicalize operand order both for matching and fallback stmt
679 generation. */
680 if ((commutative_tree_code (code)
681 || TREE_CODE_CLASS (code) == tcc_comparison)
682 && tree_swap_operands_p (op0, op1))
683 {
684 std::swap (op0, op1);
685 if (TREE_CODE_CLASS (code) == tcc_comparison)
686 code = swap_tree_comparison (code);
687 }
688
689 gimple_match_op res_op;
690 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
691 return NULL_TREE;
692 return maybe_push_res_to_seq (&res_op, seq);
693 }
694
695 /* Ternary ops. */
696
697 tree
698 gimple_simplify (enum tree_code code, tree type,
699 tree op0, tree op1, tree op2,
700 gimple_seq *seq, tree (*valueize)(tree))
701 {
702 if (constant_for_folding (op0) && constant_for_folding (op1)
703 && constant_for_folding (op2))
704 {
705 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
706 if (res != NULL_TREE
707 && CONSTANT_CLASS_P (res))
708 return res;
709 }
710
711 /* Canonicalize operand order both for matching and fallback stmt
712 generation. */
713 if (commutative_ternary_tree_code (code)
714 && tree_swap_operands_p (op0, op1))
715 std::swap (op0, op1);
716
717 gimple_match_op res_op;
718 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
719 return NULL_TREE;
720 return maybe_push_res_to_seq (&res_op, seq);
721 }
722
723 /* Builtin or internal function with one argument. */
724
725 tree
726 gimple_simplify (combined_fn fn, tree type,
727 tree arg0,
728 gimple_seq *seq, tree (*valueize)(tree))
729 {
730 if (constant_for_folding (arg0))
731 {
732 tree res = fold_const_call (fn, type, arg0);
733 if (res && CONSTANT_CLASS_P (res))
734 return res;
735 }
736
737 gimple_match_op res_op;
738 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
739 return NULL_TREE;
740 return maybe_push_res_to_seq (&res_op, seq);
741 }
742
743 /* Builtin or internal function with two arguments. */
744
745 tree
746 gimple_simplify (combined_fn fn, tree type,
747 tree arg0, tree arg1,
748 gimple_seq *seq, tree (*valueize)(tree))
749 {
750 if (constant_for_folding (arg0)
751 && constant_for_folding (arg1))
752 {
753 tree res = fold_const_call (fn, type, arg0, arg1);
754 if (res && CONSTANT_CLASS_P (res))
755 return res;
756 }
757
758 gimple_match_op res_op;
759 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
760 return NULL_TREE;
761 return maybe_push_res_to_seq (&res_op, seq);
762 }
763
764 /* Builtin or internal function with three arguments. */
765
766 tree
767 gimple_simplify (combined_fn fn, tree type,
768 tree arg0, tree arg1, tree arg2,
769 gimple_seq *seq, tree (*valueize)(tree))
770 {
771 if (constant_for_folding (arg0)
772 && constant_for_folding (arg1)
773 && constant_for_folding (arg2))
774 {
775 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
776 if (res && CONSTANT_CLASS_P (res))
777 return res;
778 }
779
780 gimple_match_op res_op;
781 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
782 return NULL_TREE;
783 return maybe_push_res_to_seq (&res_op, seq);
784 }
785
786 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
787 VALUEIZED to true if valueization changed OP. */
788
789 static inline tree
790 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
791 {
792 if (valueize && TREE_CODE (op) == SSA_NAME)
793 {
794 tree tem = valueize (op);
795 if (tem && tem != op)
796 {
797 op = tem;
798 valueized = true;
799 }
800 }
801 return op;
802 }
803
804 /* If RES_OP is a call to a conditional internal function, try simplifying
805 the associated unconditional operation and using the result to build
806 a new conditional operation. For example, if RES_OP is:
807
808 IFN_COND_ADD (COND, A, B, ELSE)
809
810 try simplifying (plus A B) and using the result to build a replacement
811 for the whole IFN_COND_ADD.
812
813 Return true if this approach led to a simplification, otherwise leave
814 RES_OP unchanged (and so suitable for other simplifications). When
815 returning true, add any new statements to SEQ and use VALUEIZE as the
816 valueization function.
817
818 RES_OP is known to be a call to IFN. */
819
820 static bool
821 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
822 gimple_seq *seq, tree (*valueize) (tree))
823 {
824 code_helper op;
825 tree_code code = conditional_internal_fn_code (ifn);
826 if (code != ERROR_MARK)
827 op = code;
828 else
829 {
830 ifn = get_unconditional_internal_fn (ifn);
831 if (ifn == IFN_LAST)
832 return false;
833 op = as_combined_fn (ifn);
834 }
835
836 unsigned int num_ops = res_op->num_ops;
837 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
838 res_op->ops[num_ops - 1]),
839 op, res_op->type, num_ops - 2);
840 for (unsigned int i = 1; i < num_ops - 1; ++i)
841 cond_op.ops[i - 1] = res_op->ops[i];
842 switch (num_ops - 2)
843 {
844 case 2:
845 if (!gimple_resimplify2 (seq, &cond_op, valueize))
846 return false;
847 break;
848 case 3:
849 if (!gimple_resimplify3 (seq, &cond_op, valueize))
850 return false;
851 break;
852 default:
853 gcc_unreachable ();
854 }
855 *res_op = cond_op;
856 maybe_resimplify_conditional_op (seq, res_op, valueize);
857 return true;
858 }
859
860 /* The main STMT based simplification entry. It is used by the fold_stmt
861 and the fold_stmt_to_constant APIs. */
862
863 bool
864 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
865 tree (*valueize)(tree), tree (*top_valueize)(tree))
866 {
867 switch (gimple_code (stmt))
868 {
869 case GIMPLE_ASSIGN:
870 {
871 enum tree_code code = gimple_assign_rhs_code (stmt);
872 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
873 switch (gimple_assign_rhs_class (stmt))
874 {
875 case GIMPLE_SINGLE_RHS:
876 if (code == REALPART_EXPR
877 || code == IMAGPART_EXPR
878 || code == VIEW_CONVERT_EXPR)
879 {
880 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
881 bool valueized = false;
882 op0 = do_valueize (op0, top_valueize, valueized);
883 res_op->set_op (code, type, op0);
884 return (gimple_resimplify1 (seq, res_op, valueize)
885 || valueized);
886 }
887 else if (code == BIT_FIELD_REF)
888 {
889 tree rhs1 = gimple_assign_rhs1 (stmt);
890 tree op0 = TREE_OPERAND (rhs1, 0);
891 bool valueized = false;
892 op0 = do_valueize (op0, top_valueize, valueized);
893 res_op->set_op (code, type, op0,
894 TREE_OPERAND (rhs1, 1),
895 TREE_OPERAND (rhs1, 2),
896 REF_REVERSE_STORAGE_ORDER (rhs1));
897 if (res_op->reverse)
898 return valueized;
899 return (gimple_resimplify3 (seq, res_op, valueize)
900 || valueized);
901 }
902 else if (code == SSA_NAME
903 && top_valueize)
904 {
905 tree op0 = gimple_assign_rhs1 (stmt);
906 tree valueized = top_valueize (op0);
907 if (!valueized || op0 == valueized)
908 return false;
909 res_op->set_op (TREE_CODE (op0), type, valueized);
910 return true;
911 }
912 break;
913 case GIMPLE_UNARY_RHS:
914 {
915 tree rhs1 = gimple_assign_rhs1 (stmt);
916 bool valueized = false;
917 rhs1 = do_valueize (rhs1, top_valueize, valueized);
918 res_op->set_op (code, type, rhs1);
919 return (gimple_resimplify1 (seq, res_op, valueize)
920 || valueized);
921 }
922 case GIMPLE_BINARY_RHS:
923 {
924 tree rhs1 = gimple_assign_rhs1 (stmt);
925 tree rhs2 = gimple_assign_rhs2 (stmt);
926 bool valueized = false;
927 rhs1 = do_valueize (rhs1, top_valueize, valueized);
928 rhs2 = do_valueize (rhs2, top_valueize, valueized);
929 res_op->set_op (code, type, rhs1, rhs2);
930 return (gimple_resimplify2 (seq, res_op, valueize)
931 || valueized);
932 }
933 case GIMPLE_TERNARY_RHS:
934 {
935 bool valueized = false;
936 tree rhs1 = gimple_assign_rhs1 (stmt);
937 /* If this is a [VEC_]COND_EXPR first try to simplify an
938 embedded GENERIC condition. */
939 if (code == COND_EXPR
940 || code == VEC_COND_EXPR)
941 {
942 if (COMPARISON_CLASS_P (rhs1))
943 {
944 tree lhs = TREE_OPERAND (rhs1, 0);
945 tree rhs = TREE_OPERAND (rhs1, 1);
946 lhs = do_valueize (lhs, top_valueize, valueized);
947 rhs = do_valueize (rhs, top_valueize, valueized);
948 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
949 TREE_TYPE (rhs1), lhs, rhs);
950 if ((gimple_resimplify2 (seq, &res_op2, valueize)
951 || valueized)
952 && res_op2.code.is_tree_code ())
953 {
954 valueized = true;
955 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
956 == tcc_comparison)
957 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
958 res_op2.ops[0], res_op2.ops[1]);
959 else if (res_op2.code == SSA_NAME
960 || res_op2.code == INTEGER_CST
961 || res_op2.code == VECTOR_CST)
962 rhs1 = res_op2.ops[0];
963 else
964 valueized = false;
965 }
966 }
967 }
968 tree rhs2 = gimple_assign_rhs2 (stmt);
969 tree rhs3 = gimple_assign_rhs3 (stmt);
970 rhs1 = do_valueize (rhs1, top_valueize, valueized);
971 rhs2 = do_valueize (rhs2, top_valueize, valueized);
972 rhs3 = do_valueize (rhs3, top_valueize, valueized);
973 res_op->set_op (code, type, rhs1, rhs2, rhs3);
974 return (gimple_resimplify3 (seq, res_op, valueize)
975 || valueized);
976 }
977 default:
978 gcc_unreachable ();
979 }
980 break;
981 }
982
983 case GIMPLE_CALL:
984 /* ??? This way we can't simplify calls with side-effects. */
985 if (gimple_call_lhs (stmt) != NULL_TREE
986 && gimple_call_num_args (stmt) >= 1
987 && gimple_call_num_args (stmt) <= 5)
988 {
989 bool valueized = false;
990 combined_fn cfn;
991 if (gimple_call_internal_p (stmt))
992 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
993 else
994 {
995 tree fn = gimple_call_fn (stmt);
996 if (!fn)
997 return false;
998
999 fn = do_valueize (fn, top_valueize, valueized);
1000 if (TREE_CODE (fn) != ADDR_EXPR
1001 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1002 return false;
1003
1004 tree decl = TREE_OPERAND (fn, 0);
1005 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1006 || !gimple_builtin_call_types_compatible_p (stmt, decl))
1007 return false;
1008
1009 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1010 }
1011
1012 unsigned int num_args = gimple_call_num_args (stmt);
1013 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1014 for (unsigned i = 0; i < num_args; ++i)
1015 {
1016 tree arg = gimple_call_arg (stmt, i);
1017 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
1018 }
1019 if (internal_fn_p (cfn)
1020 && try_conditional_simplification (as_internal_fn (cfn),
1021 res_op, seq, valueize))
1022 return true;
1023 switch (num_args)
1024 {
1025 case 1:
1026 return (gimple_resimplify1 (seq, res_op, valueize)
1027 || valueized);
1028 case 2:
1029 return (gimple_resimplify2 (seq, res_op, valueize)
1030 || valueized);
1031 case 3:
1032 return (gimple_resimplify3 (seq, res_op, valueize)
1033 || valueized);
1034 case 4:
1035 return (gimple_resimplify4 (seq, res_op, valueize)
1036 || valueized);
1037 case 5:
1038 return (gimple_resimplify5 (seq, res_op, valueize)
1039 || valueized);
1040 default:
1041 gcc_unreachable ();
1042 }
1043 }
1044 break;
1045
1046 case GIMPLE_COND:
1047 {
1048 tree lhs = gimple_cond_lhs (stmt);
1049 tree rhs = gimple_cond_rhs (stmt);
1050 bool valueized = false;
1051 lhs = do_valueize (lhs, top_valueize, valueized);
1052 rhs = do_valueize (rhs, top_valueize, valueized);
1053 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1054 return (gimple_resimplify2 (seq, res_op, valueize)
1055 || valueized);
1056 }
1057
1058 default:
1059 break;
1060 }
1061
1062 return false;
1063 }
1064
1065
1066 /* Helper for the autogenerated code, valueize OP. */
1067
1068 inline tree
1069 do_valueize (tree (*valueize)(tree), tree op)
1070 {
1071 if (valueize && TREE_CODE (op) == SSA_NAME)
1072 {
1073 tree tem = valueize (op);
1074 if (tem)
1075 return tem;
1076 }
1077 return op;
1078 }
1079
1080 /* Helper for the autogenerated code, get at the definition of NAME when
1081 VALUEIZE allows that. */
1082
1083 inline gimple *
1084 get_def (tree (*valueize)(tree), tree name)
1085 {
1086 if (valueize && ! valueize (name))
1087 return NULL;
1088 return SSA_NAME_DEF_STMT (name);
1089 }
1090
1091 /* Routine to determine if the types T1 and T2 are effectively
1092 the same for GIMPLE. If T1 or T2 is not a type, the test
1093 applies to their TREE_TYPE. */
1094
1095 static inline bool
1096 types_match (tree t1, tree t2)
1097 {
1098 if (!TYPE_P (t1))
1099 t1 = TREE_TYPE (t1);
1100 if (!TYPE_P (t2))
1101 t2 = TREE_TYPE (t2);
1102
1103 return types_compatible_p (t1, t2);
1104 }
1105
1106 /* Return if T has a single use. For GIMPLE, we also allow any
1107 non-SSA_NAME (ie constants) and zero uses to cope with uses
1108 that aren't linked up yet. */
1109
1110 static inline bool
1111 single_use (tree t)
1112 {
1113 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1114 }
1115
1116 /* Return true if math operations should be canonicalized,
1117 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1118
1119 static inline bool
1120 canonicalize_math_p ()
1121 {
1122 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1123 }
1124
1125 /* Return true if math operations that are beneficial only after
1126 vectorization should be canonicalized. */
1127
1128 static inline bool
1129 canonicalize_math_after_vectorization_p ()
1130 {
1131 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1132 }
1133
1134 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1135 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1136 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1137 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1138 will likely be exact, while exp (log (arg0) * arg1) might be not.
1139 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1140
1141 static bool
1142 optimize_pow_to_exp (tree arg0, tree arg1)
1143 {
1144 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1145 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1146 return true;
1147
1148 if (TREE_CODE (arg1) != SSA_NAME)
1149 return true;
1150
1151 gimple *def = SSA_NAME_DEF_STMT (arg1);
1152 gphi *phi = dyn_cast <gphi *> (def);
1153 tree cst1 = NULL_TREE;
1154 enum tree_code code = ERROR_MARK;
1155 if (!phi)
1156 {
1157 if (!is_gimple_assign (def))
1158 return true;
1159 code = gimple_assign_rhs_code (def);
1160 switch (code)
1161 {
1162 case PLUS_EXPR:
1163 case MINUS_EXPR:
1164 break;
1165 default:
1166 return true;
1167 }
1168 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1169 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1170 return true;
1171
1172 cst1 = gimple_assign_rhs2 (def);
1173
1174 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1175 if (!phi)
1176 return true;
1177 }
1178
1179 tree cst2 = NULL_TREE;
1180 int n = gimple_phi_num_args (phi);
1181 for (int i = 0; i < n; i++)
1182 {
1183 tree arg = PHI_ARG_DEF (phi, i);
1184 if (TREE_CODE (arg) != REAL_CST)
1185 continue;
1186 else if (cst2 == NULL_TREE)
1187 cst2 = arg;
1188 else if (!operand_equal_p (cst2, arg, 0))
1189 return true;
1190 }
1191
1192 if (cst1 && cst2)
1193 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1194 if (cst2
1195 && TREE_CODE (cst2) == REAL_CST
1196 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1197 TYPE_MODE (TREE_TYPE (cst2))))
1198 return false;
1199 return true;
1200 }
1201
1202 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1203 is another division can be optimized. Don't optimize if INNER_DIV
1204 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1205
1206 static bool
1207 optimize_successive_divisions_p (tree divisor, tree inner_div)
1208 {
1209 if (!gimple_in_ssa_p (cfun))
1210 return false;
1211
1212 imm_use_iterator imm_iter;
1213 use_operand_p use_p;
1214 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1215 {
1216 gimple *use_stmt = USE_STMT (use_p);
1217 if (!is_gimple_assign (use_stmt)
1218 || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1219 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1220 continue;
1221 return false;
1222 }
1223 return true;
1224 }