]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/c/c-fold.c
re PR c/66618 (Failure to diagnose non-constant initializer for static object with...
[thirdparty/gcc.git] / gcc / c / c-fold.c
1 /* Support for fully folding sub-trees of an expression for C compiler.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "bitmap.h"
26 #include "c-tree.h"
27 #include "intl.h"
28 #include "gimplify.h"
29
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool,
31 bool);
32
33 /* If DISABLE is true, stop issuing warnings. This is used when
34 parsing code that we know will not be executed. This function may
35 be called multiple times, and works as a stack. */
36
37 static void
38 c_disable_warnings (bool disable)
39 {
40 if (disable)
41 {
42 ++c_inhibit_evaluation_warnings;
43 fold_defer_overflow_warnings ();
44 }
45 }
46
47 /* If ENABLE is true, reenable issuing warnings. */
48
49 static void
50 c_enable_warnings (bool enable)
51 {
52 if (enable)
53 {
54 --c_inhibit_evaluation_warnings;
55 fold_undefer_and_ignore_overflow_warnings ();
56 }
57 }
58
59 /* Try to fold ARRAY_REF ary[index] if possible and not handled by
60 normal fold, return NULL_TREE otherwise. */
61
62 static tree
63 c_fold_array_ref (tree type, tree ary, tree index)
64 {
65 if (TREE_CODE (ary) != STRING_CST
66 || TREE_CODE (index) != INTEGER_CST
67 || TREE_OVERFLOW (index)
68 || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE
69 || !tree_fits_uhwi_p (index))
70 return NULL_TREE;
71
72 tree elem_type = TREE_TYPE (TREE_TYPE (ary));
73 unsigned elem_nchars = (TYPE_PRECISION (elem_type)
74 / TYPE_PRECISION (char_type_node));
75 unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars;
76 tree nelts = array_type_nelts (TREE_TYPE (ary));
77 bool dummy1 = true, dummy2 = true;
78 nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false);
79 unsigned HOST_WIDE_INT i = tree_to_uhwi (index);
80 if (!tree_int_cst_le (index, nelts)
81 || i >= len
82 || i + elem_nchars > len)
83 return NULL_TREE;
84
85 if (elem_nchars == 1)
86 return build_int_cst (type, TREE_STRING_POINTER (ary)[i]);
87
88 const unsigned char *ptr
89 = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars);
90 return native_interpret_expr (type, ptr, elem_nchars);
91 }
92
93 /* Fully fold EXPR, an expression that was not folded (beyond integer
94 constant expressions and null pointer constants) when being built
95 up. If IN_INIT, this is in a static initializer and certain
96 changes are made to the folding done. Clear *MAYBE_CONST if
97 MAYBE_CONST is not NULL and EXPR is definitely not a constant
98 expression because it contains an evaluated operator (in C99) or an
99 operator outside of sizeof returning an integer constant (in C90)
100 not permitted in constant expressions, or because it contains an
101 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
102 set to true by callers before calling this function.) Return the
103 folded expression. Function arguments have already been folded
104 before calling this function, as have the contents of SAVE_EXPR,
105 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
106 C_MAYBE_CONST_EXPR. LVAL is true if it should be treated as an
107 lvalue. */
108
109 tree
110 c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval)
111 {
112 tree ret;
113 tree eptype = NULL_TREE;
114 bool dummy = true;
115 bool maybe_const_itself = true;
116 location_t loc = EXPR_LOCATION (expr);
117
118 if (!maybe_const)
119 maybe_const = &dummy;
120 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
121 {
122 eptype = TREE_TYPE (expr);
123 expr = TREE_OPERAND (expr, 0);
124 }
125 ret = c_fully_fold_internal (expr, in_init, maybe_const,
126 &maybe_const_itself, false, lval);
127 if (eptype)
128 ret = fold_convert_loc (loc, eptype, ret);
129 *maybe_const &= maybe_const_itself;
130 return ret;
131 }
132
133 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
134 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
135 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
136 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
137 both evaluated and unevaluated subexpressions while
138 *MAYBE_CONST_ITSELF is carried from only evaluated
139 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
140 with integer constant operands, and if any of the operands doesn't
141 get folded to an integer constant, don't fold the expression itself.
142 LVAL indicates folding of lvalue, where we can't replace it with
143 an rvalue. */
144
145 static tree
146 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
147 bool *maybe_const_itself, bool for_int_const, bool lval)
148 {
149 tree ret = expr;
150 enum tree_code code = TREE_CODE (expr);
151 enum tree_code_class kind = TREE_CODE_CLASS (code);
152 location_t loc = EXPR_LOCATION (expr);
153 tree op0, op1, op2, op3;
154 tree orig_op0, orig_op1, orig_op2;
155 bool op0_const = true, op1_const = true, op2_const = true;
156 bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
157 bool nowarning = TREE_NO_WARNING (expr);
158 bool unused_p;
159 bool op0_lval = false;
160 source_range old_range;
161
162 /* Constants, declarations, statements, errors, and anything else not
163 counted as an expression cannot usefully be folded further at this
164 point. */
165 if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement)
166 {
167 /* Except for variables which we can optimize to its initializer. */
168 if (VAR_P (expr) && !lval && (optimize || in_init))
169 {
170 ret = decl_constant_value (expr);
171 /* Avoid unwanted tree sharing between the initializer and current
172 function's body where the tree can be modified e.g. by the
173 gimplifier. */
174 if (ret != expr && TREE_STATIC (expr))
175 ret = unshare_expr (ret);
176 return ret;
177 }
178 return expr;
179 }
180
181 if (IS_EXPR_CODE_CLASS (kind))
182 old_range = EXPR_LOCATION_RANGE (expr);
183
184 /* Operands of variable-length expressions (function calls) have
185 already been folded, as have __builtin_* function calls, and such
186 expressions cannot occur in constant expressions. */
187 if (kind == tcc_vl_exp)
188 {
189 *maybe_const_operands = false;
190 ret = fold (expr);
191 goto out;
192 }
193
194 if (code == C_MAYBE_CONST_EXPR)
195 {
196 tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
197 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
198 if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
199 *maybe_const_operands = false;
200 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
201 {
202 *maybe_const_itself = false;
203 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
204 maybe_const_itself, true, lval);
205 }
206 if (pre && !in_init)
207 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
208 else
209 ret = inner;
210 goto out;
211 }
212
213 /* Assignment, increment, decrement, function call and comma
214 operators, and statement expressions, cannot occur in constant
215 expressions if evaluated / outside of sizeof. (Function calls
216 were handled above, though VA_ARG_EXPR is treated like a function
217 call here, and statement expressions are handled through
218 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
219 switch (code)
220 {
221 case MODIFY_EXPR:
222 case PREDECREMENT_EXPR:
223 case PREINCREMENT_EXPR:
224 case POSTDECREMENT_EXPR:
225 case POSTINCREMENT_EXPR:
226 case COMPOUND_EXPR:
227 *maybe_const_operands = false;
228 break;
229
230 case VA_ARG_EXPR:
231 case TARGET_EXPR:
232 case BIND_EXPR:
233 case OBJ_TYPE_REF:
234 *maybe_const_operands = false;
235 ret = fold (expr);
236 goto out;
237
238 default:
239 break;
240 }
241
242 /* Fold individual tree codes as appropriate. */
243 switch (code)
244 {
245 case COMPOUND_LITERAL_EXPR:
246 /* Any non-constancy will have been marked in a containing
247 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
248 goto out;
249
250 case COMPONENT_REF:
251 orig_op0 = op0 = TREE_OPERAND (expr, 0);
252 op1 = TREE_OPERAND (expr, 1);
253 op2 = TREE_OPERAND (expr, 2);
254 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
255 maybe_const_itself, for_int_const, lval);
256 STRIP_TYPE_NOPS (op0);
257 if (op0 != orig_op0)
258 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
259 if (ret != expr)
260 {
261 TREE_READONLY (ret) = TREE_READONLY (expr);
262 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
263 }
264 goto out;
265
266 case ARRAY_REF:
267 orig_op0 = op0 = TREE_OPERAND (expr, 0);
268 orig_op1 = op1 = TREE_OPERAND (expr, 1);
269 op2 = TREE_OPERAND (expr, 2);
270 op3 = TREE_OPERAND (expr, 3);
271 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
272 maybe_const_itself, for_int_const, lval);
273 STRIP_TYPE_NOPS (op0);
274 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
275 maybe_const_itself, for_int_const, false);
276 STRIP_TYPE_NOPS (op1);
277 /* Fold "foo"[2] in initializers. */
278 if (!lval && in_init)
279 {
280 ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1);
281 if (ret)
282 goto out;
283 ret = expr;
284 }
285 if (op0 != orig_op0 || op1 != orig_op1)
286 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
287 if (ret != expr)
288 {
289 TREE_READONLY (ret) = TREE_READONLY (expr);
290 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
291 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
292 }
293 if (!lval)
294 ret = fold (ret);
295 goto out;
296
297 case MODIFY_EXPR:
298 case PREDECREMENT_EXPR:
299 case PREINCREMENT_EXPR:
300 case POSTDECREMENT_EXPR:
301 case POSTINCREMENT_EXPR:
302 op0_lval = true;
303 /* FALLTHRU */
304 case COMPOUND_EXPR:
305 case PLUS_EXPR:
306 case MINUS_EXPR:
307 case MULT_EXPR:
308 case POINTER_PLUS_EXPR:
309 case TRUNC_DIV_EXPR:
310 case CEIL_DIV_EXPR:
311 case FLOOR_DIV_EXPR:
312 case TRUNC_MOD_EXPR:
313 case RDIV_EXPR:
314 case EXACT_DIV_EXPR:
315 case LSHIFT_EXPR:
316 case RSHIFT_EXPR:
317 case BIT_IOR_EXPR:
318 case BIT_XOR_EXPR:
319 case BIT_AND_EXPR:
320 case LT_EXPR:
321 case LE_EXPR:
322 case GT_EXPR:
323 case GE_EXPR:
324 case EQ_EXPR:
325 case NE_EXPR:
326 case COMPLEX_EXPR:
327 case TRUTH_AND_EXPR:
328 case TRUTH_OR_EXPR:
329 case TRUTH_XOR_EXPR:
330 case UNORDERED_EXPR:
331 case ORDERED_EXPR:
332 case UNLT_EXPR:
333 case UNLE_EXPR:
334 case UNGT_EXPR:
335 case UNGE_EXPR:
336 case UNEQ_EXPR:
337 /* Binary operations evaluating both arguments (increment and
338 decrement are binary internally in GCC). */
339 orig_op0 = op0 = TREE_OPERAND (expr, 0);
340 orig_op1 = op1 = TREE_OPERAND (expr, 1);
341 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
342 maybe_const_itself, for_int_const,
343 op0_lval);
344 STRIP_TYPE_NOPS (op0);
345 /* The RHS of a MODIFY_EXPR was fully folded when building that
346 expression for the sake of conversion warnings. */
347 if (code != MODIFY_EXPR)
348 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
349 maybe_const_itself, for_int_const, false);
350 STRIP_TYPE_NOPS (op1);
351
352 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
353 || TREE_CODE (op1) != INTEGER_CST))
354 goto out;
355
356 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
357 ret = in_init
358 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
359 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
360 else
361 ret = fold (expr);
362 if (TREE_OVERFLOW_P (ret)
363 && !TREE_OVERFLOW_P (op0)
364 && !TREE_OVERFLOW_P (op1))
365 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
366 if (code == LSHIFT_EXPR
367 && TREE_CODE (orig_op0) != INTEGER_CST
368 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
369 && TREE_CODE (op0) == INTEGER_CST
370 && c_inhibit_evaluation_warnings == 0
371 && tree_int_cst_sgn (op0) < 0)
372 warning_at (loc, OPT_Wshift_negative_value,
373 "left shift of negative value");
374 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
375 && TREE_CODE (orig_op1) != INTEGER_CST
376 && TREE_CODE (op1) == INTEGER_CST
377 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
378 && c_inhibit_evaluation_warnings == 0)
379 {
380 if (tree_int_cst_sgn (op1) < 0)
381 warning_at (loc, OPT_Wshift_count_negative,
382 (code == LSHIFT_EXPR
383 ? G_("left shift count is negative")
384 : G_("right shift count is negative")));
385 else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
386 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
387 && compare_tree_int (op1,
388 TYPE_PRECISION (TREE_TYPE (orig_op0)))
389 >= 0)
390 warning_at (loc, OPT_Wshift_count_overflow,
391 (code == LSHIFT_EXPR
392 ? G_("left shift count >= width of type")
393 : G_("right shift count >= width of type")));
394 else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
395 && compare_tree_int (op1,
396 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
397 >= 0)
398 warning_at (loc, OPT_Wshift_count_overflow,
399 code == LSHIFT_EXPR
400 ? G_("left shift count >= width of vector element")
401 : G_("right shift count >= width of vector element"));
402 }
403 if (code == LSHIFT_EXPR
404 /* If either OP0 has been folded to INTEGER_CST... */
405 && ((TREE_CODE (orig_op0) != INTEGER_CST
406 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
407 && TREE_CODE (op0) == INTEGER_CST)
408 /* ...or if OP1 has been folded to INTEGER_CST... */
409 || (TREE_CODE (orig_op1) != INTEGER_CST
410 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
411 && TREE_CODE (op1) == INTEGER_CST))
412 && c_inhibit_evaluation_warnings == 0)
413 /* ...then maybe we can detect an overflow. */
414 maybe_warn_shift_overflow (loc, op0, op1);
415 if ((code == TRUNC_DIV_EXPR
416 || code == CEIL_DIV_EXPR
417 || code == FLOOR_DIV_EXPR
418 || code == EXACT_DIV_EXPR
419 || code == TRUNC_MOD_EXPR)
420 && TREE_CODE (orig_op1) != INTEGER_CST
421 && TREE_CODE (op1) == INTEGER_CST
422 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
423 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
424 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
425 warn_for_div_by_zero (loc, op1);
426 goto out;
427
428 case ADDR_EXPR:
429 op0_lval = true;
430 goto unary;
431 case REALPART_EXPR:
432 case IMAGPART_EXPR:
433 op0_lval = lval;
434 /* FALLTHRU */
435 case INDIRECT_REF:
436 case FIX_TRUNC_EXPR:
437 case FLOAT_EXPR:
438 CASE_CONVERT:
439 case ADDR_SPACE_CONVERT_EXPR:
440 case VIEW_CONVERT_EXPR:
441 case NON_LVALUE_EXPR:
442 case NEGATE_EXPR:
443 case BIT_NOT_EXPR:
444 case TRUTH_NOT_EXPR:
445 case CONJ_EXPR:
446 unary:
447 /* Unary operations. */
448 orig_op0 = op0 = TREE_OPERAND (expr, 0);
449 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
450 maybe_const_itself, for_int_const,
451 op0_lval);
452 STRIP_TYPE_NOPS (op0);
453
454 if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
455 goto out;
456
457 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
458 not prepared to deal with them if they occur in initializers. */
459 if (op0 != orig_op0
460 && code == ADDR_EXPR
461 && (op1 = get_base_address (op0)) != NULL_TREE
462 && INDIRECT_REF_P (op1)
463 && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
464 ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0));
465 else if (op0 != orig_op0 || in_init)
466 ret = in_init
467 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
468 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
469 else
470 ret = fold (expr);
471 if (code == INDIRECT_REF
472 && ret != expr
473 && INDIRECT_REF_P (ret))
474 {
475 TREE_READONLY (ret) = TREE_READONLY (expr);
476 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
477 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
478 }
479 switch (code)
480 {
481 case FIX_TRUNC_EXPR:
482 case FLOAT_EXPR:
483 CASE_CONVERT:
484 /* Don't warn about explicit conversions. We will already
485 have warned about suspect implicit conversions. */
486 break;
487
488 default:
489 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
490 overflow_warning (EXPR_LOCATION (expr), ret, op0);
491 break;
492 }
493 goto out;
494
495 case TRUTH_ANDIF_EXPR:
496 case TRUTH_ORIF_EXPR:
497 /* Binary operations not necessarily evaluating both
498 arguments. */
499 orig_op0 = op0 = TREE_OPERAND (expr, 0);
500 orig_op1 = op1 = TREE_OPERAND (expr, 1);
501 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
502 for_int_const, false);
503 STRIP_TYPE_NOPS (op0);
504
505 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
506 ? truthvalue_false_node
507 : truthvalue_true_node));
508 c_disable_warnings (unused_p);
509 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
510 for_int_const, false);
511 STRIP_TYPE_NOPS (op1);
512 c_enable_warnings (unused_p);
513
514 if (for_int_const
515 && (TREE_CODE (op0) != INTEGER_CST
516 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
517 || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
518 goto out;
519
520 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
521 ret = in_init
522 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
523 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
524 else
525 ret = fold (expr);
526 *maybe_const_operands &= op0_const;
527 *maybe_const_itself &= op0_const_self;
528 if (!(flag_isoc99
529 && op0_const
530 && op0_const_self
531 && (code == TRUTH_ANDIF_EXPR
532 ? op0 == truthvalue_false_node
533 : op0 == truthvalue_true_node)))
534 *maybe_const_operands &= op1_const;
535 if (!(op0_const
536 && op0_const_self
537 && (code == TRUTH_ANDIF_EXPR
538 ? op0 == truthvalue_false_node
539 : op0 == truthvalue_true_node)))
540 *maybe_const_itself &= op1_const_self;
541 goto out;
542
543 case COND_EXPR:
544 orig_op0 = op0 = TREE_OPERAND (expr, 0);
545 orig_op1 = op1 = TREE_OPERAND (expr, 1);
546 orig_op2 = op2 = TREE_OPERAND (expr, 2);
547 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
548 for_int_const, false);
549
550 STRIP_TYPE_NOPS (op0);
551 c_disable_warnings (op0 == truthvalue_false_node);
552 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
553 for_int_const, false);
554 STRIP_TYPE_NOPS (op1);
555 c_enable_warnings (op0 == truthvalue_false_node);
556
557 c_disable_warnings (op0 == truthvalue_true_node);
558 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
559 for_int_const, false);
560 STRIP_TYPE_NOPS (op2);
561 c_enable_warnings (op0 == truthvalue_true_node);
562
563 if (for_int_const
564 && (TREE_CODE (op0) != INTEGER_CST
565 /* Only the evaluated operand must be an INTEGER_CST. */
566 || (op0 == truthvalue_true_node
567 ? TREE_CODE (op1) != INTEGER_CST
568 : TREE_CODE (op2) != INTEGER_CST)))
569 goto out;
570
571 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
572 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
573 else
574 ret = fold (expr);
575 *maybe_const_operands &= op0_const;
576 *maybe_const_itself &= op0_const_self;
577 if (!(flag_isoc99
578 && op0_const
579 && op0_const_self
580 && op0 == truthvalue_false_node))
581 *maybe_const_operands &= op1_const;
582 if (!(op0_const
583 && op0_const_self
584 && op0 == truthvalue_false_node))
585 *maybe_const_itself &= op1_const_self;
586 if (!(flag_isoc99
587 && op0_const
588 && op0_const_self
589 && op0 == truthvalue_true_node))
590 *maybe_const_operands &= op2_const;
591 if (!(op0_const
592 && op0_const_self
593 && op0 == truthvalue_true_node))
594 *maybe_const_itself &= op2_const_self;
595 goto out;
596
597 case VEC_COND_EXPR:
598 orig_op0 = op0 = TREE_OPERAND (expr, 0);
599 orig_op1 = op1 = TREE_OPERAND (expr, 1);
600 orig_op2 = op2 = TREE_OPERAND (expr, 2);
601 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
602 maybe_const_itself, for_int_const, false);
603 STRIP_TYPE_NOPS (op0);
604 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
605 maybe_const_itself, for_int_const, false);
606 STRIP_TYPE_NOPS (op1);
607 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
608 maybe_const_itself, for_int_const, false);
609 STRIP_TYPE_NOPS (op2);
610
611 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
612 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
613 else
614 ret = fold (expr);
615 goto out;
616
617 case EXCESS_PRECISION_EXPR:
618 /* Each case where an operand with excess precision may be
619 encountered must remove the EXCESS_PRECISION_EXPR around
620 inner operands and possibly put one around the whole
621 expression or possibly convert to the semantic type (which
622 c_fully_fold does); we cannot tell at this stage which is
623 appropriate in any particular case. */
624 gcc_unreachable ();
625
626 case SAVE_EXPR:
627 /* Make sure to fold the contents of a SAVE_EXPR exactly once. */
628 op0 = TREE_OPERAND (expr, 0);
629 if (!SAVE_EXPR_FOLDED_P (expr))
630 {
631 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
632 maybe_const_itself, for_int_const,
633 false);
634 TREE_OPERAND (expr, 0) = op0;
635 SAVE_EXPR_FOLDED_P (expr) = true;
636 }
637 /* Return the SAVE_EXPR operand if it is invariant. */
638 if (tree_invariant_p (op0))
639 ret = op0;
640 goto out;
641
642 default:
643 /* Various codes may appear through folding built-in functions
644 and their arguments. */
645 goto out;
646 }
647
648 out:
649 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
650 have been done by this point, so remove them again. */
651 nowarning |= TREE_NO_WARNING (ret);
652 STRIP_TYPE_NOPS (ret);
653 if (nowarning && !TREE_NO_WARNING (ret))
654 {
655 if (!CAN_HAVE_LOCATION_P (ret))
656 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
657 TREE_NO_WARNING (ret) = 1;
658 }
659 if (ret != expr)
660 {
661 protected_set_expr_location (ret, loc);
662 if (IS_EXPR_CODE_CLASS (kind))
663 set_source_range (ret, old_range.m_start, old_range.m_finish);
664 }
665 return ret;
666 }