]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/convert.c
re PR c++/69631 (Bogus overflow in constant expression error)
[thirdparty/gcc.git] / gcc / convert.c
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36
37 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
38 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
39 : build1_loc (LOC, CODE, TYPE, EXPR))
40 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
41 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
42 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
43
44 /* Convert EXPR to some pointer or reference type TYPE.
45 EXPR must be pointer, reference, integer, enumeral, or literal zero;
46 in other cases error is called. If FOLD_P is true, try to fold the
47 expression. */
48
49 static tree
50 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
51 {
52 location_t loc = EXPR_LOCATION (expr);
53 if (TREE_TYPE (expr) == type)
54 return expr;
55
56 switch (TREE_CODE (TREE_TYPE (expr)))
57 {
58 case POINTER_TYPE:
59 case REFERENCE_TYPE:
60 {
61 /* If the pointers point to different address spaces, conversion needs
62 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
63 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
64 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
65
66 if (to_as == from_as)
67 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
68 else
69 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
70 type, expr);
71 }
72
73 case INTEGER_TYPE:
74 case ENUMERAL_TYPE:
75 case BOOLEAN_TYPE:
76 {
77 /* If the input precision differs from the target pointer type
78 precision, first convert the input expression to an integer type of
79 the target precision. Some targets, e.g. VMS, need several pointer
80 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
81 unsigned int pprec = TYPE_PRECISION (type);
82 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
83
84 if (eprec != pprec)
85 expr
86 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
87 lang_hooks.types.type_for_size (pprec, 0),
88 expr);
89 }
90 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
91
92 default:
93 error ("cannot convert to a pointer type");
94 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
95 }
96 }
97
98 /* A wrapper around convert_to_pointer_1 that always folds the
99 expression. */
100
101 tree
102 convert_to_pointer (tree type, tree expr)
103 {
104 return convert_to_pointer_1 (type, expr, true);
105 }
106
107 /* A wrapper around convert_to_pointer_1 that only folds the
108 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
109
110 tree
111 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
112 {
113 return convert_to_pointer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
114 }
115
116 /* Convert EXPR to some floating-point type TYPE.
117
118 EXPR must be float, fixed-point, integer, or enumeral;
119 in other cases error is called. If FOLD_P is true, try to fold
120 the expression. */
121
122 static tree
123 convert_to_real_1 (tree type, tree expr, bool fold_p)
124 {
125 enum built_in_function fcode = builtin_mathfn_code (expr);
126 tree itype = TREE_TYPE (expr);
127 location_t loc = EXPR_LOCATION (expr);
128
129 if (TREE_CODE (expr) == COMPOUND_EXPR)
130 {
131 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
132 if (t == TREE_OPERAND (expr, 1))
133 return expr;
134 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
135 TREE_OPERAND (expr, 0), t);
136 }
137
138 /* Disable until we figure out how to decide whether the functions are
139 present in runtime. */
140 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
141 if (optimize
142 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
143 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
144 {
145 switch (fcode)
146 {
147 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
148 CASE_MATHFN (COSH)
149 CASE_MATHFN (EXP)
150 CASE_MATHFN (EXP10)
151 CASE_MATHFN (EXP2)
152 CASE_MATHFN (EXPM1)
153 CASE_MATHFN (GAMMA)
154 CASE_MATHFN (J0)
155 CASE_MATHFN (J1)
156 CASE_MATHFN (LGAMMA)
157 CASE_MATHFN (POW10)
158 CASE_MATHFN (SINH)
159 CASE_MATHFN (TGAMMA)
160 CASE_MATHFN (Y0)
161 CASE_MATHFN (Y1)
162 /* The above functions may set errno differently with float
163 input or output so this transformation is not safe with
164 -fmath-errno. */
165 if (flag_errno_math)
166 break;
167 CASE_MATHFN (ACOS)
168 CASE_MATHFN (ACOSH)
169 CASE_MATHFN (ASIN)
170 CASE_MATHFN (ASINH)
171 CASE_MATHFN (ATAN)
172 CASE_MATHFN (ATANH)
173 CASE_MATHFN (CBRT)
174 CASE_MATHFN (COS)
175 CASE_MATHFN (ERF)
176 CASE_MATHFN (ERFC)
177 CASE_MATHFN (LOG)
178 CASE_MATHFN (LOG10)
179 CASE_MATHFN (LOG2)
180 CASE_MATHFN (LOG1P)
181 CASE_MATHFN (SIN)
182 CASE_MATHFN (TAN)
183 CASE_MATHFN (TANH)
184 /* The above functions are not safe to do this conversion. */
185 if (!flag_unsafe_math_optimizations)
186 break;
187 CASE_MATHFN (SQRT)
188 CASE_MATHFN (FABS)
189 CASE_MATHFN (LOGB)
190 #undef CASE_MATHFN
191 {
192 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
193 tree newtype = type;
194
195 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
196 the both as the safe type for operation. */
197 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
198 newtype = TREE_TYPE (arg0);
199
200 /* We consider to convert
201
202 (T1) sqrtT2 ((T2) exprT3)
203 to
204 (T1) sqrtT4 ((T4) exprT3)
205
206 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
207 and T4 is NEWTYPE. All those types are of floating point types.
208 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
209 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
210 T2 and T4. See the following URL for a reference:
211 http://stackoverflow.com/questions/9235456/determining-
212 floating-point-square-root
213 */
214 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
215 && !flag_unsafe_math_optimizations)
216 {
217 /* The following conversion is unsafe even the precision condition
218 below is satisfied:
219
220 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
221 */
222 if (TYPE_MODE (type) != TYPE_MODE (newtype))
223 break;
224
225 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
226 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
227 if (p1 < p2 * 2 + 2)
228 break;
229 }
230
231 /* Be careful about integer to fp conversions.
232 These may overflow still. */
233 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
234 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
235 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
236 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
237 {
238 tree fn = mathfn_built_in (newtype, fcode);
239 if (fn)
240 {
241 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
242 expr = build_call_expr (fn, 1, arg);
243 if (newtype == type)
244 return expr;
245 }
246 }
247 }
248 default:
249 break;
250 }
251 }
252
253 /* Propagate the cast into the operation. */
254 if (itype != type && FLOAT_TYPE_P (type))
255 switch (TREE_CODE (expr))
256 {
257 /* Convert (float)-x into -(float)x. This is safe for
258 round-to-nearest rounding mode when the inner type is float. */
259 case ABS_EXPR:
260 case NEGATE_EXPR:
261 if (!flag_rounding_math
262 && FLOAT_TYPE_P (itype)
263 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
264 {
265 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
266 fold_p);
267 return build1 (TREE_CODE (expr), type, arg);
268 }
269 break;
270 /* Convert (outertype)((innertype0)a+(innertype1)b)
271 into ((newtype)a+(newtype)b) where newtype
272 is the widest mode from all of these. */
273 case PLUS_EXPR:
274 case MINUS_EXPR:
275 case MULT_EXPR:
276 case RDIV_EXPR:
277 {
278 tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
279 tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
280
281 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
282 && FLOAT_TYPE_P (TREE_TYPE (arg1))
283 && DECIMAL_FLOAT_TYPE_P (itype) == DECIMAL_FLOAT_TYPE_P (type))
284 {
285 tree newtype = type;
286
287 if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
288 || TYPE_MODE (TREE_TYPE (arg1)) == SDmode
289 || TYPE_MODE (type) == SDmode)
290 newtype = dfloat32_type_node;
291 if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
292 || TYPE_MODE (TREE_TYPE (arg1)) == DDmode
293 || TYPE_MODE (type) == DDmode)
294 newtype = dfloat64_type_node;
295 if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
296 || TYPE_MODE (TREE_TYPE (arg1)) == TDmode
297 || TYPE_MODE (type) == TDmode)
298 newtype = dfloat128_type_node;
299 if (newtype == dfloat32_type_node
300 || newtype == dfloat64_type_node
301 || newtype == dfloat128_type_node)
302 {
303 expr = build2 (TREE_CODE (expr), newtype,
304 convert_to_real_1 (newtype, arg0,
305 fold_p),
306 convert_to_real_1 (newtype, arg1,
307 fold_p));
308 if (newtype == type)
309 return expr;
310 break;
311 }
312
313 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
314 newtype = TREE_TYPE (arg0);
315 if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
316 newtype = TREE_TYPE (arg1);
317 /* Sometimes this transformation is safe (cannot
318 change results through affecting double rounding
319 cases) and sometimes it is not. If NEWTYPE is
320 wider than TYPE, e.g. (float)((long double)double
321 + (long double)double) converted to
322 (float)(double + double), the transformation is
323 unsafe regardless of the details of the types
324 involved; double rounding can arise if the result
325 of NEWTYPE arithmetic is a NEWTYPE value half way
326 between two representable TYPE values but the
327 exact value is sufficiently different (in the
328 right direction) for this difference to be
329 visible in ITYPE arithmetic. If NEWTYPE is the
330 same as TYPE, however, the transformation may be
331 safe depending on the types involved: it is safe
332 if the ITYPE has strictly more than twice as many
333 mantissa bits as TYPE, can represent infinities
334 and NaNs if the TYPE can, and has sufficient
335 exponent range for the product or ratio of two
336 values representable in the TYPE to be within the
337 range of normal values of ITYPE. */
338 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
339 && (flag_unsafe_math_optimizations
340 || (TYPE_PRECISION (newtype) == TYPE_PRECISION (type)
341 && real_can_shorten_arithmetic (TYPE_MODE (itype),
342 TYPE_MODE (type))
343 && !excess_precision_type (newtype))))
344 {
345 expr = build2 (TREE_CODE (expr), newtype,
346 convert_to_real_1 (newtype, arg0,
347 fold_p),
348 convert_to_real_1 (newtype, arg1,
349 fold_p));
350 if (newtype == type)
351 return expr;
352 }
353 }
354 }
355 break;
356 default:
357 break;
358 }
359
360 switch (TREE_CODE (TREE_TYPE (expr)))
361 {
362 case REAL_TYPE:
363 /* Ignore the conversion if we don't need to store intermediate
364 results and neither type is a decimal float. */
365 return build1_loc (loc,
366 (flag_float_store
367 || DECIMAL_FLOAT_TYPE_P (type)
368 || DECIMAL_FLOAT_TYPE_P (itype))
369 ? CONVERT_EXPR : NOP_EXPR, type, expr);
370
371 case INTEGER_TYPE:
372 case ENUMERAL_TYPE:
373 case BOOLEAN_TYPE:
374 return build1 (FLOAT_EXPR, type, expr);
375
376 case FIXED_POINT_TYPE:
377 return build1 (FIXED_CONVERT_EXPR, type, expr);
378
379 case COMPLEX_TYPE:
380 return convert (type,
381 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
382 TREE_TYPE (TREE_TYPE (expr)),
383 expr));
384
385 case POINTER_TYPE:
386 case REFERENCE_TYPE:
387 error ("pointer value used where a floating point value was expected");
388 return convert_to_real_1 (type, integer_zero_node, fold_p);
389
390 default:
391 error ("aggregate value used where a float was expected");
392 return convert_to_real_1 (type, integer_zero_node, fold_p);
393 }
394 }
395
396 /* A wrapper around convert_to_real_1 that always folds the
397 expression. */
398
399 tree
400 convert_to_real (tree type, tree expr)
401 {
402 return convert_to_real_1 (type, expr, true);
403 }
404
405 /* A wrapper around convert_to_real_1 that only folds the
406 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
407
408 tree
409 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
410 {
411 return convert_to_real_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
412 }
413
414 /* Convert EXPR to some integer (or enum) type TYPE.
415
416 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
417 fixed-point or vector; in other cases error is called.
418
419 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
420
421 The result of this is always supposed to be a newly created tree node
422 not in use in any existing structure. */
423
424 static tree
425 convert_to_integer_1 (tree type, tree expr, bool dofold)
426 {
427 enum tree_code ex_form = TREE_CODE (expr);
428 tree intype = TREE_TYPE (expr);
429 unsigned int inprec = element_precision (intype);
430 unsigned int outprec = element_precision (type);
431 location_t loc = EXPR_LOCATION (expr);
432
433 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
434 be. Consider `enum E = { a, b = (enum E) 3 };'. */
435 if (!COMPLETE_TYPE_P (type))
436 {
437 error ("conversion to incomplete type");
438 return error_mark_node;
439 }
440
441 if (ex_form == COMPOUND_EXPR)
442 {
443 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
444 if (t == TREE_OPERAND (expr, 1))
445 return expr;
446 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
447 TREE_OPERAND (expr, 0), t);
448 }
449
450 /* Convert e.g. (long)round(d) -> lround(d). */
451 /* If we're converting to char, we may encounter differing behavior
452 between converting from double->char vs double->long->char.
453 We're in "undefined" territory but we prefer to be conservative,
454 so only proceed in "unsafe" math mode. */
455 if (optimize
456 && (flag_unsafe_math_optimizations
457 || (long_integer_type_node
458 && outprec >= TYPE_PRECISION (long_integer_type_node))))
459 {
460 tree s_expr = strip_float_extensions (expr);
461 tree s_intype = TREE_TYPE (s_expr);
462 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
463 tree fn = 0;
464
465 switch (fcode)
466 {
467 CASE_FLT_FN (BUILT_IN_CEIL):
468 /* Only convert in ISO C99 mode. */
469 if (!targetm.libc_has_function (function_c99_misc))
470 break;
471 if (outprec < TYPE_PRECISION (integer_type_node)
472 || (outprec == TYPE_PRECISION (integer_type_node)
473 && !TYPE_UNSIGNED (type)))
474 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
475 else if (outprec == TYPE_PRECISION (long_integer_type_node)
476 && !TYPE_UNSIGNED (type))
477 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
478 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
479 && !TYPE_UNSIGNED (type))
480 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
481 break;
482
483 CASE_FLT_FN (BUILT_IN_FLOOR):
484 /* Only convert in ISO C99 mode. */
485 if (!targetm.libc_has_function (function_c99_misc))
486 break;
487 if (outprec < TYPE_PRECISION (integer_type_node)
488 || (outprec == TYPE_PRECISION (integer_type_node)
489 && !TYPE_UNSIGNED (type)))
490 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
491 else if (outprec == TYPE_PRECISION (long_integer_type_node)
492 && !TYPE_UNSIGNED (type))
493 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
494 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
495 && !TYPE_UNSIGNED (type))
496 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
497 break;
498
499 CASE_FLT_FN (BUILT_IN_ROUND):
500 /* Only convert in ISO C99 mode and with -fno-math-errno. */
501 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
502 break;
503 if (outprec < TYPE_PRECISION (integer_type_node)
504 || (outprec == TYPE_PRECISION (integer_type_node)
505 && !TYPE_UNSIGNED (type)))
506 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
507 else if (outprec == TYPE_PRECISION (long_integer_type_node)
508 && !TYPE_UNSIGNED (type))
509 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
510 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
511 && !TYPE_UNSIGNED (type))
512 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
513 break;
514
515 CASE_FLT_FN (BUILT_IN_NEARBYINT):
516 /* Only convert nearbyint* if we can ignore math exceptions. */
517 if (flag_trapping_math)
518 break;
519 /* ... Fall through ... */
520 CASE_FLT_FN (BUILT_IN_RINT):
521 /* Only convert in ISO C99 mode and with -fno-math-errno. */
522 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
523 break;
524 if (outprec < TYPE_PRECISION (integer_type_node)
525 || (outprec == TYPE_PRECISION (integer_type_node)
526 && !TYPE_UNSIGNED (type)))
527 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
528 else if (outprec == TYPE_PRECISION (long_integer_type_node)
529 && !TYPE_UNSIGNED (type))
530 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
531 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
532 && !TYPE_UNSIGNED (type))
533 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
534 break;
535
536 CASE_FLT_FN (BUILT_IN_TRUNC):
537 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0), dofold);
538
539 default:
540 break;
541 }
542
543 if (fn)
544 {
545 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
546 return convert_to_integer_1 (type, newexpr, dofold);
547 }
548 }
549
550 /* Convert (int)logb(d) -> ilogb(d). */
551 if (optimize
552 && flag_unsafe_math_optimizations
553 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
554 && integer_type_node
555 && (outprec > TYPE_PRECISION (integer_type_node)
556 || (outprec == TYPE_PRECISION (integer_type_node)
557 && !TYPE_UNSIGNED (type))))
558 {
559 tree s_expr = strip_float_extensions (expr);
560 tree s_intype = TREE_TYPE (s_expr);
561 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
562 tree fn = 0;
563
564 switch (fcode)
565 {
566 CASE_FLT_FN (BUILT_IN_LOGB):
567 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
568 break;
569
570 default:
571 break;
572 }
573
574 if (fn)
575 {
576 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
577 return convert_to_integer_1 (type, newexpr, dofold);
578 }
579 }
580
581 switch (TREE_CODE (intype))
582 {
583 case POINTER_TYPE:
584 case REFERENCE_TYPE:
585 if (integer_zerop (expr))
586 return build_int_cst (type, 0);
587
588 /* Convert to an unsigned integer of the correct width first, and from
589 there widen/truncate to the required type. Some targets support the
590 coexistence of multiple valid pointer sizes, so fetch the one we need
591 from the type. */
592 if (!dofold)
593 return build1 (CONVERT_EXPR, type, expr);
594 expr = fold_build1 (CONVERT_EXPR,
595 lang_hooks.types.type_for_size
596 (TYPE_PRECISION (intype), 0),
597 expr);
598 return fold_convert (type, expr);
599
600 case INTEGER_TYPE:
601 case ENUMERAL_TYPE:
602 case BOOLEAN_TYPE:
603 case OFFSET_TYPE:
604 /* If this is a logical operation, which just returns 0 or 1, we can
605 change the type of the expression. */
606
607 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
608 {
609 expr = copy_node (expr);
610 TREE_TYPE (expr) = type;
611 return expr;
612 }
613
614 /* If we are widening the type, put in an explicit conversion.
615 Similarly if we are not changing the width. After this, we know
616 we are truncating EXPR. */
617
618 else if (outprec >= inprec)
619 {
620 enum tree_code code;
621
622 /* If the precision of the EXPR's type is K bits and the
623 destination mode has more bits, and the sign is changing,
624 it is not safe to use a NOP_EXPR. For example, suppose
625 that EXPR's type is a 3-bit unsigned integer type, the
626 TYPE is a 3-bit signed integer type, and the machine mode
627 for the types is 8-bit QImode. In that case, the
628 conversion necessitates an explicit sign-extension. In
629 the signed-to-unsigned case the high-order bits have to
630 be cleared. */
631 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
632 && (TYPE_PRECISION (TREE_TYPE (expr))
633 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (expr)))))
634 code = CONVERT_EXPR;
635 else
636 code = NOP_EXPR;
637
638 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
639 }
640
641 /* If TYPE is an enumeral type or a type with a precision less
642 than the number of bits in its mode, do the conversion to the
643 type corresponding to its mode, then do a nop conversion
644 to TYPE. */
645 else if (TREE_CODE (type) == ENUMERAL_TYPE
646 || outprec != GET_MODE_PRECISION (TYPE_MODE (type)))
647 return build1 (NOP_EXPR, type,
648 convert (lang_hooks.types.type_for_mode
649 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
650 expr));
651
652 /* Here detect when we can distribute the truncation down past some
653 arithmetic. For example, if adding two longs and converting to an
654 int, we can equally well convert both to ints and then add.
655 For the operations handled here, such truncation distribution
656 is always safe.
657 It is desirable in these cases:
658 1) when truncating down to full-word from a larger size
659 2) when truncating takes no work.
660 3) when at least one operand of the arithmetic has been extended
661 (as by C's default conversions). In this case we need two conversions
662 if we do the arithmetic as already requested, so we might as well
663 truncate both and then combine. Perhaps that way we need only one.
664
665 Note that in general we cannot do the arithmetic in a type
666 shorter than the desired result of conversion, even if the operands
667 are both extended from a shorter type, because they might overflow
668 if combined in that type. The exceptions to this--the times when
669 two narrow values can be combined in their narrow type even to
670 make a wider result--are handled by "shorten" in build_binary_op. */
671
672 if (dofold)
673 switch (ex_form)
674 {
675 case RSHIFT_EXPR:
676 /* We can pass truncation down through right shifting
677 when the shift count is a nonpositive constant. */
678 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
679 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
680 goto trunc1;
681 break;
682
683 case LSHIFT_EXPR:
684 /* We can pass truncation down through left shifting
685 when the shift count is a nonnegative constant and
686 the target type is unsigned. */
687 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
688 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
689 && TYPE_UNSIGNED (type)
690 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
691 {
692 /* If shift count is less than the width of the truncated type,
693 really shift. */
694 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
695 /* In this case, shifting is like multiplication. */
696 goto trunc1;
697 else
698 {
699 /* If it is >= that width, result is zero.
700 Handling this with trunc1 would give the wrong result:
701 (int) ((long long) a << 32) is well defined (as 0)
702 but (int) a << 32 is undefined and would get a
703 warning. */
704
705 tree t = build_int_cst (type, 0);
706
707 /* If the original expression had side-effects, we must
708 preserve it. */
709 if (TREE_SIDE_EFFECTS (expr))
710 return build2 (COMPOUND_EXPR, type, expr, t);
711 else
712 return t;
713 }
714 }
715 break;
716
717 case TRUNC_DIV_EXPR:
718 {
719 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
720 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
721
722 /* Don't distribute unless the output precision is at least as big
723 as the actual inputs and it has the same signedness. */
724 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
725 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
726 /* If signedness of arg0 and arg1 don't match,
727 we can't necessarily find a type to compare them in. */
728 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
729 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
730 /* Do not change the sign of the division. */
731 && (TYPE_UNSIGNED (TREE_TYPE (expr))
732 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
733 /* Either require unsigned division or a division by
734 a constant that is not -1. */
735 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
736 || (TREE_CODE (arg1) == INTEGER_CST
737 && !integer_all_onesp (arg1))))
738 goto trunc1;
739 break;
740 }
741
742 case MAX_EXPR:
743 case MIN_EXPR:
744 case MULT_EXPR:
745 {
746 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
747 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
748
749 /* Don't distribute unless the output precision is at least as big
750 as the actual inputs. Otherwise, the comparison of the
751 truncated values will be wrong. */
752 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
753 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
754 /* If signedness of arg0 and arg1 don't match,
755 we can't necessarily find a type to compare them in. */
756 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
757 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
758 goto trunc1;
759 break;
760 }
761
762 case PLUS_EXPR:
763 case MINUS_EXPR:
764 case BIT_AND_EXPR:
765 case BIT_IOR_EXPR:
766 case BIT_XOR_EXPR:
767 trunc1:
768 {
769 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
770 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
771
772 /* Do not try to narrow operands of pointer subtraction;
773 that will interfere with other folding. */
774 if (ex_form == MINUS_EXPR
775 && CONVERT_EXPR_P (arg0)
776 && CONVERT_EXPR_P (arg1)
777 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
778 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
779 break;
780
781 if (outprec >= BITS_PER_WORD
782 || TRULY_NOOP_TRUNCATION (outprec, inprec)
783 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
784 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
785 {
786 /* Do the arithmetic in type TYPEX,
787 then convert result to TYPE. */
788 tree typex = type;
789
790 /* Can't do arithmetic in enumeral types
791 so use an integer type that will hold the values. */
792 if (TREE_CODE (typex) == ENUMERAL_TYPE)
793 typex
794 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
795 TYPE_UNSIGNED (typex));
796
797 /* But now perhaps TYPEX is as wide as INPREC.
798 In that case, do nothing special here.
799 (Otherwise would recurse infinitely in convert. */
800 if (TYPE_PRECISION (typex) != inprec)
801 {
802 /* Don't do unsigned arithmetic where signed was wanted,
803 or vice versa.
804 Exception: if both of the original operands were
805 unsigned then we can safely do the work as unsigned.
806 Exception: shift operations take their type solely
807 from the first argument.
808 Exception: the LSHIFT_EXPR case above requires that
809 we perform this operation unsigned lest we produce
810 signed-overflow undefinedness.
811 And we may need to do it as unsigned
812 if we truncate to the original size. */
813 if (TYPE_UNSIGNED (TREE_TYPE (expr))
814 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
815 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
816 || ex_form == LSHIFT_EXPR
817 || ex_form == RSHIFT_EXPR
818 || ex_form == LROTATE_EXPR
819 || ex_form == RROTATE_EXPR))
820 || ex_form == LSHIFT_EXPR
821 /* If we have !flag_wrapv, and either ARG0 or
822 ARG1 is of a signed type, we have to do
823 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
824 type in case the operation in outprec precision
825 could overflow. Otherwise, we would introduce
826 signed-overflow undefinedness. */
827 || ((!TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
828 || !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
829 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
830 > outprec)
831 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
832 > outprec))
833 && (ex_form == PLUS_EXPR
834 || ex_form == MINUS_EXPR
835 || ex_form == MULT_EXPR)))
836 {
837 if (!TYPE_UNSIGNED (typex))
838 typex = unsigned_type_for (typex);
839 }
840 else
841 {
842 if (TYPE_UNSIGNED (typex))
843 typex = signed_type_for (typex);
844 }
845 /* We should do away with all this once we have a proper
846 type promotion/demotion pass, see PR45397. */
847 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
848 convert (typex, arg0),
849 convert (typex, arg1));
850 return convert (type, expr);
851 }
852 }
853 }
854 break;
855
856 case NEGATE_EXPR:
857 case BIT_NOT_EXPR:
858 /* This is not correct for ABS_EXPR,
859 since we must test the sign before truncation. */
860 {
861 /* Do the arithmetic in type TYPEX,
862 then convert result to TYPE. */
863 tree typex = type;
864
865 /* Can't do arithmetic in enumeral types
866 so use an integer type that will hold the values. */
867 if (TREE_CODE (typex) == ENUMERAL_TYPE)
868 typex
869 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
870 TYPE_UNSIGNED (typex));
871
872 if (!TYPE_UNSIGNED (typex))
873 typex = unsigned_type_for (typex);
874 return convert (type,
875 fold_build1 (ex_form, typex,
876 convert (typex,
877 TREE_OPERAND (expr, 0))));
878 }
879
880 CASE_CONVERT:
881 /* Don't introduce a
882 "can't convert between vector values of different size" error. */
883 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == VECTOR_TYPE
884 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (expr, 0))))
885 != GET_MODE_SIZE (TYPE_MODE (type))))
886 break;
887 /* If truncating after truncating, might as well do all at once.
888 If truncating after extending, we may get rid of wasted work. */
889 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
890
891 case COND_EXPR:
892 /* It is sometimes worthwhile to push the narrowing down through
893 the conditional and never loses. A COND_EXPR may have a throw
894 as one operand, which then has void type. Just leave void
895 operands as they are. */
896 return
897 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
898 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
899 ? TREE_OPERAND (expr, 1)
900 : convert (type, TREE_OPERAND (expr, 1)),
901 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
902 ? TREE_OPERAND (expr, 2)
903 : convert (type, TREE_OPERAND (expr, 2)));
904
905 default:
906 break;
907 }
908
909 /* When parsing long initializers, we might end up with a lot of casts.
910 Shortcut this. */
911 if (TREE_CODE (expr) == INTEGER_CST)
912 return fold_convert (type, expr);
913 return build1 (CONVERT_EXPR, type, expr);
914
915 case REAL_TYPE:
916 if (flag_sanitize & SANITIZE_FLOAT_CAST
917 && do_ubsan_in_current_function ())
918 {
919 expr = save_expr (expr);
920 tree check = ubsan_instrument_float_cast (loc, type, expr);
921 expr = build1 (FIX_TRUNC_EXPR, type, expr);
922 if (check == NULL_TREE)
923 return expr;
924 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
925 TREE_TYPE (expr), check, expr);
926 }
927 else
928 return build1 (FIX_TRUNC_EXPR, type, expr);
929
930 case FIXED_POINT_TYPE:
931 return build1 (FIXED_CONVERT_EXPR, type, expr);
932
933 case COMPLEX_TYPE:
934 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
935 TREE_TYPE (TREE_TYPE (expr)), expr);
936 return convert (type, expr);
937
938 case VECTOR_TYPE:
939 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
940 {
941 error ("can%'t convert a vector of type %qT"
942 " to type %qT which has different size",
943 TREE_TYPE (expr), type);
944 return error_mark_node;
945 }
946 return build1 (VIEW_CONVERT_EXPR, type, expr);
947
948 default:
949 error ("aggregate value used where an integer was expected");
950 return convert (type, integer_zero_node);
951 }
952 }
953
954 /* Convert EXPR to some integer (or enum) type TYPE.
955
956 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
957 fixed-point or vector; in other cases error is called.
958
959 The result of this is always supposed to be a newly created tree node
960 not in use in any existing structure. */
961
962 tree
963 convert_to_integer (tree type, tree expr)
964 {
965 return convert_to_integer_1 (type, expr, true);
966 }
967
968 /* A wrapper around convert_to_complex_1 that only folds the
969 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
970
971 tree
972 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
973 {
974 return convert_to_integer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
975 }
976
977 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
978 true, try to fold the expression. */
979
980 static tree
981 convert_to_complex_1 (tree type, tree expr, bool fold_p)
982 {
983 location_t loc = EXPR_LOCATION (expr);
984 tree subtype = TREE_TYPE (type);
985
986 switch (TREE_CODE (TREE_TYPE (expr)))
987 {
988 case REAL_TYPE:
989 case FIXED_POINT_TYPE:
990 case INTEGER_TYPE:
991 case ENUMERAL_TYPE:
992 case BOOLEAN_TYPE:
993 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
994 convert (subtype, integer_zero_node));
995
996 case COMPLEX_TYPE:
997 {
998 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
999
1000 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1001 return expr;
1002 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1003 {
1004 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1005 fold_p);
1006 if (t == TREE_OPERAND (expr, 1))
1007 return expr;
1008 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1009 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1010 }
1011 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1012 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1013 convert (subtype,
1014 TREE_OPERAND (expr, 0)),
1015 convert (subtype,
1016 TREE_OPERAND (expr, 1)));
1017 else
1018 {
1019 expr = save_expr (expr);
1020 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1021 TREE_TYPE (TREE_TYPE (expr)),
1022 expr);
1023 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1024 TREE_TYPE (TREE_TYPE (expr)),
1025 expr);
1026 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1027 convert (subtype, realp),
1028 convert (subtype, imagp));
1029 }
1030 }
1031
1032 case POINTER_TYPE:
1033 case REFERENCE_TYPE:
1034 error ("pointer value used where a complex was expected");
1035 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1036
1037 default:
1038 error ("aggregate value used where a complex was expected");
1039 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1040 }
1041 }
1042
1043 /* A wrapper around convert_to_complex_1 that always folds the
1044 expression. */
1045
1046 tree
1047 convert_to_complex (tree type, tree expr)
1048 {
1049 return convert_to_complex_1 (type, expr, true);
1050 }
1051
1052 /* A wrapper around convert_to_complex_1 that only folds the
1053 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
1054
1055 tree
1056 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1057 {
1058 return convert_to_complex_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
1059 }
1060
1061 /* Convert EXPR to the vector type TYPE in the usual ways. */
1062
1063 tree
1064 convert_to_vector (tree type, tree expr)
1065 {
1066 switch (TREE_CODE (TREE_TYPE (expr)))
1067 {
1068 case INTEGER_TYPE:
1069 case VECTOR_TYPE:
1070 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1071 {
1072 error ("can%'t convert a value of type %qT"
1073 " to vector type %qT which has different size",
1074 TREE_TYPE (expr), type);
1075 return error_mark_node;
1076 }
1077 return build1 (VIEW_CONVERT_EXPR, type, expr);
1078
1079 default:
1080 error ("can%'t convert value to a vector");
1081 return error_mark_node;
1082 }
1083 }
1084
1085 /* Convert EXPR to some fixed-point type TYPE.
1086
1087 EXPR must be fixed-point, float, integer, or enumeral;
1088 in other cases error is called. */
1089
1090 tree
1091 convert_to_fixed (tree type, tree expr)
1092 {
1093 if (integer_zerop (expr))
1094 {
1095 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1096 return fixed_zero_node;
1097 }
1098 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1099 {
1100 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1101 return fixed_one_node;
1102 }
1103
1104 switch (TREE_CODE (TREE_TYPE (expr)))
1105 {
1106 case FIXED_POINT_TYPE:
1107 case INTEGER_TYPE:
1108 case ENUMERAL_TYPE:
1109 case BOOLEAN_TYPE:
1110 case REAL_TYPE:
1111 return build1 (FIXED_CONVERT_EXPR, type, expr);
1112
1113 case COMPLEX_TYPE:
1114 return convert (type,
1115 fold_build1 (REALPART_EXPR,
1116 TREE_TYPE (TREE_TYPE (expr)), expr));
1117
1118 default:
1119 error ("aggregate value used where a fixed-point was expected");
1120 return error_mark_node;
1121 }
1122 }