]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/convert.cc
check undefine_p for one more vr
[thirdparty/gcc.git] / gcc / convert.cc
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
39 #include "selftest.h"
40
41 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
43 : build1_loc (LOC, CODE, TYPE, EXPR))
44 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
46 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
47
48 /* Convert EXPR to some pointer or reference type TYPE.
49 EXPR must be pointer, reference, integer, enumeral, or literal zero;
50 in other cases error is called. If FOLD_P is true, try to fold the
51 expression. */
52
53 static tree
54 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
55 {
56 location_t loc = EXPR_LOCATION (expr);
57 if (TREE_TYPE (expr) == type)
58 return expr;
59
60 switch (TREE_CODE (TREE_TYPE (expr)))
61 {
62 case POINTER_TYPE:
63 case REFERENCE_TYPE:
64 {
65 /* If the pointers point to different address spaces, conversion needs
66 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
67 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
68 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
69
70 if (to_as == from_as)
71 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
72 else
73 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
74 type, expr);
75 }
76
77 case INTEGER_TYPE:
78 case ENUMERAL_TYPE:
79 case BOOLEAN_TYPE:
80 case BITINT_TYPE:
81 {
82 /* If the input precision differs from the target pointer type
83 precision, first convert the input expression to an integer type of
84 the target precision. Some targets, e.g. VMS, need several pointer
85 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
86 unsigned int pprec = TYPE_PRECISION (type);
87 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
88
89 if (eprec != pprec)
90 expr
91 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
92 lang_hooks.types.type_for_size (pprec, 0),
93 expr);
94 }
95 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
96
97 default:
98 error ("cannot convert to a pointer type");
99 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
100 }
101 }
102
103 /* Subroutine of the various convert_to_*_maybe_fold routines.
104
105 If a location wrapper has been folded to a constant (presumably of
106 a different type), re-wrap the new constant with a location wrapper. */
107
108 tree
109 preserve_any_location_wrapper (tree result, tree orig_expr)
110 {
111 if (CONSTANT_CLASS_P (result) && location_wrapper_p (orig_expr))
112 {
113 if (result == TREE_OPERAND (orig_expr, 0))
114 return orig_expr;
115 else
116 return maybe_wrap_with_location (result, EXPR_LOCATION (orig_expr));
117 }
118
119 return result;
120 }
121
122 /* A wrapper around convert_to_pointer_1 that always folds the
123 expression. */
124
125 tree
126 convert_to_pointer (tree type, tree expr)
127 {
128 return convert_to_pointer_1 (type, expr, true);
129 }
130
131 /* A wrapper around convert_to_pointer_1 that only folds the
132 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
133
134 tree
135 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
136 {
137 tree result
138 = convert_to_pointer_1 (type, expr,
139 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
140 return preserve_any_location_wrapper (result, expr);
141 }
142
143 /* Convert EXPR to some floating-point type TYPE.
144
145 EXPR must be float, fixed-point, integer, or enumeral;
146 in other cases error is called. If FOLD_P is true, try to fold
147 the expression. */
148
149 static tree
150 convert_to_real_1 (tree type, tree expr, bool fold_p)
151 {
152 enum built_in_function fcode = builtin_mathfn_code (expr);
153 tree itype = TREE_TYPE (expr);
154 location_t loc = EXPR_LOCATION (expr);
155
156 if (TREE_CODE (expr) == COMPOUND_EXPR)
157 {
158 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
159 if (t == TREE_OPERAND (expr, 1))
160 return expr;
161 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
162 TREE_OPERAND (expr, 0), t);
163 }
164
165 /* Disable until we figure out how to decide whether the functions are
166 present in runtime. */
167 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
168 if (optimize
169 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
170 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
171 {
172 switch (fcode)
173 {
174 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
175 CASE_MATHFN (COSH)
176 CASE_MATHFN (EXP)
177 CASE_MATHFN (EXP10)
178 CASE_MATHFN (EXP2)
179 CASE_MATHFN (EXPM1)
180 CASE_MATHFN (GAMMA)
181 CASE_MATHFN (J0)
182 CASE_MATHFN (J1)
183 CASE_MATHFN (LGAMMA)
184 CASE_MATHFN (POW10)
185 CASE_MATHFN (SINH)
186 CASE_MATHFN (TGAMMA)
187 CASE_MATHFN (Y0)
188 CASE_MATHFN (Y1)
189 /* The above functions may set errno differently with float
190 input or output so this transformation is not safe with
191 -fmath-errno. */
192 if (flag_errno_math)
193 break;
194 gcc_fallthrough ();
195 CASE_MATHFN (ACOS)
196 CASE_MATHFN (ACOSH)
197 CASE_MATHFN (ASIN)
198 CASE_MATHFN (ASINH)
199 CASE_MATHFN (ATAN)
200 CASE_MATHFN (ATANH)
201 CASE_MATHFN (CBRT)
202 CASE_MATHFN (COS)
203 CASE_MATHFN (ERF)
204 CASE_MATHFN (ERFC)
205 CASE_MATHFN (LOG)
206 CASE_MATHFN (LOG10)
207 CASE_MATHFN (LOG2)
208 CASE_MATHFN (LOG1P)
209 CASE_MATHFN (SIN)
210 CASE_MATHFN (TAN)
211 CASE_MATHFN (TANH)
212 /* The above functions are not safe to do this conversion. */
213 if (!flag_unsafe_math_optimizations)
214 break;
215 gcc_fallthrough ();
216 CASE_MATHFN (SQRT)
217 CASE_MATHFN (FABS)
218 CASE_MATHFN (LOGB)
219 #undef CASE_MATHFN
220 if (call_expr_nargs (expr) != 1
221 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (expr, 0))))
222 break;
223 {
224 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
225 tree newtype = type;
226
227 /* We have (outertype)sqrt((innertype)x). Choose the wider mode
228 from the both as the safe type for operation. */
229 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
230 newtype = TREE_TYPE (arg0);
231
232 /* We consider to convert
233
234 (T1) sqrtT2 ((T2) exprT3)
235 to
236 (T1) sqrtT4 ((T4) exprT3)
237
238 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
239 and T4 is NEWTYPE. All those types are of floating-point types.
240 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
241 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
242 T2 and T4. See the following URL for a reference:
243 http://stackoverflow.com/questions/9235456/determining-
244 floating-point-square-root
245 */
246 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
247 && !flag_unsafe_math_optimizations)
248 {
249 /* The following conversion is unsafe even the precision condition
250 below is satisfied:
251
252 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
253 */
254 if (TYPE_MODE (type) != TYPE_MODE (newtype))
255 break;
256
257 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
258 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
259 if (p1 < p2 * 2 + 2)
260 break;
261 }
262
263 /* Be careful about integer to fp conversions.
264 These may overflow still. */
265 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
266 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
267 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
268 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
269 {
270 tree fn = mathfn_built_in (newtype, fcode);
271 if (fn)
272 {
273 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
274 expr = build_call_expr (fn, 1, arg);
275 if (newtype == type)
276 return expr;
277 }
278 }
279 }
280 default:
281 break;
282 }
283 }
284
285 /* Propagate the cast into the operation. */
286 if (itype != type && FLOAT_TYPE_P (type))
287 switch (TREE_CODE (expr))
288 {
289 /* Convert (float)-x into -(float)x. This is safe for
290 round-to-nearest rounding mode when the inner type is float. */
291 case ABS_EXPR:
292 case NEGATE_EXPR:
293 if (!flag_rounding_math
294 && FLOAT_TYPE_P (itype)
295 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
296 {
297 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
298 fold_p);
299 return build1 (TREE_CODE (expr), type, arg);
300 }
301 break;
302 default:
303 break;
304 }
305
306 switch (TREE_CODE (TREE_TYPE (expr)))
307 {
308 case REAL_TYPE:
309 /* Ignore the conversion if we don't need to store intermediate
310 results and neither type is a decimal float. */
311 return build1_loc (loc,
312 (flag_float_store
313 || DECIMAL_FLOAT_TYPE_P (type)
314 || DECIMAL_FLOAT_TYPE_P (itype))
315 ? CONVERT_EXPR : NOP_EXPR, type, expr);
316
317 case INTEGER_TYPE:
318 case ENUMERAL_TYPE:
319 case BOOLEAN_TYPE:
320 case BITINT_TYPE:
321 return build1 (FLOAT_EXPR, type, expr);
322
323 case FIXED_POINT_TYPE:
324 return build1 (FIXED_CONVERT_EXPR, type, expr);
325
326 case COMPLEX_TYPE:
327 return convert (type,
328 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
329 TREE_TYPE (TREE_TYPE (expr)),
330 expr));
331
332 case POINTER_TYPE:
333 case REFERENCE_TYPE:
334 error ("pointer value used where a floating-point was expected");
335 return convert_to_real_1 (type, integer_zero_node, fold_p);
336
337 default:
338 error ("aggregate value used where a floating-point was expected");
339 return convert_to_real_1 (type, integer_zero_node, fold_p);
340 }
341 }
342
343 /* A wrapper around convert_to_real_1 that always folds the
344 expression. */
345
346 tree
347 convert_to_real (tree type, tree expr)
348 {
349 return convert_to_real_1 (type, expr, true);
350 }
351
352 /* A wrapper around convert_to_real_1 that only folds the
353 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
354
355 tree
356 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
357 {
358 tree result
359 = convert_to_real_1 (type, expr,
360 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
361 return preserve_any_location_wrapper (result, expr);
362 }
363
364 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
365 result in TYPE. */
366
367 static tree
368 do_narrow (location_t loc,
369 enum tree_code ex_form, tree type, tree arg0, tree arg1,
370 tree expr, unsigned inprec, unsigned outprec, bool dofold)
371 {
372 /* Do the arithmetic in type TYPEX,
373 then convert result to TYPE. */
374 tree typex = type;
375
376 /* Can't do arithmetic in enumeral types
377 so use an integer type that will hold the values. */
378 if (TREE_CODE (typex) == ENUMERAL_TYPE)
379 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
380 TYPE_UNSIGNED (typex));
381
382 /* The type demotion below might cause doing unsigned arithmetic
383 instead of signed, and thus hide overflow bugs. */
384 if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
385 && !TYPE_UNSIGNED (typex)
386 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
387 return NULL_TREE;
388
389 /* Similarly for multiplication, but in that case it can be
390 problematic even if typex is unsigned type - 0xffff * 0xffff
391 overflows in int. */
392 if (ex_form == MULT_EXPR
393 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (expr))
394 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
395 return NULL_TREE;
396
397 /* But now perhaps TYPEX is as wide as INPREC.
398 In that case, do nothing special here.
399 (Otherwise would recurse infinitely in convert. */
400 if (TYPE_PRECISION (typex) != inprec)
401 {
402 /* Don't do unsigned arithmetic where signed was wanted,
403 or vice versa.
404 Exception: if both of the original operands were
405 unsigned then we can safely do the work as unsigned.
406 Exception: shift operations take their type solely
407 from the first argument.
408 Exception: the LSHIFT_EXPR case above requires that
409 we perform this operation unsigned lest we produce
410 signed-overflow undefinedness.
411 And we may need to do it as unsigned
412 if we truncate to the original size. */
413 if (TYPE_UNSIGNED (TREE_TYPE (expr))
414 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
415 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
416 || ex_form == LSHIFT_EXPR
417 || ex_form == RSHIFT_EXPR
418 || ex_form == LROTATE_EXPR
419 || ex_form == RROTATE_EXPR))
420 || ex_form == LSHIFT_EXPR
421 /* If we have !flag_wrapv, and either ARG0 or
422 ARG1 is of a signed type, we have to do
423 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
424 type in case the operation in outprec precision
425 could overflow. Otherwise, we would introduce
426 signed-overflow undefinedness. */
427 || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0))
428 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
429 || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1))
430 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))))
431 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
432 > outprec)
433 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
434 > outprec))
435 && (ex_form == PLUS_EXPR
436 || ex_form == MINUS_EXPR
437 || ex_form == MULT_EXPR)))
438 {
439 if (!TYPE_UNSIGNED (typex))
440 typex = unsigned_type_for (typex);
441 }
442 else
443 {
444 if (TYPE_UNSIGNED (typex))
445 typex = signed_type_for (typex);
446 }
447 /* We should do away with all this once we have a proper
448 type promotion/demotion pass, see PR45397. */
449 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
450 convert (typex, arg0),
451 convert (typex, arg1));
452 return convert (type, expr);
453 }
454
455 return NULL_TREE;
456 }
457
458 /* Convert EXPR to some integer (or enum) type TYPE.
459
460 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
461 fixed-point or vector; in other cases error is called.
462
463 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
464
465 The result of this is always supposed to be a newly created tree node
466 not in use in any existing structure. */
467
468 static tree
469 convert_to_integer_1 (tree type, tree expr, bool dofold)
470 {
471 enum tree_code ex_form = TREE_CODE (expr);
472 tree intype = TREE_TYPE (expr);
473 unsigned int inprec = element_precision (intype);
474 unsigned int outprec = element_precision (type);
475 location_t loc = EXPR_LOCATION (expr);
476
477 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
478 be. Consider `enum E = { a, b = (enum E) 3 };'. */
479 if (!COMPLETE_TYPE_P (type))
480 {
481 error ("conversion to incomplete type");
482 return error_mark_node;
483 }
484
485 if (ex_form == COMPOUND_EXPR)
486 {
487 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
488 if (t == TREE_OPERAND (expr, 1))
489 return expr;
490 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
491 TREE_OPERAND (expr, 0), t);
492 }
493
494 /* Convert e.g. (long)round(d) -> lround(d). */
495 /* If we're converting to char, we may encounter differing behavior
496 between converting from double->char vs double->long->char.
497 We're in "undefined" territory but we prefer to be conservative,
498 so only proceed in "unsafe" math mode. */
499 if (optimize
500 && (flag_unsafe_math_optimizations
501 || (long_integer_type_node
502 && outprec >= TYPE_PRECISION (long_integer_type_node))))
503 {
504 tree s_expr = strip_float_extensions (expr);
505 tree s_intype = TREE_TYPE (s_expr);
506 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
507 tree fn = 0;
508
509 switch (fcode)
510 {
511 CASE_FLT_FN (BUILT_IN_CEIL):
512 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
513 /* Only convert in ISO C99 mode. */
514 if (!targetm.libc_has_function (function_c99_misc, intype))
515 break;
516 if (outprec < TYPE_PRECISION (integer_type_node)
517 || (outprec == TYPE_PRECISION (integer_type_node)
518 && !TYPE_UNSIGNED (type)))
519 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
520 else if (outprec == TYPE_PRECISION (long_integer_type_node)
521 && !TYPE_UNSIGNED (type))
522 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
523 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
524 && !TYPE_UNSIGNED (type))
525 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
526 break;
527
528 CASE_FLT_FN (BUILT_IN_FLOOR):
529 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
530 /* Only convert in ISO C99 mode. */
531 if (!targetm.libc_has_function (function_c99_misc, intype))
532 break;
533 if (outprec < TYPE_PRECISION (integer_type_node)
534 || (outprec == TYPE_PRECISION (integer_type_node)
535 && !TYPE_UNSIGNED (type)))
536 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
537 else if (outprec == TYPE_PRECISION (long_integer_type_node)
538 && !TYPE_UNSIGNED (type))
539 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
540 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
541 && !TYPE_UNSIGNED (type))
542 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
543 break;
544
545 CASE_FLT_FN (BUILT_IN_ROUND):
546 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
547 /* Only convert in ISO C99 mode and with -fno-math-errno. */
548 if (!targetm.libc_has_function (function_c99_misc, intype)
549 || flag_errno_math)
550 break;
551 if (outprec < TYPE_PRECISION (integer_type_node)
552 || (outprec == TYPE_PRECISION (integer_type_node)
553 && !TYPE_UNSIGNED (type)))
554 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
555 else if (outprec == TYPE_PRECISION (long_integer_type_node)
556 && !TYPE_UNSIGNED (type))
557 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
558 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
559 && !TYPE_UNSIGNED (type))
560 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
561 break;
562
563 CASE_FLT_FN (BUILT_IN_NEARBYINT):
564 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
565 /* Only convert nearbyint* if we can ignore math exceptions. */
566 if (flag_trapping_math)
567 break;
568 gcc_fallthrough ();
569 CASE_FLT_FN (BUILT_IN_RINT):
570 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
571 /* Only convert in ISO C99 mode and with -fno-math-errno. */
572 if (!targetm.libc_has_function (function_c99_misc, intype)
573 || flag_errno_math)
574 break;
575 if (outprec < TYPE_PRECISION (integer_type_node)
576 || (outprec == TYPE_PRECISION (integer_type_node)
577 && !TYPE_UNSIGNED (type)))
578 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
579 else if (outprec == TYPE_PRECISION (long_integer_type_node)
580 && !TYPE_UNSIGNED (type))
581 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
582 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
583 && !TYPE_UNSIGNED (type))
584 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
585 break;
586
587 CASE_FLT_FN (BUILT_IN_TRUNC):
588 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
589 if (call_expr_nargs (s_expr) != 1
590 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
591 break;
592 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0),
593 dofold);
594
595 default:
596 break;
597 }
598
599 if (fn
600 && call_expr_nargs (s_expr) == 1
601 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
602 {
603 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
604 return convert_to_integer_1 (type, newexpr, dofold);
605 }
606 }
607
608 /* Convert (int)logb(d) -> ilogb(d). */
609 if (optimize
610 && flag_unsafe_math_optimizations
611 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
612 && integer_type_node
613 && (outprec > TYPE_PRECISION (integer_type_node)
614 || (outprec == TYPE_PRECISION (integer_type_node)
615 && !TYPE_UNSIGNED (type))))
616 {
617 tree s_expr = strip_float_extensions (expr);
618 tree s_intype = TREE_TYPE (s_expr);
619 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
620 tree fn = 0;
621
622 switch (fcode)
623 {
624 CASE_FLT_FN (BUILT_IN_LOGB):
625 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
626 break;
627
628 default:
629 break;
630 }
631
632 if (fn
633 && call_expr_nargs (s_expr) == 1
634 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
635 {
636 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
637 return convert_to_integer_1 (type, newexpr, dofold);
638 }
639 }
640
641 switch (TREE_CODE (intype))
642 {
643 case POINTER_TYPE:
644 case REFERENCE_TYPE:
645 if (integer_zerop (expr)
646 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr)))
647 return build_int_cst (type, 0);
648
649 /* Convert to an unsigned integer of the correct width first, and from
650 there widen/truncate to the required type. Some targets support the
651 coexistence of multiple valid pointer sizes, so fetch the one we need
652 from the type. */
653 if (!dofold)
654 return build1 (CONVERT_EXPR, type, expr);
655 expr = fold_build1 (CONVERT_EXPR,
656 lang_hooks.types.type_for_size
657 (TYPE_PRECISION (intype), 0),
658 expr);
659 return fold_convert (type, expr);
660
661 case INTEGER_TYPE:
662 case ENUMERAL_TYPE:
663 case BOOLEAN_TYPE:
664 case OFFSET_TYPE:
665 case BITINT_TYPE:
666 /* If this is a logical operation, which just returns 0 or 1, we can
667 change the type of the expression. */
668
669 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
670 {
671 expr = copy_node (expr);
672 TREE_TYPE (expr) = type;
673 return expr;
674 }
675
676 /* If we are widening the type, put in an explicit conversion.
677 Similarly if we are not changing the width. After this, we know
678 we are truncating EXPR. */
679
680 else if (outprec >= inprec)
681 {
682 enum tree_code code;
683
684 /* If the precision of the EXPR's type is K bits and the
685 destination mode has more bits, and the sign is changing,
686 it is not safe to use a NOP_EXPR. For example, suppose
687 that EXPR's type is a 3-bit unsigned integer type, the
688 TYPE is a 3-bit signed integer type, and the machine mode
689 for the types is 8-bit QImode. In that case, the
690 conversion necessitates an explicit sign-extension. In
691 the signed-to-unsigned case the high-order bits have to
692 be cleared. */
693 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
694 && !type_has_mode_precision_p (TREE_TYPE (expr)))
695 code = CONVERT_EXPR;
696 else
697 code = NOP_EXPR;
698
699 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
700 }
701
702 /* If TYPE is an enumeral type or a type with a precision less
703 than the number of bits in its mode, do the conversion to the
704 type corresponding to its mode, then do a nop conversion
705 to TYPE. */
706 else if (TREE_CODE (type) == ENUMERAL_TYPE
707 || (TREE_CODE (type) != BITINT_TYPE
708 && maybe_ne (outprec,
709 GET_MODE_PRECISION (TYPE_MODE (type)))))
710 {
711 expr
712 = convert_to_integer_1 (lang_hooks.types.type_for_mode
713 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
714 expr, dofold);
715 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
716 }
717
718 /* Here detect when we can distribute the truncation down past some
719 arithmetic. For example, if adding two longs and converting to an
720 int, we can equally well convert both to ints and then add.
721 For the operations handled here, such truncation distribution
722 is always safe.
723 It is desirable in these cases:
724 1) when truncating down to full-word from a larger size
725 2) when truncating takes no work.
726 3) when at least one operand of the arithmetic has been extended
727 (as by C's default conversions). In this case we need two conversions
728 if we do the arithmetic as already requested, so we might as well
729 truncate both and then combine. Perhaps that way we need only one.
730
731 Note that in general we cannot do the arithmetic in a type
732 shorter than the desired result of conversion, even if the operands
733 are both extended from a shorter type, because they might overflow
734 if combined in that type. The exceptions to this--the times when
735 two narrow values can be combined in their narrow type even to
736 make a wider result--are handled by "shorten" in build_binary_op. */
737
738 if (dofold)
739 switch (ex_form)
740 {
741 case RSHIFT_EXPR:
742 /* We can pass truncation down through right shifting
743 when the shift count is a nonpositive constant. */
744 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
745 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
746 goto trunc1;
747 break;
748
749 case LSHIFT_EXPR:
750 /* We can pass truncation down through left shifting
751 when the shift count is a nonnegative constant and
752 the target type is unsigned. */
753 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
754 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
755 && TYPE_UNSIGNED (type)
756 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
757 {
758 /* If shift count is less than the width of the truncated type,
759 really shift. */
760 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
761 /* In this case, shifting is like multiplication. */
762 goto trunc1;
763 else
764 {
765 /* If it is >= that width, result is zero.
766 Handling this with trunc1 would give the wrong result:
767 (int) ((long long) a << 32) is well defined (as 0)
768 but (int) a << 32 is undefined and would get a
769 warning. */
770
771 tree t = build_int_cst (type, 0);
772
773 /* If the original expression had side-effects, we must
774 preserve it. */
775 if (TREE_SIDE_EFFECTS (expr))
776 return build2 (COMPOUND_EXPR, type, expr, t);
777 else
778 return t;
779 }
780 }
781 break;
782
783 case TRUNC_DIV_EXPR:
784 {
785 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
786 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
787
788 /* Don't distribute unless the output precision is at least as
789 big as the actual inputs and it has the same signedness. */
790 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
791 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
792 /* If signedness of arg0 and arg1 don't match,
793 we can't necessarily find a type to compare them in. */
794 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
795 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
796 /* Do not change the sign of the division. */
797 && (TYPE_UNSIGNED (TREE_TYPE (expr))
798 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
799 /* Either require unsigned division or a division by
800 a constant that is not -1. */
801 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
802 || (TREE_CODE (arg1) == INTEGER_CST
803 && !integer_all_onesp (arg1))))
804 {
805 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
806 expr, inprec, outprec, dofold);
807 if (tem)
808 return tem;
809 }
810 break;
811 }
812
813 case MAX_EXPR:
814 case MIN_EXPR:
815 case MULT_EXPR:
816 {
817 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
818 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
819
820 /* Don't distribute unless the output precision is at least as
821 big as the actual inputs. Otherwise, the comparison of the
822 truncated values will be wrong. */
823 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
824 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
825 /* If signedness of arg0 and arg1 don't match,
826 we can't necessarily find a type to compare them in. */
827 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
828 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
829 goto trunc1;
830 break;
831 }
832
833 case PLUS_EXPR:
834 case MINUS_EXPR:
835 case BIT_AND_EXPR:
836 case BIT_IOR_EXPR:
837 case BIT_XOR_EXPR:
838 trunc1:
839 {
840 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
841 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
842
843 /* Do not try to narrow operands of pointer subtraction;
844 that will interfere with other folding. */
845 if (ex_form == MINUS_EXPR
846 && CONVERT_EXPR_P (arg0)
847 && CONVERT_EXPR_P (arg1)
848 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
849 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
850 break;
851
852 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
853 expr, inprec, outprec, dofold);
854 if (tem)
855 return tem;
856 }
857 break;
858
859 case NEGATE_EXPR:
860 /* Using unsigned arithmetic for signed types may hide overflow
861 bugs. */
862 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
863 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
864 break;
865 /* Fall through. */
866 case BIT_NOT_EXPR:
867 /* This is not correct for ABS_EXPR,
868 since we must test the sign before truncation. */
869 {
870 /* Do the arithmetic in type TYPEX,
871 then convert result to TYPE. */
872 tree typex = type;
873
874 /* Can't do arithmetic in enumeral types
875 so use an integer type that will hold the values. */
876 if (TREE_CODE (typex) == ENUMERAL_TYPE)
877 typex
878 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
879 TYPE_UNSIGNED (typex));
880
881 if (!TYPE_UNSIGNED (typex))
882 typex = unsigned_type_for (typex);
883 return convert (type,
884 fold_build1 (ex_form, typex,
885 convert (typex,
886 TREE_OPERAND (expr, 0))));
887 }
888
889 CASE_CONVERT:
890 {
891 tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
892 /* Don't introduce a "can't convert between vector values
893 of different size" error. */
894 if (TREE_CODE (argtype) == VECTOR_TYPE
895 && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype)),
896 GET_MODE_SIZE (TYPE_MODE (type))))
897 break;
898 }
899 /* If truncating after truncating, might as well do all at once.
900 If truncating after extending, we may get rid of wasted work. */
901 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
902
903 case COND_EXPR:
904 /* It is sometimes worthwhile to push the narrowing down through
905 the conditional and never loses. A COND_EXPR may have a throw
906 as one operand, which then has void type. Just leave void
907 operands as they are. */
908 return
909 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
910 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
911 ? TREE_OPERAND (expr, 1)
912 : convert (type, TREE_OPERAND (expr, 1)),
913 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
914 ? TREE_OPERAND (expr, 2)
915 : convert (type, TREE_OPERAND (expr, 2)));
916
917 default:
918 break;
919 }
920
921 /* When parsing long initializers, we might end up with a lot of casts.
922 Shortcut this. */
923 if (TREE_CODE (tree_strip_any_location_wrapper (expr)) == INTEGER_CST)
924 return fold_convert (type, expr);
925 return build1 (CONVERT_EXPR, type, expr);
926
927 case REAL_TYPE:
928 if (sanitize_flags_p (SANITIZE_FLOAT_CAST)
929 && current_function_decl != NULL_TREE)
930 {
931 expr = save_expr (expr);
932 tree check = ubsan_instrument_float_cast (loc, type, expr);
933 expr = build1 (FIX_TRUNC_EXPR, type, expr);
934 if (check == NULL_TREE)
935 return expr;
936 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
937 TREE_TYPE (expr), check, expr);
938 }
939 else
940 return build1 (FIX_TRUNC_EXPR, type, expr);
941
942 case FIXED_POINT_TYPE:
943 return build1 (FIXED_CONVERT_EXPR, type, expr);
944
945 case COMPLEX_TYPE:
946 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
947 TREE_TYPE (TREE_TYPE (expr)), expr);
948 return convert (type, expr);
949
950 case VECTOR_TYPE:
951 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
952 {
953 error ("cannot convert a vector of type %qT"
954 " to type %qT which has different size",
955 TREE_TYPE (expr), type);
956 return error_mark_node;
957 }
958 return build1 (VIEW_CONVERT_EXPR, type, expr);
959
960 default:
961 error ("aggregate value used where an integer was expected");
962 return convert (type, integer_zero_node);
963 }
964 }
965
966 /* Convert EXPR to some integer (or enum) type TYPE.
967
968 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
969 fixed-point or vector; in other cases error is called.
970
971 The result of this is always supposed to be a newly created tree node
972 not in use in any existing structure. */
973
974 tree
975 convert_to_integer (tree type, tree expr)
976 {
977 return convert_to_integer_1 (type, expr, true);
978 }
979
980 /* A wrapper around convert_to_complex_1 that only folds the
981 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
982
983 tree
984 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
985 {
986 tree result
987 = convert_to_integer_1 (type, expr,
988 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
989 return preserve_any_location_wrapper (result, expr);
990 }
991
992 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
993 true, try to fold the expression. */
994
995 static tree
996 convert_to_complex_1 (tree type, tree expr, bool fold_p)
997 {
998 location_t loc = EXPR_LOCATION (expr);
999 tree subtype = TREE_TYPE (type);
1000
1001 switch (TREE_CODE (TREE_TYPE (expr)))
1002 {
1003 case REAL_TYPE:
1004 case FIXED_POINT_TYPE:
1005 case INTEGER_TYPE:
1006 case ENUMERAL_TYPE:
1007 case BOOLEAN_TYPE:
1008 case BITINT_TYPE:
1009 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
1010 convert (subtype, integer_zero_node));
1011
1012 case COMPLEX_TYPE:
1013 {
1014 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1015
1016 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1017 return expr;
1018 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1019 {
1020 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1021 fold_p);
1022 if (t == TREE_OPERAND (expr, 1))
1023 return expr;
1024 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1025 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1026 }
1027 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1028 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1029 convert (subtype,
1030 TREE_OPERAND (expr, 0)),
1031 convert (subtype,
1032 TREE_OPERAND (expr, 1)));
1033 else
1034 {
1035 expr = save_expr (expr);
1036 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1037 TREE_TYPE (TREE_TYPE (expr)),
1038 expr);
1039 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1040 TREE_TYPE (TREE_TYPE (expr)),
1041 expr);
1042 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1043 convert (subtype, realp),
1044 convert (subtype, imagp));
1045 }
1046 }
1047
1048 case POINTER_TYPE:
1049 case REFERENCE_TYPE:
1050 error ("pointer value used where a complex was expected");
1051 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1052
1053 default:
1054 error ("aggregate value used where a complex was expected");
1055 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1056 }
1057 }
1058
1059 /* A wrapper around convert_to_complex_1 that always folds the
1060 expression. */
1061
1062 tree
1063 convert_to_complex (tree type, tree expr)
1064 {
1065 return convert_to_complex_1 (type, expr, true);
1066 }
1067
1068 /* A wrapper around convert_to_complex_1 that only folds the
1069 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1070
1071 tree
1072 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1073 {
1074 tree result
1075 = convert_to_complex_1 (type, expr,
1076 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1077 return preserve_any_location_wrapper (result, expr);
1078 }
1079
1080 /* Convert EXPR to the vector type TYPE in the usual ways. */
1081
1082 tree
1083 convert_to_vector (tree type, tree expr)
1084 {
1085 switch (TREE_CODE (TREE_TYPE (expr)))
1086 {
1087 case INTEGER_TYPE:
1088 case VECTOR_TYPE:
1089 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1090 {
1091 error ("cannot convert a value of type %qT"
1092 " to vector type %qT which has different size",
1093 TREE_TYPE (expr), type);
1094 return error_mark_node;
1095 }
1096 return build1 (VIEW_CONVERT_EXPR, type, expr);
1097
1098 default:
1099 error ("cannot convert value to a vector");
1100 return error_mark_node;
1101 }
1102 }
1103
1104 /* Convert EXPR to some fixed-point type TYPE.
1105
1106 EXPR must be fixed-point, float, integer, or enumeral;
1107 in other cases error is called. */
1108
1109 tree
1110 convert_to_fixed (tree type, tree expr)
1111 {
1112 if (integer_zerop (expr))
1113 {
1114 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1115 return fixed_zero_node;
1116 }
1117 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1118 {
1119 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1120 return fixed_one_node;
1121 }
1122
1123 switch (TREE_CODE (TREE_TYPE (expr)))
1124 {
1125 case FIXED_POINT_TYPE:
1126 case INTEGER_TYPE:
1127 case ENUMERAL_TYPE:
1128 case BOOLEAN_TYPE:
1129 case REAL_TYPE:
1130 return build1 (FIXED_CONVERT_EXPR, type, expr);
1131
1132 case COMPLEX_TYPE:
1133 return convert (type,
1134 fold_build1 (REALPART_EXPR,
1135 TREE_TYPE (TREE_TYPE (expr)), expr));
1136
1137 default:
1138 error ("aggregate value used where a fixed-point was expected");
1139 return error_mark_node;
1140 }
1141 }
1142
1143 #if CHECKING_P
1144
1145 namespace selftest {
1146
1147 /* Selftests for conversions. */
1148
1149 static void
1150 test_convert_to_integer_maybe_fold (tree orig_type, tree new_type)
1151 {
1152 /* Calling convert_to_integer_maybe_fold on an INTEGER_CST. */
1153
1154 tree orig_cst = build_int_cst (orig_type, 42);
1155
1156 /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1157 constant of the new type, unless the types are the same, in which
1158 case verify it's a no-op. */
1159 {
1160 tree result = convert_to_integer_maybe_fold (new_type,
1161 orig_cst, false);
1162 if (orig_type != new_type)
1163 {
1164 ASSERT_EQ (TREE_TYPE (result), new_type);
1165 ASSERT_EQ (TREE_CODE (result), INTEGER_CST);
1166 }
1167 else
1168 ASSERT_EQ (result, orig_cst);
1169 }
1170
1171 /* Calling convert_to_integer_maybe_fold on a location wrapper around
1172 an INTEGER_CST.
1173
1174 Verify that convert_to_integer_maybe_fold on a location wrapper
1175 around a constant returns a new location wrapper around an equivalent
1176 constant, both of the new type, unless the types are the same,
1177 in which case the original wrapper should be returned. */
1178 {
1179 const location_t loc = BUILTINS_LOCATION;
1180 tree wrapped_orig_cst = maybe_wrap_with_location (orig_cst, loc);
1181 tree result
1182 = convert_to_integer_maybe_fold (new_type, wrapped_orig_cst, false);
1183 ASSERT_EQ (TREE_TYPE (result), new_type);
1184 ASSERT_EQ (EXPR_LOCATION (result), loc);
1185 ASSERT_TRUE (location_wrapper_p (result));
1186 ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result, 0)), new_type);
1187 ASSERT_EQ (TREE_CODE (TREE_OPERAND (result, 0)), INTEGER_CST);
1188
1189 if (orig_type == new_type)
1190 ASSERT_EQ (result, wrapped_orig_cst);
1191 }
1192 }
1193
1194 /* Verify that convert_to_integer_maybe_fold preserves locations. */
1195
1196 static void
1197 test_convert_to_integer_maybe_fold ()
1198 {
1199 /* char -> long. */
1200 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1201
1202 /* char -> char. */
1203 test_convert_to_integer_maybe_fold (char_type_node, char_type_node);
1204
1205 /* long -> char. */
1206 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1207
1208 /* long -> long. */
1209 test_convert_to_integer_maybe_fold (long_integer_type_node,
1210 long_integer_type_node);
1211 }
1212
1213 /* Run all of the selftests within this file. */
1214
1215 void
1216 convert_cc_tests ()
1217 {
1218 test_convert_to_integer_maybe_fold ();
1219 }
1220
1221 } // namespace selftest
1222
1223 #endif /* CHECKING_P */