]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/convert.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / convert.c
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
39 #include "selftest.h"
40
41 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
43 : build1_loc (LOC, CODE, TYPE, EXPR))
44 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
46 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
47
48 /* Convert EXPR to some pointer or reference type TYPE.
49 EXPR must be pointer, reference, integer, enumeral, or literal zero;
50 in other cases error is called. If FOLD_P is true, try to fold the
51 expression. */
52
53 static tree
54 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
55 {
56 location_t loc = EXPR_LOCATION (expr);
57 if (TREE_TYPE (expr) == type)
58 return expr;
59
60 switch (TREE_CODE (TREE_TYPE (expr)))
61 {
62 case POINTER_TYPE:
63 case REFERENCE_TYPE:
64 {
65 /* If the pointers point to different address spaces, conversion needs
66 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
67 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
68 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
69
70 if (to_as == from_as)
71 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
72 else
73 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
74 type, expr);
75 }
76
77 case INTEGER_TYPE:
78 case ENUMERAL_TYPE:
79 case BOOLEAN_TYPE:
80 {
81 /* If the input precision differs from the target pointer type
82 precision, first convert the input expression to an integer type of
83 the target precision. Some targets, e.g. VMS, need several pointer
84 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
85 unsigned int pprec = TYPE_PRECISION (type);
86 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
87
88 if (eprec != pprec)
89 expr
90 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
91 lang_hooks.types.type_for_size (pprec, 0),
92 expr);
93 }
94 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
95
96 default:
97 error ("cannot convert to a pointer type");
98 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
99 }
100 }
101
102 /* Subroutine of the various convert_to_*_maybe_fold routines.
103
104 If a location wrapper has been folded to a constant (presumably of
105 a different type), re-wrap the new constant with a location wrapper. */
106
107 tree
108 preserve_any_location_wrapper (tree result, tree orig_expr)
109 {
110 if (CONSTANT_CLASS_P (result) && location_wrapper_p (orig_expr))
111 {
112 if (result == TREE_OPERAND (orig_expr, 0))
113 return orig_expr;
114 else
115 return maybe_wrap_with_location (result, EXPR_LOCATION (orig_expr));
116 }
117
118 return result;
119 }
120
121 /* A wrapper around convert_to_pointer_1 that always folds the
122 expression. */
123
124 tree
125 convert_to_pointer (tree type, tree expr)
126 {
127 return convert_to_pointer_1 (type, expr, true);
128 }
129
130 /* A wrapper around convert_to_pointer_1 that only folds the
131 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
132
133 tree
134 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
135 {
136 tree result
137 = convert_to_pointer_1 (type, expr,
138 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
139 return preserve_any_location_wrapper (result, expr);
140 }
141
142 /* Convert EXPR to some floating-point type TYPE.
143
144 EXPR must be float, fixed-point, integer, or enumeral;
145 in other cases error is called. If FOLD_P is true, try to fold
146 the expression. */
147
148 static tree
149 convert_to_real_1 (tree type, tree expr, bool fold_p)
150 {
151 enum built_in_function fcode = builtin_mathfn_code (expr);
152 tree itype = TREE_TYPE (expr);
153 location_t loc = EXPR_LOCATION (expr);
154
155 if (TREE_CODE (expr) == COMPOUND_EXPR)
156 {
157 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
158 if (t == TREE_OPERAND (expr, 1))
159 return expr;
160 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
161 TREE_OPERAND (expr, 0), t);
162 }
163
164 /* Disable until we figure out how to decide whether the functions are
165 present in runtime. */
166 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
167 if (optimize
168 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
169 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
170 {
171 switch (fcode)
172 {
173 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
174 CASE_MATHFN (COSH)
175 CASE_MATHFN (EXP)
176 CASE_MATHFN (EXP10)
177 CASE_MATHFN (EXP2)
178 CASE_MATHFN (EXPM1)
179 CASE_MATHFN (GAMMA)
180 CASE_MATHFN (J0)
181 CASE_MATHFN (J1)
182 CASE_MATHFN (LGAMMA)
183 CASE_MATHFN (POW10)
184 CASE_MATHFN (SINH)
185 CASE_MATHFN (TGAMMA)
186 CASE_MATHFN (Y0)
187 CASE_MATHFN (Y1)
188 /* The above functions may set errno differently with float
189 input or output so this transformation is not safe with
190 -fmath-errno. */
191 if (flag_errno_math)
192 break;
193 gcc_fallthrough ();
194 CASE_MATHFN (ACOS)
195 CASE_MATHFN (ACOSH)
196 CASE_MATHFN (ASIN)
197 CASE_MATHFN (ASINH)
198 CASE_MATHFN (ATAN)
199 CASE_MATHFN (ATANH)
200 CASE_MATHFN (CBRT)
201 CASE_MATHFN (COS)
202 CASE_MATHFN (ERF)
203 CASE_MATHFN (ERFC)
204 CASE_MATHFN (LOG)
205 CASE_MATHFN (LOG10)
206 CASE_MATHFN (LOG2)
207 CASE_MATHFN (LOG1P)
208 CASE_MATHFN (SIN)
209 CASE_MATHFN (TAN)
210 CASE_MATHFN (TANH)
211 /* The above functions are not safe to do this conversion. */
212 if (!flag_unsafe_math_optimizations)
213 break;
214 gcc_fallthrough ();
215 CASE_MATHFN (SQRT)
216 CASE_MATHFN (FABS)
217 CASE_MATHFN (LOGB)
218 #undef CASE_MATHFN
219 if (call_expr_nargs (expr) != 1
220 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (expr, 0))))
221 break;
222 {
223 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
224 tree newtype = type;
225
226 /* We have (outertype)sqrt((innertype)x). Choose the wider mode
227 from the both as the safe type for operation. */
228 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
229 newtype = TREE_TYPE (arg0);
230
231 /* We consider to convert
232
233 (T1) sqrtT2 ((T2) exprT3)
234 to
235 (T1) sqrtT4 ((T4) exprT3)
236
237 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
238 and T4 is NEWTYPE. All those types are of floating-point types.
239 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
240 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
241 T2 and T4. See the following URL for a reference:
242 http://stackoverflow.com/questions/9235456/determining-
243 floating-point-square-root
244 */
245 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
246 && !flag_unsafe_math_optimizations)
247 {
248 /* The following conversion is unsafe even the precision condition
249 below is satisfied:
250
251 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
252 */
253 if (TYPE_MODE (type) != TYPE_MODE (newtype))
254 break;
255
256 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
257 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
258 if (p1 < p2 * 2 + 2)
259 break;
260 }
261
262 /* Be careful about integer to fp conversions.
263 These may overflow still. */
264 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
265 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
266 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
267 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
268 {
269 tree fn = mathfn_built_in (newtype, fcode);
270 if (fn)
271 {
272 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
273 expr = build_call_expr (fn, 1, arg);
274 if (newtype == type)
275 return expr;
276 }
277 }
278 }
279 default:
280 break;
281 }
282 }
283
284 /* Propagate the cast into the operation. */
285 if (itype != type && FLOAT_TYPE_P (type))
286 switch (TREE_CODE (expr))
287 {
288 /* Convert (float)-x into -(float)x. This is safe for
289 round-to-nearest rounding mode when the inner type is float. */
290 case ABS_EXPR:
291 case NEGATE_EXPR:
292 if (!flag_rounding_math
293 && FLOAT_TYPE_P (itype)
294 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
295 {
296 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
297 fold_p);
298 return build1 (TREE_CODE (expr), type, arg);
299 }
300 break;
301 default:
302 break;
303 }
304
305 switch (TREE_CODE (TREE_TYPE (expr)))
306 {
307 case REAL_TYPE:
308 /* Ignore the conversion if we don't need to store intermediate
309 results and neither type is a decimal float. */
310 return build1_loc (loc,
311 (flag_float_store
312 || DECIMAL_FLOAT_TYPE_P (type)
313 || DECIMAL_FLOAT_TYPE_P (itype))
314 ? CONVERT_EXPR : NOP_EXPR, type, expr);
315
316 case INTEGER_TYPE:
317 case ENUMERAL_TYPE:
318 case BOOLEAN_TYPE:
319 return build1 (FLOAT_EXPR, type, expr);
320
321 case FIXED_POINT_TYPE:
322 return build1 (FIXED_CONVERT_EXPR, type, expr);
323
324 case COMPLEX_TYPE:
325 return convert (type,
326 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
327 TREE_TYPE (TREE_TYPE (expr)),
328 expr));
329
330 case POINTER_TYPE:
331 case REFERENCE_TYPE:
332 error ("pointer value used where a floating-point was expected");
333 return convert_to_real_1 (type, integer_zero_node, fold_p);
334
335 default:
336 error ("aggregate value used where a floating-point was expected");
337 return convert_to_real_1 (type, integer_zero_node, fold_p);
338 }
339 }
340
341 /* A wrapper around convert_to_real_1 that always folds the
342 expression. */
343
344 tree
345 convert_to_real (tree type, tree expr)
346 {
347 return convert_to_real_1 (type, expr, true);
348 }
349
350 /* A wrapper around convert_to_real_1 that only folds the
351 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
352
353 tree
354 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
355 {
356 tree result
357 = convert_to_real_1 (type, expr,
358 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
359 return preserve_any_location_wrapper (result, expr);
360 }
361
362 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
363 result in TYPE. */
364
365 static tree
366 do_narrow (location_t loc,
367 enum tree_code ex_form, tree type, tree arg0, tree arg1,
368 tree expr, unsigned inprec, unsigned outprec, bool dofold)
369 {
370 /* Do the arithmetic in type TYPEX,
371 then convert result to TYPE. */
372 tree typex = type;
373
374 /* Can't do arithmetic in enumeral types
375 so use an integer type that will hold the values. */
376 if (TREE_CODE (typex) == ENUMERAL_TYPE)
377 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
378 TYPE_UNSIGNED (typex));
379
380 /* The type demotion below might cause doing unsigned arithmetic
381 instead of signed, and thus hide overflow bugs. */
382 if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
383 && !TYPE_UNSIGNED (typex)
384 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
385 return NULL_TREE;
386
387 /* But now perhaps TYPEX is as wide as INPREC.
388 In that case, do nothing special here.
389 (Otherwise would recurse infinitely in convert. */
390 if (TYPE_PRECISION (typex) != inprec)
391 {
392 /* Don't do unsigned arithmetic where signed was wanted,
393 or vice versa.
394 Exception: if both of the original operands were
395 unsigned then we can safely do the work as unsigned.
396 Exception: shift operations take their type solely
397 from the first argument.
398 Exception: the LSHIFT_EXPR case above requires that
399 we perform this operation unsigned lest we produce
400 signed-overflow undefinedness.
401 And we may need to do it as unsigned
402 if we truncate to the original size. */
403 if (TYPE_UNSIGNED (TREE_TYPE (expr))
404 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
405 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
406 || ex_form == LSHIFT_EXPR
407 || ex_form == RSHIFT_EXPR
408 || ex_form == LROTATE_EXPR
409 || ex_form == RROTATE_EXPR))
410 || ex_form == LSHIFT_EXPR
411 /* If we have !flag_wrapv, and either ARG0 or
412 ARG1 is of a signed type, we have to do
413 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
414 type in case the operation in outprec precision
415 could overflow. Otherwise, we would introduce
416 signed-overflow undefinedness. */
417 || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0))
418 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
419 || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1))
420 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))))
421 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
422 > outprec)
423 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
424 > outprec))
425 && (ex_form == PLUS_EXPR
426 || ex_form == MINUS_EXPR
427 || ex_form == MULT_EXPR)))
428 {
429 if (!TYPE_UNSIGNED (typex))
430 typex = unsigned_type_for (typex);
431 }
432 else
433 {
434 if (TYPE_UNSIGNED (typex))
435 typex = signed_type_for (typex);
436 }
437 /* We should do away with all this once we have a proper
438 type promotion/demotion pass, see PR45397. */
439 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
440 convert (typex, arg0),
441 convert (typex, arg1));
442 return convert (type, expr);
443 }
444
445 return NULL_TREE;
446 }
447
448 /* Convert EXPR to some integer (or enum) type TYPE.
449
450 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
451 fixed-point or vector; in other cases error is called.
452
453 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
454
455 The result of this is always supposed to be a newly created tree node
456 not in use in any existing structure. */
457
458 static tree
459 convert_to_integer_1 (tree type, tree expr, bool dofold)
460 {
461 enum tree_code ex_form = TREE_CODE (expr);
462 tree intype = TREE_TYPE (expr);
463 unsigned int inprec = element_precision (intype);
464 unsigned int outprec = element_precision (type);
465 location_t loc = EXPR_LOCATION (expr);
466
467 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
468 be. Consider `enum E = { a, b = (enum E) 3 };'. */
469 if (!COMPLETE_TYPE_P (type))
470 {
471 error ("conversion to incomplete type");
472 return error_mark_node;
473 }
474
475 if (ex_form == COMPOUND_EXPR)
476 {
477 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
478 if (t == TREE_OPERAND (expr, 1))
479 return expr;
480 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
481 TREE_OPERAND (expr, 0), t);
482 }
483
484 /* Convert e.g. (long)round(d) -> lround(d). */
485 /* If we're converting to char, we may encounter differing behavior
486 between converting from double->char vs double->long->char.
487 We're in "undefined" territory but we prefer to be conservative,
488 so only proceed in "unsafe" math mode. */
489 if (optimize
490 && (flag_unsafe_math_optimizations
491 || (long_integer_type_node
492 && outprec >= TYPE_PRECISION (long_integer_type_node))))
493 {
494 tree s_expr = strip_float_extensions (expr);
495 tree s_intype = TREE_TYPE (s_expr);
496 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
497 tree fn = 0;
498
499 switch (fcode)
500 {
501 CASE_FLT_FN (BUILT_IN_CEIL):
502 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
503 /* Only convert in ISO C99 mode. */
504 if (!targetm.libc_has_function (function_c99_misc))
505 break;
506 if (outprec < TYPE_PRECISION (integer_type_node)
507 || (outprec == TYPE_PRECISION (integer_type_node)
508 && !TYPE_UNSIGNED (type)))
509 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
510 else if (outprec == TYPE_PRECISION (long_integer_type_node)
511 && !TYPE_UNSIGNED (type))
512 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
513 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
514 && !TYPE_UNSIGNED (type))
515 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
516 break;
517
518 CASE_FLT_FN (BUILT_IN_FLOOR):
519 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
520 /* Only convert in ISO C99 mode. */
521 if (!targetm.libc_has_function (function_c99_misc))
522 break;
523 if (outprec < TYPE_PRECISION (integer_type_node)
524 || (outprec == TYPE_PRECISION (integer_type_node)
525 && !TYPE_UNSIGNED (type)))
526 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
527 else if (outprec == TYPE_PRECISION (long_integer_type_node)
528 && !TYPE_UNSIGNED (type))
529 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
530 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
531 && !TYPE_UNSIGNED (type))
532 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
533 break;
534
535 CASE_FLT_FN (BUILT_IN_ROUND):
536 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
537 /* Only convert in ISO C99 mode and with -fno-math-errno. */
538 if (!targetm.libc_has_function (function_c99_misc)
539 || flag_errno_math)
540 break;
541 if (outprec < TYPE_PRECISION (integer_type_node)
542 || (outprec == TYPE_PRECISION (integer_type_node)
543 && !TYPE_UNSIGNED (type)))
544 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
545 else if (outprec == TYPE_PRECISION (long_integer_type_node)
546 && !TYPE_UNSIGNED (type))
547 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
548 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
549 && !TYPE_UNSIGNED (type))
550 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
551 break;
552
553 CASE_FLT_FN (BUILT_IN_NEARBYINT):
554 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
555 /* Only convert nearbyint* if we can ignore math exceptions. */
556 if (flag_trapping_math)
557 break;
558 gcc_fallthrough ();
559 CASE_FLT_FN (BUILT_IN_RINT):
560 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
561 /* Only convert in ISO C99 mode and with -fno-math-errno. */
562 if (!targetm.libc_has_function (function_c99_misc)
563 || flag_errno_math)
564 break;
565 if (outprec < TYPE_PRECISION (integer_type_node)
566 || (outprec == TYPE_PRECISION (integer_type_node)
567 && !TYPE_UNSIGNED (type)))
568 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
569 else if (outprec == TYPE_PRECISION (long_integer_type_node)
570 && !TYPE_UNSIGNED (type))
571 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
572 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
573 && !TYPE_UNSIGNED (type))
574 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
575 break;
576
577 CASE_FLT_FN (BUILT_IN_TRUNC):
578 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
579 if (call_expr_nargs (s_expr) != 1
580 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
581 break;
582 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0),
583 dofold);
584
585 default:
586 break;
587 }
588
589 if (fn
590 && call_expr_nargs (s_expr) == 1
591 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
592 {
593 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
594 return convert_to_integer_1 (type, newexpr, dofold);
595 }
596 }
597
598 /* Convert (int)logb(d) -> ilogb(d). */
599 if (optimize
600 && flag_unsafe_math_optimizations
601 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
602 && integer_type_node
603 && (outprec > TYPE_PRECISION (integer_type_node)
604 || (outprec == TYPE_PRECISION (integer_type_node)
605 && !TYPE_UNSIGNED (type))))
606 {
607 tree s_expr = strip_float_extensions (expr);
608 tree s_intype = TREE_TYPE (s_expr);
609 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
610 tree fn = 0;
611
612 switch (fcode)
613 {
614 CASE_FLT_FN (BUILT_IN_LOGB):
615 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
616 break;
617
618 default:
619 break;
620 }
621
622 if (fn
623 && call_expr_nargs (s_expr) == 1
624 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
625 {
626 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
627 return convert_to_integer_1 (type, newexpr, dofold);
628 }
629 }
630
631 switch (TREE_CODE (intype))
632 {
633 case POINTER_TYPE:
634 case REFERENCE_TYPE:
635 if (integer_zerop (expr)
636 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr)))
637 return build_int_cst (type, 0);
638
639 /* Convert to an unsigned integer of the correct width first, and from
640 there widen/truncate to the required type. Some targets support the
641 coexistence of multiple valid pointer sizes, so fetch the one we need
642 from the type. */
643 if (!dofold)
644 return build1 (CONVERT_EXPR, type, expr);
645 expr = fold_build1 (CONVERT_EXPR,
646 lang_hooks.types.type_for_size
647 (TYPE_PRECISION (intype), 0),
648 expr);
649 return fold_convert (type, expr);
650
651 case INTEGER_TYPE:
652 case ENUMERAL_TYPE:
653 case BOOLEAN_TYPE:
654 case OFFSET_TYPE:
655 /* If this is a logical operation, which just returns 0 or 1, we can
656 change the type of the expression. */
657
658 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
659 {
660 expr = copy_node (expr);
661 TREE_TYPE (expr) = type;
662 return expr;
663 }
664
665 /* If we are widening the type, put in an explicit conversion.
666 Similarly if we are not changing the width. After this, we know
667 we are truncating EXPR. */
668
669 else if (outprec >= inprec)
670 {
671 enum tree_code code;
672
673 /* If the precision of the EXPR's type is K bits and the
674 destination mode has more bits, and the sign is changing,
675 it is not safe to use a NOP_EXPR. For example, suppose
676 that EXPR's type is a 3-bit unsigned integer type, the
677 TYPE is a 3-bit signed integer type, and the machine mode
678 for the types is 8-bit QImode. In that case, the
679 conversion necessitates an explicit sign-extension. In
680 the signed-to-unsigned case the high-order bits have to
681 be cleared. */
682 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
683 && !type_has_mode_precision_p (TREE_TYPE (expr)))
684 code = CONVERT_EXPR;
685 else
686 code = NOP_EXPR;
687
688 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
689 }
690
691 /* If TYPE is an enumeral type or a type with a precision less
692 than the number of bits in its mode, do the conversion to the
693 type corresponding to its mode, then do a nop conversion
694 to TYPE. */
695 else if (TREE_CODE (type) == ENUMERAL_TYPE
696 || maybe_ne (outprec, GET_MODE_PRECISION (TYPE_MODE (type))))
697 {
698 expr
699 = convert_to_integer_1 (lang_hooks.types.type_for_mode
700 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
701 expr, dofold);
702 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
703 }
704
705 /* Here detect when we can distribute the truncation down past some
706 arithmetic. For example, if adding two longs and converting to an
707 int, we can equally well convert both to ints and then add.
708 For the operations handled here, such truncation distribution
709 is always safe.
710 It is desirable in these cases:
711 1) when truncating down to full-word from a larger size
712 2) when truncating takes no work.
713 3) when at least one operand of the arithmetic has been extended
714 (as by C's default conversions). In this case we need two conversions
715 if we do the arithmetic as already requested, so we might as well
716 truncate both and then combine. Perhaps that way we need only one.
717
718 Note that in general we cannot do the arithmetic in a type
719 shorter than the desired result of conversion, even if the operands
720 are both extended from a shorter type, because they might overflow
721 if combined in that type. The exceptions to this--the times when
722 two narrow values can be combined in their narrow type even to
723 make a wider result--are handled by "shorten" in build_binary_op. */
724
725 if (dofold)
726 switch (ex_form)
727 {
728 case RSHIFT_EXPR:
729 /* We can pass truncation down through right shifting
730 when the shift count is a nonpositive constant. */
731 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
732 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
733 goto trunc1;
734 break;
735
736 case LSHIFT_EXPR:
737 /* We can pass truncation down through left shifting
738 when the shift count is a nonnegative constant and
739 the target type is unsigned. */
740 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
741 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
742 && TYPE_UNSIGNED (type)
743 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
744 {
745 /* If shift count is less than the width of the truncated type,
746 really shift. */
747 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
748 /* In this case, shifting is like multiplication. */
749 goto trunc1;
750 else
751 {
752 /* If it is >= that width, result is zero.
753 Handling this with trunc1 would give the wrong result:
754 (int) ((long long) a << 32) is well defined (as 0)
755 but (int) a << 32 is undefined and would get a
756 warning. */
757
758 tree t = build_int_cst (type, 0);
759
760 /* If the original expression had side-effects, we must
761 preserve it. */
762 if (TREE_SIDE_EFFECTS (expr))
763 return build2 (COMPOUND_EXPR, type, expr, t);
764 else
765 return t;
766 }
767 }
768 break;
769
770 case TRUNC_DIV_EXPR:
771 {
772 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
773 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
774
775 /* Don't distribute unless the output precision is at least as
776 big as the actual inputs and it has the same signedness. */
777 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
778 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
779 /* If signedness of arg0 and arg1 don't match,
780 we can't necessarily find a type to compare them in. */
781 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
782 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
783 /* Do not change the sign of the division. */
784 && (TYPE_UNSIGNED (TREE_TYPE (expr))
785 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
786 /* Either require unsigned division or a division by
787 a constant that is not -1. */
788 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
789 || (TREE_CODE (arg1) == INTEGER_CST
790 && !integer_all_onesp (arg1))))
791 {
792 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
793 expr, inprec, outprec, dofold);
794 if (tem)
795 return tem;
796 }
797 break;
798 }
799
800 case MAX_EXPR:
801 case MIN_EXPR:
802 case MULT_EXPR:
803 {
804 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
805 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
806
807 /* Don't distribute unless the output precision is at least as
808 big as the actual inputs. Otherwise, the comparison of the
809 truncated values will be wrong. */
810 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
811 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
812 /* If signedness of arg0 and arg1 don't match,
813 we can't necessarily find a type to compare them in. */
814 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
815 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
816 goto trunc1;
817 break;
818 }
819
820 case PLUS_EXPR:
821 case MINUS_EXPR:
822 case BIT_AND_EXPR:
823 case BIT_IOR_EXPR:
824 case BIT_XOR_EXPR:
825 trunc1:
826 {
827 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
828 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
829
830 /* Do not try to narrow operands of pointer subtraction;
831 that will interfere with other folding. */
832 if (ex_form == MINUS_EXPR
833 && CONVERT_EXPR_P (arg0)
834 && CONVERT_EXPR_P (arg1)
835 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
836 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
837 break;
838
839 if (outprec >= BITS_PER_WORD
840 || targetm.truly_noop_truncation (outprec, inprec)
841 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
842 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
843 {
844 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
845 expr, inprec, outprec, dofold);
846 if (tem)
847 return tem;
848 }
849 }
850 break;
851
852 case NEGATE_EXPR:
853 /* Using unsigned arithmetic for signed types may hide overflow
854 bugs. */
855 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
856 && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
857 break;
858 /* Fall through. */
859 case BIT_NOT_EXPR:
860 /* This is not correct for ABS_EXPR,
861 since we must test the sign before truncation. */
862 {
863 /* Do the arithmetic in type TYPEX,
864 then convert result to TYPE. */
865 tree typex = type;
866
867 /* Can't do arithmetic in enumeral types
868 so use an integer type that will hold the values. */
869 if (TREE_CODE (typex) == ENUMERAL_TYPE)
870 typex
871 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
872 TYPE_UNSIGNED (typex));
873
874 if (!TYPE_UNSIGNED (typex))
875 typex = unsigned_type_for (typex);
876 return convert (type,
877 fold_build1 (ex_form, typex,
878 convert (typex,
879 TREE_OPERAND (expr, 0))));
880 }
881
882 CASE_CONVERT:
883 {
884 tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
885 /* Don't introduce a "can't convert between vector values
886 of different size" error. */
887 if (TREE_CODE (argtype) == VECTOR_TYPE
888 && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype)),
889 GET_MODE_SIZE (TYPE_MODE (type))))
890 break;
891 }
892 /* If truncating after truncating, might as well do all at once.
893 If truncating after extending, we may get rid of wasted work. */
894 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
895
896 case COND_EXPR:
897 /* It is sometimes worthwhile to push the narrowing down through
898 the conditional and never loses. A COND_EXPR may have a throw
899 as one operand, which then has void type. Just leave void
900 operands as they are. */
901 return
902 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
903 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
904 ? TREE_OPERAND (expr, 1)
905 : convert (type, TREE_OPERAND (expr, 1)),
906 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
907 ? TREE_OPERAND (expr, 2)
908 : convert (type, TREE_OPERAND (expr, 2)));
909
910 default:
911 break;
912 }
913
914 /* When parsing long initializers, we might end up with a lot of casts.
915 Shortcut this. */
916 if (TREE_CODE (tree_strip_any_location_wrapper (expr)) == INTEGER_CST)
917 return fold_convert (type, expr);
918 return build1 (CONVERT_EXPR, type, expr);
919
920 case REAL_TYPE:
921 if (sanitize_flags_p (SANITIZE_FLOAT_CAST)
922 && current_function_decl != NULL_TREE)
923 {
924 expr = save_expr (expr);
925 tree check = ubsan_instrument_float_cast (loc, type, expr);
926 expr = build1 (FIX_TRUNC_EXPR, type, expr);
927 if (check == NULL_TREE)
928 return expr;
929 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
930 TREE_TYPE (expr), check, expr);
931 }
932 else
933 return build1 (FIX_TRUNC_EXPR, type, expr);
934
935 case FIXED_POINT_TYPE:
936 return build1 (FIXED_CONVERT_EXPR, type, expr);
937
938 case COMPLEX_TYPE:
939 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
940 TREE_TYPE (TREE_TYPE (expr)), expr);
941 return convert (type, expr);
942
943 case VECTOR_TYPE:
944 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
945 {
946 error ("cannot convert a vector of type %qT"
947 " to type %qT which has different size",
948 TREE_TYPE (expr), type);
949 return error_mark_node;
950 }
951 return build1 (VIEW_CONVERT_EXPR, type, expr);
952
953 default:
954 error ("aggregate value used where an integer was expected");
955 return convert (type, integer_zero_node);
956 }
957 }
958
959 /* Convert EXPR to some integer (or enum) type TYPE.
960
961 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
962 fixed-point or vector; in other cases error is called.
963
964 The result of this is always supposed to be a newly created tree node
965 not in use in any existing structure. */
966
967 tree
968 convert_to_integer (tree type, tree expr)
969 {
970 return convert_to_integer_1 (type, expr, true);
971 }
972
973 /* A wrapper around convert_to_complex_1 that only folds the
974 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
975
976 tree
977 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
978 {
979 tree result
980 = convert_to_integer_1 (type, expr,
981 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
982 return preserve_any_location_wrapper (result, expr);
983 }
984
985 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
986 true, try to fold the expression. */
987
988 static tree
989 convert_to_complex_1 (tree type, tree expr, bool fold_p)
990 {
991 location_t loc = EXPR_LOCATION (expr);
992 tree subtype = TREE_TYPE (type);
993
994 switch (TREE_CODE (TREE_TYPE (expr)))
995 {
996 case REAL_TYPE:
997 case FIXED_POINT_TYPE:
998 case INTEGER_TYPE:
999 case ENUMERAL_TYPE:
1000 case BOOLEAN_TYPE:
1001 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
1002 convert (subtype, integer_zero_node));
1003
1004 case COMPLEX_TYPE:
1005 {
1006 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1007
1008 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1009 return expr;
1010 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1011 {
1012 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1013 fold_p);
1014 if (t == TREE_OPERAND (expr, 1))
1015 return expr;
1016 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1017 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1018 }
1019 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1020 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1021 convert (subtype,
1022 TREE_OPERAND (expr, 0)),
1023 convert (subtype,
1024 TREE_OPERAND (expr, 1)));
1025 else
1026 {
1027 expr = save_expr (expr);
1028 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1029 TREE_TYPE (TREE_TYPE (expr)),
1030 expr);
1031 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1032 TREE_TYPE (TREE_TYPE (expr)),
1033 expr);
1034 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1035 convert (subtype, realp),
1036 convert (subtype, imagp));
1037 }
1038 }
1039
1040 case POINTER_TYPE:
1041 case REFERENCE_TYPE:
1042 error ("pointer value used where a complex was expected");
1043 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1044
1045 default:
1046 error ("aggregate value used where a complex was expected");
1047 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1048 }
1049 }
1050
1051 /* A wrapper around convert_to_complex_1 that always folds the
1052 expression. */
1053
1054 tree
1055 convert_to_complex (tree type, tree expr)
1056 {
1057 return convert_to_complex_1 (type, expr, true);
1058 }
1059
1060 /* A wrapper around convert_to_complex_1 that only folds the
1061 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1062
1063 tree
1064 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1065 {
1066 tree result
1067 = convert_to_complex_1 (type, expr,
1068 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1069 return preserve_any_location_wrapper (result, expr);
1070 }
1071
1072 /* Convert EXPR to the vector type TYPE in the usual ways. */
1073
1074 tree
1075 convert_to_vector (tree type, tree expr)
1076 {
1077 switch (TREE_CODE (TREE_TYPE (expr)))
1078 {
1079 case INTEGER_TYPE:
1080 case VECTOR_TYPE:
1081 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1082 {
1083 error ("cannot convert a value of type %qT"
1084 " to vector type %qT which has different size",
1085 TREE_TYPE (expr), type);
1086 return error_mark_node;
1087 }
1088 return build1 (VIEW_CONVERT_EXPR, type, expr);
1089
1090 default:
1091 error ("cannot convert value to a vector");
1092 return error_mark_node;
1093 }
1094 }
1095
1096 /* Convert EXPR to some fixed-point type TYPE.
1097
1098 EXPR must be fixed-point, float, integer, or enumeral;
1099 in other cases error is called. */
1100
1101 tree
1102 convert_to_fixed (tree type, tree expr)
1103 {
1104 if (integer_zerop (expr))
1105 {
1106 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1107 return fixed_zero_node;
1108 }
1109 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1110 {
1111 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1112 return fixed_one_node;
1113 }
1114
1115 switch (TREE_CODE (TREE_TYPE (expr)))
1116 {
1117 case FIXED_POINT_TYPE:
1118 case INTEGER_TYPE:
1119 case ENUMERAL_TYPE:
1120 case BOOLEAN_TYPE:
1121 case REAL_TYPE:
1122 return build1 (FIXED_CONVERT_EXPR, type, expr);
1123
1124 case COMPLEX_TYPE:
1125 return convert (type,
1126 fold_build1 (REALPART_EXPR,
1127 TREE_TYPE (TREE_TYPE (expr)), expr));
1128
1129 default:
1130 error ("aggregate value used where a fixed-point was expected");
1131 return error_mark_node;
1132 }
1133 }
1134
1135 #if CHECKING_P
1136
1137 namespace selftest {
1138
1139 /* Selftests for conversions. */
1140
1141 static void
1142 test_convert_to_integer_maybe_fold (tree orig_type, tree new_type)
1143 {
1144 /* Calling convert_to_integer_maybe_fold on an INTEGER_CST. */
1145
1146 tree orig_cst = build_int_cst (orig_type, 42);
1147
1148 /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1149 constant of the new type, unless the types are the same, in which
1150 case verify it's a no-op. */
1151 {
1152 tree result = convert_to_integer_maybe_fold (new_type,
1153 orig_cst, false);
1154 if (orig_type != new_type)
1155 {
1156 ASSERT_EQ (TREE_TYPE (result), new_type);
1157 ASSERT_EQ (TREE_CODE (result), INTEGER_CST);
1158 }
1159 else
1160 ASSERT_EQ (result, orig_cst);
1161 }
1162
1163 /* Calling convert_to_integer_maybe_fold on a location wrapper around
1164 an INTEGER_CST.
1165
1166 Verify that convert_to_integer_maybe_fold on a location wrapper
1167 around a constant returns a new location wrapper around an equivalent
1168 constant, both of the new type, unless the types are the same,
1169 in which case the original wrapper should be returned. */
1170 {
1171 const location_t loc = BUILTINS_LOCATION;
1172 tree wrapped_orig_cst = maybe_wrap_with_location (orig_cst, loc);
1173 tree result
1174 = convert_to_integer_maybe_fold (new_type, wrapped_orig_cst, false);
1175 ASSERT_EQ (TREE_TYPE (result), new_type);
1176 ASSERT_EQ (EXPR_LOCATION (result), loc);
1177 ASSERT_TRUE (location_wrapper_p (result));
1178 ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result, 0)), new_type);
1179 ASSERT_EQ (TREE_CODE (TREE_OPERAND (result, 0)), INTEGER_CST);
1180
1181 if (orig_type == new_type)
1182 ASSERT_EQ (result, wrapped_orig_cst);
1183 }
1184 }
1185
1186 /* Verify that convert_to_integer_maybe_fold preserves locations. */
1187
1188 static void
1189 test_convert_to_integer_maybe_fold ()
1190 {
1191 /* char -> long. */
1192 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1193
1194 /* char -> char. */
1195 test_convert_to_integer_maybe_fold (char_type_node, char_type_node);
1196
1197 /* long -> char. */
1198 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1199
1200 /* long -> long. */
1201 test_convert_to_integer_maybe_fold (long_integer_type_node,
1202 long_integer_type_node);
1203 }
1204
1205 /* Run all of the selftests within this file. */
1206
1207 void
1208 convert_c_tests ()
1209 {
1210 test_convert_to_integer_maybe_fold ();
1211 }
1212
1213 } // namespace selftest
1214
1215 #endif /* CHECKING_P */