1/* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20
21/* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "target.h"
28#include "tree.h"
29#include "diagnostic-core.h"
30#include "fold-const.h"
31#include "stor-layout.h"
32#include "convert.h"
33#include "langhooks.h"
34#include "builtins.h"
35#include "ubsan.h"
36#include "stringpool.h"
37#include "attribs.h"
38#include "asan.h"
39#include "selftest.h"
40
41#define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
43 : build1_loc (LOC, CODE, TYPE, EXPR))
44#define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
46 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
47
48/* Convert EXPR to some pointer or reference type TYPE.
49 EXPR must be pointer, reference, integer, enumeral, or literal zero;
50 in other cases error is called. If FOLD_P is true, try to fold the
51 expression. */
52
53static tree
54convert_to_pointer_1 (tree type, tree expr, bool fold_p)
55{
56 location_t loc = EXPR_LOCATION (expr);
57 if (TREE_TYPE (expr) == type)
58 return expr;
59
60 switch (TREE_CODE (TREE_TYPE (expr)))
61 {
62 case POINTER_TYPE:
63 case REFERENCE_TYPE:
64 {
65 /* If the pointers point to different address spaces, conversion needs
66 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
67 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
68 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
69
70 if (to_as == from_as)
71 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
72 else
73 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
74 type, expr);
75 }
76
77 case INTEGER_TYPE:
78 case ENUMERAL_TYPE:
79 case BOOLEAN_TYPE:
80 case BITINT_TYPE:
81 {
82 /* If the input precision differs from the target pointer type
83 precision, first convert the input expression to an integer type of
84 the target precision. Some targets, e.g. VMS, need several pointer
85 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
86 unsigned int pprec = TYPE_PRECISION (type);
87 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
88
89 if (eprec != pprec)
90 expr
91 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
92 lang_hooks.types.type_for_size (pprec, 0),
93 expr);
94 }
95 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
96
97 default:
98 error ("cannot convert to a pointer type");
99 return error_mark_node;
100 }
101}
102
103/* Subroutine of the various convert_to_*_maybe_fold routines.
104
105 If a location wrapper has been folded to a constant (presumably of
106 a different type), re-wrap the new constant with a location wrapper. */
107
108tree
109preserve_any_location_wrapper (tree result, tree orig_expr)
110{
111 if (CONSTANT_CLASS_P (result) && location_wrapper_p (exp: orig_expr))
112 {
113 if (result == TREE_OPERAND (orig_expr, 0))
114 return orig_expr;
115 else
116 return maybe_wrap_with_location (result, EXPR_LOCATION (orig_expr));
117 }
118
119 return result;
120}
121
122/* A wrapper around convert_to_pointer_1 that always folds the
123 expression. */
124
125tree
126convert_to_pointer (tree type, tree expr)
127{
128 return convert_to_pointer_1 (type, expr, fold_p: true);
129}
130
131/* A wrapper around convert_to_pointer_1 that only folds the
132 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
133
134tree
135convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
136{
137 tree result
138 = convert_to_pointer_1 (type, expr,
139 fold_p: dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
140 return preserve_any_location_wrapper (result, orig_expr: expr);
141}
142
143/* Convert EXPR to some floating-point type TYPE.
144
145 EXPR must be float, fixed-point, integer, or enumeral;
146 in other cases error is called. If FOLD_P is true, try to fold
147 the expression. */
148
149static tree
150convert_to_real_1 (tree type, tree expr, bool fold_p)
151{
152 enum built_in_function fcode = builtin_mathfn_code (expr);
153 tree itype = TREE_TYPE (expr);
154 location_t loc = EXPR_LOCATION (expr);
155
156 if (TREE_CODE (expr) == COMPOUND_EXPR)
157 {
158 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
159 if (t == TREE_OPERAND (expr, 1))
160 return expr;
161 return build2_loc (EXPR_LOCATION (expr), code: COMPOUND_EXPR, TREE_TYPE (t),
162 TREE_OPERAND (expr, 0), arg1: t);
163 }
164
165 /* Disable until we figure out how to decide whether the functions are
166 present in runtime. */
167 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
168 if (optimize
169 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
170 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
171 {
172 switch (fcode)
173 {
174#define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
175 CASE_MATHFN (COSH)
176 CASE_MATHFN (EXP)
177 CASE_MATHFN (EXP10)
178 CASE_MATHFN (EXP2)
179 CASE_MATHFN (EXPM1)
180 CASE_MATHFN (GAMMA)
181 CASE_MATHFN (J0)
182 CASE_MATHFN (J1)
183 CASE_MATHFN (LGAMMA)
184 CASE_MATHFN (POW10)
185 CASE_MATHFN (SINH)
186 CASE_MATHFN (TGAMMA)
187 CASE_MATHFN (Y0)
188 CASE_MATHFN (Y1)
189 /* The above functions may set errno differently with float
190 input or output so this transformation is not safe with
191 -fmath-errno. */
192 if (flag_errno_math)
193 break;
194 gcc_fallthrough ();
195 CASE_MATHFN (ACOS)
196 CASE_MATHFN (ACOSH)
197 CASE_MATHFN (ASIN)
198 CASE_MATHFN (ASINH)
199 CASE_MATHFN (ATAN)
200 CASE_MATHFN (ATANH)
201 CASE_MATHFN (CBRT)
202 CASE_MATHFN (COS)
203 CASE_MATHFN (ERF)
204 CASE_MATHFN (ERFC)
205 CASE_MATHFN (LOG)
206 CASE_MATHFN (LOG10)
207 CASE_MATHFN (LOG2)
208 CASE_MATHFN (LOG1P)
209 CASE_MATHFN (SIN)
210 CASE_MATHFN (TAN)
211 CASE_MATHFN (TANH)
212 /* The above functions are not safe to do this conversion. */
213 if (!flag_unsafe_math_optimizations)
214 break;
215 gcc_fallthrough ();
216 CASE_MATHFN (SQRT)
217 CASE_MATHFN (FABS)
218 CASE_MATHFN (LOGB)
219#undef CASE_MATHFN
220 if (call_expr_nargs (expr) != 1
221 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (expr, 0))))
222 break;
223 {
224 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
225 tree newtype = type;
226
227 /* We have (outertype)sqrt((innertype)x). Choose the wider mode
228 from the both as the safe type for operation. */
229 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
230 newtype = TREE_TYPE (arg0);
231
232 /* We consider to convert
233
234 (T1) sqrtT2 ((T2) exprT3)
235 to
236 (T1) sqrtT4 ((T4) exprT3)
237
238 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
239 and T4 is NEWTYPE. All those types are of floating-point types.
240 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
241 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
242 T2 and T4. See the following URL for a reference:
243 http://stackoverflow.com/questions/9235456/determining-
244 floating-point-square-root
245 */
246 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
247 && !flag_unsafe_math_optimizations)
248 {
249 /* The following conversion is unsafe even the precision condition
250 below is satisfied:
251
252 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
253 */
254 if (TYPE_MODE (type) != TYPE_MODE (newtype))
255 break;
256
257 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
258 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
259 if (p1 < p2 * 2 + 2)
260 break;
261 }
262
263 /* Be careful about integer to fp conversions.
264 These may overflow still. */
265 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
266 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
267 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
268 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
269 {
270 tree fn = mathfn_built_in (newtype, fn: fcode);
271 if (fn)
272 {
273 tree arg = convert_to_real_1 (type: newtype, expr: arg0, fold_p);
274 expr = build_call_expr (fn, 1, arg);
275 if (newtype == type)
276 return expr;
277 }
278 }
279 }
280 default:
281 break;
282 }
283 }
284
285 /* Propagate the cast into the operation. */
286 if (itype != type && FLOAT_TYPE_P (type))
287 switch (TREE_CODE (expr))
288 {
289 /* Convert (float)-x into -(float)x. This is safe for
290 round-to-nearest rounding mode when the inner type is float. */
291 case ABS_EXPR:
292 case NEGATE_EXPR:
293 if (!flag_rounding_math
294 && FLOAT_TYPE_P (itype)
295 && element_precision (type) < element_precision (itype))
296 {
297 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
298 fold_p);
299 return build1 (TREE_CODE (expr), type, arg);
300 }
301 break;
302 default:
303 break;
304 }
305
306 switch (TREE_CODE (TREE_TYPE (expr)))
307 {
308 case REAL_TYPE:
309 /* Ignore the conversion if we don't need to store intermediate
310 results and neither type is a decimal float. */
311 return build1_loc (loc,
312 code: (flag_float_store
313 || DECIMAL_FLOAT_TYPE_P (type)
314 || DECIMAL_FLOAT_TYPE_P (itype))
315 ? CONVERT_EXPR : NOP_EXPR, type, arg1: expr);
316
317 case INTEGER_TYPE:
318 case ENUMERAL_TYPE:
319 case BOOLEAN_TYPE:
320 case BITINT_TYPE:
321 return build1 (FLOAT_EXPR, type, expr);
322
323 case FIXED_POINT_TYPE:
324 return build1 (FIXED_CONVERT_EXPR, type, expr);
325
326 case COMPLEX_TYPE:
327 return convert (type,
328 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
329 TREE_TYPE (TREE_TYPE (expr)),
330 expr));
331
332 case POINTER_TYPE:
333 case REFERENCE_TYPE:
334 error ("pointer value used where a floating-point was expected");
335 return error_mark_node;
336
337 case VECTOR_TYPE:
338 error ("vector value used where a floating-point was expected");
339 return error_mark_node;
340
341 default:
342 error ("aggregate value used where a floating-point was expected");
343 return error_mark_node;
344 }
345}
346
347/* A wrapper around convert_to_real_1 that always folds the
348 expression. */
349
350tree
351convert_to_real (tree type, tree expr)
352{
353 return convert_to_real_1 (type, expr, fold_p: true);
354}
355
356/* A wrapper around convert_to_real_1 that only folds the
357 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
358
359tree
360convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
361{
362 tree result
363 = convert_to_real_1 (type, expr,
364 fold_p: dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
365 return preserve_any_location_wrapper (result, orig_expr: expr);
366}
367
368/* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
369 result in TYPE. */
370
371static tree
372do_narrow (location_t loc,
373 enum tree_code ex_form, tree type, tree arg0, tree arg1,
374 tree expr, unsigned inprec, unsigned outprec, bool dofold)
375{
376 /* Do the arithmetic in type TYPEX,
377 then convert result to TYPE. */
378 tree typex = type;
379
380 /* Can't do arithmetic in enumeral types
381 so use an integer type that will hold the values. */
382 if (TREE_CODE (typex) == ENUMERAL_TYPE)
383 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
384 TYPE_UNSIGNED (typex));
385
386 /* The type demotion below might cause doing unsigned arithmetic
387 instead of signed, and thus hide overflow bugs. */
388 if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
389 && !TYPE_UNSIGNED (typex)
390 && sanitize_flags_p (flag: SANITIZE_SI_OVERFLOW))
391 return NULL_TREE;
392
393 /* Similarly for multiplication, but in that case it can be
394 problematic even if typex is unsigned type - 0xffff * 0xffff
395 overflows in int. */
396 if (ex_form == MULT_EXPR
397 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (expr))
398 && sanitize_flags_p (flag: SANITIZE_SI_OVERFLOW))
399 return NULL_TREE;
400
401 /* But now perhaps TYPEX is as wide as INPREC.
402 In that case, do nothing special here.
403 (Otherwise would recurse infinitely in convert. */
404 if (TYPE_PRECISION (typex) != inprec)
405 {
406 /* Don't do unsigned arithmetic where signed was wanted,
407 or vice versa.
408 Exception: if both of the original operands were
409 unsigned then we can safely do the work as unsigned.
410 Exception: shift operations take their type solely
411 from the first argument.
412 Exception: the LSHIFT_EXPR case above requires that
413 we perform this operation unsigned lest we produce
414 signed-overflow undefinedness.
415 And we may need to do it as unsigned
416 if we truncate to the original size. */
417 if (TYPE_UNSIGNED (TREE_TYPE (expr))
418 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
419 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
420 || ex_form == LSHIFT_EXPR
421 || ex_form == RSHIFT_EXPR
422 || ex_form == LROTATE_EXPR
423 || ex_form == RROTATE_EXPR))
424 || ex_form == LSHIFT_EXPR
425 /* If we have !flag_wrapv, and either ARG0 or
426 ARG1 is of a signed type, we have to do
427 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
428 type in case the operation in outprec precision
429 could overflow. Otherwise, we would introduce
430 signed-overflow undefinedness. */
431 || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0))
432 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
433 || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1))
434 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))))
435 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
436 > outprec)
437 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
438 > outprec))
439 && (ex_form == PLUS_EXPR
440 || ex_form == MINUS_EXPR
441 || ex_form == MULT_EXPR)))
442 {
443 if (!TYPE_UNSIGNED (typex))
444 typex = unsigned_type_for (typex);
445 }
446 else
447 {
448 if (TYPE_UNSIGNED (typex))
449 typex = signed_type_for (typex);
450 }
451 /* We should do away with all this once we have a proper
452 type promotion/demotion pass, see PR45397. */
453 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
454 convert (typex, arg0),
455 convert (typex, arg1));
456 return convert (type, expr);
457 }
458
459 return NULL_TREE;
460}
461
462/* Convert EXPR to some integer (or enum) type TYPE.
463
464 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
465 fixed-point or vector; in other cases error is called.
466
467 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
468
469 The result of this is always supposed to be a newly created tree node
470 not in use in any existing structure. */
471
472static tree
473convert_to_integer_1 (tree type, tree expr, bool dofold)
474{
475 enum tree_code ex_form = TREE_CODE (expr);
476 tree intype = TREE_TYPE (expr);
477 unsigned int inprec = element_precision (intype);
478 unsigned int outprec = element_precision (type);
479 location_t loc = EXPR_LOCATION (expr);
480
481 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
482 be. Consider `enum E = { a, b = (enum E) 3 };'. */
483 if (!COMPLETE_TYPE_P (type))
484 {
485 error ("conversion to incomplete type");
486 return error_mark_node;
487 }
488
489 if (ex_form == COMPOUND_EXPR)
490 {
491 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
492 if (t == TREE_OPERAND (expr, 1))
493 return expr;
494 return build2_loc (EXPR_LOCATION (expr), code: COMPOUND_EXPR, TREE_TYPE (t),
495 TREE_OPERAND (expr, 0), arg1: t);
496 }
497
498 /* Convert e.g. (long)round(d) -> lround(d). */
499 /* If we're converting to char, we may encounter differing behavior
500 between converting from double->char vs double->long->char.
501 We're in "undefined" territory but we prefer to be conservative,
502 so only proceed in "unsafe" math mode. */
503 if (optimize
504 && (flag_unsafe_math_optimizations
505 || (long_integer_type_node
506 && outprec >= TYPE_PRECISION (long_integer_type_node))))
507 {
508 tree s_expr = strip_float_extensions (expr);
509 tree s_intype = TREE_TYPE (s_expr);
510 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
511 tree fn = 0;
512
513 switch (fcode)
514 {
515 CASE_FLT_FN (BUILT_IN_CEIL):
516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
517 /* Only convert in ISO C99 mode. */
518 if (!targetm.libc_has_function (function_c99_misc, intype))
519 break;
520 if (outprec < TYPE_PRECISION (integer_type_node)
521 || (outprec == TYPE_PRECISION (integer_type_node)
522 && !TYPE_UNSIGNED (type)))
523 fn = mathfn_built_in (s_intype, fn: BUILT_IN_ICEIL);
524 else if (outprec == TYPE_PRECISION (long_integer_type_node)
525 && !TYPE_UNSIGNED (type))
526 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LCEIL);
527 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
528 && !TYPE_UNSIGNED (type))
529 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LLCEIL);
530 break;
531
532 CASE_FLT_FN (BUILT_IN_FLOOR):
533 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
534 /* Only convert in ISO C99 mode. */
535 if (!targetm.libc_has_function (function_c99_misc, intype))
536 break;
537 if (outprec < TYPE_PRECISION (integer_type_node)
538 || (outprec == TYPE_PRECISION (integer_type_node)
539 && !TYPE_UNSIGNED (type)))
540 fn = mathfn_built_in (s_intype, fn: BUILT_IN_IFLOOR);
541 else if (outprec == TYPE_PRECISION (long_integer_type_node)
542 && !TYPE_UNSIGNED (type))
543 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LFLOOR);
544 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
545 && !TYPE_UNSIGNED (type))
546 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LLFLOOR);
547 break;
548
549 CASE_FLT_FN (BUILT_IN_ROUND):
550 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
551 /* Only convert in ISO C99 mode and with -fno-math-errno. */
552 if (!targetm.libc_has_function (function_c99_misc, intype)
553 || flag_errno_math)
554 break;
555 if (outprec < TYPE_PRECISION (integer_type_node)
556 || (outprec == TYPE_PRECISION (integer_type_node)
557 && !TYPE_UNSIGNED (type)))
558 fn = mathfn_built_in (s_intype, fn: BUILT_IN_IROUND);
559 else if (outprec == TYPE_PRECISION (long_integer_type_node)
560 && !TYPE_UNSIGNED (type))
561 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LROUND);
562 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
563 && !TYPE_UNSIGNED (type))
564 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LLROUND);
565 break;
566
567 CASE_FLT_FN (BUILT_IN_NEARBYINT):
568 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
569 /* Only convert nearbyint* if we can ignore math exceptions. */
570 if (flag_trapping_math)
571 break;
572 gcc_fallthrough ();
573 CASE_FLT_FN (BUILT_IN_RINT):
574 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
575 /* Only convert in ISO C99 mode and with -fno-math-errno. */
576 if (!targetm.libc_has_function (function_c99_misc, intype)
577 || flag_errno_math)
578 break;
579 if (outprec < TYPE_PRECISION (integer_type_node)
580 || (outprec == TYPE_PRECISION (integer_type_node)
581 && !TYPE_UNSIGNED (type)))
582 fn = mathfn_built_in (s_intype, fn: BUILT_IN_IRINT);
583 else if (outprec == TYPE_PRECISION (long_integer_type_node)
584 && !TYPE_UNSIGNED (type))
585 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LRINT);
586 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
587 && !TYPE_UNSIGNED (type))
588 fn = mathfn_built_in (s_intype, fn: BUILT_IN_LLRINT);
589 break;
590
591 CASE_FLT_FN (BUILT_IN_TRUNC):
592 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
593 if (call_expr_nargs (s_expr) != 1
594 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0)))
595 || (!flag_fp_int_builtin_inexact && flag_trapping_math))
596 break;
597 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0),
598 dofold);
599
600 default:
601 break;
602 }
603
604 if (fn
605 && call_expr_nargs (s_expr) == 1
606 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
607 {
608 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
609 return convert_to_integer_1 (type, expr: newexpr, dofold);
610 }
611 }
612
613 /* Convert (int)logb(d) -> ilogb(d). */
614 if (optimize
615 && flag_unsafe_math_optimizations
616 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
617 && integer_type_node
618 && (outprec > TYPE_PRECISION (integer_type_node)
619 || (outprec == TYPE_PRECISION (integer_type_node)
620 && !TYPE_UNSIGNED (type))))
621 {
622 tree s_expr = strip_float_extensions (expr);
623 tree s_intype = TREE_TYPE (s_expr);
624 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
625 tree fn = 0;
626
627 switch (fcode)
628 {
629 CASE_FLT_FN (BUILT_IN_LOGB):
630 fn = mathfn_built_in (s_intype, fn: BUILT_IN_ILOGB);
631 break;
632
633 default:
634 break;
635 }
636
637 if (fn
638 && call_expr_nargs (s_expr) == 1
639 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
640 {
641 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
642 return convert_to_integer_1 (type, expr: newexpr, dofold);
643 }
644 }
645
646 switch (TREE_CODE (intype))
647 {
648 case POINTER_TYPE:
649 case REFERENCE_TYPE:
650 if (integer_zerop (expr)
651 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr)))
652 return build_int_cst (type, 0);
653
654 /* Convert to an unsigned integer of the correct width first, and from
655 there widen/truncate to the required type. Some targets support the
656 coexistence of multiple valid pointer sizes, so fetch the one we need
657 from the type. */
658 if (!dofold)
659 return build1 (CONVERT_EXPR, type, expr);
660 expr = fold_build1 (CONVERT_EXPR,
661 lang_hooks.types.type_for_size
662 (TYPE_PRECISION (intype), 0),
663 expr);
664 return fold_convert (type, expr);
665
666 case INTEGER_TYPE:
667 case ENUMERAL_TYPE:
668 case BOOLEAN_TYPE:
669 case OFFSET_TYPE:
670 case BITINT_TYPE:
671 /* If this is a logical operation, which just returns 0 or 1, we can
672 change the type of the expression. */
673
674 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
675 {
676 expr = copy_node (expr);
677 TREE_TYPE (expr) = type;
678 return expr;
679 }
680
681 /* If we are widening the type, put in an explicit conversion.
682 Similarly if we are not changing the width. After this, we know
683 we are truncating EXPR. */
684
685 else if (outprec >= inprec)
686 {
687 enum tree_code code;
688
689 /* If the precision of the EXPR's type is K bits and the
690 destination mode has more bits, and the sign is changing,
691 it is not safe to use a NOP_EXPR. For example, suppose
692 that EXPR's type is a 3-bit unsigned integer type, the
693 TYPE is a 3-bit signed integer type, and the machine mode
694 for the types is 8-bit QImode. In that case, the
695 conversion necessitates an explicit sign-extension. In
696 the signed-to-unsigned case the high-order bits have to
697 be cleared. */
698 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
699 && !type_has_mode_precision_p (TREE_TYPE (expr)))
700 code = CONVERT_EXPR;
701 else
702 code = NOP_EXPR;
703
704 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
705 }
706
707 /* If TYPE is an enumeral type or a type with a precision less
708 than the number of bits in its mode, do the conversion to the
709 type corresponding to its mode, then do a nop conversion
710 to TYPE. */
711 else if (TREE_CODE (type) == ENUMERAL_TYPE
712 || (TREE_CODE (type) != BITINT_TYPE
713 && maybe_ne (a: outprec,
714 b: GET_MODE_PRECISION (TYPE_MODE (type)))))
715 {
716 expr
717 = convert_to_integer_1 (type: lang_hooks.types.type_for_mode
718 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
719 expr, dofold);
720 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
721 }
722
723 /* Here detect when we can distribute the truncation down past some
724 arithmetic. For example, if adding two longs and converting to an
725 int, we can equally well convert both to ints and then add.
726 For the operations handled here, such truncation distribution
727 is always safe.
728 It is desirable in these cases:
729 1) when truncating down to full-word from a larger size
730 2) when truncating takes no work.
731 3) when at least one operand of the arithmetic has been extended
732 (as by C's default conversions). In this case we need two conversions
733 if we do the arithmetic as already requested, so we might as well
734 truncate both and then combine. Perhaps that way we need only one.
735
736 Note that in general we cannot do the arithmetic in a type
737 shorter than the desired result of conversion, even if the operands
738 are both extended from a shorter type, because they might overflow
739 if combined in that type. The exceptions to this--the times when
740 two narrow values can be combined in their narrow type even to
741 make a wider result--are handled by "shorten" in build_binary_op. */
742
743 if (dofold)
744 switch (ex_form)
745 {
746 case RSHIFT_EXPR:
747 /* We can pass truncation down through right shifting
748 when the shift count is a nonpositive constant. */
749 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
750 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
751 goto trunc1;
752 break;
753
754 case LSHIFT_EXPR:
755 /* We can pass truncation down through left shifting
756 when the shift count is a nonnegative constant and
757 the target type is unsigned. */
758 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
759 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
760 && TYPE_UNSIGNED (type)
761 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
762 {
763 /* If shift count is less than the width of the truncated type,
764 really shift. */
765 if (wi::to_widest (TREE_OPERAND (expr, 1))
766 < TYPE_PRECISION (type))
767 /* In this case, shifting is like multiplication. */
768 goto trunc1;
769 else
770 {
771 /* If it is >= that width, result is zero.
772 Handling this with trunc1 would give the wrong result:
773 (int) ((long long) a << 32) is well defined (as 0)
774 but (int) a << 32 is undefined and would get a
775 warning. */
776
777 tree t = build_int_cst (type, 0);
778
779 /* If the original expression had side-effects, we must
780 preserve it. */
781 if (TREE_SIDE_EFFECTS (expr))
782 return build2 (COMPOUND_EXPR, type, expr, t);
783 else
784 return t;
785 }
786 }
787 break;
788
789 case TRUNC_DIV_EXPR:
790 {
791 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
792 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
793
794 /* Don't distribute unless the output precision is at least as
795 big as the actual inputs and it has the same signedness. */
796 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
797 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
798 /* If signedness of arg0 and arg1 don't match,
799 we can't necessarily find a type to compare them in. */
800 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
801 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
802 /* Do not change the sign of the division. */
803 && (TYPE_UNSIGNED (TREE_TYPE (expr))
804 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
805 /* Either require unsigned division or a division by
806 a constant that is not -1. */
807 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
808 || (TREE_CODE (arg1) == INTEGER_CST
809 && !integer_all_onesp (arg1))))
810 {
811 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
812 expr, inprec, outprec, dofold);
813 if (tem)
814 return tem;
815 }
816 break;
817 }
818
819 case MAX_EXPR:
820 case MIN_EXPR:
821 case MULT_EXPR:
822 {
823 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
824 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
825
826 /* Don't distribute unless the output precision is at least as
827 big as the actual inputs. Otherwise, the comparison of the
828 truncated values will be wrong. */
829 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
830 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
831 /* If signedness of arg0 and arg1 don't match,
832 we can't necessarily find a type to compare them in. */
833 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
834 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
835 goto trunc1;
836 break;
837 }
838
839 case PLUS_EXPR:
840 case MINUS_EXPR:
841 case BIT_AND_EXPR:
842 case BIT_IOR_EXPR:
843 case BIT_XOR_EXPR:
844 trunc1:
845 {
846 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
847 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
848
849 /* Do not try to narrow operands of pointer subtraction;
850 that will interfere with other folding. */
851 if (ex_form == MINUS_EXPR
852 && CONVERT_EXPR_P (arg0)
853 && CONVERT_EXPR_P (arg1)
854 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
855 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
856 break;
857
858 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
859 expr, inprec, outprec, dofold);
860 if (tem)
861 return tem;
862 }
863 break;
864
865 case NEGATE_EXPR:
866 /* Using unsigned arithmetic for signed types may hide overflow
867 bugs. */
868 if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
869 && sanitize_flags_p (flag: SANITIZE_SI_OVERFLOW))
870 break;
871 /* Fall through. */
872 case BIT_NOT_EXPR:
873 /* This is not correct for ABS_EXPR,
874 since we must test the sign before truncation. */
875 {
876 /* Do the arithmetic in type TYPEX,
877 then convert result to TYPE. */
878 tree typex = type;
879
880 /* Can't do arithmetic in enumeral types
881 so use an integer type that will hold the values. */
882 if (TREE_CODE (typex) == ENUMERAL_TYPE)
883 typex
884 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
885 TYPE_UNSIGNED (typex));
886
887 if (!TYPE_UNSIGNED (typex))
888 typex = unsigned_type_for (typex);
889 return convert (type,
890 fold_build1 (ex_form, typex,
891 convert (typex,
892 TREE_OPERAND (expr, 0))));
893 }
894
895 CASE_CONVERT:
896 {
897 tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
898 /* Don't introduce a "can't convert between vector values
899 of different size" error. */
900 if (TREE_CODE (argtype) == VECTOR_TYPE
901 && maybe_ne (a: GET_MODE_SIZE (TYPE_MODE (argtype)),
902 b: GET_MODE_SIZE (TYPE_MODE (type))))
903 break;
904 }
905 /* If truncating after truncating, might as well do all at once.
906 If truncating after extending, we may get rid of wasted work. */
907 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
908
909 case COND_EXPR:
910 /* It is sometimes worthwhile to push the narrowing down through
911 the conditional and never loses. A COND_EXPR may have a throw
912 as one operand, which then has void type. Just leave void
913 operands as they are. */
914 return
915 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
916 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
917 ? TREE_OPERAND (expr, 1)
918 : convert (type, TREE_OPERAND (expr, 1)),
919 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
920 ? TREE_OPERAND (expr, 2)
921 : convert (type, TREE_OPERAND (expr, 2)));
922
923 default:
924 break;
925 }
926
927 /* When parsing long initializers, we might end up with a lot of casts.
928 Shortcut this. */
929 if (TREE_CODE (tree_strip_any_location_wrapper (expr)) == INTEGER_CST)
930 return fold_convert (type, expr);
931 return build1 (CONVERT_EXPR, type, expr);
932
933 case REAL_TYPE:
934 if (sanitize_flags_p (flag: SANITIZE_FLOAT_CAST)
935 && current_function_decl != NULL_TREE)
936 {
937 expr = save_expr (expr);
938 tree check = ubsan_instrument_float_cast (loc, type, expr);
939 expr = build1 (FIX_TRUNC_EXPR, type, expr);
940 if (check == NULL_TREE)
941 return expr;
942 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
943 TREE_TYPE (expr), check, expr);
944 }
945 else
946 return build1 (FIX_TRUNC_EXPR, type, expr);
947
948 case FIXED_POINT_TYPE:
949 return build1 (FIXED_CONVERT_EXPR, type, expr);
950
951 case COMPLEX_TYPE:
952 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
953 TREE_TYPE (TREE_TYPE (expr)), expr);
954 return convert (type, expr);
955
956 case VECTOR_TYPE:
957 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
958 {
959 error ("cannot convert a vector of type %qT"
960 " to type %qT which has different size",
961 TREE_TYPE (expr), type);
962 return error_mark_node;
963 }
964 return build1 (VIEW_CONVERT_EXPR, type, expr);
965
966 default:
967 error ("aggregate value used where an integer was expected");
968 return error_mark_node;
969 }
970}
971
972/* Convert EXPR to some integer (or enum) type TYPE.
973
974 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
975 fixed-point or vector; in other cases error is called.
976
977 The result of this is always supposed to be a newly created tree node
978 not in use in any existing structure. */
979
980tree
981convert_to_integer (tree type, tree expr)
982{
983 return convert_to_integer_1 (type, expr, dofold: true);
984}
985
986/* A wrapper around convert_to_complex_1 that only folds the
987 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
988
989tree
990convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
991{
992 tree result
993 = convert_to_integer_1 (type, expr,
994 dofold: dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
995 return preserve_any_location_wrapper (result, orig_expr: expr);
996}
997
998/* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
999 true, try to fold the expression. */
1000
1001static tree
1002convert_to_complex_1 (tree type, tree expr, bool fold_p)
1003{
1004 location_t loc = EXPR_LOCATION (expr);
1005 tree subtype = TREE_TYPE (type);
1006
1007 switch (TREE_CODE (TREE_TYPE (expr)))
1008 {
1009 case REAL_TYPE:
1010 case FIXED_POINT_TYPE:
1011 case INTEGER_TYPE:
1012 case ENUMERAL_TYPE:
1013 case BOOLEAN_TYPE:
1014 case BITINT_TYPE:
1015 {
1016 tree real = convert (subtype, expr);
1017 tree imag = convert (subtype, integer_zero_node);
1018 if (error_operand_p (t: real) || error_operand_p (t: imag))
1019 return error_mark_node;
1020 return build2 (COMPLEX_EXPR, type, real, imag);
1021 }
1022
1023 case COMPLEX_TYPE:
1024 {
1025 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1026
1027 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1028 return expr;
1029 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1030 {
1031 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1032 fold_p);
1033 if (t == TREE_OPERAND (expr, 1))
1034 return expr;
1035 return build2_loc (EXPR_LOCATION (expr), code: COMPOUND_EXPR,
1036 TREE_TYPE (t), TREE_OPERAND (expr, 0), arg1: t);
1037 }
1038 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1039 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1040 convert (subtype,
1041 TREE_OPERAND (expr, 0)),
1042 convert (subtype,
1043 TREE_OPERAND (expr, 1)));
1044 else
1045 {
1046 expr = save_expr (expr);
1047 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1048 TREE_TYPE (TREE_TYPE (expr)),
1049 expr);
1050 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1051 TREE_TYPE (TREE_TYPE (expr)),
1052 expr);
1053 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1054 convert (subtype, realp),
1055 convert (subtype, imagp));
1056 }
1057 }
1058
1059 case POINTER_TYPE:
1060 case REFERENCE_TYPE:
1061 error ("pointer value used where a complex was expected");
1062 return error_mark_node;
1063
1064 default:
1065 error ("aggregate value used where a complex was expected");
1066 return error_mark_node;
1067 }
1068}
1069
1070/* A wrapper around convert_to_complex_1 that always folds the
1071 expression. */
1072
1073tree
1074convert_to_complex (tree type, tree expr)
1075{
1076 return convert_to_complex_1 (type, expr, fold_p: true);
1077}
1078
1079/* A wrapper around convert_to_complex_1 that only folds the
1080 expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P. */
1081
1082tree
1083convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1084{
1085 tree result
1086 = convert_to_complex_1 (type, expr,
1087 fold_p: dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1088 return preserve_any_location_wrapper (result, orig_expr: expr);
1089}
1090
1091/* Convert EXPR to the vector type TYPE in the usual ways. */
1092
1093tree
1094convert_to_vector (tree type, tree expr)
1095{
1096 switch (TREE_CODE (TREE_TYPE (expr)))
1097 {
1098 case INTEGER_TYPE:
1099 case VECTOR_TYPE:
1100 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1101 {
1102 error ("cannot convert a value of type %qT"
1103 " to vector type %qT which has different size",
1104 TREE_TYPE (expr), type);
1105 return error_mark_node;
1106 }
1107 return build1 (VIEW_CONVERT_EXPR, type, expr);
1108
1109 default:
1110 error ("cannot convert value to a vector");
1111 return error_mark_node;
1112 }
1113}
1114
1115/* Convert EXPR to some fixed-point type TYPE.
1116
1117 EXPR must be fixed-point, float, integer, or enumeral;
1118 in other cases error is called. */
1119
1120tree
1121convert_to_fixed (tree type, tree expr)
1122{
1123 if (integer_zerop (expr))
1124 {
1125 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1126 return fixed_zero_node;
1127 }
1128 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1129 {
1130 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1131 return fixed_one_node;
1132 }
1133
1134 switch (TREE_CODE (TREE_TYPE (expr)))
1135 {
1136 case FIXED_POINT_TYPE:
1137 case INTEGER_TYPE:
1138 case ENUMERAL_TYPE:
1139 case BOOLEAN_TYPE:
1140 case REAL_TYPE:
1141 return build1 (FIXED_CONVERT_EXPR, type, expr);
1142
1143 case COMPLEX_TYPE:
1144 return convert (type,
1145 fold_build1 (REALPART_EXPR,
1146 TREE_TYPE (TREE_TYPE (expr)), expr));
1147
1148 default:
1149 error ("aggregate value used where a fixed-point was expected");
1150 return error_mark_node;
1151 }
1152}
1153
1154#if CHECKING_P
1155
1156namespace selftest {
1157
1158/* Selftests for conversions. */
1159
1160static void
1161test_convert_to_integer_maybe_fold (tree orig_type, tree new_type)
1162{
1163 /* Calling convert_to_integer_maybe_fold on an INTEGER_CST. */
1164
1165 tree orig_cst = build_int_cst (orig_type, 42);
1166
1167 /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1168 constant of the new type, unless the types are the same, in which
1169 case verify it's a no-op. */
1170 {
1171 tree result = convert_to_integer_maybe_fold (type: new_type,
1172 expr: orig_cst, dofold: false);
1173 if (orig_type != new_type)
1174 {
1175 ASSERT_EQ (TREE_TYPE (result), new_type);
1176 ASSERT_EQ (TREE_CODE (result), INTEGER_CST);
1177 }
1178 else
1179 ASSERT_EQ (result, orig_cst);
1180 }
1181
1182 /* Calling convert_to_integer_maybe_fold on a location wrapper around
1183 an INTEGER_CST.
1184
1185 Verify that convert_to_integer_maybe_fold on a location wrapper
1186 around a constant returns a new location wrapper around an equivalent
1187 constant, both of the new type, unless the types are the same,
1188 in which case the original wrapper should be returned. */
1189 {
1190 const location_t loc = BUILTINS_LOCATION;
1191 tree wrapped_orig_cst = maybe_wrap_with_location (orig_cst, loc);
1192 tree result
1193 = convert_to_integer_maybe_fold (type: new_type, expr: wrapped_orig_cst, dofold: false);
1194 ASSERT_EQ (TREE_TYPE (result), new_type);
1195 ASSERT_EQ (EXPR_LOCATION (result), loc);
1196 ASSERT_TRUE (location_wrapper_p (result));
1197 ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result, 0)), new_type);
1198 ASSERT_EQ (TREE_CODE (TREE_OPERAND (result, 0)), INTEGER_CST);
1199
1200 if (orig_type == new_type)
1201 ASSERT_EQ (result, wrapped_orig_cst);
1202 }
1203}
1204
1205/* Verify that convert_to_integer_maybe_fold preserves locations. */
1206
1207static void
1208test_convert_to_integer_maybe_fold ()
1209{
1210 /* char -> long. */
1211 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1212
1213 /* char -> char. */
1214 test_convert_to_integer_maybe_fold (char_type_node, char_type_node);
1215
1216 /* long -> char. */
1217 test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1218
1219 /* long -> long. */
1220 test_convert_to_integer_maybe_fold (long_integer_type_node,
1221 long_integer_type_node);
1222}
1223
1224/* Run all of the selftests within this file. */
1225
1226void
1227convert_cc_tests ()
1228{
1229 test_convert_to_integer_maybe_fold ();
1230}
1231
1232} // namespace selftest
1233
1234#endif /* CHECKING_P */
1235

source code of gcc/convert.cc