1/* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28/* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43#define INCLUDE_ALGORITHM
44#include "config.h"
45#include "system.h"
46#include "coretypes.h"
47#include "backend.h"
48#include "target.h"
49#include "rtl.h"
50#include "tree.h"
51#include "gimple.h"
52#include "predict.h"
53#include "memmodel.h"
54#include "tm_p.h"
55#include "tree-ssa-operands.h"
56#include "optabs-query.h"
57#include "cgraph.h"
58#include "diagnostic-core.h"
59#include "flags.h"
60#include "alias.h"
61#include "fold-const.h"
62#include "fold-const-call.h"
63#include "stor-layout.h"
64#include "calls.h"
65#include "tree-iterator.h"
66#include "expr.h"
67#include "intl.h"
68#include "langhooks.h"
69#include "tree-eh.h"
70#include "gimplify.h"
71#include "tree-dfa.h"
72#include "builtins.h"
73#include "generic-match.h"
74#include "gimple-iterator.h"
75#include "gimple-fold.h"
76#include "tree-into-ssa.h"
77#include "md5.h"
78#include "case-cfn-macros.h"
79#include "stringpool.h"
80#include "tree-vrp.h"
81#include "tree-ssanames.h"
82#include "selftest.h"
83#include "stringpool.h"
84#include "attribs.h"
85#include "tree-vector-builder.h"
86#include "vec-perm-indices.h"
87#include "asan.h"
88#include "gimple-range.h"
89
90/* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94int folding_initializer = 0;
95
96/* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100bool folding_cxx_constexpr = false;
101
102/* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
122};
123
124static bool negate_expr_p (tree);
125static tree negate_expr (tree);
126static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127static enum comparison_code comparison_to_compcode (enum tree_code);
128static enum tree_code compcode_to_comparison (enum comparison_code);
129static bool twoval_comparison_p (tree, tree *, tree *);
130static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133static bool simple_operand_p (const_tree);
134static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135static tree range_predecessor (tree);
136static tree range_successor (tree);
137static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140static tree unextend (tree, int, int, tree);
141static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147static tree fold_negate_const (tree, tree);
148static tree fold_not_const (const_tree, tree);
149static tree fold_relational_const (enum tree_code, tree, tree, tree);
150static tree fold_convert_const (enum tree_code, tree, tree);
151static tree fold_view_convert_expr (tree, tree);
152static tree fold_negate_expr (location_t, tree);
153
154/* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156tree_code
157minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
158{
159 enum tree_code code = ERROR_MARK;
160
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
163
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
166
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
168 {
169 if (wi::to_widest (t: exp1) == (wi::to_widest (t: exp3) - 1))
170 {
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
179 {
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, expr: exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
184
185 widest_int min = widest_int::from (x: r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (t: exp1))
188 code = MAX_EXPR;
189 }
190 }
191 if (wi::to_widest (t: exp1) == (wi::to_widest (t: exp3) + 1))
192 {
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
201 {
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, expr: exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
206
207 widest_int max = widest_int::from (x: r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (t: exp1))
210 code = MIN_EXPR;
211 }
212 }
213 }
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
216 {
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
221 }
222 return code;
223}
224
225/* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
227
228static location_t
229expr_location_or (tree t, location_t loc)
230{
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
233}
234
235/* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
237
238tree
239protected_set_expr_location_unshare (tree x, location_t loc)
240{
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
246 {
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
249 }
250 return x;
251}
252
253/* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
256
257tree
258div_if_zero_remainder (const_tree arg1, const_tree arg2)
259{
260 widest_int quo;
261
262 if (wi::multiple_of_p (x: wi::to_widest (t: arg1), y: wi::to_widest (t: arg2),
263 sgn: SIGNED, res: &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), cst: quo);
265
266 return NULL_TREE;
267}
268
269/* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
277
278static int fold_deferring_overflow_warnings;
279
280/* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
284
285static const char* fold_deferred_overflow_warning;
286
287/* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
289
290static enum warn_strict_overflow_code fold_deferred_overflow_code;
291
292/* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
294
295void
296fold_defer_overflow_warnings (void)
297{
298 ++fold_deferring_overflow_warnings;
299}
300
301/* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
309
310void
311fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
312{
313 const char *warnmsg;
314 location_t locus;
315
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
319 {
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
325 }
326
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
329
330 if (!issue || warnmsg == NULL)
331 return;
332
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
335
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
340
341 if (!issue_strict_overflow_warning (code))
342 return;
343
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (g: stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
349}
350
351/* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
353
354void
355fold_undefer_and_ignore_overflow_warnings (void)
356{
357 fold_undefer_overflow_warnings (issue: false, NULL, code: 0);
358}
359
360/* Whether we are deferring overflow warnings. */
361
362bool
363fold_deferring_overflow_warnings_p (void)
364{
365 return fold_deferring_overflow_warnings > 0;
366}
367
368/* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
370
371void
372fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
373{
374 if (fold_deferring_overflow_warnings > 0)
375 {
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
378 {
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
381 }
382 }
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
385}
386
387/* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
389
390bool
391negate_mathfn_p (combined_fn fn)
392{
393 switch (fn)
394 {
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
443
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
453
454 default:
455 break;
456 }
457 return false;
458}
459
460/* Check whether we may negate an integer constant T without causing
461 overflow. */
462
463bool
464may_negate_without_overflow_p (const_tree t)
465{
466 tree type;
467
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
469
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
473
474 return !wi::only_sign_bit_p (wi::to_wide (t));
475}
476
477/* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
479
480static bool
481negate_expr_p (tree t)
482{
483 tree type;
484
485 if (t == 0)
486 return false;
487
488 type = TREE_TYPE (t);
489
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
492 {
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
496
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
502
503 case FIXED_CST:
504 return true;
505
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
508
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
513
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
517
518 case VECTOR_CST:
519 {
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
522
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
528
529 return true;
530 }
531
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
535
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
538
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
550
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
557
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (x: wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (x: wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
572
573 /* Fall through. */
574
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
580
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
600
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
604 {
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (t: tem);
608 }
609 break;
610
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (fn: get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
616
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
620 {
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (t: op1) == element_precision (type) - 1)
623 return true;
624 }
625 break;
626
627 default:
628 break;
629 }
630 return false;
631}
632
633/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
637
638static tree
639fold_negate_expr_1 (location_t loc, tree t)
640{
641 tree type = TREE_TYPE (t);
642 tree tem;
643
644 switch (TREE_CODE (t))
645 {
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
652
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
662
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
668
669 case COMPLEX_CST:
670 {
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
675 }
676 break;
677
678 case VECTOR_CST:
679 {
680 tree_vector_builder elts;
681 elts.new_unary_operation (shape: type, vec: t, allow_stepped_p: true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
684 {
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (obj: elt);
689 }
690
691 return elts.build ();
692 }
693
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
700
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
706
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
711
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
715 {
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
718 {
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
722 }
723
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
726 {
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
730 }
731 }
732 break;
733
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
741
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
745
746 /* Fall through. */
747
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
750 {
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (t: tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (t: tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
759 }
760 break;
761
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
786
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
790 {
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (t: tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
794 }
795 break;
796
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (fn: get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
801 {
802 tree fndecl, arg;
803
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
807 }
808 break;
809
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
813 {
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (t: op1) == element_precision (type) - 1)
816 {
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
823 }
824 }
825 break;
826
827 default:
828 break;
829 }
830
831 return NULL_TREE;
832}
833
834/* A wrapper for fold_negate_expr_1. */
835
836static tree
837fold_negate_expr (location_t loc, tree t)
838{
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
845}
846
847/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
850
851static tree
852negate_expr (tree t)
853{
854 tree type, tem;
855 location_t loc;
856
857 if (t == NULL_TREE)
858 return NULL_TREE;
859
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
863
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, code: NEGATE_EXPR, TREE_TYPE (t), arg1: t);
867 return fold_convert_loc (loc, type, tem);
868}
869
870/* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
877
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
881
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
887
888 If IN is itself a literal or constant, return it as appropriate.
889
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
892
893static tree
894split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
897{
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
904
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
907
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
923 {
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
928
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
936
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
941
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
950
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
958 }
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
963 {
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
968 }
969 else
970 var = in;
971
972 if (negate_p)
973 {
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
986 }
987
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
994
995 return var;
996}
997
998/* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1002
1003static tree
1004associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1005{
1006 if (t1 == 0)
1007 {
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1010 }
1011 else if (t2 == 0)
1012 return t1;
1013
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1020 {
1021 if (code == PLUS_EXPR)
1022 {
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, code: MINUS_EXPR, type,
1025 arg0: fold_convert_loc (loc, type, t2),
1026 arg1: fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, code: MINUS_EXPR, type,
1030 arg0: fold_convert_loc (loc, type, t1),
1031 arg1: fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1035 }
1036 else if (code == MINUS_EXPR)
1037 {
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1040 }
1041
1042 return build2_loc (loc, code, type, arg0: fold_convert_loc (loc, type, t1),
1043 arg1: fold_convert_loc (loc, type, t2));
1044 }
1045
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1048}
1049
1050/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1052
1053static bool
1054int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1055{
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1060
1061 switch (code)
1062 {
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1068
1069 default:
1070 break;
1071 }
1072
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1076}
1077
1078/* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1081
1082bool
1083wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1086{
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1090 {
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (x: arg1, y: arg2);
1093 break;
1094
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (x: arg1, y: arg2);
1097 break;
1098
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (x: arg1, y: arg2);
1101 break;
1102
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (x: arg2))
1105 return false;
1106 res = wi::lshift (x: arg1, y: arg2);
1107 break;
1108
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (x: arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (x: arg1, y: arg2, sgn: sign);
1116 break;
1117
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (x: arg2))
1121 {
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1127 }
1128 else
1129 tmp = arg2;
1130
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (x: arg1, y: tmp);
1133 else
1134 res = wi::lrotate (x: arg1, y: tmp);
1135 break;
1136
1137 case PLUS_EXPR:
1138 res = wi::add (x: arg1, y: arg2, sgn: sign, overflow);
1139 break;
1140
1141 case MINUS_EXPR:
1142 res = wi::sub (x: arg1, y: arg2, sgn: sign, overflow);
1143 break;
1144
1145 case MULT_EXPR:
1146 res = wi::mul (x: arg1, y: arg2, sgn: sign, overflow);
1147 break;
1148
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (x: arg1, y: arg2, sgn: sign);
1151 break;
1152
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (x: arg1, y: arg2, sgn: sign, overflow);
1158 break;
1159
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (x: arg1, y: arg2, sgn: sign, overflow);
1164 break;
1165
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (x: arg1, y: arg2, sgn: sign, overflow);
1170 break;
1171
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (x: arg1, y: arg2, sgn: sign, overflow);
1176 break;
1177
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (x: arg1, y: arg2, sgn: sign, overflow);
1182 break;
1183
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (x: arg1, y: arg2, sgn: sign, overflow);
1188 break;
1189
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (x: arg1, y: arg2, sgn: sign, overflow);
1194 break;
1195
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (x: arg1, y: arg2, sgn: sign, overflow);
1200 break;
1201
1202 case MIN_EXPR:
1203 res = wi::min (x: arg1, y: arg2, sgn: sign);
1204 break;
1205
1206 case MAX_EXPR:
1207 res = wi::max (x: arg1, y: arg2, sgn: sign);
1208 break;
1209
1210 default:
1211 return false;
1212 }
1213 return true;
1214}
1215
1216/* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1217 min value to RES. */
1218bool
1219can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1220{
1221 if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1222 {
1223 res = wi::to_poly_wide (t: arg1);
1224 return true;
1225 }
1226 else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1227 {
1228 res = wi::to_poly_wide (t: arg2);
1229 return true;
1230 }
1231
1232 return false;
1233}
1234
1235/* Combine two poly int's ARG1 and ARG2 under operation CODE to
1236 produce a new constant in RES. Return FALSE if we don't know how
1237 to evaluate CODE at compile-time. */
1238
1239static bool
1240poly_int_binop (poly_wide_int &res, enum tree_code code,
1241 const_tree arg1, const_tree arg2,
1242 signop sign, wi::overflow_type *overflow)
1243{
1244 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1245 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1246 switch (code)
1247 {
1248 case PLUS_EXPR:
1249 res = wi::add (a: wi::to_poly_wide (t: arg1),
1250 b: wi::to_poly_wide (t: arg2), sgn: sign, overflow);
1251 break;
1252
1253 case MINUS_EXPR:
1254 res = wi::sub (a: wi::to_poly_wide (t: arg1),
1255 b: wi::to_poly_wide (t: arg2), sgn: sign, overflow);
1256 break;
1257
1258 case MULT_EXPR:
1259 if (TREE_CODE (arg2) == INTEGER_CST)
1260 res = wi::mul (a: wi::to_poly_wide (t: arg1),
1261 b: wi::to_wide (t: arg2), sgn: sign, overflow);
1262 else if (TREE_CODE (arg1) == INTEGER_CST)
1263 res = wi::mul (a: wi::to_poly_wide (t: arg2),
1264 b: wi::to_wide (t: arg1), sgn: sign, overflow);
1265 else
1266 return NULL_TREE;
1267 break;
1268
1269 case LSHIFT_EXPR:
1270 if (TREE_CODE (arg2) == INTEGER_CST)
1271 res = wi::to_poly_wide (t: arg1) << wi::to_wide (t: arg2);
1272 else
1273 return false;
1274 break;
1275
1276 case BIT_IOR_EXPR:
1277 if (TREE_CODE (arg2) != INTEGER_CST
1278 || !can_ior_p (a: wi::to_poly_wide (t: arg1), b: wi::to_wide (t: arg2),
1279 result: &res))
1280 return false;
1281 break;
1282
1283 case MIN_EXPR:
1284 if (!can_min_p (arg1, arg2, res))
1285 return false;
1286 break;
1287
1288 default:
1289 return false;
1290 }
1291 return true;
1292}
1293
1294/* Combine two integer constants ARG1 and ARG2 under operation CODE to
1295 produce a new constant. Return NULL_TREE if we don't know how to
1296 evaluate CODE at compile-time. */
1297
1298tree
1299int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1300 int overflowable)
1301{
1302 poly_wide_int poly_res;
1303 tree type = TREE_TYPE (arg1);
1304 signop sign = TYPE_SIGN (type);
1305 wi::overflow_type overflow = wi::OVF_NONE;
1306
1307 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1308 {
1309 wide_int warg1 = wi::to_wide (t: arg1), res;
1310 wide_int warg2 = wi::to_wide (t: arg2, TYPE_PRECISION (type));
1311 if (!wide_int_binop (res, code, arg1: warg1, arg2: warg2, sign, overflow: &overflow))
1312 return NULL_TREE;
1313 poly_res = res;
1314 }
1315 else if (!poly_int_tree_p (t: arg1)
1316 || !poly_int_tree_p (t: arg2)
1317 || !poly_int_binop (res&: poly_res, code, arg1, arg2, sign, overflow: &overflow))
1318 return NULL_TREE;
1319 return force_fit_type (type, poly_res, overflowable,
1320 (((sign == SIGNED || overflowable == -1)
1321 && overflow)
1322 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1323}
1324
1325/* Return true if binary operation OP distributes over addition in operand
1326 OPNO, with the other operand being held constant. OPNO counts from 1. */
1327
1328static bool
1329distributes_over_addition_p (tree_code op, int opno)
1330{
1331 switch (op)
1332 {
1333 case PLUS_EXPR:
1334 case MINUS_EXPR:
1335 case MULT_EXPR:
1336 return true;
1337
1338 case LSHIFT_EXPR:
1339 return opno == 1;
1340
1341 default:
1342 return false;
1343 }
1344}
1345
1346/* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1347 is the other operand. Try to use the value of OP to simplify the
1348 operation in one step, without having to process individual elements. */
1349static tree
1350simplify_const_binop (tree_code code, tree op, tree other_op,
1351 int index ATTRIBUTE_UNUSED)
1352{
1353 /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1354 if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1355 {
1356 if (integer_zerop (other_op))
1357 {
1358 if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1359 return op;
1360 else if (code == BIT_AND_EXPR)
1361 return other_op;
1362 }
1363 }
1364
1365 return NULL_TREE;
1366}
1367
1368
1369/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1370 constant. We assume ARG1 and ARG2 have the same data type, or at least
1371 are the same kind of constant and the same machine mode. Return zero if
1372 combining the constants is not allowed in the current operating mode. */
1373
1374static tree
1375const_binop (enum tree_code code, tree arg1, tree arg2)
1376{
1377 /* Sanity check for the recursive cases. */
1378 if (!arg1 || !arg2)
1379 return NULL_TREE;
1380
1381 STRIP_NOPS (arg1);
1382 STRIP_NOPS (arg2);
1383
1384 if (poly_int_tree_p (t: arg1) && poly_int_tree_p (t: arg2))
1385 {
1386 if (code == POINTER_PLUS_EXPR)
1387 return int_const_binop (code: PLUS_EXPR,
1388 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1389
1390 return int_const_binop (code, arg1, arg2);
1391 }
1392
1393 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1394 {
1395 machine_mode mode;
1396 REAL_VALUE_TYPE d1;
1397 REAL_VALUE_TYPE d2;
1398 REAL_VALUE_TYPE value;
1399 REAL_VALUE_TYPE result;
1400 bool inexact;
1401 tree t, type;
1402
1403 /* The following codes are handled by real_arithmetic. */
1404 switch (code)
1405 {
1406 case PLUS_EXPR:
1407 case MINUS_EXPR:
1408 case MULT_EXPR:
1409 case RDIV_EXPR:
1410 case MIN_EXPR:
1411 case MAX_EXPR:
1412 break;
1413
1414 default:
1415 return NULL_TREE;
1416 }
1417
1418 d1 = TREE_REAL_CST (arg1);
1419 d2 = TREE_REAL_CST (arg2);
1420
1421 type = TREE_TYPE (arg1);
1422 mode = TYPE_MODE (type);
1423
1424 /* Don't perform operation if we honor signaling NaNs and
1425 either operand is a signaling NaN. */
1426 if (HONOR_SNANS (mode)
1427 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1428 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1429 return NULL_TREE;
1430
1431 /* Don't perform operation if it would raise a division
1432 by zero exception. */
1433 if (code == RDIV_EXPR
1434 && real_equal (&d2, &dconst0)
1435 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1436 return NULL_TREE;
1437
1438 /* If either operand is a NaN, just return it. Otherwise, set up
1439 for floating-point trap; we return an overflow. */
1440 if (REAL_VALUE_ISNAN (d1))
1441 {
1442 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1443 is off. */
1444 d1.signalling = 0;
1445 t = build_real (type, d1);
1446 return t;
1447 }
1448 else if (REAL_VALUE_ISNAN (d2))
1449 {
1450 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1451 is off. */
1452 d2.signalling = 0;
1453 t = build_real (type, d2);
1454 return t;
1455 }
1456
1457 inexact = real_arithmetic (&value, code, &d1, &d2);
1458 real_convert (&result, mode, &value);
1459
1460 /* Don't constant fold this floating point operation if
1461 both operands are not NaN but the result is NaN, and
1462 flag_trapping_math. Such operations should raise an
1463 invalid operation exception. */
1464 if (flag_trapping_math
1465 && MODE_HAS_NANS (mode)
1466 && REAL_VALUE_ISNAN (result)
1467 && !REAL_VALUE_ISNAN (d1)
1468 && !REAL_VALUE_ISNAN (d2))
1469 return NULL_TREE;
1470
1471 /* Don't constant fold this floating point operation if
1472 the result has overflowed and flag_trapping_math. */
1473 if (flag_trapping_math
1474 && MODE_HAS_INFINITIES (mode)
1475 && REAL_VALUE_ISINF (result)
1476 && !REAL_VALUE_ISINF (d1)
1477 && !REAL_VALUE_ISINF (d2))
1478 return NULL_TREE;
1479
1480 /* Don't constant fold this floating point operation if the
1481 result may dependent upon the run-time rounding mode and
1482 flag_rounding_math is set, or if GCC's software emulation
1483 is unable to accurately represent the result. */
1484 if ((flag_rounding_math
1485 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1486 && (inexact || !real_identical (&result, &value)))
1487 return NULL_TREE;
1488
1489 t = build_real (type, result);
1490
1491 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1492 return t;
1493 }
1494
1495 if (TREE_CODE (arg1) == FIXED_CST)
1496 {
1497 FIXED_VALUE_TYPE f1;
1498 FIXED_VALUE_TYPE f2;
1499 FIXED_VALUE_TYPE result;
1500 tree t, type;
1501 bool sat_p;
1502 bool overflow_p;
1503
1504 /* The following codes are handled by fixed_arithmetic. */
1505 switch (code)
1506 {
1507 case PLUS_EXPR:
1508 case MINUS_EXPR:
1509 case MULT_EXPR:
1510 case TRUNC_DIV_EXPR:
1511 if (TREE_CODE (arg2) != FIXED_CST)
1512 return NULL_TREE;
1513 f2 = TREE_FIXED_CST (arg2);
1514 break;
1515
1516 case LSHIFT_EXPR:
1517 case RSHIFT_EXPR:
1518 {
1519 if (TREE_CODE (arg2) != INTEGER_CST)
1520 return NULL_TREE;
1521 wi::tree_to_wide_ref w2 = wi::to_wide (t: arg2);
1522 f2.data.high = w2.elt (i: 1);
1523 f2.data.low = w2.ulow ();
1524 f2.mode = SImode;
1525 }
1526 break;
1527
1528 default:
1529 return NULL_TREE;
1530 }
1531
1532 f1 = TREE_FIXED_CST (arg1);
1533 type = TREE_TYPE (arg1);
1534 sat_p = TYPE_SATURATING (type);
1535 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1536 t = build_fixed (type, result);
1537 /* Propagate overflow flags. */
1538 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1539 TREE_OVERFLOW (t) = 1;
1540 return t;
1541 }
1542
1543 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1544 {
1545 tree type = TREE_TYPE (arg1);
1546 tree r1 = TREE_REALPART (arg1);
1547 tree i1 = TREE_IMAGPART (arg1);
1548 tree r2 = TREE_REALPART (arg2);
1549 tree i2 = TREE_IMAGPART (arg2);
1550 tree real, imag;
1551
1552 switch (code)
1553 {
1554 case PLUS_EXPR:
1555 case MINUS_EXPR:
1556 real = const_binop (code, arg1: r1, arg2: r2);
1557 imag = const_binop (code, arg1: i1, arg2: i2);
1558 break;
1559
1560 case MULT_EXPR:
1561 if (COMPLEX_FLOAT_TYPE_P (type))
1562 return do_mpc_arg2 (arg1, arg2, type,
1563 /* do_nonfinite= */ folding_initializer,
1564 mpc_mul);
1565
1566 real = const_binop (code: MINUS_EXPR,
1567 arg1: const_binop (code: MULT_EXPR, arg1: r1, arg2: r2),
1568 arg2: const_binop (code: MULT_EXPR, arg1: i1, arg2: i2));
1569 imag = const_binop (code: PLUS_EXPR,
1570 arg1: const_binop (code: MULT_EXPR, arg1: r1, arg2: i2),
1571 arg2: const_binop (code: MULT_EXPR, arg1: i1, arg2: r2));
1572 break;
1573
1574 case RDIV_EXPR:
1575 if (COMPLEX_FLOAT_TYPE_P (type))
1576 return do_mpc_arg2 (arg1, arg2, type,
1577 /* do_nonfinite= */ folding_initializer,
1578 mpc_div);
1579 /* Fallthru. */
1580 case TRUNC_DIV_EXPR:
1581 case CEIL_DIV_EXPR:
1582 case FLOOR_DIV_EXPR:
1583 case ROUND_DIV_EXPR:
1584 if (flag_complex_method == 0)
1585 {
1586 /* Keep this algorithm in sync with
1587 tree-complex.cc:expand_complex_div_straight().
1588
1589 Expand complex division to scalars, straightforward algorithm.
1590 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1591 t = br*br + bi*bi
1592 */
1593 tree magsquared
1594 = const_binop (code: PLUS_EXPR,
1595 arg1: const_binop (code: MULT_EXPR, arg1: r2, arg2: r2),
1596 arg2: const_binop (code: MULT_EXPR, arg1: i2, arg2: i2));
1597 tree t1
1598 = const_binop (code: PLUS_EXPR,
1599 arg1: const_binop (code: MULT_EXPR, arg1: r1, arg2: r2),
1600 arg2: const_binop (code: MULT_EXPR, arg1: i1, arg2: i2));
1601 tree t2
1602 = const_binop (code: MINUS_EXPR,
1603 arg1: const_binop (code: MULT_EXPR, arg1: i1, arg2: r2),
1604 arg2: const_binop (code: MULT_EXPR, arg1: r1, arg2: i2));
1605
1606 real = const_binop (code, arg1: t1, arg2: magsquared);
1607 imag = const_binop (code, arg1: t2, arg2: magsquared);
1608 }
1609 else
1610 {
1611 /* Keep this algorithm in sync with
1612 tree-complex.cc:expand_complex_div_wide().
1613
1614 Expand complex division to scalars, modified algorithm to minimize
1615 overflow with wide input ranges. */
1616 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1617 fold_abs_const (r2, TREE_TYPE (type)),
1618 fold_abs_const (i2, TREE_TYPE (type)));
1619
1620 if (integer_nonzerop (compare))
1621 {
1622 /* In the TRUE branch, we compute
1623 ratio = br/bi;
1624 div = (br * ratio) + bi;
1625 tr = (ar * ratio) + ai;
1626 ti = (ai * ratio) - ar;
1627 tr = tr / div;
1628 ti = ti / div; */
1629 tree ratio = const_binop (code, arg1: r2, arg2: i2);
1630 tree div = const_binop (code: PLUS_EXPR, arg1: i2,
1631 arg2: const_binop (code: MULT_EXPR, arg1: r2, arg2: ratio));
1632 real = const_binop (code: MULT_EXPR, arg1: r1, arg2: ratio);
1633 real = const_binop (code: PLUS_EXPR, arg1: real, arg2: i1);
1634 real = const_binop (code, arg1: real, arg2: div);
1635
1636 imag = const_binop (code: MULT_EXPR, arg1: i1, arg2: ratio);
1637 imag = const_binop (code: MINUS_EXPR, arg1: imag, arg2: r1);
1638 imag = const_binop (code, arg1: imag, arg2: div);
1639 }
1640 else
1641 {
1642 /* In the FALSE branch, we compute
1643 ratio = d/c;
1644 divisor = (d * ratio) + c;
1645 tr = (b * ratio) + a;
1646 ti = b - (a * ratio);
1647 tr = tr / div;
1648 ti = ti / div; */
1649 tree ratio = const_binop (code, arg1: i2, arg2: r2);
1650 tree div = const_binop (code: PLUS_EXPR, arg1: r2,
1651 arg2: const_binop (code: MULT_EXPR, arg1: i2, arg2: ratio));
1652
1653 real = const_binop (code: MULT_EXPR, arg1: i1, arg2: ratio);
1654 real = const_binop (code: PLUS_EXPR, arg1: real, arg2: r1);
1655 real = const_binop (code, arg1: real, arg2: div);
1656
1657 imag = const_binop (code: MULT_EXPR, arg1: r1, arg2: ratio);
1658 imag = const_binop (code: MINUS_EXPR, arg1: i1, arg2: imag);
1659 imag = const_binop (code, arg1: imag, arg2: div);
1660 }
1661 }
1662 break;
1663
1664 default:
1665 return NULL_TREE;
1666 }
1667
1668 if (real && imag)
1669 return build_complex (type, real, imag);
1670 }
1671
1672 tree simplified;
1673 if ((simplified = simplify_const_binop (code, op: arg1, other_op: arg2, index: 0)))
1674 return simplified;
1675
1676 if (commutative_tree_code (code)
1677 && (simplified = simplify_const_binop (code, op: arg2, other_op: arg1, index: 1)))
1678 return simplified;
1679
1680 if (TREE_CODE (arg1) == VECTOR_CST
1681 && TREE_CODE (arg2) == VECTOR_CST
1682 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1683 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1684 {
1685 tree type = TREE_TYPE (arg1);
1686 bool step_ok_p;
1687 if (VECTOR_CST_STEPPED_P (arg1)
1688 && VECTOR_CST_STEPPED_P (arg2))
1689 /* We can operate directly on the encoding if:
1690
1691 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1692 implies
1693 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1694
1695 Addition and subtraction are the supported operators
1696 for which this is true. */
1697 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1698 else if (VECTOR_CST_STEPPED_P (arg1))
1699 /* We can operate directly on stepped encodings if:
1700
1701 a3 - a2 == a2 - a1
1702 implies:
1703 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1704
1705 which is true if (x -> x op c) distributes over addition. */
1706 step_ok_p = distributes_over_addition_p (op: code, opno: 1);
1707 else
1708 /* Similarly in reverse. */
1709 step_ok_p = distributes_over_addition_p (op: code, opno: 2);
1710 tree_vector_builder elts;
1711 if (!elts.new_binary_operation (shape: type, vec1: arg1, vec2: arg2, allow_stepped_p: step_ok_p))
1712 return NULL_TREE;
1713 unsigned int count = elts.encoded_nelts ();
1714 for (unsigned int i = 0; i < count; ++i)
1715 {
1716 tree elem1 = VECTOR_CST_ELT (arg1, i);
1717 tree elem2 = VECTOR_CST_ELT (arg2, i);
1718
1719 tree elt = const_binop (code, arg1: elem1, arg2: elem2);
1720
1721 /* It is possible that const_binop cannot handle the given
1722 code and return NULL_TREE */
1723 if (elt == NULL_TREE)
1724 return NULL_TREE;
1725 elts.quick_push (obj: elt);
1726 }
1727
1728 return elts.build ();
1729 }
1730
1731 /* Shifts allow a scalar offset for a vector. */
1732 if (TREE_CODE (arg1) == VECTOR_CST
1733 && TREE_CODE (arg2) == INTEGER_CST)
1734 {
1735 tree type = TREE_TYPE (arg1);
1736 bool step_ok_p = distributes_over_addition_p (op: code, opno: 1);
1737 tree_vector_builder elts;
1738 if (!elts.new_unary_operation (shape: type, vec: arg1, allow_stepped_p: step_ok_p))
1739 return NULL_TREE;
1740 unsigned int count = elts.encoded_nelts ();
1741 for (unsigned int i = 0; i < count; ++i)
1742 {
1743 tree elem1 = VECTOR_CST_ELT (arg1, i);
1744
1745 tree elt = const_binop (code, arg1: elem1, arg2);
1746
1747 /* It is possible that const_binop cannot handle the given
1748 code and return NULL_TREE. */
1749 if (elt == NULL_TREE)
1750 return NULL_TREE;
1751 elts.quick_push (obj: elt);
1752 }
1753
1754 return elts.build ();
1755 }
1756 return NULL_TREE;
1757}
1758
1759/* Overload that adds a TYPE parameter to be able to dispatch
1760 to fold_relational_const. */
1761
1762tree
1763const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1764{
1765 if (TREE_CODE_CLASS (code) == tcc_comparison)
1766 return fold_relational_const (code, type, arg1, arg2);
1767
1768 /* ??? Until we make the const_binop worker take the type of the
1769 result as argument put those cases that need it here. */
1770 switch (code)
1771 {
1772 case VEC_SERIES_EXPR:
1773 if (CONSTANT_CLASS_P (arg1)
1774 && CONSTANT_CLASS_P (arg2))
1775 return build_vec_series (type, arg1, arg2);
1776 return NULL_TREE;
1777
1778 case COMPLEX_EXPR:
1779 if ((TREE_CODE (arg1) == REAL_CST
1780 && TREE_CODE (arg2) == REAL_CST)
1781 || (TREE_CODE (arg1) == INTEGER_CST
1782 && TREE_CODE (arg2) == INTEGER_CST))
1783 return build_complex (type, arg1, arg2);
1784 return NULL_TREE;
1785
1786 case POINTER_DIFF_EXPR:
1787 if (poly_int_tree_p (t: arg1) && poly_int_tree_p (t: arg2))
1788 {
1789 poly_offset_int res = (wi::to_poly_offset (t: arg1)
1790 - wi::to_poly_offset (t: arg2));
1791 return force_fit_type (type, res, 1,
1792 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1793 }
1794 return NULL_TREE;
1795
1796 case VEC_PACK_TRUNC_EXPR:
1797 case VEC_PACK_FIX_TRUNC_EXPR:
1798 case VEC_PACK_FLOAT_EXPR:
1799 {
1800 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1801
1802 if (TREE_CODE (arg1) != VECTOR_CST
1803 || TREE_CODE (arg2) != VECTOR_CST)
1804 return NULL_TREE;
1805
1806 if (!VECTOR_CST_NELTS (arg1).is_constant (const_value: &in_nelts))
1807 return NULL_TREE;
1808
1809 out_nelts = in_nelts * 2;
1810 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1811 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1812
1813 tree_vector_builder elts (type, out_nelts, 1);
1814 for (i = 0; i < out_nelts; i++)
1815 {
1816 tree elt = (i < in_nelts
1817 ? VECTOR_CST_ELT (arg1, i)
1818 : VECTOR_CST_ELT (arg2, i - in_nelts));
1819 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1820 ? NOP_EXPR
1821 : code == VEC_PACK_FLOAT_EXPR
1822 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1823 TREE_TYPE (type), elt);
1824 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1825 return NULL_TREE;
1826 elts.quick_push (obj: elt);
1827 }
1828
1829 return elts.build ();
1830 }
1831
1832 case VEC_WIDEN_MULT_LO_EXPR:
1833 case VEC_WIDEN_MULT_HI_EXPR:
1834 case VEC_WIDEN_MULT_EVEN_EXPR:
1835 case VEC_WIDEN_MULT_ODD_EXPR:
1836 {
1837 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1838
1839 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1840 return NULL_TREE;
1841
1842 if (!VECTOR_CST_NELTS (arg1).is_constant (const_value: &in_nelts))
1843 return NULL_TREE;
1844 out_nelts = in_nelts / 2;
1845 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1846 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1847
1848 if (code == VEC_WIDEN_MULT_LO_EXPR)
1849 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1850 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1851 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1852 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1853 scale = 1, ofs = 0;
1854 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1855 scale = 1, ofs = 1;
1856
1857 tree_vector_builder elts (type, out_nelts, 1);
1858 for (out = 0; out < out_nelts; out++)
1859 {
1860 unsigned int in = (out << scale) + ofs;
1861 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1862 VECTOR_CST_ELT (arg1, in));
1863 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1864 VECTOR_CST_ELT (arg2, in));
1865
1866 if (t1 == NULL_TREE || t2 == NULL_TREE)
1867 return NULL_TREE;
1868 tree elt = const_binop (code: MULT_EXPR, arg1: t1, arg2: t2);
1869 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1870 return NULL_TREE;
1871 elts.quick_push (obj: elt);
1872 }
1873
1874 return elts.build ();
1875 }
1876
1877 default:;
1878 }
1879
1880 if (TREE_CODE_CLASS (code) != tcc_binary)
1881 return NULL_TREE;
1882
1883 /* Make sure type and arg0 have the same saturating flag. */
1884 gcc_checking_assert (TYPE_SATURATING (type)
1885 == TYPE_SATURATING (TREE_TYPE (arg1)));
1886
1887 return const_binop (code, arg1, arg2);
1888}
1889
1890/* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1891 Return zero if computing the constants is not possible. */
1892
1893tree
1894const_unop (enum tree_code code, tree type, tree arg0)
1895{
1896 /* Don't perform the operation, other than NEGATE and ABS, if
1897 flag_signaling_nans is on and the operand is a signaling NaN. */
1898 if (TREE_CODE (arg0) == REAL_CST
1899 && HONOR_SNANS (arg0)
1900 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1901 && code != NEGATE_EXPR
1902 && code != ABS_EXPR
1903 && code != ABSU_EXPR)
1904 return NULL_TREE;
1905
1906 switch (code)
1907 {
1908 CASE_CONVERT:
1909 case FLOAT_EXPR:
1910 case FIX_TRUNC_EXPR:
1911 case FIXED_CONVERT_EXPR:
1912 return fold_convert_const (code, type, arg0);
1913
1914 case ADDR_SPACE_CONVERT_EXPR:
1915 /* If the source address is 0, and the source address space
1916 cannot have a valid object at 0, fold to dest type null. */
1917 if (integer_zerop (arg0)
1918 && !(targetm.addr_space.zero_address_valid
1919 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1920 return fold_convert_const (code, type, arg0);
1921 break;
1922
1923 case VIEW_CONVERT_EXPR:
1924 return fold_view_convert_expr (type, arg0);
1925
1926 case NEGATE_EXPR:
1927 {
1928 /* Can't call fold_negate_const directly here as that doesn't
1929 handle all cases and we might not be able to negate some
1930 constants. */
1931 tree tem = fold_negate_expr (UNKNOWN_LOCATION, t: arg0);
1932 if (tem && CONSTANT_CLASS_P (tem))
1933 return tem;
1934 break;
1935 }
1936
1937 case ABS_EXPR:
1938 case ABSU_EXPR:
1939 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1940 return fold_abs_const (arg0, type);
1941 break;
1942
1943 case CONJ_EXPR:
1944 if (TREE_CODE (arg0) == COMPLEX_CST)
1945 {
1946 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1947 TREE_TYPE (type));
1948 return build_complex (type, TREE_REALPART (arg0), ipart);
1949 }
1950 break;
1951
1952 case BIT_NOT_EXPR:
1953 if (TREE_CODE (arg0) == INTEGER_CST)
1954 return fold_not_const (arg0, type);
1955 else if (POLY_INT_CST_P (arg0))
1956 return wide_int_to_tree (type, cst: -poly_int_cst_value (x: arg0));
1957 /* Perform BIT_NOT_EXPR on each element individually. */
1958 else if (TREE_CODE (arg0) == VECTOR_CST)
1959 {
1960 tree elem;
1961
1962 /* This can cope with stepped encodings because ~x == -1 - x. */
1963 tree_vector_builder elements;
1964 elements.new_unary_operation (shape: type, vec: arg0, allow_stepped_p: true);
1965 unsigned int i, count = elements.encoded_nelts ();
1966 for (i = 0; i < count; ++i)
1967 {
1968 elem = VECTOR_CST_ELT (arg0, i);
1969 elem = const_unop (code: BIT_NOT_EXPR, TREE_TYPE (type), arg0: elem);
1970 if (elem == NULL_TREE)
1971 break;
1972 elements.quick_push (obj: elem);
1973 }
1974 if (i == count)
1975 return elements.build ();
1976 }
1977 break;
1978
1979 case TRUTH_NOT_EXPR:
1980 if (TREE_CODE (arg0) == INTEGER_CST)
1981 return constant_boolean_node (integer_zerop (arg0), type);
1982 break;
1983
1984 case REALPART_EXPR:
1985 if (TREE_CODE (arg0) == COMPLEX_CST)
1986 return fold_convert (type, TREE_REALPART (arg0));
1987 break;
1988
1989 case IMAGPART_EXPR:
1990 if (TREE_CODE (arg0) == COMPLEX_CST)
1991 return fold_convert (type, TREE_IMAGPART (arg0));
1992 break;
1993
1994 case VEC_UNPACK_LO_EXPR:
1995 case VEC_UNPACK_HI_EXPR:
1996 case VEC_UNPACK_FLOAT_LO_EXPR:
1997 case VEC_UNPACK_FLOAT_HI_EXPR:
1998 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1999 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2000 {
2001 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2002 enum tree_code subcode;
2003
2004 if (TREE_CODE (arg0) != VECTOR_CST)
2005 return NULL_TREE;
2006
2007 if (!VECTOR_CST_NELTS (arg0).is_constant (const_value: &in_nelts))
2008 return NULL_TREE;
2009 out_nelts = in_nelts / 2;
2010 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2011
2012 unsigned int offset = 0;
2013 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2014 || code == VEC_UNPACK_FLOAT_LO_EXPR
2015 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2016 offset = out_nelts;
2017
2018 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2019 subcode = NOP_EXPR;
2020 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2021 || code == VEC_UNPACK_FLOAT_HI_EXPR)
2022 subcode = FLOAT_EXPR;
2023 else
2024 subcode = FIX_TRUNC_EXPR;
2025
2026 tree_vector_builder elts (type, out_nelts, 1);
2027 for (i = 0; i < out_nelts; i++)
2028 {
2029 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2030 VECTOR_CST_ELT (arg0, i + offset));
2031 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2032 return NULL_TREE;
2033 elts.quick_push (obj: elt);
2034 }
2035
2036 return elts.build ();
2037 }
2038
2039 case VEC_DUPLICATE_EXPR:
2040 if (CONSTANT_CLASS_P (arg0))
2041 return build_vector_from_val (type, arg0);
2042 return NULL_TREE;
2043
2044 default:
2045 break;
2046 }
2047
2048 return NULL_TREE;
2049}
2050
2051/* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2052 indicates which particular sizetype to create. */
2053
2054tree
2055size_int_kind (poly_int64 number, enum size_type_kind kind)
2056{
2057 return build_int_cst (sizetype_tab[(int) kind], number);
2058}
2059
2060/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2061 is a tree code. The type of the result is taken from the operands.
2062 Both must be equivalent integer types, ala int_binop_types_match_p.
2063 If the operands are constant, so is the result. */
2064
2065tree
2066size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2067{
2068 tree type = TREE_TYPE (arg0);
2069
2070 if (arg0 == error_mark_node || arg1 == error_mark_node)
2071 return error_mark_node;
2072
2073 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2075
2076 /* Handle the special case of two poly_int constants faster. */
2077 if (poly_int_tree_p (t: arg0) && poly_int_tree_p (t: arg1))
2078 {
2079 /* And some specific cases even faster than that. */
2080 if (code == PLUS_EXPR)
2081 {
2082 if (integer_zerop (arg0)
2083 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2084 return arg1;
2085 if (integer_zerop (arg1)
2086 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2087 return arg0;
2088 }
2089 else if (code == MINUS_EXPR)
2090 {
2091 if (integer_zerop (arg1)
2092 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2093 return arg0;
2094 }
2095 else if (code == MULT_EXPR)
2096 {
2097 if (integer_onep (arg0)
2098 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2099 return arg1;
2100 }
2101
2102 /* Handle general case of two integer constants. For sizetype
2103 constant calculations we always want to know about overflow,
2104 even in the unsigned case. */
2105 tree res = int_const_binop (code, arg1: arg0, arg2: arg1, overflowable: -1);
2106 if (res != NULL_TREE)
2107 return res;
2108 }
2109
2110 return fold_build2_loc (loc, code, type, arg0, arg1);
2111}
2112
2113/* Given two values, either both of sizetype or both of bitsizetype,
2114 compute the difference between the two values. Return the value
2115 in signed type corresponding to the type of the operands. */
2116
2117tree
2118size_diffop_loc (location_t loc, tree arg0, tree arg1)
2119{
2120 tree type = TREE_TYPE (arg0);
2121 tree ctype;
2122
2123 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2124 TREE_TYPE (arg1)));
2125
2126 /* If the type is already signed, just do the simple thing. */
2127 if (!TYPE_UNSIGNED (type))
2128 return size_binop_loc (loc, code: MINUS_EXPR, arg0, arg1);
2129
2130 if (type == sizetype)
2131 ctype = ssizetype;
2132 else if (type == bitsizetype)
2133 ctype = sbitsizetype;
2134 else
2135 ctype = signed_type_for (type);
2136
2137 /* If either operand is not a constant, do the conversions to the signed
2138 type and subtract. The hardware will do the right thing with any
2139 overflow in the subtraction. */
2140 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2141 return size_binop_loc (loc, code: MINUS_EXPR,
2142 arg0: fold_convert_loc (loc, ctype, arg0),
2143 arg1: fold_convert_loc (loc, ctype, arg1));
2144
2145 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2146 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2147 overflow) and negate (which can't either). Special-case a result
2148 of zero while we're here. */
2149 if (tree_int_cst_equal (arg0, arg1))
2150 return build_int_cst (ctype, 0);
2151 else if (tree_int_cst_lt (t1: arg1, t2: arg0))
2152 return fold_convert_loc (loc, ctype,
2153 size_binop_loc (loc, code: MINUS_EXPR, arg0, arg1));
2154 else
2155 return size_binop_loc (loc, code: MINUS_EXPR, arg0: build_int_cst (ctype, 0),
2156 arg1: fold_convert_loc (loc, ctype,
2157 size_binop_loc (loc,
2158 code: MINUS_EXPR,
2159 arg0: arg1, arg1: arg0)));
2160}
2161
2162/* A subroutine of fold_convert_const handling conversions of an
2163 INTEGER_CST to another integer type. */
2164
2165static tree
2166fold_convert_const_int_from_int (tree type, const_tree arg1)
2167{
2168 /* Given an integer constant, make new constant with new type,
2169 appropriately sign-extended or truncated. Use widest_int
2170 so that any extension is done according ARG1's type. */
2171 tree arg1_type = TREE_TYPE (arg1);
2172 unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2173 return force_fit_type (type, wide_int::from (x: wi::to_wide (t: arg1), precision: prec,
2174 TYPE_SIGN (arg1_type)),
2175 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2176 TREE_OVERFLOW (arg1));
2177}
2178
2179/* A subroutine of fold_convert_const handling conversions a REAL_CST
2180 to an integer type. */
2181
2182static tree
2183fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2184{
2185 bool overflow = false;
2186 tree t;
2187
2188 /* The following code implements the floating point to integer
2189 conversion rules required by the Java Language Specification,
2190 that IEEE NaNs are mapped to zero and values that overflow
2191 the target precision saturate, i.e. values greater than
2192 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2193 are mapped to INT_MIN. These semantics are allowed by the
2194 C and C++ standards that simply state that the behavior of
2195 FP-to-integer conversion is unspecified upon overflow. */
2196
2197 wide_int val;
2198 REAL_VALUE_TYPE r;
2199 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2200
2201 switch (code)
2202 {
2203 case FIX_TRUNC_EXPR:
2204 real_trunc (&r, VOIDmode, &x);
2205 break;
2206
2207 default:
2208 gcc_unreachable ();
2209 }
2210
2211 /* If R is NaN, return zero and show we have an overflow. */
2212 if (REAL_VALUE_ISNAN (r))
2213 {
2214 overflow = true;
2215 val = wi::zero (TYPE_PRECISION (type));
2216 }
2217
2218 /* See if R is less than the lower bound or greater than the
2219 upper bound. */
2220
2221 if (! overflow)
2222 {
2223 tree lt = TYPE_MIN_VALUE (type);
2224 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2225 if (real_less (&r, &l))
2226 {
2227 overflow = true;
2228 val = wi::to_wide (t: lt);
2229 }
2230 }
2231
2232 if (! overflow)
2233 {
2234 tree ut = TYPE_MAX_VALUE (type);
2235 if (ut)
2236 {
2237 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2238 if (real_less (&u, &r))
2239 {
2240 overflow = true;
2241 val = wi::to_wide (t: ut);
2242 }
2243 }
2244 }
2245
2246 if (! overflow)
2247 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2248
2249 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2250 return t;
2251}
2252
2253/* A subroutine of fold_convert_const handling conversions of a
2254 FIXED_CST to an integer type. */
2255
2256static tree
2257fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2258{
2259 tree t;
2260 double_int temp, temp_trunc;
2261 scalar_mode mode;
2262
2263 /* Right shift FIXED_CST to temp by fbit. */
2264 temp = TREE_FIXED_CST (arg1).data;
2265 mode = TREE_FIXED_CST (arg1).mode;
2266 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2267 {
2268 temp = temp.rshift (GET_MODE_FBIT (mode),
2269 HOST_BITS_PER_DOUBLE_INT,
2270 SIGNED_FIXED_POINT_MODE_P (mode));
2271
2272 /* Left shift temp to temp_trunc by fbit. */
2273 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2274 HOST_BITS_PER_DOUBLE_INT,
2275 SIGNED_FIXED_POINT_MODE_P (mode));
2276 }
2277 else
2278 {
2279 temp = double_int_zero;
2280 temp_trunc = double_int_zero;
2281 }
2282
2283 /* If FIXED_CST is negative, we need to round the value toward 0.
2284 By checking if the fractional bits are not zero to add 1 to temp. */
2285 if (SIGNED_FIXED_POINT_MODE_P (mode)
2286 && temp_trunc.is_negative ()
2287 && TREE_FIXED_CST (arg1).data != temp_trunc)
2288 temp += double_int_one;
2289
2290 /* Given a fixed-point constant, make new constant with new type,
2291 appropriately sign-extended or truncated. */
2292 t = force_fit_type (type, temp, -1,
2293 (temp.is_negative ()
2294 && (TYPE_UNSIGNED (type)
2295 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2296 | TREE_OVERFLOW (arg1));
2297
2298 return t;
2299}
2300
2301/* A subroutine of fold_convert_const handling conversions a REAL_CST
2302 to another floating point type. */
2303
2304static tree
2305fold_convert_const_real_from_real (tree type, const_tree arg1)
2306{
2307 REAL_VALUE_TYPE value;
2308 tree t;
2309
2310 /* If the underlying modes are the same, simply treat it as
2311 copy and rebuild with TREE_REAL_CST information and the
2312 given type. */
2313 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2314 {
2315 t = build_real (type, TREE_REAL_CST (arg1));
2316 return t;
2317 }
2318
2319 /* Don't perform the operation if flag_signaling_nans is on
2320 and the operand is a signaling NaN. */
2321 if (HONOR_SNANS (arg1)
2322 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2323 return NULL_TREE;
2324
2325 /* With flag_rounding_math we should respect the current rounding mode
2326 unless the conversion is exact. */
2327 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2328 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2329 return NULL_TREE;
2330
2331 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2332 t = build_real (type, value);
2333
2334 /* If converting an infinity or NAN to a representation that doesn't
2335 have one, set the overflow bit so that we can produce some kind of
2336 error message at the appropriate point if necessary. It's not the
2337 most user-friendly message, but it's better than nothing. */
2338 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2339 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2340 TREE_OVERFLOW (t) = 1;
2341 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2342 && !MODE_HAS_NANS (TYPE_MODE (type)))
2343 TREE_OVERFLOW (t) = 1;
2344 /* Regular overflow, conversion produced an infinity in a mode that
2345 can't represent them. */
2346 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2347 && REAL_VALUE_ISINF (value)
2348 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2349 TREE_OVERFLOW (t) = 1;
2350 else
2351 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2352 return t;
2353}
2354
2355/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2356 to a floating point type. */
2357
2358static tree
2359fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2360{
2361 REAL_VALUE_TYPE value;
2362 tree t;
2363
2364 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2365 &TREE_FIXED_CST (arg1));
2366 t = build_real (type, value);
2367
2368 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2369 return t;
2370}
2371
2372/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2373 to another fixed-point type. */
2374
2375static tree
2376fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2377{
2378 FIXED_VALUE_TYPE value;
2379 tree t;
2380 bool overflow_p;
2381
2382 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2383 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2384 t = build_fixed (type, value);
2385
2386 /* Propagate overflow flags. */
2387 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 return t;
2390}
2391
2392/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2393 to a fixed-point type. */
2394
2395static tree
2396fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2397{
2398 FIXED_VALUE_TYPE value;
2399 tree t;
2400 bool overflow_p;
2401 double_int di;
2402
2403 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2404
2405 di.low = TREE_INT_CST_ELT (arg1, 0);
2406 if (TREE_INT_CST_NUNITS (arg1) == 1)
2407 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2408 else
2409 di.high = TREE_INT_CST_ELT (arg1, 1);
2410
2411 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2412 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2413 TYPE_SATURATING (type));
2414 t = build_fixed (type, value);
2415
2416 /* Propagate overflow flags. */
2417 if (overflow_p | TREE_OVERFLOW (arg1))
2418 TREE_OVERFLOW (t) = 1;
2419 return t;
2420}
2421
2422/* A subroutine of fold_convert_const handling conversions a REAL_CST
2423 to a fixed-point type. */
2424
2425static tree
2426fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2427{
2428 FIXED_VALUE_TYPE value;
2429 tree t;
2430 bool overflow_p;
2431
2432 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2433 &TREE_REAL_CST (arg1),
2434 TYPE_SATURATING (type));
2435 t = build_fixed (type, value);
2436
2437 /* Propagate overflow flags. */
2438 if (overflow_p | TREE_OVERFLOW (arg1))
2439 TREE_OVERFLOW (t) = 1;
2440 return t;
2441}
2442
2443/* Attempt to fold type conversion operation CODE of expression ARG1 to
2444 type TYPE. If no simplification can be done return NULL_TREE. */
2445
2446static tree
2447fold_convert_const (enum tree_code code, tree type, tree arg1)
2448{
2449 tree arg_type = TREE_TYPE (arg1);
2450 if (arg_type == type)
2451 return arg1;
2452
2453 /* We can't widen types, since the runtime value could overflow the
2454 original type before being extended to the new type. */
2455 if (POLY_INT_CST_P (arg1)
2456 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2457 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2458 return build_poly_int_cst (type,
2459 poly_wide_int::from (a: poly_int_cst_value (x: arg1),
2460 TYPE_PRECISION (type),
2461 TYPE_SIGN (arg_type)));
2462
2463 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2464 || TREE_CODE (type) == OFFSET_TYPE)
2465 {
2466 if (TREE_CODE (arg1) == INTEGER_CST)
2467 return fold_convert_const_int_from_int (type, arg1);
2468 else if (TREE_CODE (arg1) == REAL_CST)
2469 return fold_convert_const_int_from_real (code, type, arg1);
2470 else if (TREE_CODE (arg1) == FIXED_CST)
2471 return fold_convert_const_int_from_fixed (type, arg1);
2472 }
2473 else if (SCALAR_FLOAT_TYPE_P (type))
2474 {
2475 if (TREE_CODE (arg1) == INTEGER_CST)
2476 {
2477 tree res = build_real_from_int_cst (type, arg1);
2478 /* Avoid the folding if flag_rounding_math is on and the
2479 conversion is not exact. */
2480 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2481 {
2482 bool fail = false;
2483 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2484 TYPE_PRECISION (TREE_TYPE (arg1)));
2485 if (fail || wi::ne_p (x: w, y: wi::to_wide (t: arg1)))
2486 return NULL_TREE;
2487 }
2488 return res;
2489 }
2490 else if (TREE_CODE (arg1) == REAL_CST)
2491 return fold_convert_const_real_from_real (type, arg1);
2492 else if (TREE_CODE (arg1) == FIXED_CST)
2493 return fold_convert_const_real_from_fixed (type, arg1);
2494 }
2495 else if (FIXED_POINT_TYPE_P (type))
2496 {
2497 if (TREE_CODE (arg1) == FIXED_CST)
2498 return fold_convert_const_fixed_from_fixed (type, arg1);
2499 else if (TREE_CODE (arg1) == INTEGER_CST)
2500 return fold_convert_const_fixed_from_int (type, arg1);
2501 else if (TREE_CODE (arg1) == REAL_CST)
2502 return fold_convert_const_fixed_from_real (type, arg1);
2503 }
2504 else if (VECTOR_TYPE_P (type))
2505 {
2506 if (TREE_CODE (arg1) == VECTOR_CST
2507 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2508 {
2509 tree elttype = TREE_TYPE (type);
2510 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2511 /* We can't handle steps directly when extending, since the
2512 values need to wrap at the original precision first. */
2513 bool step_ok_p
2514 = (INTEGRAL_TYPE_P (elttype)
2515 && INTEGRAL_TYPE_P (arg1_elttype)
2516 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2517 tree_vector_builder v;
2518 if (!v.new_unary_operation (shape: type, vec: arg1, allow_stepped_p: step_ok_p))
2519 return NULL_TREE;
2520 unsigned int len = v.encoded_nelts ();
2521 for (unsigned int i = 0; i < len; ++i)
2522 {
2523 tree elt = VECTOR_CST_ELT (arg1, i);
2524 tree cvt = fold_convert_const (code, type: elttype, arg1: elt);
2525 if (cvt == NULL_TREE)
2526 return NULL_TREE;
2527 v.quick_push (obj: cvt);
2528 }
2529 return v.build ();
2530 }
2531 }
2532 return NULL_TREE;
2533}
2534
2535/* Construct a vector of zero elements of vector type TYPE. */
2536
2537static tree
2538build_zero_vector (tree type)
2539{
2540 tree t;
2541
2542 t = fold_convert_const (code: NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2543 return build_vector_from_val (type, t);
2544}
2545
2546/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2547
2548bool
2549fold_convertible_p (const_tree type, const_tree arg)
2550{
2551 const_tree orig = TREE_TYPE (arg);
2552
2553 if (type == orig)
2554 return true;
2555
2556 if (TREE_CODE (arg) == ERROR_MARK
2557 || TREE_CODE (type) == ERROR_MARK
2558 || TREE_CODE (orig) == ERROR_MARK)
2559 return false;
2560
2561 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2562 return true;
2563
2564 switch (TREE_CODE (type))
2565 {
2566 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2567 case POINTER_TYPE: case REFERENCE_TYPE:
2568 case OFFSET_TYPE:
2569 return (INTEGRAL_TYPE_P (orig)
2570 || (POINTER_TYPE_P (orig)
2571 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2572 || TREE_CODE (orig) == OFFSET_TYPE);
2573
2574 case REAL_TYPE:
2575 case FIXED_POINT_TYPE:
2576 case VOID_TYPE:
2577 return TREE_CODE (type) == TREE_CODE (orig);
2578
2579 case VECTOR_TYPE:
2580 return (VECTOR_TYPE_P (orig)
2581 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2582 TYPE_VECTOR_SUBPARTS (orig))
2583 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2584
2585 default:
2586 return false;
2587 }
2588}
2589
2590/* Convert expression ARG to type TYPE. Used by the middle-end for
2591 simple conversions in preference to calling the front-end's convert. */
2592
2593tree
2594fold_convert_loc (location_t loc, tree type, tree arg)
2595{
2596 tree orig = TREE_TYPE (arg);
2597 tree tem;
2598
2599 if (type == orig)
2600 return arg;
2601
2602 if (TREE_CODE (arg) == ERROR_MARK
2603 || TREE_CODE (type) == ERROR_MARK
2604 || TREE_CODE (orig) == ERROR_MARK)
2605 return error_mark_node;
2606
2607 switch (TREE_CODE (type))
2608 {
2609 case POINTER_TYPE:
2610 case REFERENCE_TYPE:
2611 /* Handle conversions between pointers to different address spaces. */
2612 if (POINTER_TYPE_P (orig)
2613 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2614 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2615 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2616 /* fall through */
2617
2618 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2619 case OFFSET_TYPE: case BITINT_TYPE:
2620 if (TREE_CODE (arg) == INTEGER_CST)
2621 {
2622 tem = fold_convert_const (code: NOP_EXPR, type, arg1: arg);
2623 if (tem != NULL_TREE)
2624 return tem;
2625 }
2626 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2627 || TREE_CODE (orig) == OFFSET_TYPE)
2628 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2629 if (TREE_CODE (orig) == COMPLEX_TYPE)
2630 return fold_convert_loc (loc, type,
2631 arg: fold_build1_loc (loc, REALPART_EXPR,
2632 TREE_TYPE (orig), arg));
2633 gcc_assert (VECTOR_TYPE_P (orig)
2634 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2636
2637 case REAL_TYPE:
2638 if (TREE_CODE (arg) == INTEGER_CST)
2639 {
2640 tem = fold_convert_const (code: FLOAT_EXPR, type, arg1: arg);
2641 if (tem != NULL_TREE)
2642 return tem;
2643 }
2644 else if (TREE_CODE (arg) == REAL_CST)
2645 {
2646 tem = fold_convert_const (code: NOP_EXPR, type, arg1: arg);
2647 if (tem != NULL_TREE)
2648 return tem;
2649 }
2650 else if (TREE_CODE (arg) == FIXED_CST)
2651 {
2652 tem = fold_convert_const (code: FIXED_CONVERT_EXPR, type, arg1: arg);
2653 if (tem != NULL_TREE)
2654 return tem;
2655 }
2656
2657 switch (TREE_CODE (orig))
2658 {
2659 case INTEGER_TYPE: case BITINT_TYPE:
2660 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2661 case POINTER_TYPE: case REFERENCE_TYPE:
2662 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2663
2664 case REAL_TYPE:
2665 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2666
2667 case FIXED_POINT_TYPE:
2668 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2669
2670 case COMPLEX_TYPE:
2671 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 return fold_convert_loc (loc, type, arg: tem);
2673
2674 default:
2675 gcc_unreachable ();
2676 }
2677
2678 case FIXED_POINT_TYPE:
2679 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2680 || TREE_CODE (arg) == REAL_CST)
2681 {
2682 tem = fold_convert_const (code: FIXED_CONVERT_EXPR, type, arg1: arg);
2683 if (tem != NULL_TREE)
2684 goto fold_convert_exit;
2685 }
2686
2687 switch (TREE_CODE (orig))
2688 {
2689 case FIXED_POINT_TYPE:
2690 case INTEGER_TYPE:
2691 case ENUMERAL_TYPE:
2692 case BOOLEAN_TYPE:
2693 case REAL_TYPE:
2694 case BITINT_TYPE:
2695 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2696
2697 case COMPLEX_TYPE:
2698 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2699 return fold_convert_loc (loc, type, arg: tem);
2700
2701 default:
2702 gcc_unreachable ();
2703 }
2704
2705 case COMPLEX_TYPE:
2706 switch (TREE_CODE (orig))
2707 {
2708 case INTEGER_TYPE: case BITINT_TYPE:
2709 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2710 case POINTER_TYPE: case REFERENCE_TYPE:
2711 case REAL_TYPE:
2712 case FIXED_POINT_TYPE:
2713 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2714 fold_convert_loc (loc, TREE_TYPE (type), arg),
2715 fold_convert_loc (loc, TREE_TYPE (type),
2716 integer_zero_node));
2717 case COMPLEX_TYPE:
2718 {
2719 tree rpart, ipart;
2720
2721 if (TREE_CODE (arg) == COMPLEX_EXPR)
2722 {
2723 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2724 TREE_OPERAND (arg, 0));
2725 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2726 TREE_OPERAND (arg, 1));
2727 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2728 }
2729
2730 arg = save_expr (arg);
2731 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2732 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2733 rpart = fold_convert_loc (loc, TREE_TYPE (type), arg: rpart);
2734 ipart = fold_convert_loc (loc, TREE_TYPE (type), arg: ipart);
2735 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2736 }
2737
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 case VECTOR_TYPE:
2743 if (integer_zerop (arg))
2744 return build_zero_vector (type);
2745 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2746 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2747 || VECTOR_TYPE_P (orig));
2748 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2749
2750 case VOID_TYPE:
2751 tem = fold_ignored_result (arg);
2752 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2753
2754 default:
2755 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2756 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2757 gcc_unreachable ();
2758 }
2759 fold_convert_exit:
2760 tem = protected_set_expr_location_unshare (x: tem, loc);
2761 return tem;
2762}
2763
2764/* Return false if expr can be assumed not to be an lvalue, true
2765 otherwise. */
2766
2767static bool
2768maybe_lvalue_p (const_tree x)
2769{
2770 /* We only need to wrap lvalue tree codes. */
2771 switch (TREE_CODE (x))
2772 {
2773 case VAR_DECL:
2774 case PARM_DECL:
2775 case RESULT_DECL:
2776 case LABEL_DECL:
2777 case FUNCTION_DECL:
2778 case SSA_NAME:
2779 case COMPOUND_LITERAL_EXPR:
2780
2781 case COMPONENT_REF:
2782 case MEM_REF:
2783 case INDIRECT_REF:
2784 case ARRAY_REF:
2785 case ARRAY_RANGE_REF:
2786 case BIT_FIELD_REF:
2787 case OBJ_TYPE_REF:
2788
2789 case REALPART_EXPR:
2790 case IMAGPART_EXPR:
2791 case PREINCREMENT_EXPR:
2792 case PREDECREMENT_EXPR:
2793 case SAVE_EXPR:
2794 case TRY_CATCH_EXPR:
2795 case WITH_CLEANUP_EXPR:
2796 case COMPOUND_EXPR:
2797 case MODIFY_EXPR:
2798 case TARGET_EXPR:
2799 case COND_EXPR:
2800 case BIND_EXPR:
2801 case VIEW_CONVERT_EXPR:
2802 break;
2803
2804 default:
2805 /* Assume the worst for front-end tree codes. */
2806 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2807 break;
2808 return false;
2809 }
2810
2811 return true;
2812}
2813
2814/* Return an expr equal to X but certainly not valid as an lvalue. */
2815
2816tree
2817non_lvalue_loc (location_t loc, tree x)
2818{
2819 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2820 us. */
2821 if (in_gimple_form)
2822 return x;
2823
2824 if (! maybe_lvalue_p (x))
2825 return x;
2826 return build1_loc (loc, code: NON_LVALUE_EXPR, TREE_TYPE (x), arg1: x);
2827}
2828
2829/* Given a tree comparison code, return the code that is the logical inverse.
2830 It is generally not safe to do this for floating-point comparisons, except
2831 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2832 ERROR_MARK in this case. */
2833
2834enum tree_code
2835invert_tree_comparison (enum tree_code code, bool honor_nans)
2836{
2837 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2838 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2839 return ERROR_MARK;
2840
2841 switch (code)
2842 {
2843 case EQ_EXPR:
2844 return NE_EXPR;
2845 case NE_EXPR:
2846 return EQ_EXPR;
2847 case GT_EXPR:
2848 return honor_nans ? UNLE_EXPR : LE_EXPR;
2849 case GE_EXPR:
2850 return honor_nans ? UNLT_EXPR : LT_EXPR;
2851 case LT_EXPR:
2852 return honor_nans ? UNGE_EXPR : GE_EXPR;
2853 case LE_EXPR:
2854 return honor_nans ? UNGT_EXPR : GT_EXPR;
2855 case LTGT_EXPR:
2856 return UNEQ_EXPR;
2857 case UNEQ_EXPR:
2858 return LTGT_EXPR;
2859 case UNGT_EXPR:
2860 return LE_EXPR;
2861 case UNGE_EXPR:
2862 return LT_EXPR;
2863 case UNLT_EXPR:
2864 return GE_EXPR;
2865 case UNLE_EXPR:
2866 return GT_EXPR;
2867 case ORDERED_EXPR:
2868 return UNORDERED_EXPR;
2869 case UNORDERED_EXPR:
2870 return ORDERED_EXPR;
2871 default:
2872 gcc_unreachable ();
2873 }
2874}
2875
2876/* Similar, but return the comparison that results if the operands are
2877 swapped. This is safe for floating-point. */
2878
2879enum tree_code
2880swap_tree_comparison (enum tree_code code)
2881{
2882 switch (code)
2883 {
2884 case EQ_EXPR:
2885 case NE_EXPR:
2886 case ORDERED_EXPR:
2887 case UNORDERED_EXPR:
2888 case LTGT_EXPR:
2889 case UNEQ_EXPR:
2890 return code;
2891 case GT_EXPR:
2892 return LT_EXPR;
2893 case GE_EXPR:
2894 return LE_EXPR;
2895 case LT_EXPR:
2896 return GT_EXPR;
2897 case LE_EXPR:
2898 return GE_EXPR;
2899 case UNGT_EXPR:
2900 return UNLT_EXPR;
2901 case UNGE_EXPR:
2902 return UNLE_EXPR;
2903 case UNLT_EXPR:
2904 return UNGT_EXPR;
2905 case UNLE_EXPR:
2906 return UNGE_EXPR;
2907 default:
2908 gcc_unreachable ();
2909 }
2910}
2911
2912
2913/* Convert a comparison tree code from an enum tree_code representation
2914 into a compcode bit-based encoding. This function is the inverse of
2915 compcode_to_comparison. */
2916
2917static enum comparison_code
2918comparison_to_compcode (enum tree_code code)
2919{
2920 switch (code)
2921 {
2922 case LT_EXPR:
2923 return COMPCODE_LT;
2924 case EQ_EXPR:
2925 return COMPCODE_EQ;
2926 case LE_EXPR:
2927 return COMPCODE_LE;
2928 case GT_EXPR:
2929 return COMPCODE_GT;
2930 case NE_EXPR:
2931 return COMPCODE_NE;
2932 case GE_EXPR:
2933 return COMPCODE_GE;
2934 case ORDERED_EXPR:
2935 return COMPCODE_ORD;
2936 case UNORDERED_EXPR:
2937 return COMPCODE_UNORD;
2938 case UNLT_EXPR:
2939 return COMPCODE_UNLT;
2940 case UNEQ_EXPR:
2941 return COMPCODE_UNEQ;
2942 case UNLE_EXPR:
2943 return COMPCODE_UNLE;
2944 case UNGT_EXPR:
2945 return COMPCODE_UNGT;
2946 case LTGT_EXPR:
2947 return COMPCODE_LTGT;
2948 case UNGE_EXPR:
2949 return COMPCODE_UNGE;
2950 default:
2951 gcc_unreachable ();
2952 }
2953}
2954
2955/* Convert a compcode bit-based encoding of a comparison operator back
2956 to GCC's enum tree_code representation. This function is the
2957 inverse of comparison_to_compcode. */
2958
2959static enum tree_code
2960compcode_to_comparison (enum comparison_code code)
2961{
2962 switch (code)
2963 {
2964 case COMPCODE_LT:
2965 return LT_EXPR;
2966 case COMPCODE_EQ:
2967 return EQ_EXPR;
2968 case COMPCODE_LE:
2969 return LE_EXPR;
2970 case COMPCODE_GT:
2971 return GT_EXPR;
2972 case COMPCODE_NE:
2973 return NE_EXPR;
2974 case COMPCODE_GE:
2975 return GE_EXPR;
2976 case COMPCODE_ORD:
2977 return ORDERED_EXPR;
2978 case COMPCODE_UNORD:
2979 return UNORDERED_EXPR;
2980 case COMPCODE_UNLT:
2981 return UNLT_EXPR;
2982 case COMPCODE_UNEQ:
2983 return UNEQ_EXPR;
2984 case COMPCODE_UNLE:
2985 return UNLE_EXPR;
2986 case COMPCODE_UNGT:
2987 return UNGT_EXPR;
2988 case COMPCODE_LTGT:
2989 return LTGT_EXPR;
2990 case COMPCODE_UNGE:
2991 return UNGE_EXPR;
2992 default:
2993 gcc_unreachable ();
2994 }
2995}
2996
2997/* Return true if COND1 tests the opposite condition of COND2. */
2998
2999bool
3000inverse_conditions_p (const_tree cond1, const_tree cond2)
3001{
3002 return (COMPARISON_CLASS_P (cond1)
3003 && COMPARISON_CLASS_P (cond2)
3004 && (invert_tree_comparison
3005 (TREE_CODE (cond1),
3006 honor_nans: HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3007 && operand_equal_p (TREE_OPERAND (cond1, 0),
3008 TREE_OPERAND (cond2, 0), flags: 0)
3009 && operand_equal_p (TREE_OPERAND (cond1, 1),
3010 TREE_OPERAND (cond2, 1), flags: 0));
3011}
3012
3013/* Return a tree for the comparison which is the combination of
3014 doing the AND or OR (depending on CODE) of the two operations LCODE
3015 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3016 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3017 if this makes the transformation invalid. */
3018
3019tree
3020combine_comparisons (location_t loc,
3021 enum tree_code code, enum tree_code lcode,
3022 enum tree_code rcode, tree truth_type,
3023 tree ll_arg, tree lr_arg)
3024{
3025 bool honor_nans = HONOR_NANS (ll_arg);
3026 enum comparison_code lcompcode = comparison_to_compcode (code: lcode);
3027 enum comparison_code rcompcode = comparison_to_compcode (code: rcode);
3028 int compcode;
3029
3030 switch (code)
3031 {
3032 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3033 compcode = lcompcode & rcompcode;
3034 break;
3035
3036 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3037 compcode = lcompcode | rcompcode;
3038 break;
3039
3040 default:
3041 return NULL_TREE;
3042 }
3043
3044 if (!honor_nans)
3045 {
3046 /* Eliminate unordered comparisons, as well as LTGT and ORD
3047 which are not used unless the mode has NaNs. */
3048 compcode &= ~COMPCODE_UNORD;
3049 if (compcode == COMPCODE_LTGT)
3050 compcode = COMPCODE_NE;
3051 else if (compcode == COMPCODE_ORD)
3052 compcode = COMPCODE_TRUE;
3053 }
3054 else if (flag_trapping_math)
3055 {
3056 /* Check that the original operation and the optimized ones will trap
3057 under the same condition. */
3058 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3059 && (lcompcode != COMPCODE_EQ)
3060 && (lcompcode != COMPCODE_ORD);
3061 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3062 && (rcompcode != COMPCODE_EQ)
3063 && (rcompcode != COMPCODE_ORD);
3064 bool trap = (compcode & COMPCODE_UNORD) == 0
3065 && (compcode != COMPCODE_EQ)
3066 && (compcode != COMPCODE_ORD);
3067
3068 /* In a short-circuited boolean expression the LHS might be
3069 such that the RHS, if evaluated, will never trap. For
3070 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3071 if neither x nor y is NaN. (This is a mixed blessing: for
3072 example, the expression above will never trap, hence
3073 optimizing it to x < y would be invalid). */
3074 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3075 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3076 rtrap = false;
3077
3078 /* If the comparison was short-circuited, and only the RHS
3079 trapped, we may now generate a spurious trap. */
3080 if (rtrap && !ltrap
3081 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3082 return NULL_TREE;
3083
3084 /* If we changed the conditions that cause a trap, we lose. */
3085 if ((ltrap || rtrap) != trap)
3086 return NULL_TREE;
3087 }
3088
3089 if (compcode == COMPCODE_TRUE)
3090 return constant_boolean_node (true, truth_type);
3091 else if (compcode == COMPCODE_FALSE)
3092 return constant_boolean_node (false, truth_type);
3093 else
3094 {
3095 enum tree_code tcode;
3096
3097 tcode = compcode_to_comparison (code: (enum comparison_code) compcode);
3098 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3099 }
3100}
3101
3102/* Return nonzero if two operands (typically of the same tree node)
3103 are necessarily equal. FLAGS modifies behavior as follows:
3104
3105 If OEP_ONLY_CONST is set, only return nonzero for constants.
3106 This function tests whether the operands are indistinguishable;
3107 it does not test whether they are equal using C's == operation.
3108 The distinction is important for IEEE floating point, because
3109 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3110 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3111
3112 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3113 even though it may hold multiple values during a function.
3114 This is because a GCC tree node guarantees that nothing else is
3115 executed between the evaluation of its "operands" (which may often
3116 be evaluated in arbitrary order). Hence if the operands themselves
3117 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3118 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3119 unset means assuming isochronic (or instantaneous) tree equivalence.
3120 Unless comparing arbitrary expression trees, such as from different
3121 statements, this flag can usually be left unset.
3122
3123 If OEP_PURE_SAME is set, then pure functions with identical arguments
3124 are considered the same. It is used when the caller has other ways
3125 to ensure that global memory is unchanged in between.
3126
3127 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3128 not values of expressions.
3129
3130 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3131 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3132
3133 If OEP_BITWISE is set, then require the values to be bitwise identical
3134 rather than simply numerically equal. Do not take advantage of things
3135 like math-related flags or undefined behavior; only return true for
3136 values that are provably bitwise identical in all circumstances.
3137
3138 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3139 any operand with side effect. This is unnecesarily conservative in the
3140 case we know that arg0 and arg1 are in disjoint code paths (such as in
3141 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3142 addresses with TREE_CONSTANT flag set so we know that &var == &var
3143 even if var is volatile. */
3144
3145bool
3146operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3147 unsigned int flags)
3148{
3149 bool r;
3150 if (verify_hash_value (arg0, arg1, flags, ret: &r))
3151 return r;
3152
3153 STRIP_ANY_LOCATION_WRAPPER (arg0);
3154 STRIP_ANY_LOCATION_WRAPPER (arg1);
3155
3156 /* If either is ERROR_MARK, they aren't equal. */
3157 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3158 || TREE_TYPE (arg0) == error_mark_node
3159 || TREE_TYPE (arg1) == error_mark_node)
3160 return false;
3161
3162 /* Similar, if either does not have a type (like a template id),
3163 they aren't equal. */
3164 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3165 return false;
3166
3167 /* Bitwise identity makes no sense if the values have different layouts. */
3168 if ((flags & OEP_BITWISE)
3169 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3170 return false;
3171
3172 /* We cannot consider pointers to different address space equal. */
3173 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3174 && POINTER_TYPE_P (TREE_TYPE (arg1))
3175 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3176 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3177 return false;
3178
3179 /* Check equality of integer constants before bailing out due to
3180 precision differences. */
3181 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3182 {
3183 /* Address of INTEGER_CST is not defined; check that we did not forget
3184 to drop the OEP_ADDRESS_OF flags. */
3185 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3186 return tree_int_cst_equal (arg0, arg1);
3187 }
3188
3189 if (!(flags & OEP_ADDRESS_OF))
3190 {
3191 /* If both types don't have the same signedness, then we can't consider
3192 them equal. We must check this before the STRIP_NOPS calls
3193 because they may change the signedness of the arguments. As pointers
3194 strictly don't have a signedness, require either two pointers or
3195 two non-pointers as well. */
3196 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3197 || POINTER_TYPE_P (TREE_TYPE (arg0))
3198 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3199 return false;
3200
3201 /* If both types don't have the same precision, then it is not safe
3202 to strip NOPs. */
3203 if (element_precision (TREE_TYPE (arg0))
3204 != element_precision (TREE_TYPE (arg1)))
3205 return false;
3206
3207 STRIP_NOPS (arg0);
3208 STRIP_NOPS (arg1);
3209 }
3210#if 0
3211 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3212 sanity check once the issue is solved. */
3213 else
3214 /* Addresses of conversions and SSA_NAMEs (and many other things)
3215 are not defined. Check that we did not forget to drop the
3216 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3217 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3218 && TREE_CODE (arg0) != SSA_NAME);
3219#endif
3220
3221 /* In case both args are comparisons but with different comparison
3222 code, try to swap the comparison operands of one arg to produce
3223 a match and compare that variant. */
3224 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3225 && COMPARISON_CLASS_P (arg0)
3226 && COMPARISON_CLASS_P (arg1))
3227 {
3228 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3229
3230 if (TREE_CODE (arg0) == swap_code)
3231 return operand_equal_p (TREE_OPERAND (arg0, 0),
3232 TREE_OPERAND (arg1, 1), flags)
3233 && operand_equal_p (TREE_OPERAND (arg0, 1),
3234 TREE_OPERAND (arg1, 0), flags);
3235 }
3236
3237 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3238 {
3239 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3240 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3241 ;
3242 else if (flags & OEP_ADDRESS_OF)
3243 {
3244 /* If we are interested in comparing addresses ignore
3245 MEM_REF wrappings of the base that can appear just for
3246 TBAA reasons. */
3247 if (TREE_CODE (arg0) == MEM_REF
3248 && DECL_P (arg1)
3249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3250 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3251 && integer_zerop (TREE_OPERAND (arg0, 1)))
3252 return true;
3253 else if (TREE_CODE (arg1) == MEM_REF
3254 && DECL_P (arg0)
3255 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3256 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3257 && integer_zerop (TREE_OPERAND (arg1, 1)))
3258 return true;
3259 return false;
3260 }
3261 else
3262 return false;
3263 }
3264
3265 /* When not checking adddresses, this is needed for conversions and for
3266 COMPONENT_REF. Might as well play it safe and always test this. */
3267 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3268 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3269 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3270 && !(flags & OEP_ADDRESS_OF)))
3271 return false;
3272
3273 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3274 We don't care about side effects in that case because the SAVE_EXPR
3275 takes care of that for us. In all other cases, two expressions are
3276 equal if they have no side effects. If we have two identical
3277 expressions with side effects that should be treated the same due
3278 to the only side effects being identical SAVE_EXPR's, that will
3279 be detected in the recursive calls below.
3280 If we are taking an invariant address of two identical objects
3281 they are necessarily equal as well. */
3282 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3283 && (TREE_CODE (arg0) == SAVE_EXPR
3284 || (flags & OEP_MATCH_SIDE_EFFECTS)
3285 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3286 return true;
3287
3288 /* Next handle constant cases, those for which we can return 1 even
3289 if ONLY_CONST is set. */
3290 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3291 switch (TREE_CODE (arg0))
3292 {
3293 case INTEGER_CST:
3294 return tree_int_cst_equal (arg0, arg1);
3295
3296 case FIXED_CST:
3297 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3298 TREE_FIXED_CST (arg1));
3299
3300 case REAL_CST:
3301 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3302 return true;
3303
3304 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3305 {
3306 /* If we do not distinguish between signed and unsigned zero,
3307 consider them equal. */
3308 if (real_zerop (arg0) && real_zerop (arg1))
3309 return true;
3310 }
3311 return false;
3312
3313 case VECTOR_CST:
3314 {
3315 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3316 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3317 return false;
3318
3319 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3320 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3321 return false;
3322
3323 unsigned int count = vector_cst_encoded_nelts (t: arg0);
3324 for (unsigned int i = 0; i < count; ++i)
3325 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3326 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3327 return false;
3328 return true;
3329 }
3330
3331 case COMPLEX_CST:
3332 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3333 flags)
3334 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3335 flags));
3336
3337 case STRING_CST:
3338 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3339 && ! memcmp (TREE_STRING_POINTER (arg0),
3340 TREE_STRING_POINTER (arg1),
3341 TREE_STRING_LENGTH (arg0)));
3342
3343 case ADDR_EXPR:
3344 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3345 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3346 flags: flags | OEP_ADDRESS_OF
3347 | OEP_MATCH_SIDE_EFFECTS);
3348 case CONSTRUCTOR:
3349 {
3350 /* In GIMPLE empty constructors are allowed in initializers of
3351 aggregates. */
3352 if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3353 return true;
3354
3355 /* See sem_variable::equals in ipa-icf for a similar approach. */
3356 tree typ0 = TREE_TYPE (arg0);
3357 tree typ1 = TREE_TYPE (arg1);
3358
3359 if (TREE_CODE (typ0) != TREE_CODE (typ1))
3360 return false;
3361 else if (TREE_CODE (typ0) == ARRAY_TYPE)
3362 {
3363 /* For arrays, check that the sizes all match. */
3364 const HOST_WIDE_INT siz0 = int_size_in_bytes (typ0);
3365 if (TYPE_MODE (typ0) != TYPE_MODE (typ1)
3366 || siz0 < 0
3367 || siz0 != int_size_in_bytes (typ1))
3368 return false;
3369 }
3370 else if (!types_compatible_p (type1: typ0, type2: typ1))
3371 return false;
3372
3373 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3374 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3375 if (vec_safe_length (v: v0) != vec_safe_length (v: v1))
3376 return false;
3377
3378 /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3379 of the CONSTRUCTOR referenced indirectly. */
3380 flags &= ~OEP_ADDRESS_OF;
3381
3382 for (unsigned idx = 0; idx < vec_safe_length (v: v0); ++idx)
3383 {
3384 constructor_elt *c0 = &(*v0)[idx];
3385 constructor_elt *c1 = &(*v1)[idx];
3386
3387 /* Check that the values are the same... */
3388 if (c0->value != c1->value
3389 && !operand_equal_p (arg0: c0->value, arg1: c1->value, flags))
3390 return false;
3391
3392 /* ... and that they apply to the same field! */
3393 if (c0->index != c1->index
3394 && (TREE_CODE (typ0) == ARRAY_TYPE
3395 ? !operand_equal_p (arg0: c0->index, arg1: c1->index, flags)
3396 : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3397 DECL_FIELD_OFFSET (c1->index),
3398 flags)
3399 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3400 DECL_FIELD_BIT_OFFSET (c1->index),
3401 flags)))
3402 return false;
3403 }
3404
3405 return true;
3406 }
3407
3408 default:
3409 break;
3410 }
3411
3412 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3413 two instances of undefined behavior will give identical results. */
3414 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3415 return false;
3416
3417/* Define macros to test an operand from arg0 and arg1 for equality and a
3418 variant that allows null and views null as being different from any
3419 non-null value. In the latter case, if either is null, the both
3420 must be; otherwise, do the normal comparison. */
3421#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3422 TREE_OPERAND (arg1, N), flags)
3423
3424#define OP_SAME_WITH_NULL(N) \
3425 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3426 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3427
3428 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3429 {
3430 case tcc_unary:
3431 /* Two conversions are equal only if signedness and modes match. */
3432 switch (TREE_CODE (arg0))
3433 {
3434 CASE_CONVERT:
3435 case FIX_TRUNC_EXPR:
3436 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3437 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3438 return false;
3439 break;
3440 default:
3441 break;
3442 }
3443
3444 return OP_SAME (0);
3445
3446
3447 case tcc_comparison:
3448 case tcc_binary:
3449 if (OP_SAME (0) && OP_SAME (1))
3450 return true;
3451
3452 /* For commutative ops, allow the other order. */
3453 return (commutative_tree_code (TREE_CODE (arg0))
3454 && operand_equal_p (TREE_OPERAND (arg0, 0),
3455 TREE_OPERAND (arg1, 1), flags)
3456 && operand_equal_p (TREE_OPERAND (arg0, 1),
3457 TREE_OPERAND (arg1, 0), flags));
3458
3459 case tcc_reference:
3460 /* If either of the pointer (or reference) expressions we are
3461 dereferencing contain a side effect, these cannot be equal,
3462 but their addresses can be. */
3463 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3464 && (TREE_SIDE_EFFECTS (arg0)
3465 || TREE_SIDE_EFFECTS (arg1)))
3466 return false;
3467
3468 switch (TREE_CODE (arg0))
3469 {
3470 case INDIRECT_REF:
3471 if (!(flags & OEP_ADDRESS_OF))
3472 {
3473 if (TYPE_ALIGN (TREE_TYPE (arg0))
3474 != TYPE_ALIGN (TREE_TYPE (arg1)))
3475 return false;
3476 /* Verify that the access types are compatible. */
3477 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3478 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3479 return false;
3480 }
3481 flags &= ~OEP_ADDRESS_OF;
3482 return OP_SAME (0);
3483
3484 case IMAGPART_EXPR:
3485 /* Require the same offset. */
3486 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3487 TYPE_SIZE (TREE_TYPE (arg1)),
3488 flags: flags & ~OEP_ADDRESS_OF))
3489 return false;
3490
3491 /* Fallthru. */
3492 case REALPART_EXPR:
3493 case VIEW_CONVERT_EXPR:
3494 return OP_SAME (0);
3495
3496 case TARGET_MEM_REF:
3497 case MEM_REF:
3498 if (!(flags & OEP_ADDRESS_OF))
3499 {
3500 /* Require equal access sizes */
3501 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3502 && (!TYPE_SIZE (TREE_TYPE (arg0))
3503 || !TYPE_SIZE (TREE_TYPE (arg1))
3504 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3505 TYPE_SIZE (TREE_TYPE (arg1)),
3506 flags)))
3507 return false;
3508 /* Verify that access happens in similar types. */
3509 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3510 return false;
3511 /* Verify that accesses are TBAA compatible. */
3512 if (!alias_ptr_types_compatible_p
3513 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3514 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3515 || (MR_DEPENDENCE_CLIQUE (arg0)
3516 != MR_DEPENDENCE_CLIQUE (arg1))
3517 || (MR_DEPENDENCE_BASE (arg0)
3518 != MR_DEPENDENCE_BASE (arg1)))
3519 return false;
3520 /* Verify that alignment is compatible. */
3521 if (TYPE_ALIGN (TREE_TYPE (arg0))
3522 != TYPE_ALIGN (TREE_TYPE (arg1)))
3523 return false;
3524 }
3525 flags &= ~OEP_ADDRESS_OF;
3526 return (OP_SAME (0) && OP_SAME (1)
3527 /* TARGET_MEM_REF require equal extra operands. */
3528 && (TREE_CODE (arg0) != TARGET_MEM_REF
3529 || (OP_SAME_WITH_NULL (2)
3530 && OP_SAME_WITH_NULL (3)
3531 && OP_SAME_WITH_NULL (4))));
3532
3533 case ARRAY_REF:
3534 case ARRAY_RANGE_REF:
3535 if (!OP_SAME (0))
3536 return false;
3537 flags &= ~OEP_ADDRESS_OF;
3538 /* Compare the array index by value if it is constant first as we
3539 may have different types but same value here. */
3540 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3541 TREE_OPERAND (arg1, 1))
3542 || OP_SAME (1))
3543 && OP_SAME_WITH_NULL (2)
3544 && OP_SAME_WITH_NULL (3)
3545 /* Compare low bound and element size as with OEP_ADDRESS_OF
3546 we have to account for the offset of the ref. */
3547 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3548 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3549 || (operand_equal_p (arg0: array_ref_low_bound
3550 (CONST_CAST_TREE (arg0)),
3551 arg1: array_ref_low_bound
3552 (CONST_CAST_TREE (arg1)), flags)
3553 && operand_equal_p (arg0: array_ref_element_size
3554 (CONST_CAST_TREE (arg0)),
3555 arg1: array_ref_element_size
3556 (CONST_CAST_TREE (arg1)),
3557 flags))));
3558
3559 case COMPONENT_REF:
3560 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3561 may be NULL when we're called to compare MEM_EXPRs. */
3562 if (!OP_SAME_WITH_NULL (0))
3563 return false;
3564 {
3565 bool compare_address = flags & OEP_ADDRESS_OF;
3566
3567 /* Most of time we only need to compare FIELD_DECLs for equality.
3568 However when determining address look into actual offsets.
3569 These may match for unions and unshared record types. */
3570 flags &= ~OEP_ADDRESS_OF;
3571 if (!OP_SAME (1))
3572 {
3573 if (compare_address
3574 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3575 {
3576 tree field0 = TREE_OPERAND (arg0, 1);
3577 tree field1 = TREE_OPERAND (arg1, 1);
3578
3579 /* Non-FIELD_DECL operands can appear in C++ templates. */
3580 if (TREE_CODE (field0) != FIELD_DECL
3581 || TREE_CODE (field1) != FIELD_DECL
3582 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3583 DECL_FIELD_OFFSET (field1), flags)
3584 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3585 DECL_FIELD_BIT_OFFSET (field1),
3586 flags))
3587 return false;
3588 }
3589 else
3590 return false;
3591 }
3592 }
3593 return OP_SAME_WITH_NULL (2);
3594
3595 case BIT_FIELD_REF:
3596 if (!OP_SAME (0))
3597 return false;
3598 flags &= ~OEP_ADDRESS_OF;
3599 return OP_SAME (1) && OP_SAME (2);
3600
3601 default:
3602 return false;
3603 }
3604
3605 case tcc_expression:
3606 switch (TREE_CODE (arg0))
3607 {
3608 case ADDR_EXPR:
3609 /* Be sure we pass right ADDRESS_OF flag. */
3610 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3611 return operand_equal_p (TREE_OPERAND (arg0, 0),
3612 TREE_OPERAND (arg1, 0),
3613 flags: flags | OEP_ADDRESS_OF);
3614
3615 case TRUTH_NOT_EXPR:
3616 return OP_SAME (0);
3617
3618 case TRUTH_ANDIF_EXPR:
3619 case TRUTH_ORIF_EXPR:
3620 return OP_SAME (0) && OP_SAME (1);
3621
3622 case WIDEN_MULT_PLUS_EXPR:
3623 case WIDEN_MULT_MINUS_EXPR:
3624 if (!OP_SAME (2))
3625 return false;
3626 /* The multiplcation operands are commutative. */
3627 /* FALLTHRU */
3628
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632 if (OP_SAME (0) && OP_SAME (1))
3633 return true;
3634
3635 /* Otherwise take into account this is a commutative operation. */
3636 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3637 TREE_OPERAND (arg1, 1), flags)
3638 && operand_equal_p (TREE_OPERAND (arg0, 1),
3639 TREE_OPERAND (arg1, 0), flags));
3640
3641 case COND_EXPR:
3642 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3643 return false;
3644 flags &= ~OEP_ADDRESS_OF;
3645 return OP_SAME (0);
3646
3647 case BIT_INSERT_EXPR:
3648 /* BIT_INSERT_EXPR has an implict operand as the type precision
3649 of op1. Need to check to make sure they are the same. */
3650 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3651 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3652 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3653 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3654 return false;
3655 /* FALLTHRU */
3656
3657 case VEC_COND_EXPR:
3658 case DOT_PROD_EXPR:
3659 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3660
3661 case MODIFY_EXPR:
3662 case INIT_EXPR:
3663 case COMPOUND_EXPR:
3664 case PREDECREMENT_EXPR:
3665 case PREINCREMENT_EXPR:
3666 case POSTDECREMENT_EXPR:
3667 case POSTINCREMENT_EXPR:
3668 if (flags & OEP_LEXICOGRAPHIC)
3669 return OP_SAME (0) && OP_SAME (1);
3670 return false;
3671
3672 case CLEANUP_POINT_EXPR:
3673 case EXPR_STMT:
3674 case SAVE_EXPR:
3675 if (flags & OEP_LEXICOGRAPHIC)
3676 return OP_SAME (0);
3677 return false;
3678
3679 case OBJ_TYPE_REF:
3680 /* Virtual table reference. */
3681 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3682 OBJ_TYPE_REF_EXPR (arg1), flags))
3683 return false;
3684 flags &= ~OEP_ADDRESS_OF;
3685 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3686 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3687 return false;
3688 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3689 OBJ_TYPE_REF_OBJECT (arg1), flags))
3690 return false;
3691 if (virtual_method_call_p (arg0))
3692 {
3693 if (!virtual_method_call_p (arg1))
3694 return false;
3695 return types_same_for_odr (type1: obj_type_ref_class (ref: arg0),
3696 type2: obj_type_ref_class (ref: arg1));
3697 }
3698 return false;
3699
3700 default:
3701 return false;
3702 }
3703
3704 case tcc_vl_exp:
3705 switch (TREE_CODE (arg0))
3706 {
3707 case CALL_EXPR:
3708 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3709 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3710 /* If not both CALL_EXPRs are either internal or normal function
3711 functions, then they are not equal. */
3712 return false;
3713 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3714 {
3715 /* If the CALL_EXPRs call different internal functions, then they
3716 are not equal. */
3717 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3718 return false;
3719 }
3720 else
3721 {
3722 /* If the CALL_EXPRs call different functions, then they are not
3723 equal. */
3724 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3725 flags))
3726 return false;
3727 }
3728
3729 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3730 {
3731 unsigned int cef = call_expr_flags (arg0);
3732 if (flags & OEP_PURE_SAME)
3733 cef &= ECF_CONST | ECF_PURE;
3734 else
3735 cef &= ECF_CONST;
3736 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3737 return false;
3738 }
3739
3740 /* Now see if all the arguments are the same. */
3741 {
3742 const_call_expr_arg_iterator iter0, iter1;
3743 const_tree a0, a1;
3744 for (a0 = first_const_call_expr_arg (exp: arg0, iter: &iter0),
3745 a1 = first_const_call_expr_arg (exp: arg1, iter: &iter1);
3746 a0 && a1;
3747 a0 = next_const_call_expr_arg (iter: &iter0),
3748 a1 = next_const_call_expr_arg (iter: &iter1))
3749 if (! operand_equal_p (arg0: a0, arg1: a1, flags))
3750 return false;
3751
3752 /* If we get here and both argument lists are exhausted
3753 then the CALL_EXPRs are equal. */
3754 return ! (a0 || a1);
3755 }
3756 default:
3757 return false;
3758 }
3759
3760 case tcc_declaration:
3761 /* Consider __builtin_sqrt equal to sqrt. */
3762 if (TREE_CODE (arg0) == FUNCTION_DECL)
3763 return (fndecl_built_in_p (node: arg0) && fndecl_built_in_p (node: arg1)
3764 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3765 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3766 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3767
3768 if (DECL_P (arg0)
3769 && (flags & OEP_DECL_NAME)
3770 && (flags & OEP_LEXICOGRAPHIC))
3771 {
3772 /* Consider decls with the same name equal. The caller needs
3773 to make sure they refer to the same entity (such as a function
3774 formal parameter). */
3775 tree a0name = DECL_NAME (arg0);
3776 tree a1name = DECL_NAME (arg1);
3777 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3778 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3779 return a0ns && a1ns && strcmp (s1: a0ns, s2: a1ns) == 0;
3780 }
3781 return false;
3782
3783 case tcc_exceptional:
3784 if (TREE_CODE (arg0) == CONSTRUCTOR)
3785 {
3786 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3787 return false;
3788
3789 /* In GIMPLE constructors are used only to build vectors from
3790 elements. Individual elements in the constructor must be
3791 indexed in increasing order and form an initial sequence.
3792
3793 We make no effort to compare nonconstant ones in GENERIC. */
3794 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3795 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3796 return false;
3797
3798 /* Be sure that vectors constructed have the same representation.
3799 We only tested element precision and modes to match.
3800 Vectors may be BLKmode and thus also check that the number of
3801 parts match. */
3802 if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3803 b: TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3804 return false;
3805
3806 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3807 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3808 unsigned int len = vec_safe_length (v: v0);
3809
3810 if (len != vec_safe_length (v: v1))
3811 return false;
3812
3813 for (unsigned int i = 0; i < len; i++)
3814 {
3815 constructor_elt *c0 = &(*v0)[i];
3816 constructor_elt *c1 = &(*v1)[i];
3817
3818 if (!operand_equal_p (arg0: c0->value, arg1: c1->value, flags)
3819 /* In GIMPLE the indexes can be either NULL or matching i.
3820 Double check this so we won't get false
3821 positives for GENERIC. */
3822 || (c0->index
3823 && (TREE_CODE (c0->index) != INTEGER_CST
3824 || compare_tree_int (c0->index, i)))
3825 || (c1->index
3826 && (TREE_CODE (c1->index) != INTEGER_CST
3827 || compare_tree_int (c1->index, i))))
3828 return false;
3829 }
3830 return true;
3831 }
3832 else if (TREE_CODE (arg0) == STATEMENT_LIST
3833 && (flags & OEP_LEXICOGRAPHIC))
3834 {
3835 /* Compare the STATEMENT_LISTs. */
3836 tree_stmt_iterator tsi1, tsi2;
3837 tree body1 = CONST_CAST_TREE (arg0);
3838 tree body2 = CONST_CAST_TREE (arg1);
3839 for (tsi1 = tsi_start (t: body1), tsi2 = tsi_start (t: body2); ;
3840 tsi_next (i: &tsi1), tsi_next (i: &tsi2))
3841 {
3842 /* The lists don't have the same number of statements. */
3843 if (tsi_end_p (i: tsi1) ^ tsi_end_p (i: tsi2))
3844 return false;
3845 if (tsi_end_p (i: tsi1) && tsi_end_p (i: tsi2))
3846 return true;
3847 if (!operand_equal_p (arg0: tsi_stmt (i: tsi1), arg1: tsi_stmt (i: tsi2),
3848 flags: flags & (OEP_LEXICOGRAPHIC
3849 | OEP_NO_HASH_CHECK)))
3850 return false;
3851 }
3852 }
3853 return false;
3854
3855 case tcc_statement:
3856 switch (TREE_CODE (arg0))
3857 {
3858 case RETURN_EXPR:
3859 if (flags & OEP_LEXICOGRAPHIC)
3860 return OP_SAME_WITH_NULL (0);
3861 return false;
3862 case DEBUG_BEGIN_STMT:
3863 if (flags & OEP_LEXICOGRAPHIC)
3864 return true;
3865 return false;
3866 default:
3867 return false;
3868 }
3869
3870 default:
3871 return false;
3872 }
3873
3874#undef OP_SAME
3875#undef OP_SAME_WITH_NULL
3876}
3877
3878/* Generate a hash value for an expression. This can be used iteratively
3879 by passing a previous result as the HSTATE argument. */
3880
3881void
3882operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3883 unsigned int flags)
3884{
3885 int i;
3886 enum tree_code code;
3887 enum tree_code_class tclass;
3888
3889 if (t == NULL_TREE || t == error_mark_node)
3890 {
3891 hstate.merge_hash (other: 0);
3892 return;
3893 }
3894
3895 STRIP_ANY_LOCATION_WRAPPER (t);
3896
3897 if (!(flags & OEP_ADDRESS_OF))
3898 STRIP_NOPS (t);
3899
3900 code = TREE_CODE (t);
3901
3902 switch (code)
3903 {
3904 /* Alas, constants aren't shared, so we can't rely on pointer
3905 identity. */
3906 case VOID_CST:
3907 hstate.merge_hash (other: 0);
3908 return;
3909 case INTEGER_CST:
3910 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3911 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3912 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3913 return;
3914 case REAL_CST:
3915 {
3916 unsigned int val2;
3917 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3918 val2 = rvc_zero;
3919 else
3920 val2 = real_hash (TREE_REAL_CST_PTR (t));
3921 hstate.merge_hash (other: val2);
3922 return;
3923 }
3924 case FIXED_CST:
3925 {
3926 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3927 hstate.merge_hash (other: val2);
3928 return;
3929 }
3930 case STRING_CST:
3931 hstate.add (data: (const void *) TREE_STRING_POINTER (t),
3932 TREE_STRING_LENGTH (t));
3933 return;
3934 case COMPLEX_CST:
3935 hash_operand (TREE_REALPART (t), hstate, flags);
3936 hash_operand (TREE_IMAGPART (t), hstate, flags);
3937 return;
3938 case VECTOR_CST:
3939 {
3940 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3941 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3942 unsigned int count = vector_cst_encoded_nelts (t);
3943 for (unsigned int i = 0; i < count; ++i)
3944 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3945 return;
3946 }
3947 case SSA_NAME:
3948 /* We can just compare by pointer. */
3949 hstate.add_hwi (SSA_NAME_VERSION (t));
3950 return;
3951 case PLACEHOLDER_EXPR:
3952 /* The node itself doesn't matter. */
3953 return;
3954 case BLOCK:
3955 case OMP_CLAUSE:
3956 /* Ignore. */
3957 return;
3958 case TREE_LIST:
3959 /* A list of expressions, for a CALL_EXPR or as the elements of a
3960 VECTOR_CST. */
3961 for (; t; t = TREE_CHAIN (t))
3962 hash_operand (TREE_VALUE (t), hstate, flags);
3963 return;
3964 case CONSTRUCTOR:
3965 {
3966 unsigned HOST_WIDE_INT idx;
3967 tree field, value;
3968 flags &= ~OEP_ADDRESS_OF;
3969 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3971 {
3972 /* In GIMPLE the indexes can be either NULL or matching i. */
3973 if (field == NULL_TREE)
3974 field = bitsize_int (idx);
3975 if (TREE_CODE (field) == FIELD_DECL)
3976 {
3977 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
3978 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
3979 }
3980 else
3981 hash_operand (t: field, hstate, flags);
3982 hash_operand (t: value, hstate, flags);
3983 }
3984 return;
3985 }
3986 case STATEMENT_LIST:
3987 {
3988 tree_stmt_iterator i;
3989 for (i = tsi_start (CONST_CAST_TREE (t));
3990 !tsi_end_p (i); tsi_next (i: &i))
3991 hash_operand (t: tsi_stmt (i), hstate, flags);
3992 return;
3993 }
3994 case TREE_VEC:
3995 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3996 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3997 return;
3998 case IDENTIFIER_NODE:
3999 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4000 return;
4001 case FUNCTION_DECL:
4002 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4003 Otherwise nodes that compare equal according to operand_equal_p might
4004 get different hash codes. However, don't do this for machine specific
4005 or front end builtins, since the function code is overloaded in those
4006 cases. */
4007 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4008 && builtin_decl_explicit_p (fncode: DECL_FUNCTION_CODE (decl: t)))
4009 {
4010 t = builtin_decl_explicit (fncode: DECL_FUNCTION_CODE (decl: t));
4011 code = TREE_CODE (t);
4012 }
4013 /* FALL THROUGH */
4014 default:
4015 if (POLY_INT_CST_P (t))
4016 {
4017 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4018 hstate.add_wide_int (x: wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4019 return;
4020 }
4021 tclass = TREE_CODE_CLASS (code);
4022
4023 if (tclass == tcc_declaration)
4024 {
4025 /* DECL's have a unique ID */
4026 hstate.add_hwi (DECL_UID (t));
4027 }
4028 else if (tclass == tcc_comparison && !commutative_tree_code (code))
4029 {
4030 /* For comparisons that can be swapped, use the lower
4031 tree code. */
4032 enum tree_code ccode = swap_tree_comparison (code);
4033 if (code < ccode)
4034 ccode = code;
4035 hstate.add_object (obj&: ccode);
4036 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4037 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4038 }
4039 else if (CONVERT_EXPR_CODE_P (code))
4040 {
4041 /* NOP_EXPR and CONVERT_EXPR are considered equal by
4042 operand_equal_p. */
4043 enum tree_code ccode = NOP_EXPR;
4044 hstate.add_object (obj&: ccode);
4045
4046 /* Don't hash the type, that can lead to having nodes which
4047 compare equal according to operand_equal_p, but which
4048 have different hash codes. Make sure to include signedness
4049 in the hash computation. */
4050 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4051 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4052 }
4053 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4054 else if (code == MEM_REF
4055 && (flags & OEP_ADDRESS_OF) != 0
4056 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4057 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4058 && integer_zerop (TREE_OPERAND (t, 1)))
4059 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4060 hstate, flags);
4061 /* Don't ICE on FE specific trees, or their arguments etc.
4062 during operand_equal_p hash verification. */
4063 else if (!IS_EXPR_CODE_CLASS (tclass))
4064 gcc_assert (flags & OEP_HASH_CHECK);
4065 else
4066 {
4067 unsigned int sflags = flags;
4068
4069 hstate.add_object (obj&: code);
4070
4071 switch (code)
4072 {
4073 case ADDR_EXPR:
4074 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4075 flags |= OEP_ADDRESS_OF;
4076 sflags = flags;
4077 break;
4078
4079 case INDIRECT_REF:
4080 case MEM_REF:
4081 case TARGET_MEM_REF:
4082 flags &= ~OEP_ADDRESS_OF;
4083 sflags = flags;
4084 break;
4085
4086 case COMPONENT_REF:
4087 if (sflags & OEP_ADDRESS_OF)
4088 {
4089 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4090 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4091 hstate, flags: flags & ~OEP_ADDRESS_OF);
4092 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4093 hstate, flags: flags & ~OEP_ADDRESS_OF);
4094 return;
4095 }
4096 break;
4097 case ARRAY_REF:
4098 case ARRAY_RANGE_REF:
4099 case BIT_FIELD_REF:
4100 sflags &= ~OEP_ADDRESS_OF;
4101 break;
4102
4103 case COND_EXPR:
4104 flags &= ~OEP_ADDRESS_OF;
4105 break;
4106
4107 case WIDEN_MULT_PLUS_EXPR:
4108 case WIDEN_MULT_MINUS_EXPR:
4109 {
4110 /* The multiplication operands are commutative. */
4111 inchash::hash one, two;
4112 hash_operand (TREE_OPERAND (t, 0), hstate&: one, flags);
4113 hash_operand (TREE_OPERAND (t, 1), hstate&: two, flags);
4114 hstate.add_commutative (a&: one, b&: two);
4115 hash_operand (TREE_OPERAND (t, 2), hstate&: two, flags);
4116 return;
4117 }
4118
4119 case CALL_EXPR:
4120 if (CALL_EXPR_FN (t) == NULL_TREE)
4121 hstate.add_int (CALL_EXPR_IFN (t));
4122 break;
4123
4124 case TARGET_EXPR:
4125 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4126 Usually different TARGET_EXPRs just should use
4127 different temporaries in their slots. */
4128 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4129 return;
4130
4131 case OBJ_TYPE_REF:
4132 /* Virtual table reference. */
4133 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4134 flags &= ~OEP_ADDRESS_OF;
4135 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4136 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4137 if (!virtual_method_call_p (t))
4138 return;
4139 if (tree c = obj_type_ref_class (ref: t))
4140 {
4141 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4142 /* We compute mangled names only when free_lang_data is run.
4143 In that case we can hash precisely. */
4144 if (TREE_CODE (c) == TYPE_DECL
4145 && DECL_ASSEMBLER_NAME_SET_P (c))
4146 hstate.add_object
4147 (IDENTIFIER_HASH_VALUE
4148 (DECL_ASSEMBLER_NAME (c)));
4149 }
4150 return;
4151 default:
4152 break;
4153 }
4154
4155 /* Don't hash the type, that can lead to having nodes which
4156 compare equal according to operand_equal_p, but which
4157 have different hash codes. */
4158 if (code == NON_LVALUE_EXPR)
4159 {
4160 /* Make sure to include signness in the hash computation. */
4161 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4162 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4163 }
4164
4165 else if (commutative_tree_code (code))
4166 {
4167 /* It's a commutative expression. We want to hash it the same
4168 however it appears. We do this by first hashing both operands
4169 and then rehashing based on the order of their independent
4170 hashes. */
4171 inchash::hash one, two;
4172 hash_operand (TREE_OPERAND (t, 0), hstate&: one, flags);
4173 hash_operand (TREE_OPERAND (t, 1), hstate&: two, flags);
4174 hstate.add_commutative (a&: one, b&: two);
4175 }
4176 else
4177 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4178 hash_operand (TREE_OPERAND (t, i), hstate,
4179 flags: i == 0 ? flags : sflags);
4180 }
4181 return;
4182 }
4183}
4184
4185bool
4186operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4187 unsigned int flags, bool *ret)
4188{
4189 /* When checking and unless comparing DECL names, verify that if
4190 the outermost operand_equal_p call returns non-zero then ARG0
4191 and ARG1 have the same hash value. */
4192 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4193 {
4194 if (operand_equal_p (arg0, arg1, flags: flags | OEP_NO_HASH_CHECK))
4195 {
4196 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4197 {
4198 inchash::hash hstate0 (0), hstate1 (0);
4199 hash_operand (t: arg0, hstate&: hstate0, flags: flags | OEP_HASH_CHECK);
4200 hash_operand (t: arg1, hstate&: hstate1, flags: flags | OEP_HASH_CHECK);
4201 hashval_t h0 = hstate0.end ();
4202 hashval_t h1 = hstate1.end ();
4203 gcc_assert (h0 == h1);
4204 }
4205 *ret = true;
4206 }
4207 else
4208 *ret = false;
4209
4210 return true;
4211 }
4212
4213 return false;
4214}
4215
4216
4217static operand_compare default_compare_instance;
4218
4219/* Conveinece wrapper around operand_compare class because usually we do
4220 not need to play with the valueizer. */
4221
4222bool
4223operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4224{
4225 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4226}
4227
4228namespace inchash
4229{
4230
4231/* Generate a hash value for an expression. This can be used iteratively
4232 by passing a previous result as the HSTATE argument.
4233
4234 This function is intended to produce the same hash for expressions which
4235 would compare equal using operand_equal_p. */
4236void
4237add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4238{
4239 default_compare_instance.hash_operand (t, hstate, flags);
4240}
4241
4242}
4243
4244/* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4245 with a different signedness or a narrower precision. */
4246
4247static bool
4248operand_equal_for_comparison_p (tree arg0, tree arg1)
4249{
4250 if (operand_equal_p (arg0, arg1, flags: 0))
4251 return true;
4252
4253 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4254 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4255 return false;
4256
4257 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4258 and see if the inner values are the same. This removes any
4259 signedness comparison, which doesn't matter here. */
4260 tree op0 = arg0;
4261 tree op1 = arg1;
4262 STRIP_NOPS (op0);
4263 STRIP_NOPS (op1);
4264 if (operand_equal_p (arg0: op0, arg1: op1, flags: 0))
4265 return true;
4266
4267 /* Discard a single widening conversion from ARG1 and see if the inner
4268 value is the same as ARG0. */
4269 if (CONVERT_EXPR_P (arg1)
4270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4271 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4272 < TYPE_PRECISION (TREE_TYPE (arg1))
4273 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), flags: 0))
4274 return true;
4275
4276 return false;
4277}
4278
4279/* See if ARG is an expression that is either a comparison or is performing
4280 arithmetic on comparisons. The comparisons must only be comparing
4281 two different values, which will be stored in *CVAL1 and *CVAL2; if
4282 they are nonzero it means that some operands have already been found.
4283 No variables may be used anywhere else in the expression except in the
4284 comparisons.
4285
4286 If this is true, return 1. Otherwise, return zero. */
4287
4288static bool
4289twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4290{
4291 enum tree_code code = TREE_CODE (arg);
4292 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4293
4294 /* We can handle some of the tcc_expression cases here. */
4295 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4296 tclass = tcc_unary;
4297 else if (tclass == tcc_expression
4298 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4299 || code == COMPOUND_EXPR))
4300 tclass = tcc_binary;
4301
4302 switch (tclass)
4303 {
4304 case tcc_unary:
4305 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4306
4307 case tcc_binary:
4308 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4309 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4310
4311 case tcc_constant:
4312 return true;
4313
4314 case tcc_expression:
4315 if (code == COND_EXPR)
4316 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4317 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4318 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4319 return false;
4320
4321 case tcc_comparison:
4322 /* First see if we can handle the first operand, then the second. For
4323 the second operand, we know *CVAL1 can't be zero. It must be that
4324 one side of the comparison is each of the values; test for the
4325 case where this isn't true by failing if the two operands
4326 are the same. */
4327
4328 if (operand_equal_p (TREE_OPERAND (arg, 0),
4329 TREE_OPERAND (arg, 1), flags: 0))
4330 return false;
4331
4332 if (*cval1 == 0)
4333 *cval1 = TREE_OPERAND (arg, 0);
4334 else if (operand_equal_p (arg0: *cval1, TREE_OPERAND (arg, 0), flags: 0))
4335 ;
4336 else if (*cval2 == 0)
4337 *cval2 = TREE_OPERAND (arg, 0);
4338 else if (operand_equal_p (arg0: *cval2, TREE_OPERAND (arg, 0), flags: 0))
4339 ;
4340 else
4341 return false;
4342
4343 if (operand_equal_p (arg0: *cval1, TREE_OPERAND (arg, 1), flags: 0))
4344 ;
4345 else if (*cval2 == 0)
4346 *cval2 = TREE_OPERAND (arg, 1);
4347 else if (operand_equal_p (arg0: *cval2, TREE_OPERAND (arg, 1), flags: 0))
4348 ;
4349 else
4350 return false;
4351
4352 return true;
4353
4354 default:
4355 return false;
4356 }
4357}
4358
4359/* ARG is a tree that is known to contain just arithmetic operations and
4360 comparisons. Evaluate the operations in the tree substituting NEW0 for
4361 any occurrence of OLD0 as an operand of a comparison and likewise for
4362 NEW1 and OLD1. */
4363
4364static tree
4365eval_subst (location_t loc, tree arg, tree old0, tree new0,
4366 tree old1, tree new1)
4367{
4368 tree type = TREE_TYPE (arg);
4369 enum tree_code code = TREE_CODE (arg);
4370 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4371
4372 /* We can handle some of the tcc_expression cases here. */
4373 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4374 tclass = tcc_unary;
4375 else if (tclass == tcc_expression
4376 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4377 tclass = tcc_binary;
4378
4379 switch (tclass)
4380 {
4381 case tcc_unary:
4382 return fold_build1_loc (loc, code, type,
4383 eval_subst (loc, TREE_OPERAND (arg, 0),
4384 old0, new0, old1, new1));
4385
4386 case tcc_binary:
4387 return fold_build2_loc (loc, code, type,
4388 eval_subst (loc, TREE_OPERAND (arg, 0),
4389 old0, new0, old1, new1),
4390 eval_subst (loc, TREE_OPERAND (arg, 1),
4391 old0, new0, old1, new1));
4392
4393 case tcc_expression:
4394 switch (code)
4395 {
4396 case SAVE_EXPR:
4397 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4398 old1, new1);
4399
4400 case COMPOUND_EXPR:
4401 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4402 old1, new1);
4403
4404 case COND_EXPR:
4405 return fold_build3_loc (loc, code, type,
4406 eval_subst (loc, TREE_OPERAND (arg, 0),
4407 old0, new0, old1, new1),
4408 eval_subst (loc, TREE_OPERAND (arg, 1),
4409 old0, new0, old1, new1),
4410 eval_subst (loc, TREE_OPERAND (arg, 2),
4411 old0, new0, old1, new1));
4412 default:
4413 break;
4414 }
4415 /* Fall through - ??? */
4416
4417 case tcc_comparison:
4418 {
4419 tree arg0 = TREE_OPERAND (arg, 0);
4420 tree arg1 = TREE_OPERAND (arg, 1);
4421
4422 /* We need to check both for exact equality and tree equality. The
4423 former will be true if the operand has a side-effect. In that
4424 case, we know the operand occurred exactly once. */
4425
4426 if (arg0 == old0 || operand_equal_p (arg0, arg1: old0, flags: 0))
4427 arg0 = new0;
4428 else if (arg0 == old1 || operand_equal_p (arg0, arg1: old1, flags: 0))
4429 arg0 = new1;
4430
4431 if (arg1 == old0 || operand_equal_p (arg0: arg1, arg1: old0, flags: 0))
4432 arg1 = new0;
4433 else if (arg1 == old1 || operand_equal_p (arg0: arg1, arg1: old1, flags: 0))
4434 arg1 = new1;
4435
4436 return fold_build2_loc (loc, code, type, arg0, arg1);
4437 }
4438
4439 default:
4440 return arg;
4441 }
4442}
4443
4444/* Return a tree for the case when the result of an expression is RESULT
4445 converted to TYPE and OMITTED was previously an operand of the expression
4446 but is now not needed (e.g., we folded OMITTED * 0).
4447
4448 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4449 the conversion of RESULT to TYPE. */
4450
4451tree
4452omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4453{
4454 tree t = fold_convert_loc (loc, type, arg: result);
4455
4456 /* If the resulting operand is an empty statement, just return the omitted
4457 statement casted to void. */
4458 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4459 return build1_loc (loc, code: NOP_EXPR, void_type_node,
4460 arg1: fold_ignored_result (omitted));
4461
4462 if (TREE_SIDE_EFFECTS (omitted))
4463 return build2_loc (loc, code: COMPOUND_EXPR, type,
4464 arg0: fold_ignored_result (omitted), arg1: t);
4465
4466 return non_lvalue_loc (loc, x: t);
4467}
4468
4469/* Return a tree for the case when the result of an expression is RESULT
4470 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4471 of the expression but are now not needed.
4472
4473 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4474 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4475 evaluated before OMITTED2. Otherwise, if neither has side effects,
4476 just do the conversion of RESULT to TYPE. */
4477
4478tree
4479omit_two_operands_loc (location_t loc, tree type, tree result,
4480 tree omitted1, tree omitted2)
4481{
4482 tree t = fold_convert_loc (loc, type, arg: result);
4483
4484 if (TREE_SIDE_EFFECTS (omitted2))
4485 t = build2_loc (loc, code: COMPOUND_EXPR, type, arg0: omitted2, arg1: t);
4486 if (TREE_SIDE_EFFECTS (omitted1))
4487 t = build2_loc (loc, code: COMPOUND_EXPR, type, arg0: omitted1, arg1: t);
4488
4489 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, x: t) : t;
4490}
4491
4492
4493/* Return a simplified tree node for the truth-negation of ARG. This
4494 never alters ARG itself. We assume that ARG is an operation that
4495 returns a truth value (0 or 1).
4496
4497 FIXME: one would think we would fold the result, but it causes
4498 problems with the dominator optimizer. */
4499
4500static tree
4501fold_truth_not_expr (location_t loc, tree arg)
4502{
4503 tree type = TREE_TYPE (arg);
4504 enum tree_code code = TREE_CODE (arg);
4505 location_t loc1, loc2;
4506
4507 /* If this is a comparison, we can simply invert it, except for
4508 floating-point non-equality comparisons, in which case we just
4509 enclose a TRUTH_NOT_EXPR around what we have. */
4510
4511 if (TREE_CODE_CLASS (code) == tcc_comparison)
4512 {
4513 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4514 if (FLOAT_TYPE_P (op_type)
4515 && flag_trapping_math
4516 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4517 && code != NE_EXPR && code != EQ_EXPR)
4518 return NULL_TREE;
4519
4520 code = invert_tree_comparison (code, honor_nans: HONOR_NANS (op_type));
4521 if (code == ERROR_MARK)
4522 return NULL_TREE;
4523
4524 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4525 TREE_OPERAND (arg, 1));
4526 copy_warning (ret, arg);
4527 return ret;
4528 }
4529
4530 switch (code)
4531 {
4532 case INTEGER_CST:
4533 return constant_boolean_node (integer_zerop (arg), type);
4534
4535 case TRUTH_AND_EXPR:
4536 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4537 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4538 return build2_loc (loc, code: TRUTH_OR_EXPR, type,
4539 arg0: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4540 arg1: invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4541
4542 case TRUTH_OR_EXPR:
4543 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4544 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4545 return build2_loc (loc, code: TRUTH_AND_EXPR, type,
4546 arg0: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4547 arg1: invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4548
4549 case TRUTH_XOR_EXPR:
4550 /* Here we can invert either operand. We invert the first operand
4551 unless the second operand is a TRUTH_NOT_EXPR in which case our
4552 result is the XOR of the first operand with the inside of the
4553 negation of the second operand. */
4554
4555 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4556 return build2_loc (loc, code: TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4557 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4558 else
4559 return build2_loc (loc, code: TRUTH_XOR_EXPR, type,
4560 arg0: invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4561 TREE_OPERAND (arg, 1));
4562
4563 case TRUTH_ANDIF_EXPR:
4564 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4565 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4566 return build2_loc (loc, code: TRUTH_ORIF_EXPR, type,
4567 arg0: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4568 arg1: invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4569
4570 case TRUTH_ORIF_EXPR:
4571 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4572 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4573 return build2_loc (loc, code: TRUTH_ANDIF_EXPR, type,
4574 arg0: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4575 arg1: invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4576
4577 case TRUTH_NOT_EXPR:
4578 return TREE_OPERAND (arg, 0);
4579
4580 case COND_EXPR:
4581 {
4582 tree arg1 = TREE_OPERAND (arg, 1);
4583 tree arg2 = TREE_OPERAND (arg, 2);
4584
4585 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4586 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4587
4588 /* A COND_EXPR may have a throw as one operand, which
4589 then has void type. Just leave void operands
4590 as they are. */
4591 return build3_loc (loc, code: COND_EXPR, type, TREE_OPERAND (arg, 0),
4592 VOID_TYPE_P (TREE_TYPE (arg1))
4593 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4594 VOID_TYPE_P (TREE_TYPE (arg2))
4595 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4596 }
4597
4598 case COMPOUND_EXPR:
4599 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4600 return build2_loc (loc, code: COMPOUND_EXPR, type,
4601 TREE_OPERAND (arg, 0),
4602 arg1: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4603
4604 case NON_LVALUE_EXPR:
4605 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4606 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4607
4608 CASE_CONVERT:
4609 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4610 return build1_loc (loc, code: TRUTH_NOT_EXPR, type, arg1: arg);
4611
4612 /* fall through */
4613
4614 case FLOAT_EXPR:
4615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4616 return build1_loc (loc, TREE_CODE (arg), type,
4617 arg1: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4618
4619 case BIT_AND_EXPR:
4620 if (!integer_onep (TREE_OPERAND (arg, 1)))
4621 return NULL_TREE;
4622 return build2_loc (loc, code: EQ_EXPR, type, arg0: arg, arg1: build_int_cst (type, 0));
4623
4624 case SAVE_EXPR:
4625 return build1_loc (loc, code: TRUTH_NOT_EXPR, type, arg1: arg);
4626
4627 case CLEANUP_POINT_EXPR:
4628 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4629 return build1_loc (loc, code: CLEANUP_POINT_EXPR, type,
4630 arg1: invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4631
4632 default:
4633 return NULL_TREE;
4634 }
4635}
4636
4637/* Fold the truth-negation of ARG. This never alters ARG itself. We
4638 assume that ARG is an operation that returns a truth value (0 or 1
4639 for scalars, 0 or -1 for vectors). Return the folded expression if
4640 folding is successful. Otherwise, return NULL_TREE. */
4641
4642static tree
4643fold_invert_truthvalue (location_t loc, tree arg)
4644{
4645 tree type = TREE_TYPE (arg);
4646 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4647 ? BIT_NOT_EXPR
4648 : TRUTH_NOT_EXPR,
4649 type, arg);
4650}
4651
4652/* Return a simplified tree node for the truth-negation of ARG. This
4653 never alters ARG itself. We assume that ARG is an operation that
4654 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4655
4656tree
4657invert_truthvalue_loc (location_t loc, tree arg)
4658{
4659 if (TREE_CODE (arg) == ERROR_MARK)
4660 return arg;
4661
4662 tree type = TREE_TYPE (arg);
4663 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4664 ? BIT_NOT_EXPR
4665 : TRUTH_NOT_EXPR,
4666 type, arg);
4667}
4668
4669/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4670 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4671 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4672 is the original memory reference used to preserve the alias set of
4673 the access. */
4674
4675static tree
4676make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4677 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4678 int unsignedp, int reversep)
4679{
4680 tree result, bftype;
4681
4682 /* Attempt not to lose the access path if possible. */
4683 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4684 {
4685 tree ninner = TREE_OPERAND (orig_inner, 0);
4686 machine_mode nmode;
4687 poly_int64 nbitsize, nbitpos;
4688 tree noffset;
4689 int nunsignedp, nreversep, nvolatilep = 0;
4690 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4691 &noffset, &nmode, &nunsignedp,
4692 &nreversep, &nvolatilep);
4693 if (base == inner
4694 && noffset == NULL_TREE
4695 && known_subrange_p (pos1: bitpos, size1: bitsize, pos2: nbitpos, size2: nbitsize)
4696 && !reversep
4697 && !nreversep
4698 && !nvolatilep)
4699 {
4700 inner = ninner;
4701 bitpos -= nbitpos;
4702 }
4703 }
4704
4705 alias_set_type iset = get_alias_set (orig_inner);
4706 if (iset == 0 && get_alias_set (inner) != iset)
4707 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4708 build_fold_addr_expr (inner),
4709 build_int_cst (ptr_type_node, 0));
4710
4711 if (known_eq (bitpos, 0) && !reversep)
4712 {
4713 tree size = TYPE_SIZE (TREE_TYPE (inner));
4714 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4715 || POINTER_TYPE_P (TREE_TYPE (inner)))
4716 && tree_fits_shwi_p (size)
4717 && tree_to_shwi (size) == bitsize)
4718 return fold_convert_loc (loc, type, arg: inner);
4719 }
4720
4721 bftype = type;
4722 if (TYPE_PRECISION (bftype) != bitsize
4723 || TYPE_UNSIGNED (bftype) == !unsignedp)
4724 bftype = build_nonstandard_integer_type (bitsize, 0);
4725
4726 result = build3_loc (loc, code: BIT_FIELD_REF, type: bftype, arg0: inner,
4727 bitsize_int (bitsize), bitsize_int (bitpos));
4728 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4729
4730 if (bftype != type)
4731 result = fold_convert_loc (loc, type, arg: result);
4732
4733 return result;
4734}
4735
4736/* Optimize a bit-field compare.
4737
4738 There are two cases: First is a compare against a constant and the
4739 second is a comparison of two items where the fields are at the same
4740 bit position relative to the start of a chunk (byte, halfword, word)
4741 large enough to contain it. In these cases we can avoid the shift
4742 implicit in bitfield extractions.
4743
4744 For constants, we emit a compare of the shifted constant with the
4745 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4746 compared. For two fields at the same position, we do the ANDs with the
4747 similar mask and compare the result of the ANDs.
4748
4749 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4750 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4751 are the left and right operands of the comparison, respectively.
4752
4753 If the optimization described above can be done, we return the resulting
4754 tree. Otherwise we return zero. */
4755
4756static tree
4757optimize_bit_field_compare (location_t loc, enum tree_code code,
4758 tree compare_type, tree lhs, tree rhs)
4759{
4760 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4761 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4762 tree type = TREE_TYPE (lhs);
4763 tree unsigned_type;
4764 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4765 machine_mode lmode, rmode;
4766 scalar_int_mode nmode;
4767 int lunsignedp, runsignedp;
4768 int lreversep, rreversep;
4769 int lvolatilep = 0, rvolatilep = 0;
4770 tree linner, rinner = NULL_TREE;
4771 tree mask;
4772 tree offset;
4773
4774 /* Get all the information about the extractions being done. If the bit size
4775 is the same as the size of the underlying object, we aren't doing an
4776 extraction at all and so can do nothing. We also don't want to
4777 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4778 then will no longer be able to replace it. */
4779 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4780 &lunsignedp, &lreversep, &lvolatilep);
4781 if (linner == lhs
4782 || !known_size_p (a: plbitsize)
4783 || !plbitsize.is_constant (const_value: &lbitsize)
4784 || !plbitpos.is_constant (const_value: &lbitpos)
4785 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4786 || offset != 0
4787 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4788 || lvolatilep)
4789 return 0;
4790
4791 if (const_p)
4792 rreversep = lreversep;
4793 else
4794 {
4795 /* If this is not a constant, we can only do something if bit positions,
4796 sizes, signedness and storage order are the same. */
4797 rinner
4798 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4799 &runsignedp, &rreversep, &rvolatilep);
4800
4801 if (rinner == rhs
4802 || maybe_ne (a: lbitpos, b: rbitpos)
4803 || maybe_ne (a: lbitsize, b: rbitsize)
4804 || lunsignedp != runsignedp
4805 || lreversep != rreversep
4806 || offset != 0
4807 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4808 || rvolatilep)
4809 return 0;
4810 }
4811
4812 /* Honor the C++ memory model and mimic what RTL expansion does. */
4813 poly_uint64 bitstart = 0;
4814 poly_uint64 bitend = 0;
4815 if (TREE_CODE (lhs) == COMPONENT_REF)
4816 {
4817 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4818 if (!plbitpos.is_constant (const_value: &lbitpos) || offset != NULL_TREE)
4819 return 0;
4820 }
4821
4822 /* See if we can find a mode to refer to this field. We should be able to,
4823 but fail if we can't. */
4824 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4825 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4826 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4827 TYPE_ALIGN (TREE_TYPE (rinner))),
4828 BITS_PER_WORD, false, &nmode))
4829 return 0;
4830
4831 /* Set signed and unsigned types of the precision of this mode for the
4832 shifts below. */
4833 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4834
4835 /* Compute the bit position and size for the new reference and our offset
4836 within it. If the new reference is the same size as the original, we
4837 won't optimize anything, so return zero. */
4838 nbitsize = GET_MODE_BITSIZE (mode: nmode);
4839 nbitpos = lbitpos & ~ (nbitsize - 1);
4840 lbitpos -= nbitpos;
4841 if (nbitsize == lbitsize)
4842 return 0;
4843
4844 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4845 lbitpos = nbitsize - lbitsize - lbitpos;
4846
4847 /* Make the mask to be used against the extracted field. */
4848 mask = build_int_cst_type (unsigned_type, -1);
4849 mask = const_binop (code: LSHIFT_EXPR, arg1: mask, size_int (nbitsize - lbitsize));
4850 mask = const_binop (code: RSHIFT_EXPR, arg1: mask,
4851 size_int (nbitsize - lbitsize - lbitpos));
4852
4853 if (! const_p)
4854 {
4855 if (nbitpos < 0)
4856 return 0;
4857
4858 /* If not comparing with constant, just rework the comparison
4859 and return. */
4860 tree t1 = make_bit_field_ref (loc, inner: linner, orig_inner: lhs, type: unsigned_type,
4861 bitsize: nbitsize, bitpos: nbitpos, unsignedp: 1, reversep: lreversep);
4862 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4863 tree t2 = make_bit_field_ref (loc, inner: rinner, orig_inner: rhs, type: unsigned_type,
4864 bitsize: nbitsize, bitpos: nbitpos, unsignedp: 1, reversep: rreversep);
4865 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4866 return fold_build2_loc (loc, code, compare_type, t1, t2);
4867 }
4868
4869 /* Otherwise, we are handling the constant case. See if the constant is too
4870 big for the field. Warn and return a tree for 0 (false) if so. We do
4871 this not only for its own sake, but to avoid having to test for this
4872 error case below. If we didn't, we might generate wrong code.
4873
4874 For unsigned fields, the constant shifted right by the field length should
4875 be all zero. For signed fields, the high-order bits should agree with
4876 the sign bit. */
4877
4878 if (lunsignedp)
4879 {
4880 if (wi::lrshift (x: wi::to_wide (t: rhs), y: lbitsize) != 0)
4881 {
4882 warning (0, "comparison is always %d due to width of bit-field",
4883 code == NE_EXPR);
4884 return constant_boolean_node (code == NE_EXPR, compare_type);
4885 }
4886 }
4887 else
4888 {
4889 wide_int tem = wi::arshift (x: wi::to_wide (t: rhs), y: lbitsize - 1);
4890 if (tem != 0 && tem != -1)
4891 {
4892 warning (0, "comparison is always %d due to width of bit-field",
4893 code == NE_EXPR);
4894 return constant_boolean_node (code == NE_EXPR, compare_type);
4895 }
4896 }
4897
4898 if (nbitpos < 0)
4899 return 0;
4900
4901 /* Single-bit compares should always be against zero. */
4902 if (lbitsize == 1 && ! integer_zerop (rhs))
4903 {
4904 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4905 rhs = build_int_cst (type, 0);
4906 }
4907
4908 /* Make a new bitfield reference, shift the constant over the
4909 appropriate number of bits and mask it with the computed mask
4910 (in case this was a signed field). If we changed it, make a new one. */
4911 lhs = make_bit_field_ref (loc, inner: linner, orig_inner: lhs, type: unsigned_type,
4912 bitsize: nbitsize, bitpos: nbitpos, unsignedp: 1, reversep: lreversep);
4913
4914 rhs = const_binop (code: BIT_AND_EXPR,
4915 arg1: const_binop (code: LSHIFT_EXPR,
4916 arg1: fold_convert_loc (loc, type: unsigned_type, arg: rhs),
4917 size_int (lbitpos)),
4918 arg2: mask);
4919
4920 lhs = build2_loc (loc, code, type: compare_type,
4921 arg0: build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), arg1: rhs);
4922 return lhs;
4923}
4924
4925/* Subroutine for fold_truth_andor_1: decode a field reference.
4926
4927 If EXP is a comparison reference, we return the innermost reference.
4928
4929 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4930 set to the starting bit number.
4931
4932 If the innermost field can be completely contained in a mode-sized
4933 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4934
4935 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4936 otherwise it is not changed.
4937
4938 *PUNSIGNEDP is set to the signedness of the field.
4939
4940 *PREVERSEP is set to the storage order of the field.
4941
4942 *PMASK is set to the mask used. This is either contained in a
4943 BIT_AND_EXPR or derived from the width of the field.
4944
4945 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4946
4947 Return 0 if this is not a component reference or is one that we can't
4948 do anything with. */
4949
4950static tree
4951decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4952 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4953 int *punsignedp, int *preversep, int *pvolatilep,
4954 tree *pmask, tree *pand_mask)
4955{
4956 tree exp = *exp_;
4957 tree outer_type = 0;
4958 tree and_mask = 0;
4959 tree mask, inner, offset;
4960 tree unsigned_type;
4961 unsigned int precision;
4962
4963 /* All the optimizations using this function assume integer fields.
4964 There are problems with FP fields since the type_for_size call
4965 below can fail for, e.g., XFmode. */
4966 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4967 return NULL_TREE;
4968
4969 /* We are interested in the bare arrangement of bits, so strip everything
4970 that doesn't affect the machine mode. However, record the type of the
4971 outermost expression if it may matter below. */
4972 if (CONVERT_EXPR_P (exp)
4973 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4974 outer_type = TREE_TYPE (exp);
4975 STRIP_NOPS (exp);
4976
4977 if (TREE_CODE (exp) == BIT_AND_EXPR)
4978 {
4979 and_mask = TREE_OPERAND (exp, 1);
4980 exp = TREE_OPERAND (exp, 0);
4981 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4982 if (TREE_CODE (and_mask) != INTEGER_CST)
4983 return NULL_TREE;
4984 }
4985
4986 poly_int64 poly_bitsize, poly_bitpos;
4987 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4988 pmode, punsignedp, preversep, pvolatilep);
4989 if ((inner == exp && and_mask == 0)
4990 || !poly_bitsize.is_constant (const_value: pbitsize)
4991 || !poly_bitpos.is_constant (const_value: pbitpos)
4992 || *pbitsize < 0
4993 || offset != 0
4994 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4995 /* Reject out-of-bound accesses (PR79731). */
4996 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4997 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4998 *pbitpos + *pbitsize) < 0))
4999 return NULL_TREE;
5000
5001 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
5002 if (unsigned_type == NULL_TREE)
5003 return NULL_TREE;
5004
5005 *exp_ = exp;
5006
5007 /* If the number of bits in the reference is the same as the bitsize of
5008 the outer type, then the outer type gives the signedness. Otherwise
5009 (in case of a small bitfield) the signedness is unchanged. */
5010 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
5011 *punsignedp = TYPE_UNSIGNED (outer_type);
5012
5013 /* Compute the mask to access the bitfield. */
5014 precision = TYPE_PRECISION (unsigned_type);
5015
5016 mask = build_int_cst_type (unsigned_type, -1);
5017
5018 mask = const_binop (code: LSHIFT_EXPR, arg1: mask, size_int (precision - *pbitsize));
5019 mask = const_binop (code: RSHIFT_EXPR, arg1: mask, size_int (precision - *pbitsize));
5020
5021 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
5022 if (and_mask != 0)
5023 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
5024 fold_convert_loc (loc, type: unsigned_type, arg: and_mask), mask);
5025
5026 *pmask = mask;
5027 *pand_mask = and_mask;
5028 return inner;
5029}
5030
5031/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
5032 bit positions and MASK is SIGNED. */
5033
5034static bool
5035all_ones_mask_p (const_tree mask, unsigned int size)
5036{
5037 tree type = TREE_TYPE (mask);
5038 unsigned int precision = TYPE_PRECISION (type);
5039
5040 /* If this function returns true when the type of the mask is
5041 UNSIGNED, then there will be errors. In particular see
5042 gcc.c-torture/execute/990326-1.c. There does not appear to be
5043 any documentation paper trail as to why this is so. But the pre
5044 wide-int worked with that restriction and it has been preserved
5045 here. */
5046 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
5047 return false;
5048
5049 return wi::mask (width: size, negate_p: false, precision) == wi::to_wide (t: mask);
5050}
5051
5052/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5053 represents the sign bit of EXP's type. If EXP represents a sign
5054 or zero extension, also test VAL against the unextended type.
5055 The return value is the (sub)expression whose sign bit is VAL,
5056 or NULL_TREE otherwise. */
5057
5058tree
5059sign_bit_p (tree exp, const_tree val)
5060{
5061 int width;
5062 tree t;
5063
5064 /* Tree EXP must have an integral type. */
5065 t = TREE_TYPE (exp);
5066 if (! INTEGRAL_TYPE_P (t))
5067 return NULL_TREE;
5068
5069 /* Tree VAL must be an integer constant. */
5070 if (TREE_CODE (val) != INTEGER_CST
5071 || TREE_OVERFLOW (val))
5072 return NULL_TREE;
5073
5074 width = TYPE_PRECISION (t);
5075 if (wi::only_sign_bit_p (wi::to_wide (t: val), width))
5076 return exp;
5077
5078 /* Handle extension from a narrower type. */
5079 if (TREE_CODE (exp) == NOP_EXPR
5080 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5081 return sign_bit_p (TREE_OPERAND (exp, 0), val);
5082
5083 return NULL_TREE;
5084}
5085
5086/* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5087 operand is simple enough to be evaluated unconditionally. */
5088
5089static bool
5090simple_operand_p (const_tree exp)
5091{
5092 /* Strip any conversions that don't change the machine mode. */
5093 STRIP_NOPS (exp);
5094
5095 return (CONSTANT_CLASS_P (exp)
5096 || TREE_CODE (exp) == SSA_NAME
5097 || (DECL_P (exp)
5098 && ! TREE_ADDRESSABLE (exp)
5099 && ! TREE_THIS_VOLATILE (exp)
5100 && ! DECL_NONLOCAL (exp)
5101 /* Don't regard global variables as simple. They may be
5102 allocated in ways unknown to the compiler (shared memory,
5103 #pragma weak, etc). */
5104 && ! TREE_PUBLIC (exp)
5105 && ! DECL_EXTERNAL (exp)
5106 /* Weakrefs are not safe to be read, since they can be NULL.
5107 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5108 have DECL_WEAK flag set. */
5109 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5110 /* Loading a static variable is unduly expensive, but global
5111 registers aren't expensive. */
5112 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5113}
5114
5115/* Determine if an operand is simple enough to be evaluated unconditionally.
5116 In addition to simple_operand_p, we assume that comparisons, conversions,
5117 and logic-not operations are simple, if their operands are simple, too. */
5118
5119bool
5120simple_condition_p (tree exp)
5121{
5122 enum tree_code code;
5123
5124 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (expr: exp))
5125 return false;
5126
5127 while (CONVERT_EXPR_P (exp))
5128 exp = TREE_OPERAND (exp, 0);
5129
5130 code = TREE_CODE (exp);
5131
5132 if (TREE_CODE_CLASS (code) == tcc_comparison)
5133 return (simple_operand_p (TREE_OPERAND (exp, 0))
5134 && simple_operand_p (TREE_OPERAND (exp, 1)));
5135
5136 if (code == TRUTH_NOT_EXPR)
5137 return simple_condition_p (TREE_OPERAND (exp, 0));
5138
5139 return simple_operand_p (exp);
5140}
5141
5142
5143/* The following functions are subroutines to fold_range_test and allow it to
5144 try to change a logical combination of comparisons into a range test.
5145
5146 For example, both
5147 X == 2 || X == 3 || X == 4 || X == 5
5148 and
5149 X >= 2 && X <= 5
5150 are converted to
5151 (unsigned) (X - 2) <= 3
5152
5153 We describe each set of comparisons as being either inside or outside
5154 a range, using a variable named like IN_P, and then describe the
5155 range with a lower and upper bound. If one of the bounds is omitted,
5156 it represents either the highest or lowest value of the type.
5157
5158 In the comments below, we represent a range by two numbers in brackets
5159 preceded by a "+" to designate being inside that range, or a "-" to
5160 designate being outside that range, so the condition can be inverted by
5161 flipping the prefix. An omitted bound is represented by a "-". For
5162 example, "- [-, 10]" means being outside the range starting at the lowest
5163 possible value and ending at 10, in other words, being greater than 10.
5164 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5165 always false.
5166
5167 We set up things so that the missing bounds are handled in a consistent
5168 manner so neither a missing bound nor "true" and "false" need to be
5169 handled using a special case. */
5170
5171/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5172 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5173 and UPPER1_P are nonzero if the respective argument is an upper bound
5174 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5175 must be specified for a comparison. ARG1 will be converted to ARG0's
5176 type if both are specified. */
5177
5178static tree
5179range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5180 tree arg1, int upper1_p)
5181{
5182 tree tem;
5183 int result;
5184 int sgn0, sgn1;
5185
5186 /* If neither arg represents infinity, do the normal operation.
5187 Else, if not a comparison, return infinity. Else handle the special
5188 comparison rules. Note that most of the cases below won't occur, but
5189 are handled for consistency. */
5190
5191 if (arg0 != 0 && arg1 != 0)
5192 {
5193 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5194 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5195 STRIP_NOPS (tem);
5196 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5197 }
5198
5199 if (TREE_CODE_CLASS (code) != tcc_comparison)
5200 return 0;
5201
5202 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5203 for neither. In real maths, we cannot assume open ended ranges are
5204 the same. But, this is computer arithmetic, where numbers are finite.
5205 We can therefore make the transformation of any unbounded range with
5206 the value Z, Z being greater than any representable number. This permits
5207 us to treat unbounded ranges as equal. */
5208 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5209 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5210 switch (code)
5211 {
5212 case EQ_EXPR:
5213 result = sgn0 == sgn1;
5214 break;
5215 case NE_EXPR:
5216 result = sgn0 != sgn1;
5217 break;
5218 case LT_EXPR:
5219 result = sgn0 < sgn1;
5220 break;
5221 case LE_EXPR:
5222 result = sgn0 <= sgn1;
5223 break;
5224 case GT_EXPR:
5225 result = sgn0 > sgn1;
5226 break;
5227 case GE_EXPR:
5228 result = sgn0 >= sgn1;
5229 break;
5230 default:
5231 gcc_unreachable ();
5232 }
5233
5234 return constant_boolean_node (result, type);
5235}
5236
5237/* Helper routine for make_range. Perform one step for it, return
5238 new expression if the loop should continue or NULL_TREE if it should
5239 stop. */
5240
5241tree
5242make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5243 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5244 bool *strict_overflow_p)
5245{
5246 tree arg0_type = TREE_TYPE (arg0);
5247 tree n_low, n_high, low = *p_low, high = *p_high;
5248 int in_p = *p_in_p, n_in_p;
5249
5250 switch (code)
5251 {
5252 case TRUTH_NOT_EXPR:
5253 /* We can only do something if the range is testing for zero. */
5254 if (low == NULL_TREE || high == NULL_TREE
5255 || ! integer_zerop (low) || ! integer_zerop (high))
5256 return NULL_TREE;
5257 *p_in_p = ! in_p;
5258 return arg0;
5259
5260 case EQ_EXPR: case NE_EXPR:
5261 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5262 /* We can only do something if the range is testing for zero
5263 and if the second operand is an integer constant. Note that
5264 saying something is "in" the range we make is done by
5265 complementing IN_P since it will set in the initial case of
5266 being not equal to zero; "out" is leaving it alone. */
5267 if (low == NULL_TREE || high == NULL_TREE
5268 || ! integer_zerop (low) || ! integer_zerop (high)
5269 || TREE_CODE (arg1) != INTEGER_CST)
5270 return NULL_TREE;
5271
5272 switch (code)
5273 {
5274 case NE_EXPR: /* - [c, c] */
5275 low = high = arg1;
5276 break;
5277 case EQ_EXPR: /* + [c, c] */
5278 in_p = ! in_p, low = high = arg1;
5279 break;
5280 case GT_EXPR: /* - [-, c] */
5281 low = 0, high = arg1;
5282 break;
5283 case GE_EXPR: /* + [c, -] */
5284 in_p = ! in_p, low = arg1, high = 0;
5285 break;
5286 case LT_EXPR: /* - [c, -] */
5287 low = arg1, high = 0;
5288 break;
5289 case LE_EXPR: /* + [-, c] */
5290 in_p = ! in_p, low = 0, high = arg1;
5291 break;
5292 default:
5293 gcc_unreachable ();
5294 }
5295
5296 /* If this is an unsigned comparison, we also know that EXP is
5297 greater than or equal to zero. We base the range tests we make
5298 on that fact, so we record it here so we can parse existing
5299 range tests. We test arg0_type since often the return type
5300 of, e.g. EQ_EXPR, is boolean. */
5301 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5302 {
5303 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5304 in_p, low, high, 1,
5305 build_int_cst (arg0_type, 0),
5306 NULL_TREE))
5307 return NULL_TREE;
5308
5309 in_p = n_in_p, low = n_low, high = n_high;
5310
5311 /* If the high bound is missing, but we have a nonzero low
5312 bound, reverse the range so it goes from zero to the low bound
5313 minus 1. */
5314 if (high == 0 && low && ! integer_zerop (low))
5315 {
5316 in_p = ! in_p;
5317 high = range_binop (code: MINUS_EXPR, NULL_TREE, arg0: low, upper0_p: 0,
5318 arg1: build_int_cst (TREE_TYPE (low), 1), upper1_p: 0);
5319 low = build_int_cst (arg0_type, 0);
5320 }
5321 }
5322
5323 *p_low = low;
5324 *p_high = high;
5325 *p_in_p = in_p;
5326 return arg0;
5327
5328 case NEGATE_EXPR:
5329 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5330 low and high are non-NULL, then normalize will DTRT. */
5331 if (!TYPE_UNSIGNED (arg0_type)
5332 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5333 {
5334 if (low == NULL_TREE)
5335 low = TYPE_MIN_VALUE (arg0_type);
5336 if (high == NULL_TREE)
5337 high = TYPE_MAX_VALUE (arg0_type);
5338 }
5339
5340 /* (-x) IN [a,b] -> x in [-b, -a] */
5341 n_low = range_binop (code: MINUS_EXPR, type: exp_type,
5342 arg0: build_int_cst (exp_type, 0),
5343 upper0_p: 0, arg1: high, upper1_p: 1);
5344 n_high = range_binop (code: MINUS_EXPR, type: exp_type,
5345 arg0: build_int_cst (exp_type, 0),
5346 upper0_p: 0, arg1: low, upper1_p: 0);
5347 if (n_high != 0 && TREE_OVERFLOW (n_high))
5348 return NULL_TREE;
5349 goto normalize;
5350
5351 case BIT_NOT_EXPR:
5352 /* ~ X -> -X - 1 */
5353 return build2_loc (loc, code: MINUS_EXPR, type: exp_type, arg0: negate_expr (t: arg0),
5354 arg1: build_int_cst (exp_type, 1));
5355
5356 case PLUS_EXPR:
5357 case MINUS_EXPR:
5358 if (TREE_CODE (arg1) != INTEGER_CST)
5359 return NULL_TREE;
5360
5361 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5362 move a constant to the other side. */
5363 if (!TYPE_UNSIGNED (arg0_type)
5364 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5365 return NULL_TREE;
5366
5367 /* If EXP is signed, any overflow in the computation is undefined,
5368 so we don't worry about it so long as our computations on
5369 the bounds don't overflow. For unsigned, overflow is defined
5370 and this is exactly the right thing. */
5371 n_low = range_binop (code: code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5372 type: arg0_type, arg0: low, upper0_p: 0, arg1, upper1_p: 0);
5373 n_high = range_binop (code: code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5374 type: arg0_type, arg0: high, upper0_p: 1, arg1, upper1_p: 0);
5375 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5376 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5377 return NULL_TREE;
5378
5379 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5380 *strict_overflow_p = true;
5381
5382 normalize:
5383 /* Check for an unsigned range which has wrapped around the maximum
5384 value thus making n_high < n_low, and normalize it. */
5385 if (n_low && n_high && tree_int_cst_lt (t1: n_high, t2: n_low))
5386 {
5387 low = range_binop (code: PLUS_EXPR, type: arg0_type, arg0: n_high, upper0_p: 0,
5388 arg1: build_int_cst (TREE_TYPE (n_high), 1), upper1_p: 0);
5389 high = range_binop (code: MINUS_EXPR, type: arg0_type, arg0: n_low, upper0_p: 0,
5390 arg1: build_int_cst (TREE_TYPE (n_low), 1), upper1_p: 0);
5391
5392 /* If the range is of the form +/- [ x+1, x ], we won't
5393 be able to normalize it. But then, it represents the
5394 whole range or the empty set, so make it
5395 +/- [ -, - ]. */
5396 if (tree_int_cst_equal (n_low, low)
5397 && tree_int_cst_equal (n_high, high))
5398 low = high = 0;
5399 else
5400 in_p = ! in_p;
5401 }
5402 else
5403 low = n_low, high = n_high;
5404
5405 *p_low = low;
5406 *p_high = high;
5407 *p_in_p = in_p;
5408 return arg0;
5409
5410 CASE_CONVERT:
5411 case NON_LVALUE_EXPR:
5412 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5413 return NULL_TREE;
5414
5415 if (! INTEGRAL_TYPE_P (arg0_type)
5416 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5417 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5418 return NULL_TREE;
5419
5420 n_low = low, n_high = high;
5421
5422 if (n_low != 0)
5423 n_low = fold_convert_loc (loc, type: arg0_type, arg: n_low);
5424
5425 if (n_high != 0)
5426 n_high = fold_convert_loc (loc, type: arg0_type, arg: n_high);
5427
5428 /* If we're converting arg0 from an unsigned type, to exp,
5429 a signed type, we will be doing the comparison as unsigned.
5430 The tests above have already verified that LOW and HIGH
5431 are both positive.
5432
5433 So we have to ensure that we will handle large unsigned
5434 values the same way that the current signed bounds treat
5435 negative values. */
5436
5437 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5438 {
5439 tree high_positive;
5440 tree equiv_type;
5441 /* For fixed-point modes, we need to pass the saturating flag
5442 as the 2nd parameter. */
5443 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5444 equiv_type
5445 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5446 TYPE_SATURATING (arg0_type));
5447 else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5448 equiv_type = arg0_type;
5449 else
5450 equiv_type
5451 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5452
5453 /* A range without an upper bound is, naturally, unbounded.
5454 Since convert would have cropped a very large value, use
5455 the max value for the destination type. */
5456 high_positive
5457 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5458 : TYPE_MAX_VALUE (arg0_type);
5459
5460 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5461 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5462 fold_convert_loc (loc, type: arg0_type,
5463 arg: high_positive),
5464 build_int_cst (arg0_type, 1));
5465
5466 /* If the low bound is specified, "and" the range with the
5467 range for which the original unsigned value will be
5468 positive. */
5469 if (low != 0)
5470 {
5471 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5472 1, fold_convert_loc (loc, type: arg0_type,
5473 integer_zero_node),
5474 high_positive))
5475 return NULL_TREE;
5476
5477 in_p = (n_in_p == in_p);
5478 }
5479 else
5480 {
5481 /* Otherwise, "or" the range with the range of the input
5482 that will be interpreted as negative. */
5483 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5484 1, fold_convert_loc (loc, type: arg0_type,
5485 integer_zero_node),
5486 high_positive))
5487 return NULL_TREE;
5488
5489 in_p = (in_p != n_in_p);
5490 }
5491 }
5492
5493 /* Otherwise, if we are converting arg0 from signed type, to exp,
5494 an unsigned type, we will do the comparison as signed. If
5495 high is non-NULL, we punt above if it doesn't fit in the signed
5496 type, so if we get through here, +[-, high] or +[low, high] are
5497 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5498 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5499 high is not, the +[low, -] range is equivalent to union of
5500 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5501 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5502 low being 0, which should be treated as [-, -]. */
5503 else if (TYPE_UNSIGNED (exp_type)
5504 && !TYPE_UNSIGNED (arg0_type)
5505 && low
5506 && !high)
5507 {
5508 if (integer_zerop (low))
5509 n_low = NULL_TREE;
5510 else
5511 {
5512 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5513 n_low, build_int_cst (arg0_type, -1));
5514 n_low = build_zero_cst (arg0_type);
5515 in_p = !in_p;
5516 }
5517 }
5518
5519 *p_low = n_low;
5520 *p_high = n_high;
5521 *p_in_p = in_p;
5522 return arg0;
5523
5524 default:
5525 return NULL_TREE;
5526 }
5527}
5528
5529/* Given EXP, a logical expression, set the range it is testing into
5530 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5531 actually being tested. *PLOW and *PHIGH will be made of the same
5532 type as the returned expression. If EXP is not a comparison, we
5533 will most likely not be returning a useful value and range. Set
5534 *STRICT_OVERFLOW_P to true if the return value is only valid
5535 because signed overflow is undefined; otherwise, do not change
5536 *STRICT_OVERFLOW_P. */
5537
5538tree
5539make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5540 bool *strict_overflow_p)
5541{
5542 enum tree_code code;
5543 tree arg0, arg1 = NULL_TREE;
5544 tree exp_type, nexp;
5545 int in_p;
5546 tree low, high;
5547 location_t loc = EXPR_LOCATION (exp);
5548
5549 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5550 and see if we can refine the range. Some of the cases below may not
5551 happen, but it doesn't seem worth worrying about this. We "continue"
5552 the outer loop when we've changed something; otherwise we "break"
5553 the switch, which will "break" the while. */
5554
5555 in_p = 0;
5556 low = high = build_int_cst (TREE_TYPE (exp), 0);
5557
5558 while (1)
5559 {
5560 code = TREE_CODE (exp);
5561 exp_type = TREE_TYPE (exp);
5562 arg0 = NULL_TREE;
5563
5564 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5565 {
5566 if (TREE_OPERAND_LENGTH (exp) > 0)
5567 arg0 = TREE_OPERAND (exp, 0);
5568 if (TREE_CODE_CLASS (code) == tcc_binary
5569 || TREE_CODE_CLASS (code) == tcc_comparison
5570 || (TREE_CODE_CLASS (code) == tcc_expression
5571 && TREE_OPERAND_LENGTH (exp) > 1))
5572 arg1 = TREE_OPERAND (exp, 1);
5573 }
5574 if (arg0 == NULL_TREE)
5575 break;
5576
5577 nexp = make_range_step (loc, code, arg0, arg1, exp_type, p_low: &low,
5578 p_high: &high, p_in_p: &in_p, strict_overflow_p);
5579 if (nexp == NULL_TREE)
5580 break;
5581 exp = nexp;
5582 }
5583
5584 /* If EXP is a constant, we can evaluate whether this is true or false. */
5585 if (TREE_CODE (exp) == INTEGER_CST)
5586 {
5587 in_p = in_p == (integer_onep (range_binop (code: GE_EXPR, integer_type_node,
5588 arg0: exp, upper0_p: 0, arg1: low, upper1_p: 0))
5589 && integer_onep (range_binop (code: LE_EXPR, integer_type_node,
5590 arg0: exp, upper0_p: 1, arg1: high, upper1_p: 1)));
5591 low = high = 0;
5592 exp = 0;
5593 }
5594
5595 *pin_p = in_p, *plow = low, *phigh = high;
5596 return exp;
5597}
5598
5599/* Returns TRUE if [LOW, HIGH] range check can be optimized to
5600 a bitwise check i.e. when
5601 LOW == 0xXX...X00...0
5602 HIGH == 0xXX...X11...1
5603 Return corresponding mask in MASK and stem in VALUE. */
5604
5605static bool
5606maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5607 tree *value)
5608{
5609 if (TREE_CODE (low) != INTEGER_CST
5610 || TREE_CODE (high) != INTEGER_CST)
5611 return false;
5612
5613 unsigned prec = TYPE_PRECISION (type);
5614 wide_int lo = wi::to_wide (t: low, prec);
5615 wide_int hi = wi::to_wide (t: high, prec);
5616
5617 wide_int end_mask = lo ^ hi;
5618 if ((end_mask & (end_mask + 1)) != 0
5619 || (lo & end_mask) != 0)
5620 return false;
5621
5622 wide_int stem_mask = ~end_mask;
5623 wide_int stem = lo & stem_mask;
5624 if (stem != (hi & stem_mask))
5625 return false;
5626
5627 *mask = wide_int_to_tree (type, cst: stem_mask);
5628 *value = wide_int_to_tree (type, cst: stem);
5629
5630 return true;
5631}
5632
5633/* Helper routine for build_range_check and match.pd. Return the type to
5634 perform the check or NULL if it shouldn't be optimized. */
5635
5636tree
5637range_check_type (tree etype)
5638{
5639 /* First make sure that arithmetics in this type is valid, then make sure
5640 that it wraps around. */
5641 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5642 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5643
5644 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5645 {
5646 tree utype, minv, maxv;
5647
5648 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5649 for the type in question, as we rely on this here. */
5650 utype = unsigned_type_for (etype);
5651 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5652 maxv = range_binop (code: PLUS_EXPR, NULL_TREE, arg0: maxv, upper0_p: 1,
5653 arg1: build_int_cst (TREE_TYPE (maxv), 1), upper1_p: 1);
5654 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5655
5656 if (integer_zerop (range_binop (code: NE_EXPR, integer_type_node,
5657 arg0: minv, upper0_p: 1, arg1: maxv, upper1_p: 1)))
5658 etype = utype;
5659 else
5660 return NULL_TREE;
5661 }
5662 else if (POINTER_TYPE_P (etype)
5663 || TREE_CODE (etype) == OFFSET_TYPE
5664 /* Right now all BITINT_TYPEs satisfy
5665 (unsigned) max + 1 == (unsigned) min, so no need to verify
5666 that like for INTEGER_TYPEs. */
5667 || TREE_CODE (etype) == BITINT_TYPE)
5668 etype = unsigned_type_for (etype);
5669 return etype;
5670}
5671
5672/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5673 type, TYPE, return an expression to test if EXP is in (or out of, depending
5674 on IN_P) the range. Return 0 if the test couldn't be created. */
5675
5676tree
5677build_range_check (location_t loc, tree type, tree exp, int in_p,
5678 tree low, tree high)
5679{
5680 tree etype = TREE_TYPE (exp), mask, value;
5681
5682 /* Disable this optimization for function pointer expressions
5683 on targets that require function pointer canonicalization. */
5684 if (targetm.have_canonicalize_funcptr_for_compare ()
5685 && POINTER_TYPE_P (etype)
5686 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5687 return NULL_TREE;
5688
5689 if (! in_p)
5690 {
5691 value = build_range_check (loc, type, exp, in_p: 1, low, high);
5692 if (value != 0)
5693 return invert_truthvalue_loc (loc, arg: value);
5694
5695 return 0;
5696 }
5697
5698 if (low == 0 && high == 0)
5699 return omit_one_operand_loc (loc, type, result: build_int_cst (type, 1), omitted: exp);
5700
5701 if (low == 0)
5702 return fold_build2_loc (loc, LE_EXPR, type, exp,
5703 fold_convert_loc (loc, type: etype, arg: high));
5704
5705 if (high == 0)
5706 return fold_build2_loc (loc, GE_EXPR, type, exp,
5707 fold_convert_loc (loc, type: etype, arg: low));
5708
5709 if (operand_equal_p (arg0: low, arg1: high, flags: 0))
5710 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5711 fold_convert_loc (loc, type: etype, arg: low));
5712
5713 if (TREE_CODE (exp) == BIT_AND_EXPR
5714 && maskable_range_p (low, high, type: etype, mask: &mask, value: &value))
5715 return fold_build2_loc (loc, EQ_EXPR, type,
5716 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5717 exp, mask),
5718 value);
5719
5720 if (integer_zerop (low))
5721 {
5722 if (! TYPE_UNSIGNED (etype))
5723 {
5724 etype = unsigned_type_for (etype);
5725 high = fold_convert_loc (loc, type: etype, arg: high);
5726 exp = fold_convert_loc (loc, type: etype, arg: exp);
5727 }
5728 return build_range_check (loc, type, exp, in_p: 1, low: 0, high);
5729 }
5730
5731 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5732 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5733 {
5734 int prec = TYPE_PRECISION (etype);
5735
5736 if (wi::mask <widest_int> (width: prec - 1, negate_p: false) == wi::to_widest (t: high))
5737 {
5738 if (TYPE_UNSIGNED (etype))
5739 {
5740 tree signed_etype = signed_type_for (etype);
5741 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5742 etype
5743 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5744 else
5745 etype = signed_etype;
5746 exp = fold_convert_loc (loc, type: etype, arg: exp);
5747 }
5748 return fold_build2_loc (loc, GT_EXPR, type, exp,
5749 build_int_cst (etype, 0));
5750 }
5751 }
5752
5753 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5754 This requires wrap-around arithmetics for the type of the expression. */
5755 etype = range_check_type (etype);
5756 if (etype == NULL_TREE)
5757 return NULL_TREE;
5758
5759 high = fold_convert_loc (loc, type: etype, arg: high);
5760 low = fold_convert_loc (loc, type: etype, arg: low);
5761 exp = fold_convert_loc (loc, type: etype, arg: exp);
5762
5763 value = const_binop (code: MINUS_EXPR, arg1: high, arg2: low);
5764
5765 if (value != 0 && !TREE_OVERFLOW (value))
5766 return build_range_check (loc, type,
5767 exp: fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5768 in_p: 1, low: build_int_cst (etype, 0), high: value);
5769
5770 return 0;
5771}
5772
5773/* Return the predecessor of VAL in its type, handling the infinite case. */
5774
5775static tree
5776range_predecessor (tree val)
5777{
5778 tree type = TREE_TYPE (val);
5779
5780 if (INTEGRAL_TYPE_P (type)
5781 && operand_equal_p (arg0: val, TYPE_MIN_VALUE (type), flags: 0))
5782 return 0;
5783 else
5784 return range_binop (code: MINUS_EXPR, NULL_TREE, arg0: val, upper0_p: 0,
5785 arg1: build_int_cst (TREE_TYPE (val), 1), upper1_p: 0);
5786}
5787
5788/* Return the successor of VAL in its type, handling the infinite case. */
5789
5790static tree
5791range_successor (tree val)
5792{
5793 tree type = TREE_TYPE (val);
5794
5795 if (INTEGRAL_TYPE_P (type)
5796 && operand_equal_p (arg0: val, TYPE_MAX_VALUE (type), flags: 0))
5797 return 0;
5798 else
5799 return range_binop (code: PLUS_EXPR, NULL_TREE, arg0: val, upper0_p: 0,
5800 arg1: build_int_cst (TREE_TYPE (val), 1), upper1_p: 0);
5801}
5802
5803/* Given two ranges, see if we can merge them into one. Return 1 if we
5804 can, 0 if we can't. Set the output range into the specified parameters. */
5805
5806bool
5807merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5808 tree high0, int in1_p, tree low1, tree high1)
5809{
5810 bool no_overlap;
5811 int subset;
5812 int temp;
5813 tree tem;
5814 int in_p;
5815 tree low, high;
5816 int lowequal = ((low0 == 0 && low1 == 0)
5817 || integer_onep (range_binop (code: EQ_EXPR, integer_type_node,
5818 arg0: low0, upper0_p: 0, arg1: low1, upper1_p: 0)));
5819 int highequal = ((high0 == 0 && high1 == 0)
5820 || integer_onep (range_binop (code: EQ_EXPR, integer_type_node,
5821 arg0: high0, upper0_p: 1, arg1: high1, upper1_p: 1)));
5822
5823 /* Make range 0 be the range that starts first, or ends last if they
5824 start at the same value. Swap them if it isn't. */
5825 if (integer_onep (range_binop (code: GT_EXPR, integer_type_node,
5826 arg0: low0, upper0_p: 0, arg1: low1, upper1_p: 0))
5827 || (lowequal
5828 && integer_onep (range_binop (code: GT_EXPR, integer_type_node,
5829 arg0: high1, upper0_p: 1, arg1: high0, upper1_p: 1))))
5830 {
5831 temp = in0_p, in0_p = in1_p, in1_p = temp;
5832 tem = low0, low0 = low1, low1 = tem;
5833 tem = high0, high0 = high1, high1 = tem;
5834 }
5835
5836 /* If the second range is != high1 where high1 is the type maximum of
5837 the type, try first merging with < high1 range. */
5838 if (low1
5839 && high1
5840 && TREE_CODE (low1) == INTEGER_CST
5841 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5842 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5843 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5844 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5845 && operand_equal_p (arg0: low1, arg1: high1, flags: 0))
5846 {
5847 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5848 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5849 in1_p: !in1_p, NULL_TREE, high1: range_predecessor (val: low1)))
5850 return true;
5851 /* Similarly for the second range != low1 where low1 is the type minimum
5852 of the type, try first merging with > low1 range. */
5853 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5854 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5855 in1_p: !in1_p, low1: range_successor (val: low1), NULL_TREE))
5856 return true;
5857 }
5858
5859 /* Now flag two cases, whether the ranges are disjoint or whether the
5860 second range is totally subsumed in the first. Note that the tests
5861 below are simplified by the ones above. */
5862 no_overlap = integer_onep (range_binop (code: LT_EXPR, integer_type_node,
5863 arg0: high0, upper0_p: 1, arg1: low1, upper1_p: 0));
5864 subset = integer_onep (range_binop (code: LE_EXPR, integer_type_node,
5865 arg0: high1, upper0_p: 1, arg1: high0, upper1_p: 1));
5866
5867 /* We now have four cases, depending on whether we are including or
5868 excluding the two ranges. */
5869 if (in0_p && in1_p)
5870 {
5871 /* If they don't overlap, the result is false. If the second range
5872 is a subset it is the result. Otherwise, the range is from the start
5873 of the second to the end of the first. */
5874 if (no_overlap)
5875 in_p = 0, low = high = 0;
5876 else if (subset)
5877 in_p = 1, low = low1, high = high1;
5878 else
5879 in_p = 1, low = low1, high = high0;
5880 }
5881
5882 else if (in0_p && ! in1_p)
5883 {
5884 /* If they don't overlap, the result is the first range. If they are
5885 equal, the result is false. If the second range is a subset of the
5886 first, and the ranges begin at the same place, we go from just after
5887 the end of the second range to the end of the first. If the second
5888 range is not a subset of the first, or if it is a subset and both
5889 ranges end at the same place, the range starts at the start of the
5890 first range and ends just before the second range.
5891 Otherwise, we can't describe this as a single range. */
5892 if (no_overlap)
5893 in_p = 1, low = low0, high = high0;
5894 else if (lowequal && highequal)
5895 in_p = 0, low = high = 0;
5896 else if (subset && lowequal)
5897 {
5898 low = range_successor (val: high1);
5899 high = high0;
5900 in_p = 1;
5901 if (low == 0)
5902 {
5903 /* We are in the weird situation where high0 > high1 but
5904 high1 has no successor. Punt. */
5905 return 0;
5906 }
5907 }
5908 else if (! subset || highequal)
5909 {
5910 low = low0;
5911 high = range_predecessor (val: low1);
5912 in_p = 1;
5913 if (high == 0)
5914 {
5915 /* low0 < low1 but low1 has no predecessor. Punt. */
5916 return 0;
5917 }
5918 }
5919 else
5920 return 0;
5921 }
5922
5923 else if (! in0_p && in1_p)
5924 {
5925 /* If they don't overlap, the result is the second range. If the second
5926 is a subset of the first, the result is false. Otherwise,
5927 the range starts just after the first range and ends at the
5928 end of the second. */
5929 if (no_overlap)
5930 in_p = 1, low = low1, high = high1;
5931 else if (subset || highequal)
5932 in_p = 0, low = high = 0;
5933 else
5934 {
5935 low = range_successor (val: high0);
5936 high = high1;
5937 in_p = 1;
5938 if (low == 0)
5939 {
5940 /* high1 > high0 but high0 has no successor. Punt. */
5941 return 0;
5942 }
5943 }
5944 }
5945
5946 else
5947 {
5948 /* The case where we are excluding both ranges. Here the complex case
5949 is if they don't overlap. In that case, the only time we have a
5950 range is if they are adjacent. If the second is a subset of the
5951 first, the result is the first. Otherwise, the range to exclude
5952 starts at the beginning of the first range and ends at the end of the
5953 second. */
5954 if (no_overlap)
5955 {
5956 if (integer_onep (range_binop (code: EQ_EXPR, integer_type_node,
5957 arg0: range_successor (val: high0),
5958 upper0_p: 1, arg1: low1, upper1_p: 0)))
5959 in_p = 0, low = low0, high = high1;
5960 else
5961 {
5962 /* Canonicalize - [min, x] into - [-, x]. */
5963 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5964 switch (TREE_CODE (TREE_TYPE (low0)))
5965 {
5966 case ENUMERAL_TYPE:
5967 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5968 b: GET_MODE_BITSIZE
5969 (TYPE_MODE (TREE_TYPE (low0)))))
5970 break;
5971 /* FALLTHROUGH */
5972 case INTEGER_TYPE:
5973 if (tree_int_cst_equal (low0,
5974 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5975 low0 = 0;
5976 break;
5977 case POINTER_TYPE:
5978 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5979 && integer_zerop (low0))
5980 low0 = 0;
5981 break;
5982 default:
5983 break;
5984 }
5985
5986 /* Canonicalize - [x, max] into - [x, -]. */
5987 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5988 switch (TREE_CODE (TREE_TYPE (high1)))
5989 {
5990 case ENUMERAL_TYPE:
5991 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5992 b: GET_MODE_BITSIZE
5993 (TYPE_MODE (TREE_TYPE (high1)))))
5994 break;
5995 /* FALLTHROUGH */
5996 case INTEGER_TYPE:
5997 if (tree_int_cst_equal (high1,
5998 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5999 high1 = 0;
6000 break;
6001 case POINTER_TYPE:
6002 if (TYPE_UNSIGNED (TREE_TYPE (high1))
6003 && integer_zerop (range_binop (code: PLUS_EXPR, NULL_TREE,
6004 arg0: high1, upper0_p: 1,
6005 arg1: build_int_cst (TREE_TYPE (high1), 1),
6006 upper1_p: 1)))
6007 high1 = 0;
6008 break;
6009 default:
6010 break;
6011 }
6012
6013 /* The ranges might be also adjacent between the maximum and
6014 minimum values of the given type. For
6015 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
6016 return + [x + 1, y - 1]. */
6017 if (low0 == 0 && high1 == 0)
6018 {
6019 low = range_successor (val: high0);
6020 high = range_predecessor (val: low1);
6021 if (low == 0 || high == 0)
6022 return 0;
6023
6024 in_p = 1;
6025 }
6026 else
6027 return 0;
6028 }
6029 }
6030 else if (subset)
6031 in_p = 0, low = low0, high = high0;
6032 else
6033 in_p = 0, low = low0, high = high1;
6034 }
6035
6036 *pin_p = in_p, *plow = low, *phigh = high;
6037 return 1;
6038}
6039
6040
6041/* Subroutine of fold, looking inside expressions of the form
6042 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6043 are the three operands of the COND_EXPR. This function is
6044 being used also to optimize A op B ? C : A, by reversing the
6045 comparison first.
6046
6047 Return a folded expression whose code is not a COND_EXPR
6048 anymore, or NULL_TREE if no folding opportunity is found. */
6049
6050static tree
6051fold_cond_expr_with_comparison (location_t loc, tree type,
6052 enum tree_code comp_code,
6053 tree arg00, tree arg01, tree arg1, tree arg2)
6054{
6055 tree arg1_type = TREE_TYPE (arg1);
6056 tree tem;
6057
6058 STRIP_NOPS (arg1);
6059 STRIP_NOPS (arg2);
6060
6061 /* If we have A op 0 ? A : -A, consider applying the following
6062 transformations:
6063
6064 A == 0? A : -A same as -A
6065 A != 0? A : -A same as A
6066 A >= 0? A : -A same as abs (A)
6067 A > 0? A : -A same as abs (A)
6068 A <= 0? A : -A same as -abs (A)
6069 A < 0? A : -A same as -abs (A)
6070
6071 None of these transformations work for modes with signed
6072 zeros. If A is +/-0, the first two transformations will
6073 change the sign of the result (from +0 to -0, or vice
6074 versa). The last four will fix the sign of the result,
6075 even though the original expressions could be positive or
6076 negative, depending on the sign of A.
6077
6078 Note that all these transformations are correct if A is
6079 NaN, since the two alternatives (A and -A) are also NaNs. */
6080 if (!HONOR_SIGNED_ZEROS (type)
6081 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6082 ? real_zerop (arg01)
6083 : integer_zerop (arg01))
6084 && ((TREE_CODE (arg2) == NEGATE_EXPR
6085 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, flags: 0))
6086 /* In the case that A is of the form X-Y, '-A' (arg2) may
6087 have already been folded to Y-X, check for that. */
6088 || (TREE_CODE (arg1) == MINUS_EXPR
6089 && TREE_CODE (arg2) == MINUS_EXPR
6090 && operand_equal_p (TREE_OPERAND (arg1, 0),
6091 TREE_OPERAND (arg2, 1), flags: 0)
6092 && operand_equal_p (TREE_OPERAND (arg1, 1),
6093 TREE_OPERAND (arg2, 0), flags: 0))))
6094 switch (comp_code)
6095 {
6096 case EQ_EXPR:
6097 case UNEQ_EXPR:
6098 tem = fold_convert_loc (loc, type: arg1_type, arg: arg1);
6099 return fold_convert_loc (loc, type, arg: negate_expr (t: tem));
6100 case NE_EXPR:
6101 case LTGT_EXPR:
6102 return fold_convert_loc (loc, type, arg: arg1);
6103 case UNGE_EXPR:
6104 case UNGT_EXPR:
6105 if (flag_trapping_math)
6106 break;
6107 /* Fall through. */
6108 case GE_EXPR:
6109 case GT_EXPR:
6110 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6111 break;
6112 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6113 return fold_convert_loc (loc, type, arg: tem);
6114 case UNLE_EXPR:
6115 case UNLT_EXPR:
6116 if (flag_trapping_math)
6117 break;
6118 /* FALLTHRU */
6119 case LE_EXPR:
6120 case LT_EXPR:
6121 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6122 break;
6123 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6124 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6125 {
6126 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6127 is not, invokes UB both in abs and in the negation of it.
6128 So, use ABSU_EXPR instead. */
6129 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6130 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6131 tem = negate_expr (t: tem);
6132 return fold_convert_loc (loc, type, arg: tem);
6133 }
6134 else
6135 {
6136 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6137 return negate_expr (t: fold_convert_loc (loc, type, arg: tem));
6138 }
6139 default:
6140 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6141 break;
6142 }
6143
6144 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6145 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6146 both transformations are correct when A is NaN: A != 0
6147 is then true, and A == 0 is false. */
6148
6149 if (!HONOR_SIGNED_ZEROS (type)
6150 && integer_zerop (arg01) && integer_zerop (arg2))
6151 {
6152 if (comp_code == NE_EXPR)
6153 return fold_convert_loc (loc, type, arg: arg1);
6154 else if (comp_code == EQ_EXPR)
6155 return build_zero_cst (type);
6156 }
6157
6158 /* Try some transformations of A op B ? A : B.
6159
6160 A == B? A : B same as B
6161 A != B? A : B same as A
6162 A >= B? A : B same as max (A, B)
6163 A > B? A : B same as max (B, A)
6164 A <= B? A : B same as min (A, B)
6165 A < B? A : B same as min (B, A)
6166
6167 As above, these transformations don't work in the presence
6168 of signed zeros. For example, if A and B are zeros of
6169 opposite sign, the first two transformations will change
6170 the sign of the result. In the last four, the original
6171 expressions give different results for (A=+0, B=-0) and
6172 (A=-0, B=+0), but the transformed expressions do not.
6173
6174 The first two transformations are correct if either A or B
6175 is a NaN. In the first transformation, the condition will
6176 be false, and B will indeed be chosen. In the case of the
6177 second transformation, the condition A != B will be true,
6178 and A will be chosen.
6179
6180 The conversions to max() and min() are not correct if B is
6181 a number and A is not. The conditions in the original
6182 expressions will be false, so all four give B. The min()
6183 and max() versions would give a NaN instead. */
6184 if (!HONOR_SIGNED_ZEROS (type)
6185 && operand_equal_for_comparison_p (arg0: arg01, arg1: arg2)
6186 /* Avoid these transformations if the COND_EXPR may be used
6187 as an lvalue in the C++ front-end. PR c++/19199. */
6188 && (in_gimple_form
6189 || VECTOR_TYPE_P (type)
6190 || (! lang_GNU_CXX ()
6191 && strcmp (s1: lang_hooks.name, s2: "GNU Objective-C++") != 0)
6192 || ! maybe_lvalue_p (x: arg1)
6193 || ! maybe_lvalue_p (x: arg2)))
6194 {
6195 tree comp_op0 = arg00;
6196 tree comp_op1 = arg01;
6197 tree comp_type = TREE_TYPE (comp_op0);
6198
6199 switch (comp_code)
6200 {
6201 case EQ_EXPR:
6202 return fold_convert_loc (loc, type, arg: arg2);
6203 case NE_EXPR:
6204 return fold_convert_loc (loc, type, arg: arg1);
6205 case LE_EXPR:
6206 case LT_EXPR:
6207 case UNLE_EXPR:
6208 case UNLT_EXPR:
6209 /* In C++ a ?: expression can be an lvalue, so put the
6210 operand which will be used if they are equal first
6211 so that we can convert this back to the
6212 corresponding COND_EXPR. */
6213 if (!HONOR_NANS (arg1))
6214 {
6215 comp_op0 = fold_convert_loc (loc, type: comp_type, arg: comp_op0);
6216 comp_op1 = fold_convert_loc (loc, type: comp_type, arg: comp_op1);
6217 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6218 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6219 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6220 comp_op1, comp_op0);
6221 return fold_convert_loc (loc, type, arg: tem);
6222 }
6223 break;
6224 case GE_EXPR:
6225 case GT_EXPR:
6226 case UNGE_EXPR:
6227 case UNGT_EXPR:
6228 if (!HONOR_NANS (arg1))
6229 {
6230 comp_op0 = fold_convert_loc (loc, type: comp_type, arg: comp_op0);
6231 comp_op1 = fold_convert_loc (loc, type: comp_type, arg: comp_op1);
6232 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6233 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6234 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6235 comp_op1, comp_op0);
6236 return fold_convert_loc (loc, type, arg: tem);
6237 }
6238 break;
6239 case UNEQ_EXPR:
6240 if (!HONOR_NANS (arg1))
6241 return fold_convert_loc (loc, type, arg: arg2);
6242 break;
6243 case LTGT_EXPR:
6244 if (!HONOR_NANS (arg1))
6245 return fold_convert_loc (loc, type, arg: arg1);
6246 break;
6247 default:
6248 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6249 break;
6250 }
6251 }
6252
6253 return NULL_TREE;
6254}
6255
6256
6257
6258#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6259#define LOGICAL_OP_NON_SHORT_CIRCUIT \
6260 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6261 false) >= 2)
6262#endif
6263
6264/* EXP is some logical combination of boolean tests. See if we can
6265 merge it into some range test. Return the new tree if so. */
6266
6267static tree
6268fold_range_test (location_t loc, enum tree_code code, tree type,
6269 tree op0, tree op1)
6270{
6271 int or_op = (code == TRUTH_ORIF_EXPR
6272 || code == TRUTH_OR_EXPR);
6273 int in0_p, in1_p, in_p;
6274 tree low0, low1, low, high0, high1, high;
6275 bool strict_overflow_p = false;
6276 tree tem, lhs, rhs;
6277 const char * const warnmsg = G_("assuming signed overflow does not occur "
6278 "when simplifying range test");
6279
6280 if (!INTEGRAL_TYPE_P (type))
6281 return 0;
6282
6283 lhs = make_range (exp: op0, pin_p: &in0_p, plow: &low0, phigh: &high0, strict_overflow_p: &strict_overflow_p);
6284 /* If op0 is known true or false and this is a short-circuiting
6285 operation we must not merge with op1 since that makes side-effects
6286 unconditional. So special-case this. */
6287 if (!lhs
6288 && ((code == TRUTH_ORIF_EXPR && in0_p)
6289 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6290 return op0;
6291 rhs = make_range (exp: op1, pin_p: &in1_p, plow: &low1, phigh: &high1, strict_overflow_p: &strict_overflow_p);
6292
6293 /* If this is an OR operation, invert both sides; we will invert
6294 again at the end. */
6295 if (or_op)
6296 in0_p = ! in0_p, in1_p = ! in1_p;
6297
6298 /* If both expressions are the same, if we can merge the ranges, and we
6299 can build the range test, return it or it inverted. If one of the
6300 ranges is always true or always false, consider it to be the same
6301 expression as the other. */
6302 if ((lhs == 0 || rhs == 0 || operand_equal_p (arg0: lhs, arg1: rhs, flags: 0))
6303 && merge_ranges (pin_p: &in_p, plow: &low, phigh: &high, in0_p, low0, high0,
6304 in1_p, low1, high1)
6305 && (tem = (build_range_check (loc, type,
6306 exp: lhs != 0 ? lhs
6307 : rhs != 0 ? rhs : integer_zero_node,
6308 in_p, low, high))) != 0)
6309 {
6310 if (strict_overflow_p)
6311 fold_overflow_warning (gmsgid: warnmsg, wc: WARN_STRICT_OVERFLOW_COMPARISON);
6312 return or_op ? invert_truthvalue_loc (loc, arg: tem) : tem;
6313 }
6314
6315 /* On machines where the branch cost is expensive, if this is a
6316 short-circuited branch and the underlying object on both sides
6317 is the same, make a non-short-circuit operation. */
6318 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6319 if (param_logical_op_non_short_circuit != -1)
6320 logical_op_non_short_circuit
6321 = param_logical_op_non_short_circuit;
6322 if (logical_op_non_short_circuit
6323 && !sanitize_coverage_p ()
6324 && lhs != 0 && rhs != 0
6325 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6326 && operand_equal_p (arg0: lhs, arg1: rhs, flags: 0))
6327 {
6328 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6329 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6330 which cases we can't do this. */
6331 if (simple_operand_p (exp: lhs))
6332 return build2_loc (loc, code: code == TRUTH_ANDIF_EXPR
6333 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6334 type, arg0: op0, arg1: op1);
6335
6336 else if (!lang_hooks.decls.global_bindings_p ()
6337 && !CONTAINS_PLACEHOLDER_P (lhs))
6338 {
6339 tree common = save_expr (lhs);
6340
6341 if ((lhs = build_range_check (loc, type, exp: common,
6342 in_p: or_op ? ! in0_p : in0_p,
6343 low: low0, high: high0)) != 0
6344 && (rhs = build_range_check (loc, type, exp: common,
6345 in_p: or_op ? ! in1_p : in1_p,
6346 low: low1, high: high1)) != 0)
6347 {
6348 if (strict_overflow_p)
6349 fold_overflow_warning (gmsgid: warnmsg,
6350 wc: WARN_STRICT_OVERFLOW_COMPARISON);
6351 return build2_loc (loc, code: code == TRUTH_ANDIF_EXPR
6352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6353 type, arg0: lhs, arg1: rhs);
6354 }
6355 }
6356 }
6357
6358 return 0;
6359}
6360
6361/* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6362 bit value. Arrange things so the extra bits will be set to zero if and
6363 only if C is signed-extended to its full width. If MASK is nonzero,
6364 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6365
6366static tree
6367unextend (tree c, int p, int unsignedp, tree mask)
6368{
6369 tree type = TREE_TYPE (c);
6370 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6371 tree temp;
6372
6373 if (p == modesize || unsignedp)
6374 return c;
6375
6376 /* We work by getting just the sign bit into the low-order bit, then
6377 into the high-order bit, then sign-extend. We then XOR that value
6378 with C. */
6379 temp = build_int_cst (TREE_TYPE (c),
6380 wi::extract_uhwi (x: wi::to_wide (t: c), bitpos: p - 1, width: 1));
6381
6382 /* We must use a signed type in order to get an arithmetic right shift.
6383 However, we must also avoid introducing accidental overflows, so that
6384 a subsequent call to integer_zerop will work. Hence we must
6385 do the type conversion here. At this point, the constant is either
6386 zero or one, and the conversion to a signed type can never overflow.
6387 We could get an overflow if this conversion is done anywhere else. */
6388 if (TYPE_UNSIGNED (type))
6389 temp = fold_convert (signed_type_for (type), temp);
6390
6391 temp = const_binop (code: LSHIFT_EXPR, arg1: temp, size_int (modesize - 1));
6392 temp = const_binop (code: RSHIFT_EXPR, arg1: temp, size_int (modesize - p - 1));
6393 if (mask != 0)
6394 temp = const_binop (code: BIT_AND_EXPR, arg1: temp,
6395 fold_convert (TREE_TYPE (c), mask));
6396 /* If necessary, convert the type back to match the type of C. */
6397 if (TYPE_UNSIGNED (type))
6398 temp = fold_convert (type, temp);
6399
6400 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6401}
6402
6403/* For an expression that has the form
6404 (A && B) || ~B
6405 or
6406 (A || B) && ~B,
6407 we can drop one of the inner expressions and simplify to
6408 A || ~B
6409 or
6410 A && ~B
6411 LOC is the location of the resulting expression. OP is the inner
6412 logical operation; the left-hand side in the examples above, while CMPOP
6413 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6414 removing a condition that guards another, as in
6415 (A != NULL && A->...) || A == NULL
6416 which we must not transform. If RHS_ONLY is true, only eliminate the
6417 right-most operand of the inner logical operation. */
6418
6419static tree
6420merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6421 bool rhs_only)
6422{
6423 enum tree_code code = TREE_CODE (cmpop);
6424 enum tree_code truthop_code = TREE_CODE (op);
6425 tree lhs = TREE_OPERAND (op, 0);
6426 tree rhs = TREE_OPERAND (op, 1);
6427 tree orig_lhs = lhs, orig_rhs = rhs;
6428 enum tree_code rhs_code = TREE_CODE (rhs);
6429 enum tree_code lhs_code = TREE_CODE (lhs);
6430 enum tree_code inv_code;
6431
6432 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6433 return NULL_TREE;
6434
6435 if (TREE_CODE_CLASS (code) != tcc_comparison)
6436 return NULL_TREE;
6437
6438 tree type = TREE_TYPE (TREE_OPERAND (cmpop, 0));
6439
6440 if (rhs_code == truthop_code)
6441 {
6442 tree newrhs = merge_truthop_with_opposite_arm (loc, op: rhs, cmpop, rhs_only);
6443 if (newrhs != NULL_TREE)
6444 {
6445 rhs = newrhs;
6446 rhs_code = TREE_CODE (rhs);
6447 }
6448 }
6449 if (lhs_code == truthop_code && !rhs_only)
6450 {
6451 tree newlhs = merge_truthop_with_opposite_arm (loc, op: lhs, cmpop, rhs_only: false);
6452 if (newlhs != NULL_TREE)
6453 {
6454 lhs = newlhs;
6455 lhs_code = TREE_CODE (lhs);
6456 }
6457 }
6458
6459 inv_code = invert_tree_comparison (code, honor_nans: HONOR_NANS (type));
6460 if (inv_code == rhs_code
6461 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), flags: 0)
6462 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), flags: 0))
6463 return lhs;
6464 if (!rhs_only && inv_code == lhs_code
6465 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), flags: 0)
6466 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), flags: 0))
6467 return rhs;
6468 if (rhs != orig_rhs || lhs != orig_lhs)
6469 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6470 lhs, rhs);
6471 return NULL_TREE;
6472}
6473
6474/* Find ways of folding logical expressions of LHS and RHS:
6475 Try to merge two comparisons to the same innermost item.
6476 Look for range tests like "ch >= '0' && ch <= '9'".
6477 Look for combinations of simple terms on machines with expensive branches
6478 and evaluate the RHS unconditionally.
6479
6480 For example, if we have p->a == 2 && p->b == 4 and we can make an
6481 object large enough to span both A and B, we can do this with a comparison
6482 against the object ANDed with the a mask.
6483
6484 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6485 operations to do this with one comparison.
6486
6487 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6488 function and the one above.
6489
6490 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6491 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6492
6493 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6494 two operands.
6495
6496 We return the simplified tree or 0 if no optimization is possible. */
6497
6498static tree
6499fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6500 tree lhs, tree rhs)
6501{
6502 /* If this is the "or" of two comparisons, we can do something if
6503 the comparisons are NE_EXPR. If this is the "and", we can do something
6504 if the comparisons are EQ_EXPR. I.e.,
6505 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6506
6507 WANTED_CODE is this operation code. For single bit fields, we can
6508 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6509 comparison for one-bit fields. */
6510
6511 enum tree_code wanted_code;
6512 enum tree_code lcode, rcode;
6513 tree ll_arg, lr_arg, rl_arg, rr_arg;
6514 tree ll_inner, lr_inner, rl_inner, rr_inner;
6515 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6516 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6517 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6518 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6519 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6520 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6521 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6522 scalar_int_mode lnmode, rnmode;
6523 tree ll_mask, lr_mask, rl_mask, rr_mask;
6524 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6525 tree l_const, r_const;
6526 tree lntype, rntype, result;
6527 HOST_WIDE_INT first_bit, end_bit;
6528 int volatilep;
6529
6530 /* Start by getting the comparison codes. Fail if anything is volatile.
6531 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6532 it were surrounded with a NE_EXPR. */
6533
6534 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6535 return 0;
6536
6537 lcode = TREE_CODE (lhs);
6538 rcode = TREE_CODE (rhs);
6539
6540 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6541 {
6542 lhs = build2 (NE_EXPR, truth_type, lhs,
6543 build_int_cst (TREE_TYPE (lhs), 0));
6544 lcode = NE_EXPR;
6545 }
6546
6547 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6548 {
6549 rhs = build2 (NE_EXPR, truth_type, rhs,
6550 build_int_cst (TREE_TYPE (rhs), 0));
6551 rcode = NE_EXPR;
6552 }
6553
6554 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6555 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6556 return 0;
6557
6558 ll_arg = TREE_OPERAND (lhs, 0);
6559 lr_arg = TREE_OPERAND (lhs, 1);
6560 rl_arg = TREE_OPERAND (rhs, 0);
6561 rr_arg = TREE_OPERAND (rhs, 1);
6562
6563 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6564 if (simple_operand_p (exp: ll_arg)
6565 && simple_operand_p (exp: lr_arg))
6566 {
6567 if (operand_equal_p (arg0: ll_arg, arg1: rl_arg, flags: 0)
6568 && operand_equal_p (arg0: lr_arg, arg1: rr_arg, flags: 0))
6569 {
6570 result = combine_comparisons (loc, code, lcode, rcode,
6571 truth_type, ll_arg, lr_arg);
6572 if (result)
6573 return result;
6574 }
6575 else if (operand_equal_p (arg0: ll_arg, arg1: rr_arg, flags: 0)
6576 && operand_equal_p (arg0: lr_arg, arg1: rl_arg, flags: 0))
6577 {
6578 result = combine_comparisons (loc, code, lcode,
6579 rcode: swap_tree_comparison (code: rcode),
6580 truth_type, ll_arg, lr_arg);
6581 if (result)
6582 return result;
6583 }
6584 }
6585
6586 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6587 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6588
6589 /* If the RHS can be evaluated unconditionally and its operands are
6590 simple, it wins to evaluate the RHS unconditionally on machines
6591 with expensive branches. In this case, this isn't a comparison
6592 that can be merged. */
6593
6594 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6595 false) >= 2
6596 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6597 && simple_operand_p (exp: rl_arg)
6598 && simple_operand_p (exp: rr_arg))
6599 {
6600 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6601 if (code == TRUTH_OR_EXPR
6602 && lcode == NE_EXPR && integer_zerop (lr_arg)
6603 && rcode == NE_EXPR && integer_zerop (rr_arg)
6604 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6605 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6606 return build2_loc (loc, code: NE_EXPR, type: truth_type,
6607 arg0: build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6608 ll_arg, rl_arg),
6609 arg1: build_int_cst (TREE_TYPE (ll_arg), 0));
6610
6611 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6612 if (code == TRUTH_AND_EXPR
6613 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6614 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6615 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6616 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6617 return build2_loc (loc, code: EQ_EXPR, type: truth_type,
6618 arg0: build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6619 ll_arg, rl_arg),
6620 arg1: build_int_cst (TREE_TYPE (ll_arg), 0));
6621 }
6622
6623 /* See if the comparisons can be merged. Then get all the parameters for
6624 each side. */
6625
6626 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6627 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6628 return 0;
6629
6630 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6631 volatilep = 0;
6632 ll_inner = decode_field_reference (loc, exp_: &ll_arg,
6633 pbitsize: &ll_bitsize, pbitpos: &ll_bitpos, pmode: &ll_mode,
6634 punsignedp: &ll_unsignedp, preversep: &ll_reversep, pvolatilep: &volatilep,
6635 pmask: &ll_mask, pand_mask: &ll_and_mask);
6636 lr_inner = decode_field_reference (loc, exp_: &lr_arg,
6637 pbitsize: &lr_bitsize, pbitpos: &lr_bitpos, pmode: &lr_mode,
6638 punsignedp: &lr_unsignedp, preversep: &lr_reversep, pvolatilep: &volatilep,
6639 pmask: &lr_mask, pand_mask: &lr_and_mask);
6640 rl_inner = decode_field_reference (loc, exp_: &rl_arg,
6641 pbitsize: &rl_bitsize, pbitpos: &rl_bitpos, pmode: &rl_mode,
6642 punsignedp: &rl_unsignedp, preversep: &rl_reversep, pvolatilep: &volatilep,
6643 pmask: &rl_mask, pand_mask: &rl_and_mask);
6644 rr_inner = decode_field_reference (loc, exp_: &rr_arg,
6645 pbitsize: &rr_bitsize, pbitpos: &rr_bitpos, pmode: &rr_mode,
6646 punsignedp: &rr_unsignedp, preversep: &rr_reversep, pvolatilep: &volatilep,
6647 pmask: &rr_mask, pand_mask: &rr_and_mask);
6648
6649 /* It must be true that the inner operation on the lhs of each
6650 comparison must be the same if we are to be able to do anything.
6651 Then see if we have constants. If not, the same must be true for
6652 the rhs's. */
6653 if (volatilep
6654 || ll_reversep != rl_reversep
6655 || ll_inner == 0 || rl_inner == 0
6656 || ! operand_equal_p (arg0: ll_inner, arg1: rl_inner, flags: 0))
6657 return 0;
6658
6659 if (TREE_CODE (lr_arg) == INTEGER_CST
6660 && TREE_CODE (rr_arg) == INTEGER_CST)
6661 {
6662 l_const = lr_arg, r_const = rr_arg;
6663 lr_reversep = ll_reversep;
6664 }
6665 else if (lr_reversep != rr_reversep
6666 || lr_inner == 0 || rr_inner == 0
6667 || ! operand_equal_p (arg0: lr_inner, arg1: rr_inner, flags: 0))
6668 return 0;
6669 else
6670 l_const = r_const = 0;
6671
6672 /* If either comparison code is not correct for our logical operation,
6673 fail. However, we can convert a one-bit comparison against zero into
6674 the opposite comparison against that bit being set in the field. */
6675
6676 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6677 if (lcode != wanted_code)
6678 {
6679 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6680 {
6681 /* Make the left operand unsigned, since we are only interested
6682 in the value of one bit. Otherwise we are doing the wrong
6683 thing below. */
6684 ll_unsignedp = 1;
6685 l_const = ll_mask;
6686 }
6687 else
6688 return 0;
6689 }
6690
6691 /* This is analogous to the code for l_const above. */
6692 if (rcode != wanted_code)
6693 {
6694 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6695 {
6696 rl_unsignedp = 1;
6697 r_const = rl_mask;
6698 }
6699 else
6700 return 0;
6701 }
6702
6703 /* See if we can find a mode that contains both fields being compared on
6704 the left. If we can't, fail. Otherwise, update all constants and masks
6705 to be relative to a field of that size. */
6706 first_bit = MIN (ll_bitpos, rl_bitpos);
6707 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6708 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6709 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6710 volatilep, &lnmode))
6711 return 0;
6712
6713 lnbitsize = GET_MODE_BITSIZE (mode: lnmode);
6714 lnbitpos = first_bit & ~ (lnbitsize - 1);
6715 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6716 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6717
6718 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6719 {
6720 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6721 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6722 }
6723
6724 ll_mask = const_binop (code: LSHIFT_EXPR, arg1: fold_convert_loc (loc, type: lntype, arg: ll_mask),
6725 size_int (xll_bitpos));
6726 rl_mask = const_binop (code: LSHIFT_EXPR, arg1: fold_convert_loc (loc, type: lntype, arg: rl_mask),
6727 size_int (xrl_bitpos));
6728 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6729 return 0;
6730
6731 if (l_const)
6732 {
6733 l_const = fold_convert_loc (loc, type: lntype, arg: l_const);
6734 l_const = unextend (c: l_const, p: ll_bitsize, unsignedp: ll_unsignedp, mask: ll_and_mask);
6735 l_const = const_binop (code: LSHIFT_EXPR, arg1: l_const, size_int (xll_bitpos));
6736 if (l_const == NULL_TREE)
6737 return 0;
6738 if (! integer_zerop (const_binop (code: BIT_AND_EXPR, arg1: l_const,
6739 arg2: fold_build1_loc (loc, BIT_NOT_EXPR,
6740 lntype, ll_mask))))
6741 {
6742 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6743
6744 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6745 }
6746 }
6747 if (r_const)
6748 {
6749 r_const = fold_convert_loc (loc, type: lntype, arg: r_const);
6750 r_const = unextend (c: r_const, p: rl_bitsize, unsignedp: rl_unsignedp, mask: rl_and_mask);
6751 r_const = const_binop (code: LSHIFT_EXPR, arg1: r_const, size_int (xrl_bitpos));
6752 if (r_const == NULL_TREE)
6753 return 0;
6754 if (! integer_zerop (const_binop (code: BIT_AND_EXPR, arg1: r_const,
6755 arg2: fold_build1_loc (loc, BIT_NOT_EXPR,
6756 lntype, rl_mask))))
6757 {
6758 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6759
6760 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6761 }
6762 }
6763
6764 /* If the right sides are not constant, do the same for it. Also,
6765 disallow this optimization if a size, signedness or storage order
6766 mismatch occurs between the left and right sides. */
6767 if (l_const == 0)
6768 {
6769 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6770 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6771 || ll_reversep != lr_reversep
6772 /* Make sure the two fields on the right
6773 correspond to the left without being swapped. */
6774 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6775 return 0;
6776
6777 first_bit = MIN (lr_bitpos, rr_bitpos);
6778 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6779 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6780 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6781 volatilep, &rnmode))
6782 return 0;
6783
6784 rnbitsize = GET_MODE_BITSIZE (mode: rnmode);
6785 rnbitpos = first_bit & ~ (rnbitsize - 1);
6786 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6787 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6788
6789 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6790 {
6791 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6792 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6793 }
6794
6795 lr_mask = const_binop (code: LSHIFT_EXPR, arg1: fold_convert_loc (loc,
6796 type: rntype, arg: lr_mask),
6797 size_int (xlr_bitpos));
6798 rr_mask = const_binop (code: LSHIFT_EXPR, arg1: fold_convert_loc (loc,
6799 type: rntype, arg: rr_mask),
6800 size_int (xrr_bitpos));
6801 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6802 return 0;
6803
6804 /* Make a mask that corresponds to both fields being compared.
6805 Do this for both items being compared. If the operands are the
6806 same size and the bits being compared are in the same position
6807 then we can do this by masking both and comparing the masked
6808 results. */
6809 ll_mask = const_binop (code: BIT_IOR_EXPR, arg1: ll_mask, arg2: rl_mask);
6810 lr_mask = const_binop (code: BIT_IOR_EXPR, arg1: lr_mask, arg2: rr_mask);
6811 if (lnbitsize == rnbitsize
6812 && xll_bitpos == xlr_bitpos
6813 && lnbitpos >= 0
6814 && rnbitpos >= 0)
6815 {
6816 lhs = make_bit_field_ref (loc, inner: ll_inner, orig_inner: ll_arg,
6817 type: lntype, bitsize: lnbitsize, bitpos: lnbitpos,
6818 unsignedp: ll_unsignedp || rl_unsignedp, reversep: ll_reversep);
6819 if (! all_ones_mask_p (mask: ll_mask, size: lnbitsize))
6820 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6821
6822 rhs = make_bit_field_ref (loc, inner: lr_inner, orig_inner: lr_arg,
6823 type: rntype, bitsize: rnbitsize, bitpos: rnbitpos,
6824 unsignedp: lr_unsignedp || rr_unsignedp, reversep: lr_reversep);
6825 if (! all_ones_mask_p (mask: lr_mask, size: rnbitsize))
6826 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6827
6828 return build2_loc (loc, code: wanted_code, type: truth_type, arg0: lhs, arg1: rhs);
6829 }
6830
6831 /* There is still another way we can do something: If both pairs of
6832 fields being compared are adjacent, we may be able to make a wider
6833 field containing them both.
6834
6835 Note that we still must mask the lhs/rhs expressions. Furthermore,
6836 the mask must be shifted to account for the shift done by
6837 make_bit_field_ref. */
6838 if (((ll_bitsize + ll_bitpos == rl_bitpos
6839 && lr_bitsize + lr_bitpos == rr_bitpos)
6840 || (ll_bitpos == rl_bitpos + rl_bitsize
6841 && lr_bitpos == rr_bitpos + rr_bitsize))
6842 && ll_bitpos >= 0
6843 && rl_bitpos >= 0
6844 && lr_bitpos >= 0
6845 && rr_bitpos >= 0)
6846 {
6847 tree type;
6848
6849 lhs = make_bit_field_ref (loc, inner: ll_inner, orig_inner: ll_arg, type: lntype,
6850 bitsize: ll_bitsize + rl_bitsize,
6851 MIN (ll_bitpos, rl_bitpos),
6852 unsignedp: ll_unsignedp, reversep: ll_reversep);
6853 rhs = make_bit_field_ref (loc, inner: lr_inner, orig_inner: lr_arg, type: rntype,
6854 bitsize: lr_bitsize + rr_bitsize,
6855 MIN (lr_bitpos, rr_bitpos),
6856 unsignedp: lr_unsignedp, reversep: lr_reversep);
6857
6858 ll_mask = const_binop (code: RSHIFT_EXPR, arg1: ll_mask,
6859 size_int (MIN (xll_bitpos, xrl_bitpos)));
6860 lr_mask = const_binop (code: RSHIFT_EXPR, arg1: lr_mask,
6861 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6862 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6863 return 0;
6864
6865 /* Convert to the smaller type before masking out unwanted bits. */
6866 type = lntype;
6867 if (lntype != rntype)
6868 {
6869 if (lnbitsize > rnbitsize)
6870 {
6871 lhs = fold_convert_loc (loc, type: rntype, arg: lhs);
6872 ll_mask = fold_convert_loc (loc, type: rntype, arg: ll_mask);
6873 type = rntype;
6874 }
6875 else if (lnbitsize < rnbitsize)
6876 {
6877 rhs = fold_convert_loc (loc, type: lntype, arg: rhs);
6878 lr_mask = fold_convert_loc (loc, type: lntype, arg: lr_mask);
6879 type = lntype;
6880 }
6881 }
6882
6883 if (! all_ones_mask_p (mask: ll_mask, size: ll_bitsize + rl_bitsize))
6884 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6885
6886 if (! all_ones_mask_p (mask: lr_mask, size: lr_bitsize + rr_bitsize))
6887 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6888
6889 return build2_loc (loc, code: wanted_code, type: truth_type, arg0: lhs, arg1: rhs);
6890 }
6891
6892 return 0;
6893 }
6894
6895 /* Handle the case of comparisons with constants. If there is something in
6896 common between the masks, those bits of the constants must be the same.
6897 If not, the condition is always false. Test for this to avoid generating
6898 incorrect code below. */
6899 result = const_binop (code: BIT_AND_EXPR, arg1: ll_mask, arg2: rl_mask);
6900 if (! integer_zerop (result)
6901 && simple_cst_equal (const_binop (code: BIT_AND_EXPR, arg1: result, arg2: l_const),
6902 const_binop (code: BIT_AND_EXPR, arg1: result, arg2: r_const)) != 1)
6903 {
6904 if (wanted_code == NE_EXPR)
6905 {
6906 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6907 return constant_boolean_node (true, truth_type);
6908 }
6909 else
6910 {
6911 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6912 return constant_boolean_node (false, truth_type);
6913 }
6914 }
6915
6916 if (lnbitpos < 0)
6917 return 0;
6918
6919 /* Construct the expression we will return. First get the component
6920 reference we will make. Unless the mask is all ones the width of
6921 that field, perform the mask operation. Then compare with the
6922 merged constant. */
6923 result = make_bit_field_ref (loc, inner: ll_inner, orig_inner: ll_arg,
6924 type: lntype, bitsize: lnbitsize, bitpos: lnbitpos,
6925 unsignedp: ll_unsignedp || rl_unsignedp, reversep: ll_reversep);
6926
6927 ll_mask = const_binop (code: BIT_IOR_EXPR, arg1: ll_mask, arg2: rl_mask);
6928 if (! all_ones_mask_p (mask: ll_mask, size: lnbitsize))
6929 result = build2_loc (loc, code: BIT_AND_EXPR, type: lntype, arg0: result, arg1: ll_mask);
6930
6931 return build2_loc (loc, code: wanted_code, type: truth_type, arg0: result,
6932 arg1: const_binop (code: BIT_IOR_EXPR, arg1: l_const, arg2: r_const));
6933}
6934
6935/* T is an integer expression that is being multiplied, divided, or taken a
6936 modulus (CODE says which and what kind of divide or modulus) by a
6937 constant C. See if we can eliminate that operation by folding it with
6938 other operations already in T. WIDE_TYPE, if non-null, is a type that
6939 should be used for the computation if wider than our type.
6940
6941 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6942 (X * 2) + (Y * 4). We must, however, be assured that either the original
6943 expression would not overflow or that overflow is undefined for the type
6944 in the language in question.
6945
6946 If we return a non-null expression, it is an equivalent form of the
6947 original computation, but need not be in the original type.
6948
6949 We set *STRICT_OVERFLOW_P to true if the return values depends on
6950 signed overflow being undefined. Otherwise we do not change
6951 *STRICT_OVERFLOW_P. */
6952
6953static tree
6954extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6955 bool *strict_overflow_p)
6956{
6957 /* To avoid exponential search depth, refuse to allow recursion past
6958 three levels. Beyond that (1) it's highly unlikely that we'll find
6959 something interesting and (2) we've probably processed it before
6960 when we built the inner expression. */
6961
6962 static int depth;
6963 tree ret;
6964
6965 if (depth > 3)
6966 return NULL;
6967
6968 depth++;
6969 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6970 depth--;
6971
6972 return ret;
6973}
6974
6975static tree
6976extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6977 bool *strict_overflow_p)
6978{
6979 tree type = TREE_TYPE (t);
6980 enum tree_code tcode = TREE_CODE (t);
6981 tree ctype = type;
6982 if (wide_type)
6983 {
6984 if (TREE_CODE (type) == BITINT_TYPE
6985 || TREE_CODE (wide_type) == BITINT_TYPE)
6986 {
6987 if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6988 ctype = wide_type;
6989 }
6990 else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6991 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6992 ctype = wide_type;
6993 }
6994 tree t1, t2;
6995 bool same_p = tcode == code;
6996 tree op0 = NULL_TREE, op1 = NULL_TREE;
6997 bool sub_strict_overflow_p;
6998
6999 /* Don't deal with constants of zero here; they confuse the code below. */
7000 if (integer_zerop (c))
7001 return NULL_TREE;
7002
7003 if (TREE_CODE_CLASS (tcode) == tcc_unary)
7004 op0 = TREE_OPERAND (t, 0);
7005
7006 if (TREE_CODE_CLASS (tcode) == tcc_binary)
7007 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
7008
7009 /* Note that we need not handle conditional operations here since fold
7010 already handles those cases. So just do arithmetic here. */
7011 switch (tcode)
7012 {
7013 case INTEGER_CST:
7014 /* For a constant, we can always simplify if we are a multiply
7015 or (for divide and modulus) if it is a multiple of our constant. */
7016 if (code == MULT_EXPR
7017 || wi::multiple_of_p (x: wi::to_wide (t), y: wi::to_wide (t: c),
7018 TYPE_SIGN (type)))
7019 {
7020 tree tem = const_binop (code, fold_convert (ctype, t),
7021 fold_convert (ctype, c));
7022 /* If the multiplication overflowed, we lost information on it.
7023 See PR68142 and PR69845. */
7024 if (TREE_OVERFLOW (tem))
7025 return NULL_TREE;
7026 return tem;
7027 }
7028 break;
7029
7030 CASE_CONVERT: case NON_LVALUE_EXPR:
7031 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
7032 break;
7033 /* If op0 is an expression ... */
7034 if ((COMPARISON_CLASS_P (op0)
7035 || UNARY_CLASS_P (op0)
7036 || BINARY_CLASS_P (op0)
7037 || VL_EXP_CLASS_P (op0)
7038 || EXPRESSION_CLASS_P (op0))
7039 /* ... and has wrapping overflow, and its type is smaller
7040 than ctype, then we cannot pass through as widening. */
7041 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
7042 && (TYPE_PRECISION (ctype)
7043 > TYPE_PRECISION (TREE_TYPE (op0))))
7044 /* ... or this is a truncation (t is narrower than op0),
7045 then we cannot pass through this narrowing. */
7046 || (TYPE_PRECISION (type)
7047 < TYPE_PRECISION (TREE_TYPE (op0)))
7048 /* ... or signedness changes for division or modulus,
7049 then we cannot pass through this conversion. */
7050 || (code != MULT_EXPR
7051 && (TYPE_UNSIGNED (ctype)
7052 != TYPE_UNSIGNED (TREE_TYPE (op0))))
7053 /* ... or has undefined overflow while the converted to
7054 type has not, we cannot do the operation in the inner type
7055 as that would introduce undefined overflow. */
7056 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
7057 && !TYPE_OVERFLOW_UNDEFINED (type))))
7058 break;
7059
7060 /* Pass the constant down and see if we can make a simplification. If
7061 we can, replace this expression with the inner simplification for
7062 possible later conversion to our or some other type. */
7063 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
7064 && TREE_CODE (t2) == INTEGER_CST
7065 && !TREE_OVERFLOW (t2)
7066 && (t1 = extract_muldiv (t: op0, c: t2, code,
7067 wide_type: code == MULT_EXPR ? ctype : NULL_TREE,
7068 strict_overflow_p)) != 0)
7069 return t1;
7070 break;
7071
7072 case ABS_EXPR:
7073 /* If widening the type changes it from signed to unsigned, then we
7074 must avoid building ABS_EXPR itself as unsigned. */
7075 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
7076 {
7077 tree cstype = (*signed_type_for) (ctype);
7078 if ((t1 = extract_muldiv (t: op0, c, code, wide_type: cstype, strict_overflow_p))
7079 != 0)
7080 {
7081 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
7082 return fold_convert (ctype, t1);
7083 }
7084 break;
7085 }
7086 /* If the constant is negative, we cannot simplify this. */
7087 if (tree_int_cst_sgn (c) == -1)
7088 break;
7089 /* FALLTHROUGH */
7090 case NEGATE_EXPR:
7091 /* For division and modulus, type can't be unsigned, as e.g.
7092 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
7093 For signed types, even with wrapping overflow, this is fine. */
7094 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
7095 break;
7096 if ((t1 = extract_muldiv (t: op0, c, code, wide_type, strict_overflow_p))
7097 != 0)
7098 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
7099 break;
7100
7101 case MIN_EXPR: case MAX_EXPR:
7102 /* If widening the type changes the signedness, then we can't perform
7103 this optimization as that changes the result. */
7104 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
7105 break;
7106
7107 /* Punt for multiplication altogether.
7108 MAX (1U + INT_MAX, 1U) * 2U is not equivalent to
7109 MAX ((1U + INT_MAX) * 2U, 1U * 2U), the former is
7110 0U, the latter is 2U.
7111 MAX (INT_MIN / 2, 0) * -2 is not equivalent to
7112 MIN (INT_MIN / 2 * -2, 0 * -2), the former is
7113 well defined 0, the latter invokes UB.
7114 MAX (INT_MIN / 2, 5) * 5 is not equivalent to
7115 MAX (INT_MIN / 2 * 5, 5 * 5), the former is
7116 well defined 25, the latter invokes UB. */
7117 if (code == MULT_EXPR)
7118 break;
7119 /* For division/modulo, punt on c being -1 for MAX, as
7120 MAX (INT_MIN, 0) / -1 is not equivalent to
7121 MIN (INT_MIN / -1, 0 / -1), the former is well defined
7122 0, the latter invokes UB (or for -fwrapv is INT_MIN).
7123 MIN (INT_MIN, 0) / -1 already invokes UB, so the
7124 transformation won't make it worse. */
7125 else if (tcode == MAX_EXPR && integer_minus_onep (c))
7126 break;
7127
7128 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
7129 sub_strict_overflow_p = false;
7130 if ((t1 = extract_muldiv (t: op0, c, code, wide_type,
7131 strict_overflow_p: &sub_strict_overflow_p)) != 0
7132 && (t2 = extract_muldiv (t: op1, c, code, wide_type,
7133 strict_overflow_p: &sub_strict_overflow_p)) != 0)
7134 {
7135 if (tree_int_cst_sgn (c) < 0)
7136 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
7137 if (sub_strict_overflow_p)
7138 *strict_overflow_p = true;
7139 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7140 fold_convert (ctype, t2));
7141 }
7142 break;
7143
7144 case LSHIFT_EXPR: case RSHIFT_EXPR:
7145 /* If the second operand is constant, this is a multiplication
7146 or floor division, by a power of two, so we can treat it that
7147 way unless the multiplier or divisor overflows. Signed
7148 left-shift overflow is implementation-defined rather than
7149 undefined in C90, so do not convert signed left shift into
7150 multiplication. */
7151 if (TREE_CODE (op1) == INTEGER_CST
7152 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7153 /* const_binop may not detect overflow correctly,
7154 so check for it explicitly here. */
7155 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7156 y: wi::to_wide (t: op1))
7157 && (t1 = fold_convert (ctype,
7158 const_binop (LSHIFT_EXPR, size_one_node,
7159 op1))) != 0
7160 && !TREE_OVERFLOW (t1))
7161 return extract_muldiv (t: build2 (tcode == LSHIFT_EXPR
7162 ? MULT_EXPR : FLOOR_DIV_EXPR,
7163 ctype,
7164 fold_convert (ctype, op0),
7165 t1),
7166 c, code, wide_type, strict_overflow_p);
7167 break;
7168
7169 case PLUS_EXPR: case MINUS_EXPR:
7170 /* See if we can eliminate the operation on both sides. If we can, we
7171 can return a new PLUS or MINUS. If we can't, the only remaining
7172 cases where we can do anything are if the second operand is a
7173 constant. */
7174 sub_strict_overflow_p = false;
7175 t1 = extract_muldiv (t: op0, c, code, wide_type, strict_overflow_p: &sub_strict_overflow_p);
7176 t2 = extract_muldiv (t: op1, c, code, wide_type, strict_overflow_p: &sub_strict_overflow_p);
7177 if (t1 != 0 && t2 != 0
7178 && TYPE_OVERFLOW_WRAPS (ctype)
7179 && (code == MULT_EXPR
7180 /* If not multiplication, we can only do this if both operands
7181 are divisible by c. */
7182 || (multiple_of_p (ctype, op0, c)
7183 && multiple_of_p (ctype, op1, c))))
7184 {
7185 if (sub_strict_overflow_p)
7186 *strict_overflow_p = true;
7187 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7188 fold_convert (ctype, t2));
7189 }
7190
7191 /* If this was a subtraction, negate OP1 and set it to be an addition.
7192 This simplifies the logic below. */
7193 if (tcode == MINUS_EXPR)
7194 {
7195 tcode = PLUS_EXPR, op1 = negate_expr (t: op1);
7196 /* If OP1 was not easily negatable, the constant may be OP0. */
7197 if (TREE_CODE (op0) == INTEGER_CST)
7198 {
7199 std::swap (a&: op0, b&: op1);
7200 std::swap (a&: t1, b&: t2);
7201 }
7202 }
7203
7204 if (TREE_CODE (op1) != INTEGER_CST)
7205 break;
7206
7207 /* If either OP1 or C are negative, this optimization is not safe for
7208 some of the division and remainder types while for others we need
7209 to change the code. */
7210 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7211 {
7212 if (code == CEIL_DIV_EXPR)
7213 code = FLOOR_DIV_EXPR;
7214 else if (code == FLOOR_DIV_EXPR)
7215 code = CEIL_DIV_EXPR;
7216 else if (code != MULT_EXPR
7217 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7218 break;
7219 }
7220
7221 /* If it's a multiply or a division/modulus operation of a multiple
7222 of our constant, do the operation and verify it doesn't overflow. */
7223 if (code == MULT_EXPR
7224 || wi::multiple_of_p (x: wi::to_wide (t: op1), y: wi::to_wide (t: c),
7225 TYPE_SIGN (type)))
7226 {
7227 op1 = const_binop (code, fold_convert (ctype, op1),
7228 fold_convert (ctype, c));
7229 /* We allow the constant to overflow with wrapping semantics. */
7230 if (op1 == 0
7231 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7232 break;
7233 }
7234 else
7235 break;
7236
7237 /* If we have an unsigned type, we cannot widen the operation since it
7238 will change the result if the original computation overflowed. */
7239 if (TYPE_UNSIGNED (ctype) && ctype != type)
7240 break;
7241
7242 /* The last case is if we are a multiply. In that case, we can
7243 apply the distributive law to commute the multiply and addition
7244 if the multiplication of the constants doesn't overflow
7245 and overflow is defined. With undefined overflow
7246 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7247 But fold_plusminus_mult_expr would factor back any power-of-two
7248 value so do not distribute in the first place in this case. */
7249 if (code == MULT_EXPR
7250 && TYPE_OVERFLOW_WRAPS (ctype)
7251 && !(tree_fits_shwi_p (c) && pow2p_hwi (x: absu_hwi (x: tree_to_shwi (c)))))
7252 return fold_build2 (tcode, ctype,
7253 fold_build2 (code, ctype,
7254 fold_convert (ctype, op0),
7255 fold_convert (ctype, c)),
7256 op1);
7257
7258 break;
7259
7260 case MULT_EXPR:
7261 /* We have a special case here if we are doing something like
7262 (C * 8) % 4 since we know that's zero. */
7263 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7264 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7265 /* If the multiplication can overflow we cannot optimize this. */
7266 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7267 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7268 && wi::multiple_of_p (x: wi::to_wide (t: op1), y: wi::to_wide (t: c),
7269 TYPE_SIGN (type)))
7270 {
7271 *strict_overflow_p = true;
7272 return omit_one_operand (type, integer_zero_node, op0);
7273 }
7274
7275 /* ... fall through ... */
7276
7277 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7278 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7279 /* If we can extract our operation from the LHS, do so and return a
7280 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7281 do something only if the second operand is a constant. */
7282 if (same_p
7283 && TYPE_OVERFLOW_WRAPS (ctype)
7284 && (t1 = extract_muldiv (t: op0, c, code, wide_type,
7285 strict_overflow_p)) != 0)
7286 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7287 fold_convert (ctype, op1));
7288 else if (tcode == MULT_EXPR && code == MULT_EXPR
7289 && TYPE_OVERFLOW_WRAPS (ctype)
7290 && (t1 = extract_muldiv (t: op1, c, code, wide_type,
7291 strict_overflow_p)) != 0)
7292 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7293 fold_convert (ctype, t1));
7294 else if (TREE_CODE (op1) != INTEGER_CST)
7295 return 0;
7296
7297 /* If these are the same operation types, we can associate them
7298 assuming no overflow. */
7299 if (tcode == code)
7300 {
7301 bool overflow_p = false;
7302 wi::overflow_type overflow_mul;
7303 signop sign = TYPE_SIGN (ctype);
7304 unsigned prec = TYPE_PRECISION (ctype);
7305 wide_int mul = wi::mul (x: wi::to_wide (t: op1, prec),
7306 y: wi::to_wide (t: c, prec),
7307 sgn: sign, overflow: &overflow_mul);
7308 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7309 if (overflow_mul
7310 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7311 overflow_p = true;
7312 if (!overflow_p)
7313 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7314 wide_int_to_tree (ctype, mul));
7315 }
7316
7317 /* If these operations "cancel" each other, we have the main
7318 optimizations of this pass, which occur when either constant is a
7319 multiple of the other, in which case we replace this with either an
7320 operation or CODE or TCODE.
7321
7322 If we have an unsigned type, we cannot do this since it will change
7323 the result if the original computation overflowed. */
7324 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7325 && !TYPE_OVERFLOW_SANITIZED (ctype)
7326 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7327 || (tcode == MULT_EXPR
7328 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7329 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7330 && code != MULT_EXPR)))
7331 {
7332 if (wi::multiple_of_p (x: wi::to_wide (t: op1), y: wi::to_wide (t: c),
7333 TYPE_SIGN (type)))
7334 {
7335 *strict_overflow_p = true;
7336 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7337 fold_convert (ctype,
7338 const_binop (TRUNC_DIV_EXPR,
7339 op1, c)));
7340 }
7341 else if (wi::multiple_of_p (x: wi::to_wide (t: c), y: wi::to_wide (t: op1),
7342 TYPE_SIGN (type)))
7343 {
7344 *strict_overflow_p = true;
7345 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7346 fold_convert (ctype,
7347 const_binop (TRUNC_DIV_EXPR,
7348 c, op1)));
7349 }
7350 }
7351 break;
7352
7353 default:
7354 break;
7355 }
7356
7357 return 0;
7358}
7359
7360/* Return a node which has the indicated constant VALUE (either 0 or
7361 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7362 and is of the indicated TYPE. */
7363
7364tree
7365constant_boolean_node (bool value, tree type)
7366{
7367 if (type == integer_type_node)
7368 return value ? integer_one_node : integer_zero_node;
7369 else if (type == boolean_type_node)
7370 return value ? boolean_true_node : boolean_false_node;
7371 else if (VECTOR_TYPE_P (type))
7372 return build_vector_from_val (type,
7373 build_int_cst (TREE_TYPE (type),
7374 value ? -1 : 0));
7375 else
7376 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7377}
7378
7379
7380/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7381 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7382 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7383 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7384 COND is the first argument to CODE; otherwise (as in the example
7385 given here), it is the second argument. TYPE is the type of the
7386 original expression. Return NULL_TREE if no simplification is
7387 possible. */
7388
7389static tree
7390fold_binary_op_with_conditional_arg (location_t loc,
7391 enum tree_code code,
7392 tree type, tree op0, tree op1,
7393 tree cond, tree arg, int cond_first_p)
7394{
7395 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7396 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7397 tree test, true_value, false_value;
7398 tree lhs = NULL_TREE;
7399 tree rhs = NULL_TREE;
7400 enum tree_code cond_code = COND_EXPR;
7401
7402 /* Do not move possibly trapping operations into the conditional as this
7403 pessimizes code and causes gimplification issues when applied late. */
7404 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7405 ANY_INTEGRAL_TYPE_P (type)
7406 && TYPE_OVERFLOW_TRAPS (type), op1))
7407 return NULL_TREE;
7408
7409 if (TREE_CODE (cond) == COND_EXPR
7410 || TREE_CODE (cond) == VEC_COND_EXPR)
7411 {
7412 test = TREE_OPERAND (cond, 0);
7413 true_value = TREE_OPERAND (cond, 1);
7414 false_value = TREE_OPERAND (cond, 2);
7415 /* If this operand throws an expression, then it does not make
7416 sense to try to perform a logical or arithmetic operation
7417 involving it. */
7418 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7419 lhs = true_value;
7420 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7421 rhs = false_value;
7422 }
7423 else if (!(TREE_CODE (type) != VECTOR_TYPE
7424 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7425 {
7426 tree testtype = TREE_TYPE (cond);
7427 test = cond;
7428 true_value = constant_boolean_node (value: true, type: testtype);
7429 false_value = constant_boolean_node (value: false, type: testtype);
7430 }
7431 else
7432 /* Detect the case of mixing vector and scalar types - bail out. */
7433 return NULL_TREE;
7434
7435 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7436 cond_code = VEC_COND_EXPR;
7437
7438 /* This transformation is only worthwhile if we don't have to wrap ARG
7439 in a SAVE_EXPR and the operation can be simplified without recursing
7440 on at least one of the branches once its pushed inside the COND_EXPR. */
7441 if (!TREE_CONSTANT (arg)
7442 && (TREE_SIDE_EFFECTS (arg)
7443 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7444 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7445 return NULL_TREE;
7446
7447 arg = fold_convert_loc (loc, type: arg_type, arg);
7448 if (lhs == 0)
7449 {
7450 true_value = fold_convert_loc (loc, type: cond_type, arg: true_value);
7451 if (cond_first_p)
7452 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7453 else
7454 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7455 }
7456 if (rhs == 0)
7457 {
7458 false_value = fold_convert_loc (loc, type: cond_type, arg: false_value);
7459 if (cond_first_p)
7460 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7461 else
7462 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7463 }
7464
7465 /* Check that we have simplified at least one of the branches. */
7466 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7467 return NULL_TREE;
7468
7469 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7470}
7471
7472
7473/* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7474
7475 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7476 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7477 if ARG - ZERO_ARG is the same as X.
7478
7479 If ARG is NULL, check for any value of type TYPE.
7480
7481 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7482 and finite. The problematic cases are when X is zero, and its mode
7483 has signed zeros. In the case of rounding towards -infinity,
7484 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7485 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7486
7487bool
7488fold_real_zero_addition_p (const_tree type, const_tree arg,
7489 const_tree zero_arg, int negate)
7490{
7491 if (!real_zerop (zero_arg))
7492 return false;
7493
7494 /* Don't allow the fold with -fsignaling-nans. */
7495 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7496 return false;
7497
7498 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7499 if (!HONOR_SIGNED_ZEROS (type))
7500 return true;
7501
7502 /* There is no case that is safe for all rounding modes. */
7503 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7504 return false;
7505
7506 /* In a vector or complex, we would need to check the sign of all zeros. */
7507 if (TREE_CODE (zero_arg) == VECTOR_CST)
7508 zero_arg = uniform_vector_p (zero_arg);
7509 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7510 return false;
7511
7512 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7513 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7514 negate = !negate;
7515
7516 /* The mode has signed zeros, and we have to honor their sign.
7517 In this situation, there are only two cases we can return true for.
7518 (i) X - 0 is the same as X with default rounding.
7519 (ii) X + 0 is X when X can't possibly be -0.0. */
7520 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7521}
7522
7523/* Subroutine of match.pd that optimizes comparisons of a division by
7524 a nonzero integer constant against an integer constant, i.e.
7525 X/C1 op C2.
7526
7527 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7528 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7529
7530enum tree_code
7531fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7532 tree *hi, bool *neg_overflow)
7533{
7534 tree prod, tmp, type = TREE_TYPE (c1);
7535 signop sign = TYPE_SIGN (type);
7536 wi::overflow_type overflow;
7537
7538 /* We have to do this the hard way to detect unsigned overflow.
7539 prod = int_const_binop (MULT_EXPR, c1, c2); */
7540 wide_int val = wi::mul (x: wi::to_wide (t: c1), y: wi::to_wide (t: c2), sgn: sign, overflow: &overflow);
7541 prod = force_fit_type (type, val, -1, overflow);
7542 *neg_overflow = false;
7543
7544 if (sign == UNSIGNED)
7545 {
7546 tmp = int_const_binop (code: MINUS_EXPR, arg1: c1, arg2: build_int_cst (type, 1));
7547 *lo = prod;
7548
7549 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7550 val = wi::add (x: wi::to_wide (t: prod), y: wi::to_wide (t: tmp), sgn: sign, overflow: &overflow);
7551 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7552 }
7553 else if (tree_int_cst_sgn (c1) >= 0)
7554 {
7555 tmp = int_const_binop (code: MINUS_EXPR, arg1: c1, arg2: build_int_cst (type, 1));
7556 switch (tree_int_cst_sgn (c2))
7557 {
7558 case -1:
7559 *neg_overflow = true;
7560 *lo = int_const_binop (code: MINUS_EXPR, arg1: prod, arg2: tmp);
7561 *hi = prod;
7562 break;
7563
7564 case 0:
7565 *lo = fold_negate_const (tmp, type);
7566 *hi = tmp;
7567 break;
7568
7569 case 1:
7570 *hi = int_const_binop (code: PLUS_EXPR, arg1: prod, arg2: tmp);
7571 *lo = prod;
7572 break;
7573
7574 default:
7575 gcc_unreachable ();
7576 }
7577 }
7578 else
7579 {
7580 /* A negative divisor reverses the relational operators. */
7581 code = swap_tree_comparison (code);
7582
7583 tmp = int_const_binop (code: PLUS_EXPR, arg1: c1, arg2: build_int_cst (type, 1));
7584 switch (tree_int_cst_sgn (c2))
7585 {
7586 case -1:
7587 *hi = int_const_binop (code: MINUS_EXPR, arg1: prod, arg2: tmp);
7588 *lo = prod;
7589 break;
7590
7591 case 0:
7592 *hi = fold_negate_const (tmp, type);
7593 *lo = tmp;
7594 break;
7595
7596 case 1:
7597 *neg_overflow = true;
7598 *lo = int_const_binop (code: PLUS_EXPR, arg1: prod, arg2: tmp);
7599 *hi = prod;
7600 break;
7601
7602 default:
7603 gcc_unreachable ();
7604 }
7605 }
7606
7607 if (code != EQ_EXPR && code != NE_EXPR)
7608 return code;
7609
7610 if (TREE_OVERFLOW (*lo)
7611 || operand_equal_p (arg0: *lo, TYPE_MIN_VALUE (type), flags: 0))
7612 *lo = NULL_TREE;
7613 if (TREE_OVERFLOW (*hi)
7614 || operand_equal_p (arg0: *hi, TYPE_MAX_VALUE (type), flags: 0))
7615 *hi = NULL_TREE;
7616
7617 return code;
7618}
7619
7620/* Test whether it is preferable to swap two operands, ARG0 and
7621 ARG1, for example because ARG0 is an integer constant and ARG1
7622 isn't. */
7623
7624bool
7625tree_swap_operands_p (const_tree arg0, const_tree arg1)
7626{
7627 if (CONSTANT_CLASS_P (arg1))
7628 return false;
7629 if (CONSTANT_CLASS_P (arg0))
7630 return true;
7631
7632 STRIP_NOPS (arg0);
7633 STRIP_NOPS (arg1);
7634
7635 if (TREE_CONSTANT (arg1))
7636 return false;
7637 if (TREE_CONSTANT (arg0))
7638 return true;
7639
7640 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7641 for commutative and comparison operators. Ensuring a canonical
7642 form allows the optimizers to find additional redundancies without
7643 having to explicitly check for both orderings. */
7644 if (TREE_CODE (arg0) == SSA_NAME
7645 && TREE_CODE (arg1) == SSA_NAME
7646 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7647 return true;
7648
7649 /* Put SSA_NAMEs last. */
7650 if (TREE_CODE (arg1) == SSA_NAME)
7651 return false;
7652 if (TREE_CODE (arg0) == SSA_NAME)
7653 return true;
7654
7655 /* Put variables last. */
7656 if (DECL_P (arg1))
7657 return false;
7658 if (DECL_P (arg0))
7659 return true;
7660
7661 return false;
7662}
7663
7664
7665/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7666 means A >= Y && A != MAX, but in this case we know that
7667 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7668
7669static tree
7670fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7671{
7672 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7673
7674 if (TREE_CODE (bound) == LT_EXPR)
7675 a = TREE_OPERAND (bound, 0);
7676 else if (TREE_CODE (bound) == GT_EXPR)
7677 a = TREE_OPERAND (bound, 1);
7678 else
7679 return NULL_TREE;
7680
7681 typea = TREE_TYPE (a);
7682 if (!INTEGRAL_TYPE_P (typea)
7683 && !POINTER_TYPE_P (typea))
7684 return NULL_TREE;
7685
7686 if (TREE_CODE (ineq) == LT_EXPR)
7687 {
7688 a1 = TREE_OPERAND (ineq, 1);
7689 y = TREE_OPERAND (ineq, 0);
7690 }
7691 else if (TREE_CODE (ineq) == GT_EXPR)
7692 {
7693 a1 = TREE_OPERAND (ineq, 0);
7694 y = TREE_OPERAND (ineq, 1);
7695 }
7696 else
7697 return NULL_TREE;
7698
7699 if (TREE_TYPE (a1) != typea)
7700 return NULL_TREE;
7701
7702 if (POINTER_TYPE_P (typea))
7703 {
7704 /* Convert the pointer types into integer before taking the difference. */
7705 tree ta = fold_convert_loc (loc, ssizetype, arg: a);
7706 tree ta1 = fold_convert_loc (loc, ssizetype, arg: a1);
7707 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7708 }
7709 else
7710 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7711
7712 if (!diff || !integer_onep (diff))
7713 return NULL_TREE;
7714
7715 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7716}
7717
7718/* Fold a sum or difference of at least one multiplication.
7719 Returns the folded tree or NULL if no simplification could be made. */
7720
7721static tree
7722fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7723 tree arg0, tree arg1)
7724{
7725 tree arg00, arg01, arg10, arg11;
7726 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7727
7728 /* (A * C) +- (B * C) -> (A+-B) * C.
7729 (A * C) +- A -> A * (C+-1).
7730 We are most concerned about the case where C is a constant,
7731 but other combinations show up during loop reduction. Since
7732 it is not difficult, try all four possibilities. */
7733
7734 if (TREE_CODE (arg0) == MULT_EXPR)
7735 {
7736 arg00 = TREE_OPERAND (arg0, 0);
7737 arg01 = TREE_OPERAND (arg0, 1);
7738 }
7739 else if (TREE_CODE (arg0) == INTEGER_CST)
7740 {
7741 arg00 = build_one_cst (type);
7742 arg01 = arg0;
7743 }
7744 else
7745 {
7746 /* We cannot generate constant 1 for fract. */
7747 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7748 return NULL_TREE;
7749 arg00 = arg0;
7750 arg01 = build_one_cst (type);
7751 }
7752 if (TREE_CODE (arg1) == MULT_EXPR)
7753 {
7754 arg10 = TREE_OPERAND (arg1, 0);
7755 arg11 = TREE_OPERAND (arg1, 1);
7756 }
7757 else if (TREE_CODE (arg1) == INTEGER_CST)
7758 {
7759 arg10 = build_one_cst (type);
7760 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7761 the purpose of this canonicalization. */
7762 if (wi::neg_p (x: wi::to_wide (t: arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7763 && negate_expr_p (t: arg1)
7764 && code == PLUS_EXPR)
7765 {
7766 arg11 = negate_expr (t: arg1);
7767 code = MINUS_EXPR;
7768 }
7769 else
7770 arg11 = arg1;
7771 }
7772 else
7773 {
7774 /* We cannot generate constant 1 for fract. */
7775 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7776 return NULL_TREE;
7777 arg10 = arg1;
7778 arg11 = build_one_cst (type);
7779 }
7780 same = NULL_TREE;
7781
7782 /* Prefer factoring a common non-constant. */
7783 if (operand_equal_p (arg0: arg00, arg1: arg10, flags: 0))
7784 same = arg00, alt0 = arg01, alt1 = arg11;
7785 else if (operand_equal_p (arg0: arg01, arg1: arg11, flags: 0))
7786 same = arg01, alt0 = arg00, alt1 = arg10;
7787 else if (operand_equal_p (arg0: arg00, arg1: arg11, flags: 0))
7788 same = arg00, alt0 = arg01, alt1 = arg10;
7789 else if (operand_equal_p (arg0: arg01, arg1: arg10, flags: 0))
7790 same = arg01, alt0 = arg00, alt1 = arg11;
7791
7792 /* No identical multiplicands; see if we can find a common
7793 power-of-two factor in non-power-of-two multiplies. This
7794 can help in multi-dimensional array access. */
7795 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7796 {
7797 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7798 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7799 HOST_WIDE_INT tmp;
7800 bool swap = false;
7801 tree maybe_same;
7802
7803 /* Move min of absolute values to int11. */
7804 if (absu_hwi (x: int01) < absu_hwi (x: int11))
7805 {
7806 tmp = int01, int01 = int11, int11 = tmp;
7807 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7808 maybe_same = arg01;
7809 swap = true;
7810 }
7811 else
7812 maybe_same = arg11;
7813
7814 const unsigned HOST_WIDE_INT factor = absu_hwi (x: int11);
7815 if (factor > 1
7816 && pow2p_hwi (x: factor)
7817 && (int01 & (factor - 1)) == 0
7818 /* The remainder should not be a constant, otherwise we
7819 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7820 increased the number of multiplications necessary. */
7821 && TREE_CODE (arg10) != INTEGER_CST)
7822 {
7823 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7824 build_int_cst (TREE_TYPE (arg00),
7825 int01 / int11));
7826 alt1 = arg10;
7827 same = maybe_same;
7828 if (swap)
7829 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7830 }
7831 }
7832
7833 if (!same)
7834 return NULL_TREE;
7835
7836 if (! ANY_INTEGRAL_TYPE_P (type)
7837 || TYPE_OVERFLOW_WRAPS (type)
7838 /* We are neither factoring zero nor minus one. */
7839 || TREE_CODE (same) == INTEGER_CST)
7840 return fold_build2_loc (loc, MULT_EXPR, type,
7841 fold_build2_loc (loc, code, type,
7842 fold_convert_loc (loc, type, arg: alt0),
7843 fold_convert_loc (loc, type, arg: alt1)),
7844 fold_convert_loc (loc, type, arg: same));
7845
7846 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7847 same may be minus one and thus the multiplication may overflow. Perform
7848 the sum operation in an unsigned type. */
7849 tree utype = unsigned_type_for (type);
7850 tree tem = fold_build2_loc (loc, code, utype,
7851 fold_convert_loc (loc, type: utype, arg: alt0),
7852 fold_convert_loc (loc, type: utype, arg: alt1));
7853 /* If the sum evaluated to a constant that is not -INF the multiplication
7854 cannot overflow. */
7855 if (TREE_CODE (tem) == INTEGER_CST
7856 && (wi::to_wide (t: tem)
7857 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7858 return fold_build2_loc (loc, MULT_EXPR, type,
7859 fold_convert (type, tem), same);
7860
7861 /* Do not resort to unsigned multiplication because
7862 we lose the no-overflow property of the expression. */
7863 return NULL_TREE;
7864}
7865
7866/* Subroutine of native_encode_expr. Encode the INTEGER_CST
7867 specified by EXPR into the buffer PTR of length LEN bytes.
7868 Return the number of bytes placed in the buffer, or zero
7869 upon failure. */
7870
7871static int
7872native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7873{
7874 tree type = TREE_TYPE (expr);
7875 int total_bytes;
7876 if (TREE_CODE (type) == BITINT_TYPE)
7877 {
7878 struct bitint_info info;
7879 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7880 gcc_assert (ok);
7881 scalar_int_mode limb_mode = as_a <scalar_int_mode> (m: info.limb_mode);
7882 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (mode: limb_mode))
7883 {
7884 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7885 /* More work is needed when adding _BitInt support to PDP endian
7886 if limb is smaller than word, or if _BitInt limb ordering doesn't
7887 match target endianity here. */
7888 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7889 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7890 || (GET_MODE_SIZE (limb_mode)
7891 >= UNITS_PER_WORD)));
7892 }
7893 else
7894 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7895 }
7896 else
7897 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7898 int byte, offset, word, words;
7899 unsigned char value;
7900
7901 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7902 return 0;
7903 if (off == -1)
7904 off = 0;
7905
7906 if (ptr == NULL)
7907 /* Dry run. */
7908 return MIN (len, total_bytes - off);
7909
7910 words = total_bytes / UNITS_PER_WORD;
7911
7912 for (byte = 0; byte < total_bytes; byte++)
7913 {
7914 int bitpos = byte * BITS_PER_UNIT;
7915 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7916 number of bytes. */
7917 value = wi::extract_uhwi (x: wi::to_widest (t: expr), bitpos, BITS_PER_UNIT);
7918
7919 if (total_bytes > UNITS_PER_WORD)
7920 {
7921 word = byte / UNITS_PER_WORD;
7922 if (WORDS_BIG_ENDIAN)
7923 word = (words - 1) - word;
7924 offset = word * UNITS_PER_WORD;
7925 if (BYTES_BIG_ENDIAN)
7926 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7927 else
7928 offset += byte % UNITS_PER_WORD;
7929 }
7930 else
7931 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7932 if (offset >= off && offset - off < len)
7933 ptr[offset - off] = value;
7934 }
7935 return MIN (len, total_bytes - off);
7936}
7937
7938
7939/* Subroutine of native_encode_expr. Encode the FIXED_CST
7940 specified by EXPR into the buffer PTR of length LEN bytes.
7941 Return the number of bytes placed in the buffer, or zero
7942 upon failure. */
7943
7944static int
7945native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7946{
7947 tree type = TREE_TYPE (expr);
7948 scalar_mode mode = SCALAR_TYPE_MODE (type);
7949 int total_bytes = GET_MODE_SIZE (mode);
7950 FIXED_VALUE_TYPE value;
7951 tree i_value, i_type;
7952
7953 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7954 return 0;
7955
7956 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7957
7958 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7959 return 0;
7960
7961 value = TREE_FIXED_CST (expr);
7962 i_value = double_int_to_tree (i_type, value.data);
7963
7964 return native_encode_int (expr: i_value, ptr, len, off);
7965}
7966
7967
7968/* Subroutine of native_encode_expr. Encode the REAL_CST
7969 specified by EXPR into the buffer PTR of length LEN bytes.
7970 Return the number of bytes placed in the buffer, or zero
7971 upon failure. */
7972
7973static int
7974native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7975{
7976 tree type = TREE_TYPE (expr);
7977 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7978 int byte, offset, word, words, bitpos;
7979 unsigned char value;
7980
7981 /* There are always 32 bits in each long, no matter the size of
7982 the hosts long. We handle floating point representations with
7983 up to 192 bits. */
7984 long tmp[6];
7985
7986 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7987 return 0;
7988 if (off == -1)
7989 off = 0;
7990
7991 if (ptr == NULL)
7992 /* Dry run. */
7993 return MIN (len, total_bytes - off);
7994
7995 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7996
7997 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7998
7999 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8000 bitpos += BITS_PER_UNIT)
8001 {
8002 byte = (bitpos / BITS_PER_UNIT) & 3;
8003 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
8004
8005 if (UNITS_PER_WORD < 4)
8006 {
8007 word = byte / UNITS_PER_WORD;
8008 if (WORDS_BIG_ENDIAN)
8009 word = (words - 1) - word;
8010 offset = word * UNITS_PER_WORD;
8011 if (BYTES_BIG_ENDIAN)
8012 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8013 else
8014 offset += byte % UNITS_PER_WORD;
8015 }
8016 else
8017 {
8018 offset = byte;
8019 if (BYTES_BIG_ENDIAN)
8020 {
8021 /* Reverse bytes within each long, or within the entire float
8022 if it's smaller than a long (for HFmode). */
8023 offset = MIN (3, total_bytes - 1) - offset;
8024 gcc_assert (offset >= 0);
8025 }
8026 }
8027 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
8028 if (offset >= off
8029 && offset - off < len)
8030 ptr[offset - off] = value;
8031 }
8032 return MIN (len, total_bytes - off);
8033}
8034
8035/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
8036 specified by EXPR into the buffer PTR of length LEN bytes.
8037 Return the number of bytes placed in the buffer, or zero
8038 upon failure. */
8039
8040static int
8041native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
8042{
8043 int rsize, isize;
8044 tree part;
8045
8046 part = TREE_REALPART (expr);
8047 rsize = native_encode_expr (part, ptr, len, off);
8048 if (off == -1 && rsize == 0)
8049 return 0;
8050 part = TREE_IMAGPART (expr);
8051 if (off != -1)
8052 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
8053 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
8054 len - rsize, off);
8055 if (off == -1 && isize != rsize)
8056 return 0;
8057 return rsize + isize;
8058}
8059
8060/* Like native_encode_vector, but only encode the first COUNT elements.
8061 The other arguments are as for native_encode_vector. */
8062
8063static int
8064native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
8065 int off, unsigned HOST_WIDE_INT count)
8066{
8067 tree itype = TREE_TYPE (TREE_TYPE (expr));
8068 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
8069 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
8070 {
8071 /* This is the only case in which elements can be smaller than a byte.
8072 Element 0 is always in the lsb of the containing byte. */
8073 unsigned int elt_bits = TYPE_PRECISION (itype);
8074 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
8075 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8076 return 0;
8077
8078 if (off == -1)
8079 off = 0;
8080
8081 /* Zero the buffer and then set bits later where necessary. */
8082 int extract_bytes = MIN (len, total_bytes - off);
8083 if (ptr)
8084 memset (s: ptr, c: 0, n: extract_bytes);
8085
8086 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
8087 unsigned int first_elt = off * elts_per_byte;
8088 unsigned int extract_elts = extract_bytes * elts_per_byte;
8089 for (unsigned int i = 0; i < extract_elts; ++i)
8090 {
8091 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
8092 if (TREE_CODE (elt) != INTEGER_CST)
8093 return 0;
8094
8095 if (ptr && wi::extract_uhwi (x: wi::to_wide (t: elt), bitpos: 0, width: 1))
8096 {
8097 unsigned int bit = i * elt_bits;
8098 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
8099 }
8100 }
8101 return extract_bytes;
8102 }
8103
8104 int offset = 0;
8105 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
8106 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
8107 {
8108 if (off >= size)
8109 {
8110 off -= size;
8111 continue;
8112 }
8113 tree elem = VECTOR_CST_ELT (expr, i);
8114 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
8115 len - offset, off);
8116 if ((off == -1 && res != size) || res == 0)
8117 return 0;
8118 offset += res;
8119 if (offset >= len)
8120 return (off == -1 && i < count - 1) ? 0 : offset;
8121 if (off != -1)
8122 off = 0;
8123 }
8124 return offset;
8125}
8126
8127/* Subroutine of native_encode_expr. Encode the VECTOR_CST
8128 specified by EXPR into the buffer PTR of length LEN bytes.
8129 Return the number of bytes placed in the buffer, or zero
8130 upon failure. */
8131
8132static int
8133native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8134{
8135 unsigned HOST_WIDE_INT count;
8136 if (!VECTOR_CST_NELTS (expr).is_constant (const_value: &count))
8137 return 0;
8138 return native_encode_vector_part (expr, ptr, len, off, count);
8139}
8140
8141
8142/* Subroutine of native_encode_expr. Encode the STRING_CST
8143 specified by EXPR into the buffer PTR of length LEN bytes.
8144 Return the number of bytes placed in the buffer, or zero
8145 upon failure. */
8146
8147static int
8148native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8149{
8150 tree type = TREE_TYPE (expr);
8151
8152 /* Wide-char strings are encoded in target byte-order so native
8153 encoding them is trivial. */
8154 if (BITS_PER_UNIT != CHAR_BIT
8155 || TREE_CODE (type) != ARRAY_TYPE
8156 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8157 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8158 return 0;
8159
8160 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8161 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8162 return 0;
8163 if (off == -1)
8164 off = 0;
8165 len = MIN (total_bytes - off, len);
8166 if (ptr == NULL)
8167 /* Dry run. */;
8168 else
8169 {
8170 int written = 0;
8171 if (off < TREE_STRING_LENGTH (expr))
8172 {
8173 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8174 memcpy (dest: ptr, TREE_STRING_POINTER (expr) + off, n: written);
8175 }
8176 memset (s: ptr + written, c: 0, n: len - written);
8177 }
8178 return len;
8179}
8180
8181
8182/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8183 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8184 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8185 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8186 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8187 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8188 Return the number of bytes placed in the buffer, or zero upon failure. */
8189
8190int
8191native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8192{
8193 /* We don't support starting at negative offset and -1 is special. */
8194 if (off < -1)
8195 return 0;
8196
8197 switch (TREE_CODE (expr))
8198 {
8199 case INTEGER_CST:
8200 return native_encode_int (expr, ptr, len, off);
8201
8202 case REAL_CST:
8203 return native_encode_real (expr, ptr, len, off);
8204
8205 case FIXED_CST:
8206 return native_encode_fixed (expr, ptr, len, off);
8207
8208 case COMPLEX_CST:
8209 return native_encode_complex (expr, ptr, len, off);
8210
8211 case VECTOR_CST:
8212 return native_encode_vector (expr, ptr, len, off);
8213
8214 case STRING_CST:
8215 return native_encode_string (expr, ptr, len, off);
8216
8217 default:
8218 return 0;
8219 }
8220}
8221
8222/* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8223 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8224 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8225 machine modes, we can't just use build_nonstandard_integer_type. */
8226
8227tree
8228find_bitfield_repr_type (int fieldsize, int len)
8229{
8230 machine_mode mode;
8231 for (int pass = 0; pass < 2; pass++)
8232 {
8233 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8234 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8235 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8236 && known_eq (GET_MODE_PRECISION (mode),
8237 GET_MODE_BITSIZE (mode))
8238 && known_le (GET_MODE_SIZE (mode), len))
8239 {
8240 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8241 if (ret && TYPE_MODE (ret) == mode)
8242 return ret;
8243 }
8244 }
8245
8246 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8247 if (int_n_enabled_p[i]
8248 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8249 && int_n_trees[i].unsigned_type)
8250 {
8251 tree ret = int_n_trees[i].unsigned_type;
8252 mode = TYPE_MODE (ret);
8253 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8254 && known_eq (GET_MODE_PRECISION (mode),
8255 GET_MODE_BITSIZE (mode))
8256 && known_le (GET_MODE_SIZE (mode), len))
8257 return ret;
8258 }
8259
8260 return NULL_TREE;
8261}
8262
8263/* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8264 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8265 to be non-NULL and OFF zero), then in addition to filling the
8266 bytes pointed by PTR with the value also clear any bits pointed
8267 by MASK that are known to be initialized, keep them as is for
8268 e.g. uninitialized padding bits or uninitialized fields. */
8269
8270int
8271native_encode_initializer (tree init, unsigned char *ptr, int len,
8272 int off, unsigned char *mask)
8273{
8274 int r;
8275
8276 /* We don't support starting at negative offset and -1 is special. */
8277 if (off < -1 || init == NULL_TREE)
8278 return 0;
8279
8280 gcc_assert (mask == NULL || (off == 0 && ptr));
8281
8282 STRIP_NOPS (init);
8283 switch (TREE_CODE (init))
8284 {
8285 case VIEW_CONVERT_EXPR:
8286 case NON_LVALUE_EXPR:
8287 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8288 mask);
8289 default:
8290 r = native_encode_expr (expr: init, ptr, len, off);
8291 if (mask)
8292 memset (s: mask, c: 0, n: r);
8293 return r;
8294 case CONSTRUCTOR:
8295 tree type = TREE_TYPE (init);
8296 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8297 if (total_bytes < 0)
8298 return 0;
8299 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8300 return 0;
8301 int o = off == -1 ? 0 : off;
8302 if (TREE_CODE (type) == ARRAY_TYPE)
8303 {
8304 tree min_index;
8305 unsigned HOST_WIDE_INT cnt;
8306 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8307 constructor_elt *ce;
8308
8309 if (!TYPE_DOMAIN (type)
8310 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8311 return 0;
8312
8313 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8314 if (fieldsize <= 0)
8315 return 0;
8316
8317 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8318 if (ptr)
8319 memset (s: ptr, c: '\0', MIN (total_bytes - off, len));
8320
8321 for (cnt = 0; ; cnt++)
8322 {
8323 tree val = NULL_TREE, index = NULL_TREE;
8324 HOST_WIDE_INT pos = curpos, count = 0;
8325 bool full = false;
8326 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), ix: cnt, ptr: &ce))
8327 {
8328 val = ce->value;
8329 index = ce->index;
8330 }
8331 else if (mask == NULL
8332 || CONSTRUCTOR_NO_CLEARING (init)
8333 || curpos >= total_bytes)
8334 break;
8335 else
8336 pos = total_bytes;
8337
8338 if (index && TREE_CODE (index) == RANGE_EXPR)
8339 {
8340 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8341 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8342 return 0;
8343 offset_int wpos
8344 = wi::sext (x: wi::to_offset (TREE_OPERAND (index, 0))
8345 - wi::to_offset (t: min_index),
8346 TYPE_PRECISION (sizetype));
8347 wpos *= fieldsize;
8348 if (!wi::fits_shwi_p (x: pos))
8349 return 0;
8350 pos = wpos.to_shwi ();
8351 offset_int wcount
8352 = wi::sext (x: wi::to_offset (TREE_OPERAND (index, 1))
8353 - wi::to_offset (TREE_OPERAND (index, 0)),
8354 TYPE_PRECISION (sizetype));
8355 if (!wi::fits_shwi_p (x: wcount))
8356 return 0;
8357 count = wcount.to_shwi ();
8358 }
8359 else if (index)
8360 {
8361 if (TREE_CODE (index) != INTEGER_CST)
8362 return 0;
8363 offset_int wpos
8364 = wi::sext (x: wi::to_offset (t: index)
8365 - wi::to_offset (t: min_index),
8366 TYPE_PRECISION (sizetype));
8367 wpos *= fieldsize;
8368 if (!wi::fits_shwi_p (x: wpos))
8369 return 0;
8370 pos = wpos.to_shwi ();
8371 }
8372
8373 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8374 {
8375 if (valueinit == -1)
8376 {
8377 tree zero = build_zero_cst (TREE_TYPE (type));
8378 r = native_encode_initializer (init: zero, ptr: ptr + curpos,
8379 len: fieldsize, off: 0,
8380 mask: mask + curpos);
8381 if (TREE_CODE (zero) == CONSTRUCTOR)
8382 ggc_free (zero);
8383 if (!r)
8384 return 0;
8385 valueinit = curpos;
8386 curpos += fieldsize;
8387 }
8388 while (curpos != pos)
8389 {
8390 memcpy (dest: ptr + curpos, src: ptr + valueinit, n: fieldsize);
8391 memcpy (dest: mask + curpos, src: mask + valueinit, n: fieldsize);
8392 curpos += fieldsize;
8393 }
8394 }
8395
8396 curpos = pos;
8397 if (val)
8398 do
8399 {
8400 if (off == -1
8401 || (curpos >= off
8402 && (curpos + fieldsize
8403 <= (HOST_WIDE_INT) off + len)))
8404 {
8405 if (full)
8406 {
8407 if (ptr)
8408 memcpy (dest: ptr + (curpos - o), src: ptr + (pos - o),
8409 n: fieldsize);
8410 if (mask)
8411 memcpy (dest: mask + curpos, src: mask + pos, n: fieldsize);
8412 }
8413 else if (!native_encode_initializer (init: val,
8414 ptr: ptr
8415 ? ptr + curpos - o
8416 : NULL,
8417 len: fieldsize,
8418 off: off == -1 ? -1
8419 : 0,
8420 mask: mask
8421 ? mask + curpos
8422 : NULL))
8423 return 0;
8424 else
8425 {
8426 full = true;
8427 pos = curpos;
8428 }
8429 }
8430 else if (curpos + fieldsize > off
8431 && curpos < (HOST_WIDE_INT) off + len)
8432 {
8433 /* Partial overlap. */
8434 unsigned char *p = NULL;
8435 int no = 0;
8436 int l;
8437 gcc_assert (mask == NULL);
8438 if (curpos >= off)
8439 {
8440 if (ptr)
8441 p = ptr + curpos - off;
8442 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8443 fieldsize);
8444 }
8445 else
8446 {
8447 p = ptr;
8448 no = off - curpos;
8449 l = len;
8450 }
8451 if (!native_encode_initializer (init: val, ptr: p, len: l, off: no, NULL))
8452 return 0;
8453 }
8454 curpos += fieldsize;
8455 }
8456 while (count-- != 0);
8457 }
8458 return MIN (total_bytes - off, len);
8459 }
8460 else if (TREE_CODE (type) == RECORD_TYPE
8461 || TREE_CODE (type) == UNION_TYPE)
8462 {
8463 unsigned HOST_WIDE_INT cnt;
8464 constructor_elt *ce;
8465 tree fld_base = TYPE_FIELDS (type);
8466 tree to_free = NULL_TREE;
8467
8468 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8469 if (ptr != NULL)
8470 memset (s: ptr, c: '\0', MIN (total_bytes - o, len));
8471 for (cnt = 0; ; cnt++)
8472 {
8473 tree val = NULL_TREE, field = NULL_TREE;
8474 HOST_WIDE_INT pos = 0, fieldsize;
8475 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8476
8477 if (to_free)
8478 {
8479 ggc_free (to_free);
8480 to_free = NULL_TREE;
8481 }
8482
8483 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), ix: cnt, ptr: &ce))
8484 {
8485 val = ce->value;
8486 field = ce->index;
8487 if (field == NULL_TREE)
8488 return 0;
8489
8490 pos = int_byte_position (field);
8491 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8492 continue;
8493 }
8494 else if (mask == NULL
8495 || CONSTRUCTOR_NO_CLEARING (init))
8496 break;
8497 else
8498 pos = total_bytes;
8499
8500 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8501 {
8502 tree fld;
8503 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8504 {
8505 if (TREE_CODE (fld) != FIELD_DECL)
8506 continue;
8507 if (fld == field)
8508 break;
8509 if (DECL_PADDING_P (fld))
8510 continue;
8511 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8512 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8513 return 0;
8514 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8515 continue;
8516 break;
8517 }
8518 if (fld == NULL_TREE)
8519 {
8520 if (ce == NULL)
8521 break;
8522 return 0;
8523 }
8524 fld_base = DECL_CHAIN (fld);
8525 if (fld != field)
8526 {
8527 cnt--;
8528 field = fld;
8529 pos = int_byte_position (field);
8530 val = build_zero_cst (TREE_TYPE (fld));
8531 if (TREE_CODE (val) == CONSTRUCTOR)
8532 to_free = val;
8533 }
8534 }
8535
8536 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8537 && TYPE_DOMAIN (TREE_TYPE (field))
8538 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8539 {
8540 if (mask || off != -1)
8541 return 0;
8542 if (val == NULL_TREE)
8543 continue;
8544 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8545 return 0;
8546 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8547 if (fieldsize < 0
8548 || (int) fieldsize != fieldsize
8549 || (pos + fieldsize) > INT_MAX)
8550 return 0;
8551 if (pos + fieldsize > total_bytes)
8552 {
8553 if (ptr != NULL && total_bytes < len)
8554 memset (s: ptr + total_bytes, c: '\0',
8555 MIN (pos + fieldsize, len) - total_bytes);
8556 total_bytes = pos + fieldsize;
8557 }
8558 }
8559 else
8560 {
8561 if (DECL_SIZE_UNIT (field) == NULL_TREE
8562 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8563 return 0;
8564 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8565 }
8566 if (fieldsize == 0)
8567 continue;
8568
8569 /* Prepare to deal with integral bit-fields and filter out other
8570 bit-fields that do not start and end on a byte boundary. */
8571 if (DECL_BIT_FIELD (field))
8572 {
8573 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8574 return 0;
8575 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8576 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8577 {
8578 bpos %= BITS_PER_UNIT;
8579 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8580 epos = fieldsize % BITS_PER_UNIT;
8581 fieldsize += BITS_PER_UNIT - 1;
8582 fieldsize /= BITS_PER_UNIT;
8583 }
8584 else if (bpos % BITS_PER_UNIT
8585 || DECL_SIZE (field) == NULL_TREE
8586 || !tree_fits_shwi_p (DECL_SIZE (field))
8587 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8588 return 0;
8589 }
8590
8591 if (off != -1 && pos + fieldsize <= off)
8592 continue;
8593
8594 if (val == NULL_TREE)
8595 continue;
8596
8597 if (DECL_BIT_FIELD (field)
8598 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8599 {
8600 /* FIXME: Handle PDP endian. */
8601 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8602 return 0;
8603
8604 if (TREE_CODE (val) == NON_LVALUE_EXPR)
8605 val = TREE_OPERAND (val, 0);
8606 if (TREE_CODE (val) != INTEGER_CST)
8607 return 0;
8608
8609 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8610 tree repr_type = NULL_TREE;
8611 HOST_WIDE_INT rpos = 0;
8612 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8613 {
8614 rpos = int_byte_position (repr);
8615 repr_type = TREE_TYPE (repr);
8616 }
8617 else
8618 {
8619 repr_type = find_bitfield_repr_type (fieldsize, len);
8620 if (repr_type == NULL_TREE)
8621 return 0;
8622 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8623 gcc_assert (repr_size > 0 && repr_size <= len);
8624 if (pos + repr_size <= o + len)
8625 rpos = pos;
8626 else
8627 {
8628 rpos = o + len - repr_size;
8629 gcc_assert (rpos <= pos);
8630 }
8631 }
8632
8633 if (rpos > pos)
8634 return 0;
8635 wide_int w = wi::to_wide (t: val, TYPE_PRECISION (repr_type));
8636 int diff = (TYPE_PRECISION (repr_type)
8637 - TYPE_PRECISION (TREE_TYPE (field)));
8638 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8639 if (!BYTES_BIG_ENDIAN)
8640 w = wi::lshift (x: w, y: bitoff);
8641 else
8642 w = wi::lshift (x: w, y: diff - bitoff);
8643 val = wide_int_to_tree (type: repr_type, cst: w);
8644
8645 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8646 / BITS_PER_UNIT + 1];
8647 int l = native_encode_int (expr: val, ptr: buf, len: sizeof buf, off: 0);
8648 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8649 return 0;
8650
8651 if (ptr == NULL)
8652 continue;
8653
8654 /* If the bitfield does not start at byte boundary, handle
8655 the partial byte at the start. */
8656 if (bpos
8657 && (off == -1 || (pos >= off && len >= 1)))
8658 {
8659 if (!BYTES_BIG_ENDIAN)
8660 {
8661 int msk = (1 << bpos) - 1;
8662 buf[pos - rpos] &= ~msk;
8663 buf[pos - rpos] |= ptr[pos - o] & msk;
8664 if (mask)
8665 {
8666 if (fieldsize > 1 || epos == 0)
8667 mask[pos] &= msk;
8668 else
8669 mask[pos] &= (msk | ~((1 << epos) - 1));
8670 }
8671 }
8672 else
8673 {
8674 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8675 buf[pos - rpos] &= msk;
8676 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8677 if (mask)
8678 {
8679 if (fieldsize > 1 || epos == 0)
8680 mask[pos] &= ~msk;
8681 else
8682 mask[pos] &= (~msk
8683 | ((1 << (BITS_PER_UNIT - epos))
8684 - 1));
8685 }
8686 }
8687 }
8688 /* If the bitfield does not end at byte boundary, handle
8689 the partial byte at the end. */
8690 if (epos
8691 && (off == -1
8692 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8693 {
8694 if (!BYTES_BIG_ENDIAN)
8695 {
8696 int msk = (1 << epos) - 1;
8697 buf[pos - rpos + fieldsize - 1] &= msk;
8698 buf[pos - rpos + fieldsize - 1]
8699 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8700 if (mask && (fieldsize > 1 || bpos == 0))
8701 mask[pos + fieldsize - 1] &= ~msk;
8702 }
8703 else
8704 {
8705 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8706 buf[pos - rpos + fieldsize - 1] &= ~msk;
8707 buf[pos - rpos + fieldsize - 1]
8708 |= ptr[pos + fieldsize - 1 - o] & msk;
8709 if (mask && (fieldsize > 1 || bpos == 0))
8710 mask[pos + fieldsize - 1] &= msk;
8711 }
8712 }
8713 if (off == -1
8714 || (pos >= off
8715 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8716 {
8717 memcpy (dest: ptr + pos - o, src: buf + (pos - rpos), n: fieldsize);
8718 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8719 memset (s: mask + pos + (bpos != 0), c: 0,
8720 n: fieldsize - (bpos != 0) - (epos != 0));
8721 }
8722 else
8723 {
8724 /* Partial overlap. */
8725 HOST_WIDE_INT fsz = fieldsize;
8726 gcc_assert (mask == NULL);
8727 if (pos < off)
8728 {
8729 fsz -= (off - pos);
8730 pos = off;
8731 }
8732 if (pos + fsz > (HOST_WIDE_INT) off + len)
8733 fsz = (HOST_WIDE_INT) off + len - pos;
8734 memcpy (dest: ptr + pos - off, src: buf + (pos - rpos), n: fsz);
8735 }
8736 continue;
8737 }
8738
8739 if (off == -1
8740 || (pos >= off
8741 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8742 {
8743 int fldsize = fieldsize;
8744 if (off == -1)
8745 {
8746 tree fld = DECL_CHAIN (field);
8747 while (fld)
8748 {
8749 if (TREE_CODE (fld) == FIELD_DECL)
8750 break;
8751 fld = DECL_CHAIN (fld);
8752 }
8753 if (fld == NULL_TREE)
8754 fldsize = len - pos;
8755 }
8756 r = native_encode_initializer (init: val, ptr: ptr ? ptr + pos - o
8757 : NULL,
8758 len: fldsize,
8759 off: off == -1 ? -1 : 0,
8760 mask: mask ? mask + pos : NULL);
8761 if (!r)
8762 return 0;
8763 if (off == -1
8764 && fldsize != fieldsize
8765 && r > fieldsize
8766 && pos + r > total_bytes)
8767 total_bytes = pos + r;
8768 }
8769 else
8770 {
8771 /* Partial overlap. */
8772 unsigned char *p = NULL;
8773 int no = 0;
8774 int l;
8775 gcc_assert (mask == NULL);
8776 if (pos >= off)
8777 {
8778 if (ptr)
8779 p = ptr + pos - off;
8780 l = MIN ((HOST_WIDE_INT) off + len - pos,
8781 fieldsize);
8782 }
8783 else
8784 {
8785 p = ptr;
8786 no = off - pos;
8787 l = len;
8788 }
8789 if (!native_encode_initializer (init: val, ptr: p, len: l, off: no, NULL))
8790 return 0;
8791 }
8792 }
8793 return MIN (total_bytes - off, len);
8794 }
8795 return 0;
8796 }
8797}
8798
8799
8800/* Subroutine of native_interpret_expr. Interpret the contents of
8801 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8802 If the buffer cannot be interpreted, return NULL_TREE. */
8803
8804static tree
8805native_interpret_int (tree type, const unsigned char *ptr, int len)
8806{
8807 int total_bytes;
8808 if (TREE_CODE (type) == BITINT_TYPE)
8809 {
8810 struct bitint_info info;
8811 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8812 gcc_assert (ok);
8813 scalar_int_mode limb_mode = as_a <scalar_int_mode> (m: info.limb_mode);
8814 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (mode: limb_mode))
8815 {
8816 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8817 /* More work is needed when adding _BitInt support to PDP endian
8818 if limb is smaller than word, or if _BitInt limb ordering doesn't
8819 match target endianity here. */
8820 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8821 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8822 || (GET_MODE_SIZE (limb_mode)
8823 >= UNITS_PER_WORD)));
8824 }
8825 else
8826 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8827 }
8828 else
8829 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8830
8831 if (total_bytes > len)
8832 return NULL_TREE;
8833
8834 wide_int result = wi::from_buffer (ptr, total_bytes);
8835
8836 return wide_int_to_tree (type, cst: result);
8837}
8838
8839
8840/* Subroutine of native_interpret_expr. Interpret the contents of
8841 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8842 If the buffer cannot be interpreted, return NULL_TREE. */
8843
8844static tree
8845native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8846{
8847 scalar_mode mode = SCALAR_TYPE_MODE (type);
8848 int total_bytes = GET_MODE_SIZE (mode);
8849 double_int result;
8850 FIXED_VALUE_TYPE fixed_value;
8851
8852 if (total_bytes > len
8853 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8854 return NULL_TREE;
8855
8856 result = double_int::from_buffer (buffer: ptr, len: total_bytes);
8857 fixed_value = fixed_from_double_int (result, mode);
8858
8859 return build_fixed (type, fixed_value);
8860}
8861
8862
8863/* Subroutine of native_interpret_expr. Interpret the contents of
8864 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8865 If the buffer cannot be interpreted, return NULL_TREE. */
8866
8867tree
8868native_interpret_real (tree type, const unsigned char *ptr, int len)
8869{
8870 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8871 int total_bytes = GET_MODE_SIZE (mode);
8872 unsigned char value;
8873 /* There are always 32 bits in each long, no matter the size of
8874 the hosts long. We handle floating point representations with
8875 up to 192 bits. */
8876 REAL_VALUE_TYPE r;
8877 long tmp[6];
8878
8879 if (total_bytes > len || total_bytes > 24)
8880 return NULL_TREE;
8881 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8882
8883 memset (s: tmp, c: 0, n: sizeof (tmp));
8884 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8885 bitpos += BITS_PER_UNIT)
8886 {
8887 /* Both OFFSET and BYTE index within a long;
8888 bitpos indexes the whole float. */
8889 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8890 if (UNITS_PER_WORD < 4)
8891 {
8892 int word = byte / UNITS_PER_WORD;
8893 if (WORDS_BIG_ENDIAN)
8894 word = (words - 1) - word;
8895 offset = word * UNITS_PER_WORD;
8896 if (BYTES_BIG_ENDIAN)
8897 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8898 else
8899 offset += byte % UNITS_PER_WORD;
8900 }
8901 else
8902 {
8903 offset = byte;
8904 if (BYTES_BIG_ENDIAN)
8905 {
8906 /* Reverse bytes within each long, or within the entire float
8907 if it's smaller than a long (for HFmode). */
8908 offset = MIN (3, total_bytes - 1) - offset;
8909 gcc_assert (offset >= 0);
8910 }
8911 }
8912 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8913
8914 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8915 }
8916
8917 real_from_target (&r, tmp, mode);
8918 return build_real (type, r);
8919}
8920
8921
8922/* Subroutine of native_interpret_expr. Interpret the contents of
8923 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8924 If the buffer cannot be interpreted, return NULL_TREE. */
8925
8926static tree
8927native_interpret_complex (tree type, const unsigned char *ptr, int len)
8928{
8929 tree etype, rpart, ipart;
8930 int size;
8931
8932 etype = TREE_TYPE (type);
8933 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8934 if (size * 2 > len)
8935 return NULL_TREE;
8936 rpart = native_interpret_expr (etype, ptr, size);
8937 if (!rpart)
8938 return NULL_TREE;
8939 ipart = native_interpret_expr (etype, ptr+size, size);
8940 if (!ipart)
8941 return NULL_TREE;
8942 return build_complex (type, rpart, ipart);
8943}
8944
8945/* Read a vector of type TYPE from the target memory image given by BYTES,
8946 which contains LEN bytes. The vector is known to be encodable using
8947 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8948
8949 Return the vector on success, otherwise return null. */
8950
8951static tree
8952native_interpret_vector_part (tree type, const unsigned char *bytes,
8953 unsigned int len, unsigned int npatterns,
8954 unsigned int nelts_per_pattern)
8955{
8956 tree elt_type = TREE_TYPE (type);
8957 if (VECTOR_BOOLEAN_TYPE_P (type)
8958 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8959 {
8960 /* This is the only case in which elements can be smaller than a byte.
8961 Element 0 is always in the lsb of the containing byte. */
8962 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8963 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8964 return NULL_TREE;
8965
8966 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8967 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8968 {
8969 unsigned int bit_index = i * elt_bits;
8970 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8971 unsigned int lsb = bit_index % BITS_PER_UNIT;
8972 builder.quick_push (obj: bytes[byte_index] & (1 << lsb)
8973 ? build_all_ones_cst (elt_type)
8974 : build_zero_cst (elt_type));
8975 }
8976 return builder.build ();
8977 }
8978
8979 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8980 if (elt_bytes * npatterns * nelts_per_pattern > len)
8981 return NULL_TREE;
8982
8983 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8984 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8985 {
8986 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8987 if (!elt)
8988 return NULL_TREE;
8989 builder.quick_push (obj: elt);
8990 bytes += elt_bytes;
8991 }
8992 return builder.build ();
8993}
8994
8995/* Subroutine of native_interpret_expr. Interpret the contents of
8996 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8997 If the buffer cannot be interpreted, return NULL_TREE. */
8998
8999static tree
9000native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
9001{
9002 unsigned HOST_WIDE_INT size;
9003
9004 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (const_value: &size)
9005 || size > len)
9006 return NULL_TREE;
9007
9008 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (node: type).to_constant ();
9009 return native_interpret_vector_part (type, bytes: ptr, len, npatterns: count, nelts_per_pattern: 1);
9010}
9011
9012
9013/* Subroutine of fold_view_convert_expr. Interpret the contents of
9014 the buffer PTR of length LEN as a constant of type TYPE. For
9015 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
9016 we return a REAL_CST, etc... If the buffer cannot be interpreted,
9017 return NULL_TREE. */
9018
9019tree
9020native_interpret_expr (tree type, const unsigned char *ptr, int len)
9021{
9022 switch (TREE_CODE (type))
9023 {
9024 case INTEGER_TYPE:
9025 case ENUMERAL_TYPE:
9026 case BOOLEAN_TYPE:
9027 case POINTER_TYPE:
9028 case REFERENCE_TYPE:
9029 case OFFSET_TYPE:
9030 case BITINT_TYPE:
9031 return native_interpret_int (type, ptr, len);
9032
9033 case REAL_TYPE:
9034 if (tree ret = native_interpret_real (type, ptr, len))
9035 {
9036 /* For floating point values in composite modes, punt if this
9037 folding doesn't preserve bit representation. As the mode doesn't
9038 have fixed precision while GCC pretends it does, there could be
9039 valid values that GCC can't really represent accurately.
9040 See PR95450. Even for other modes, e.g. x86 XFmode can have some
9041 bit combinationations which GCC doesn't preserve. */
9042 unsigned char buf[24 * 2];
9043 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
9044 int total_bytes = GET_MODE_SIZE (mode);
9045 memcpy (dest: buf + 24, src: ptr, n: total_bytes);
9046 clear_type_padding_in_mask (type, buf + 24);
9047 if (native_encode_expr (expr: ret, ptr: buf, len: total_bytes, off: 0) != total_bytes
9048 || memcmp (s1: buf + 24, s2: buf, n: total_bytes) != 0)
9049 return NULL_TREE;
9050 return ret;
9051 }
9052 return NULL_TREE;
9053
9054 case FIXED_POINT_TYPE:
9055 return native_interpret_fixed (type, ptr, len);
9056
9057 case COMPLEX_TYPE:
9058 return native_interpret_complex (type, ptr, len);
9059
9060 case VECTOR_TYPE:
9061 return native_interpret_vector (type, ptr, len);
9062
9063 default:
9064 return NULL_TREE;
9065 }
9066}
9067
9068/* Returns true if we can interpret the contents of a native encoding
9069 as TYPE. */
9070
9071bool
9072can_native_interpret_type_p (tree type)
9073{
9074 switch (TREE_CODE (type))
9075 {
9076 case INTEGER_TYPE:
9077 case ENUMERAL_TYPE:
9078 case BOOLEAN_TYPE:
9079 case POINTER_TYPE:
9080 case REFERENCE_TYPE:
9081 case FIXED_POINT_TYPE:
9082 case REAL_TYPE:
9083 case COMPLEX_TYPE:
9084 case VECTOR_TYPE:
9085 case OFFSET_TYPE:
9086 return true;
9087 default:
9088 return false;
9089 }
9090}
9091
9092/* Attempt to interpret aggregate of TYPE from bytes encoded in target
9093 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
9094
9095tree
9096native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
9097 int len)
9098{
9099 vec<constructor_elt, va_gc> *elts = NULL;
9100 if (TREE_CODE (type) == ARRAY_TYPE)
9101 {
9102 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
9103 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
9104 return NULL_TREE;
9105
9106 HOST_WIDE_INT cnt = 0;
9107 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
9108 {
9109 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
9110 return NULL_TREE;
9111 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
9112 }
9113 if (eltsz == 0)
9114 cnt = 0;
9115 HOST_WIDE_INT pos = 0;
9116 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
9117 {
9118 tree v = NULL_TREE;
9119 if (pos >= len || pos + eltsz > len)
9120 return NULL_TREE;
9121 if (can_native_interpret_type_p (TREE_TYPE (type)))
9122 {
9123 v = native_interpret_expr (TREE_TYPE (type),
9124 ptr: ptr + off + pos, len: eltsz);
9125 if (v == NULL_TREE)
9126 return NULL_TREE;
9127 }
9128 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9129 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9130 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off: off + pos,
9131 len: eltsz);
9132 if (v == NULL_TREE)
9133 return NULL_TREE;
9134 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9135 }
9136 return build_constructor (type, elts);
9137 }
9138 if (TREE_CODE (type) != RECORD_TYPE)
9139 return NULL_TREE;
9140 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9141 {
9142 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
9143 || is_empty_type (TREE_TYPE (field)))
9144 continue;
9145 tree fld = field;
9146 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9147 int diff = 0;
9148 tree v = NULL_TREE;
9149 if (DECL_BIT_FIELD (field))
9150 {
9151 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9152 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9153 {
9154 poly_int64 bitoffset;
9155 poly_uint64 field_offset, fld_offset;
9156 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), value: &field_offset)
9157 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), value: &fld_offset))
9158 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9159 else
9160 bitoffset = 0;
9161 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9162 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9163 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9164 - TYPE_PRECISION (TREE_TYPE (field)));
9165 if (!bitoffset.is_constant (const_value: &bitoff)
9166 || bitoff < 0
9167 || bitoff > diff)
9168 return NULL_TREE;
9169 }
9170 else
9171 {
9172 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9173 return NULL_TREE;
9174 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9175 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9176 bpos %= BITS_PER_UNIT;
9177 fieldsize += bpos;
9178 fieldsize += BITS_PER_UNIT - 1;
9179 fieldsize /= BITS_PER_UNIT;
9180 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9181 if (repr_type == NULL_TREE)
9182 return NULL_TREE;
9183 sz = int_size_in_bytes (repr_type);
9184 if (sz < 0 || sz > len)
9185 return NULL_TREE;
9186 pos = int_byte_position (field);
9187 if (pos < 0 || pos > len || pos + fieldsize > len)
9188 return NULL_TREE;
9189 HOST_WIDE_INT rpos;
9190 if (pos + sz <= len)
9191 rpos = pos;
9192 else
9193 {
9194 rpos = len - sz;
9195 gcc_assert (rpos <= pos);
9196 }
9197 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9198 pos = rpos;
9199 diff = (TYPE_PRECISION (repr_type)
9200 - TYPE_PRECISION (TREE_TYPE (field)));
9201 v = native_interpret_expr (type: repr_type, ptr: ptr + off + pos, len: sz);
9202 if (v == NULL_TREE)
9203 return NULL_TREE;
9204 fld = NULL_TREE;
9205 }
9206 }
9207
9208 if (fld)
9209 {
9210 sz = int_size_in_bytes (TREE_TYPE (fld));
9211 if (sz < 0 || sz > len)
9212 return NULL_TREE;
9213 tree byte_pos = byte_position (fld);
9214 if (!tree_fits_shwi_p (byte_pos))
9215 return NULL_TREE;
9216 pos = tree_to_shwi (byte_pos);
9217 if (pos < 0 || pos > len || pos + sz > len)
9218 return NULL_TREE;
9219 }
9220 if (fld == NULL_TREE)
9221 /* Already handled above. */;
9222 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9223 {
9224 v = native_interpret_expr (TREE_TYPE (fld),
9225 ptr: ptr + off + pos, len: sz);
9226 if (v == NULL_TREE)
9227 return NULL_TREE;
9228 }
9229 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9230 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9231 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off: off + pos, len: sz);
9232 if (v == NULL_TREE)
9233 return NULL_TREE;
9234 if (fld != field)
9235 {
9236 if (TREE_CODE (v) != INTEGER_CST)
9237 return NULL_TREE;
9238
9239 /* FIXME: Figure out how to handle PDP endian bitfields. */
9240 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9241 return NULL_TREE;
9242 if (!BYTES_BIG_ENDIAN)
9243 v = wide_int_to_tree (TREE_TYPE (field),
9244 cst: wi::lrshift (x: wi::to_wide (t: v), y: bitoff));
9245 else
9246 v = wide_int_to_tree (TREE_TYPE (field),
9247 cst: wi::lrshift (x: wi::to_wide (t: v),
9248 y: diff - bitoff));
9249 }
9250 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9251 }
9252 return build_constructor (type, elts);
9253}
9254
9255/* Routines for manipulation of native_encode_expr encoded data if the encoded
9256 or extracted constant positions and/or sizes aren't byte aligned. */
9257
9258/* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9259 bits between adjacent elements. AMNT should be within
9260 [0, BITS_PER_UNIT).
9261 Example, AMNT = 2:
9262 00011111|11100000 << 2 = 01111111|10000000
9263 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9264
9265void
9266shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9267 unsigned int amnt)
9268{
9269 if (amnt == 0)
9270 return;
9271
9272 unsigned char carry_over = 0U;
9273 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9274 unsigned char clear_mask = (~0U) << amnt;
9275
9276 for (unsigned int i = 0; i < sz; i++)
9277 {
9278 unsigned prev_carry_over = carry_over;
9279 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9280
9281 ptr[i] <<= amnt;
9282 if (i != 0)
9283 {
9284 ptr[i] &= clear_mask;
9285 ptr[i] |= prev_carry_over;
9286 }
9287 }
9288}
9289
9290/* Like shift_bytes_in_array_left but for big-endian.
9291 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9292 bits between adjacent elements. AMNT should be within
9293 [0, BITS_PER_UNIT).
9294 Example, AMNT = 2:
9295 00011111|11100000 >> 2 = 00000111|11111000
9296 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9297
9298void
9299shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9300 unsigned int amnt)
9301{
9302 if (amnt == 0)
9303 return;
9304
9305 unsigned char carry_over = 0U;
9306 unsigned char carry_mask = ~(~0U << amnt);
9307
9308 for (unsigned int i = 0; i < sz; i++)
9309 {
9310 unsigned prev_carry_over = carry_over;
9311 carry_over = ptr[i] & carry_mask;
9312
9313 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9314 ptr[i] >>= amnt;
9315 ptr[i] |= prev_carry_over;
9316 }
9317}
9318
9319/* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9320 directly on the VECTOR_CST encoding, in a way that works for variable-
9321 length vectors. Return the resulting VECTOR_CST on success or null
9322 on failure. */
9323
9324static tree
9325fold_view_convert_vector_encoding (tree type, tree expr)
9326{
9327 tree expr_type = TREE_TYPE (expr);
9328 poly_uint64 type_bits, expr_bits;
9329 if (!poly_int_tree_p (TYPE_SIZE (type), value: &type_bits)
9330 || !poly_int_tree_p (TYPE_SIZE (expr_type), value: &expr_bits))
9331 return NULL_TREE;
9332
9333 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (node: type);
9334 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (node: expr_type);
9335 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9336 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9337
9338 /* We can only preserve the semantics of a stepped pattern if the new
9339 vector element is an integer of the same size. */
9340 if (VECTOR_CST_STEPPED_P (expr)
9341 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9342 return NULL_TREE;
9343
9344 /* The number of bits needed to encode one element from every pattern
9345 of the original vector. */
9346 unsigned int expr_sequence_bits
9347 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9348
9349 /* The number of bits needed to encode one element from every pattern
9350 of the result. */
9351 unsigned int type_sequence_bits
9352 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9353
9354 /* Don't try to read more bytes than are available, which can happen
9355 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9356 The general VIEW_CONVERT handling can cope with that case, so there's
9357 no point complicating things here. */
9358 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9359 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9360 BITS_PER_UNIT);
9361 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9362 if (known_gt (buffer_bits, expr_bits))
9363 return NULL_TREE;
9364
9365 /* Get enough bytes of EXPR to form the new encoding. */
9366 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9367 buffer.quick_grow (len: buffer_bytes);
9368 if (native_encode_vector_part (expr, ptr: buffer.address (), len: buffer_bytes, off: 0,
9369 count: buffer_bits / expr_elt_bits)
9370 != (int) buffer_bytes)
9371 return NULL_TREE;
9372
9373 /* Reencode the bytes as TYPE. */
9374 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9375 return native_interpret_vector_part (type, bytes: &buffer[0], len: buffer.length (),
9376 npatterns: type_npatterns, nelts_per_pattern);
9377}
9378
9379/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9380 TYPE at compile-time. If we're unable to perform the conversion
9381 return NULL_TREE. */
9382
9383static tree
9384fold_view_convert_expr (tree type, tree expr)
9385{
9386 unsigned char buffer[128];
9387 unsigned char *buf;
9388 int len;
9389 HOST_WIDE_INT l;
9390
9391 /* Check that the host and target are sane. */
9392 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9393 return NULL_TREE;
9394
9395 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9396 if (tree res = fold_view_convert_vector_encoding (type, expr))
9397 return res;
9398
9399 l = int_size_in_bytes (type);
9400 if (l > (int) sizeof (buffer)
9401 && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9402 {
9403 buf = XALLOCAVEC (unsigned char, l);
9404 len = l;
9405 }
9406 else
9407 {
9408 buf = buffer;
9409 len = sizeof (buffer);
9410 }
9411 len = native_encode_expr (expr, ptr: buf, len);
9412 if (len == 0)
9413 return NULL_TREE;
9414
9415 return native_interpret_expr (type, ptr: buf, len);
9416}
9417
9418/* Build an expression for the address of T. Folds away INDIRECT_REF
9419 to avoid confusing the gimplify process. */
9420
9421tree
9422build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9423{
9424 /* The size of the object is not relevant when talking about its address. */
9425 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9426 t = TREE_OPERAND (t, 0);
9427
9428 if (INDIRECT_REF_P (t))
9429 {
9430 t = TREE_OPERAND (t, 0);
9431
9432 if (TREE_TYPE (t) != ptrtype)
9433 t = build1_loc (loc, code: NOP_EXPR, type: ptrtype, arg1: t);
9434 }
9435 else if (TREE_CODE (t) == MEM_REF
9436 && integer_zerop (TREE_OPERAND (t, 1)))
9437 {
9438 t = TREE_OPERAND (t, 0);
9439
9440 if (TREE_TYPE (t) != ptrtype)
9441 t = fold_convert_loc (loc, type: ptrtype, arg: t);
9442 }
9443 else if (TREE_CODE (t) == MEM_REF
9444 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9445 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9446 TREE_OPERAND (t, 0),
9447 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9448 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9449 {
9450 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9451
9452 if (TREE_TYPE (t) != ptrtype)
9453 t = fold_convert_loc (loc, type: ptrtype, arg: t);
9454 }
9455 else
9456 t = build1_loc (loc, code: ADDR_EXPR, type: ptrtype, arg1: t);
9457
9458 return t;
9459}
9460
9461/* Build an expression for the address of T. */
9462
9463tree
9464build_fold_addr_expr_loc (location_t loc, tree t)
9465{
9466 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9467
9468 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9469}
9470
9471/* Fold a unary expression of code CODE and type TYPE with operand
9472 OP0. Return the folded expression if folding is successful.
9473 Otherwise, return NULL_TREE. */
9474
9475tree
9476fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9477{
9478 tree tem;
9479 tree arg0;
9480 enum tree_code_class kind = TREE_CODE_CLASS (code);
9481
9482 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9483 && TREE_CODE_LENGTH (code) == 1);
9484
9485 arg0 = op0;
9486 if (arg0)
9487 {
9488 if (CONVERT_EXPR_CODE_P (code)
9489 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9490 {
9491 /* Don't use STRIP_NOPS, because signedness of argument type
9492 matters. */
9493 STRIP_SIGN_NOPS (arg0);
9494 }
9495 else
9496 {
9497 /* Strip any conversions that don't change the mode. This
9498 is safe for every expression, except for a comparison
9499 expression because its signedness is derived from its
9500 operands.
9501
9502 Note that this is done as an internal manipulation within
9503 the constant folder, in order to find the simplest
9504 representation of the arguments so that their form can be
9505 studied. In any cases, the appropriate type conversions
9506 should be put back in the tree that will get out of the
9507 constant folder. */
9508 STRIP_NOPS (arg0);
9509 }
9510
9511 if (CONSTANT_CLASS_P (arg0))
9512 {
9513 tree tem = const_unop (code, type, arg0);
9514 if (tem)
9515 {
9516 if (TREE_TYPE (tem) != type)
9517 tem = fold_convert_loc (loc, type, arg: tem);
9518 return tem;
9519 }
9520 }
9521 }
9522
9523 tem = generic_simplify (loc, code, type, op0);
9524 if (tem)
9525 return tem;
9526
9527 if (TREE_CODE_CLASS (code) == tcc_unary)
9528 {
9529 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9530 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9531 fold_build1_loc (loc, code, type,
9532 fold_convert_loc (loc, TREE_TYPE (op0),
9533 TREE_OPERAND (arg0, 1))));
9534 else if (TREE_CODE (arg0) == COND_EXPR)
9535 {
9536 tree arg01 = TREE_OPERAND (arg0, 1);
9537 tree arg02 = TREE_OPERAND (arg0, 2);
9538 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9539 arg01 = fold_build1_loc (loc, code, type,
9540 fold_convert_loc (loc,
9541 TREE_TYPE (op0), arg: arg01));
9542 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9543 arg02 = fold_build1_loc (loc, code, type,
9544 fold_convert_loc (loc,
9545 TREE_TYPE (op0), arg: arg02));
9546 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9547 arg01, arg02);
9548
9549 /* If this was a conversion, and all we did was to move into
9550 inside the COND_EXPR, bring it back out. But leave it if
9551 it is a conversion from integer to integer and the
9552 result precision is no wider than a word since such a
9553 conversion is cheap and may be optimized away by combine,
9554 while it couldn't if it were outside the COND_EXPR. Then return
9555 so we don't get into an infinite recursion loop taking the
9556 conversion out and then back in. */
9557
9558 if ((CONVERT_EXPR_CODE_P (code)
9559 || code == NON_LVALUE_EXPR)
9560 && TREE_CODE (tem) == COND_EXPR
9561 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9562 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9563 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9564 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9565 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9566 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9567 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9568 && (INTEGRAL_TYPE_P
9569 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9570 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9571 || flag_syntax_only))
9572 tem = build1_loc (loc, code, type,
9573 arg1: build3 (COND_EXPR,
9574 TREE_TYPE (TREE_OPERAND
9575 (TREE_OPERAND (tem, 1), 0)),
9576 TREE_OPERAND (tem, 0),
9577 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9578 TREE_OPERAND (TREE_OPERAND (tem, 2),
9579 0)));
9580 return tem;
9581 }
9582 }
9583
9584 switch (code)
9585 {
9586 case NON_LVALUE_EXPR:
9587 if (!maybe_lvalue_p (x: op0))
9588 return fold_convert_loc (loc, type, arg: op0);
9589 return NULL_TREE;
9590
9591 CASE_CONVERT:
9592 case FLOAT_EXPR:
9593 case FIX_TRUNC_EXPR:
9594 if (COMPARISON_CLASS_P (op0))
9595 {
9596 /* If we have (type) (a CMP b) and type is an integral type, return
9597 new expression involving the new type. Canonicalize
9598 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9599 non-integral type.
9600 Do not fold the result as that would not simplify further, also
9601 folding again results in recursions. */
9602 if (TREE_CODE (type) == BOOLEAN_TYPE)
9603 return build2_loc (loc, TREE_CODE (op0), type,
9604 TREE_OPERAND (op0, 0),
9605 TREE_OPERAND (op0, 1));
9606 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9607 && TREE_CODE (type) != VECTOR_TYPE)
9608 return build3_loc (loc, code: COND_EXPR, type, arg0: op0,
9609 arg1: constant_boolean_node (value: true, type),
9610 arg2: constant_boolean_node (value: false, type));
9611 }
9612
9613 /* Handle (T *)&A.B.C for A being of type T and B and C
9614 living at offset zero. This occurs frequently in
9615 C++ upcasting and then accessing the base. */
9616 if (TREE_CODE (op0) == ADDR_EXPR
9617 && POINTER_TYPE_P (type)
9618 && handled_component_p (TREE_OPERAND (op0, 0)))
9619 {
9620 poly_int64 bitsize, bitpos;
9621 tree offset;
9622 machine_mode mode;
9623 int unsignedp, reversep, volatilep;
9624 tree base
9625 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9626 &offset, &mode, &unsignedp, &reversep,
9627 &volatilep);
9628 /* If the reference was to a (constant) zero offset, we can use
9629 the address of the base if it has the same base type
9630 as the result type and the pointer type is unqualified. */
9631 if (!offset
9632 && known_eq (bitpos, 0)
9633 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9634 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9635 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9636 return fold_convert_loc (loc, type,
9637 arg: build_fold_addr_expr_loc (loc, t: base));
9638 }
9639
9640 if (TREE_CODE (op0) == MODIFY_EXPR
9641 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9642 /* Detect assigning a bitfield. */
9643 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9644 && DECL_BIT_FIELD
9645 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9646 {
9647 /* Don't leave an assignment inside a conversion
9648 unless assigning a bitfield. */
9649 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9650 /* First do the assignment, then return converted constant. */
9651 tem = build2_loc (loc, code: COMPOUND_EXPR, TREE_TYPE (tem), arg0: op0, arg1: tem);
9652 suppress_warning (tem /* What warning? */);
9653 TREE_USED (tem) = 1;
9654 return tem;
9655 }
9656
9657 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9658 constants (if x has signed type, the sign bit cannot be set
9659 in c). This folds extension into the BIT_AND_EXPR.
9660 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9661 very likely don't have maximal range for their precision and this
9662 transformation effectively doesn't preserve non-maximal ranges. */
9663 if (TREE_CODE (type) == INTEGER_TYPE
9664 && TREE_CODE (op0) == BIT_AND_EXPR
9665 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9666 {
9667 tree and_expr = op0;
9668 tree and0 = TREE_OPERAND (and_expr, 0);
9669 tree and1 = TREE_OPERAND (and_expr, 1);
9670 int change = 0;
9671
9672 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9673 || (TYPE_PRECISION (type)
9674 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9675 change = 1;
9676 else if (TYPE_PRECISION (TREE_TYPE (and1))
9677 <= HOST_BITS_PER_WIDE_INT
9678 && tree_fits_uhwi_p (and1))
9679 {
9680 unsigned HOST_WIDE_INT cst;
9681
9682 cst = tree_to_uhwi (and1);
9683 cst &= HOST_WIDE_INT_M1U
9684 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9685 change = (cst == 0);
9686 if (change
9687 && !flag_syntax_only
9688 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9689 == ZERO_EXTEND))
9690 {
9691 tree uns = unsigned_type_for (TREE_TYPE (and0));
9692 and0 = fold_convert_loc (loc, type: uns, arg: and0);
9693 and1 = fold_convert_loc (loc, type: uns, arg: and1);
9694 }
9695 }
9696 if (change)
9697 {
9698 tree and1_type = TREE_TYPE (and1);
9699 unsigned prec = MAX (TYPE_PRECISION (and1_type),
9700 TYPE_PRECISION (type));
9701 tem = force_fit_type (type,
9702 wide_int::from (x: wi::to_wide (t: and1), precision: prec,
9703 TYPE_SIGN (and1_type)),
9704 0, TREE_OVERFLOW (and1));
9705 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9706 fold_convert_loc (loc, type, arg: and0), tem);
9707 }
9708 }
9709
9710 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9711 cast (T1)X will fold away. We assume that this happens when X itself
9712 is a cast. */
9713 if (POINTER_TYPE_P (type)
9714 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9715 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9716 {
9717 tree arg00 = TREE_OPERAND (arg0, 0);
9718 tree arg01 = TREE_OPERAND (arg0, 1);
9719
9720 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9721 when the pointed type needs higher alignment than
9722 the p+ first operand's pointed type. */
9723 if (!in_gimple_form
9724 && sanitize_flags_p (flag: SANITIZE_ALIGNMENT)
9725 && (min_align_of_type (TREE_TYPE (type))
9726 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9727 return NULL_TREE;
9728
9729 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9730 when type is a reference type and arg00's type is not,
9731 because arg00 could be validly nullptr and if arg01 doesn't return,
9732 we don't want false positive binding of reference to nullptr. */
9733 if (TREE_CODE (type) == REFERENCE_TYPE
9734 && !in_gimple_form
9735 && sanitize_flags_p (flag: SANITIZE_NULL)
9736 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9737 return NULL_TREE;
9738
9739 arg00 = fold_convert_loc (loc, type, arg: arg00);
9740 return fold_build_pointer_plus_loc (loc, ptr: arg00, off: arg01);
9741 }
9742
9743 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9744 of the same precision, and X is an integer type not narrower than
9745 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9746 if (INTEGRAL_TYPE_P (type)
9747 && TREE_CODE (op0) == BIT_NOT_EXPR
9748 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9749 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9750 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9751 {
9752 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9753 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9754 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9755 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9756 fold_convert_loc (loc, type, arg: tem));
9757 }
9758
9759 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9760 type of X and Y (integer types only). */
9761 if (INTEGRAL_TYPE_P (type)
9762 && TREE_CODE (op0) == MULT_EXPR
9763 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9764 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9765 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9766 || !sanitize_flags_p (flag: SANITIZE_SI_OVERFLOW)))
9767 {
9768 /* Be careful not to introduce new overflows. */
9769 tree mult_type;
9770 if (TYPE_OVERFLOW_WRAPS (type))
9771 mult_type = type;
9772 else
9773 mult_type = unsigned_type_for (type);
9774
9775 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9776 {
9777 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9778 fold_convert_loc (loc, type: mult_type,
9779 TREE_OPERAND (op0, 0)),
9780 fold_convert_loc (loc, type: mult_type,
9781 TREE_OPERAND (op0, 1)));
9782 return fold_convert_loc (loc, type, arg: tem);
9783 }
9784 }
9785
9786 return NULL_TREE;
9787
9788 case VIEW_CONVERT_EXPR:
9789 if (TREE_CODE (op0) == MEM_REF)
9790 {
9791 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9792 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9793 tem = fold_build2_loc (loc, MEM_REF, type,
9794 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9795 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9796 return tem;
9797 }
9798
9799 return NULL_TREE;
9800
9801 case NEGATE_EXPR:
9802 tem = fold_negate_expr (loc, t: arg0);
9803 if (tem)
9804 return fold_convert_loc (loc, type, arg: tem);
9805 return NULL_TREE;
9806
9807 case ABS_EXPR:
9808 /* Convert fabs((double)float) into (double)fabsf(float). */
9809 if (TREE_CODE (arg0) == NOP_EXPR
9810 && TREE_CODE (type) == REAL_TYPE)
9811 {
9812 tree targ0 = strip_float_extensions (arg0);
9813 if (targ0 != arg0)
9814 return fold_convert_loc (loc, type,
9815 arg: fold_build1_loc (loc, ABS_EXPR,
9816 TREE_TYPE (targ0),
9817 targ0));
9818 }
9819 return NULL_TREE;
9820
9821 case BIT_NOT_EXPR:
9822 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9823 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9824 && (tem = fold_unary_loc (loc, code: BIT_NOT_EXPR, type,
9825 op0: fold_convert_loc (loc, type,
9826 TREE_OPERAND (arg0, 0)))))
9827 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9828 fold_convert_loc (loc, type,
9829 TREE_OPERAND (arg0, 1)));
9830 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9831 && (tem = fold_unary_loc (loc, code: BIT_NOT_EXPR, type,
9832 op0: fold_convert_loc (loc, type,
9833 TREE_OPERAND (arg0, 1)))))
9834 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9835 fold_convert_loc (loc, type,
9836 TREE_OPERAND (arg0, 0)), tem);
9837
9838 return NULL_TREE;
9839
9840 case TRUTH_NOT_EXPR:
9841 /* Note that the operand of this must be an int
9842 and its values must be 0 or 1.
9843 ("true" is a fixed value perhaps depending on the language,
9844 but we don't handle values other than 1 correctly yet.) */
9845 tem = fold_truth_not_expr (loc, arg: arg0);
9846 if (!tem)
9847 return NULL_TREE;
9848 return fold_convert_loc (loc, type, arg: tem);
9849
9850 case INDIRECT_REF:
9851 /* Fold *&X to X if X is an lvalue. */
9852 if (TREE_CODE (op0) == ADDR_EXPR)
9853 {
9854 tree op00 = TREE_OPERAND (op0, 0);
9855 if ((VAR_P (op00)
9856 || TREE_CODE (op00) == PARM_DECL
9857 || TREE_CODE (op00) == RESULT_DECL)
9858 && !TREE_READONLY (op00))
9859 return op00;
9860 }
9861 return NULL_TREE;
9862
9863 default:
9864 return NULL_TREE;
9865 } /* switch (code) */
9866}
9867
9868
9869/* If the operation was a conversion do _not_ mark a resulting constant
9870 with TREE_OVERFLOW if the original constant was not. These conversions
9871 have implementation defined behavior and retaining the TREE_OVERFLOW
9872 flag here would confuse later passes such as VRP. */
9873tree
9874fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9875 tree type, tree op0)
9876{
9877 tree res = fold_unary_loc (loc, code, type, op0);
9878 if (res
9879 && TREE_CODE (res) == INTEGER_CST
9880 && TREE_CODE (op0) == INTEGER_CST
9881 && CONVERT_EXPR_CODE_P (code))
9882 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9883
9884 return res;
9885}
9886
9887/* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9888 operands OP0 and OP1. LOC is the location of the resulting expression.
9889 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9890 Return the folded expression if folding is successful. Otherwise,
9891 return NULL_TREE. */
9892static tree
9893fold_truth_andor (location_t loc, enum tree_code code, tree type,
9894 tree arg0, tree arg1, tree op0, tree op1)
9895{
9896 tree tem;
9897
9898 /* We only do these simplifications if we are optimizing. */
9899 if (!optimize)
9900 return NULL_TREE;
9901
9902 /* Check for things like (A || B) && (A || C). We can convert this
9903 to A || (B && C). Note that either operator can be any of the four
9904 truth and/or operations and the transformation will still be
9905 valid. Also note that we only care about order for the
9906 ANDIF and ORIF operators. If B contains side effects, this
9907 might change the truth-value of A. */
9908 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9909 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9910 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9911 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9912 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9913 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9914 {
9915 tree a00 = TREE_OPERAND (arg0, 0);
9916 tree a01 = TREE_OPERAND (arg0, 1);
9917 tree a10 = TREE_OPERAND (arg1, 0);
9918 tree a11 = TREE_OPERAND (arg1, 1);
9919 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9920 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9921 && (code == TRUTH_AND_EXPR
9922 || code == TRUTH_OR_EXPR));
9923
9924 if (operand_equal_p (arg0: a00, arg1: a10, flags: 0))
9925 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9926 fold_build2_loc (loc, code, type, a01, a11));
9927 else if (commutative && operand_equal_p (arg0: a00, arg1: a11, flags: 0))
9928 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9929 fold_build2_loc (loc, code, type, a01, a10));
9930 else if (commutative && operand_equal_p (arg0: a01, arg1: a10, flags: 0))
9931 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9932 fold_build2_loc (loc, code, type, a00, a11));
9933
9934 /* This case if tricky because we must either have commutative
9935 operators or else A10 must not have side-effects. */
9936
9937 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9938 && operand_equal_p (arg0: a01, arg1: a11, flags: 0))
9939 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9940 fold_build2_loc (loc, code, type, a00, a10),
9941 a01);
9942 }
9943
9944 /* See if we can build a range comparison. */
9945 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9946 return tem;
9947
9948 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9949 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9950 {
9951 tem = merge_truthop_with_opposite_arm (loc, op: arg0, cmpop: arg1, rhs_only: true);
9952 if (tem)
9953 return fold_build2_loc (loc, code, type, tem, arg1);
9954 }
9955
9956 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9957 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9958 {
9959 tem = merge_truthop_with_opposite_arm (loc, op: arg1, cmpop: arg0, rhs_only: false);
9960 if (tem)
9961 return fold_build2_loc (loc, code, type, arg0, tem);
9962 }
9963
9964 /* Check for the possibility of merging component references. If our
9965 lhs is another similar operation, try to merge its rhs with our
9966 rhs. Then try to merge our lhs and rhs. */
9967 if (TREE_CODE (arg0) == code
9968 && (tem = fold_truth_andor_1 (loc, code, truth_type: type,
9969 TREE_OPERAND (arg0, 1), rhs: arg1)) != 0)
9970 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9971
9972 if ((tem = fold_truth_andor_1 (loc, code, truth_type: type, lhs: arg0, rhs: arg1)) != 0)
9973 return tem;
9974
9975 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9976 if (param_logical_op_non_short_circuit != -1)
9977 logical_op_non_short_circuit
9978 = param_logical_op_non_short_circuit;
9979 if (logical_op_non_short_circuit
9980 && !sanitize_coverage_p ()
9981 && (code == TRUTH_AND_EXPR
9982 || code == TRUTH_ANDIF_EXPR
9983 || code == TRUTH_OR_EXPR
9984 || code == TRUTH_ORIF_EXPR))
9985 {
9986 enum tree_code ncode, icode;
9987
9988 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9989 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9990 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9991
9992 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9993 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9994 We don't want to pack more than two leafs to a non-IF AND/OR
9995 expression.
9996 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9997 equal to IF-CODE, then we don't want to add right-hand operand.
9998 If the inner right-hand side of left-hand operand has
9999 side-effects, or isn't simple, then we can't add to it,
10000 as otherwise we might destroy if-sequence. */
10001 if (TREE_CODE (arg0) == icode
10002 && simple_condition_p (exp: arg1)
10003 /* Needed for sequence points to handle trappings, and
10004 side-effects. */
10005 && simple_condition_p (TREE_OPERAND (arg0, 1)))
10006 {
10007 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
10008 arg1);
10009 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
10010 tem);
10011 }
10012 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
10013 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
10014 else if (TREE_CODE (arg1) == icode
10015 && simple_condition_p (exp: arg0)
10016 /* Needed for sequence points to handle trappings, and
10017 side-effects. */
10018 && simple_condition_p (TREE_OPERAND (arg1, 0)))
10019 {
10020 tem = fold_build2_loc (loc, ncode, type,
10021 arg0, TREE_OPERAND (arg1, 0));
10022 return fold_build2_loc (loc, icode, type, tem,
10023 TREE_OPERAND (arg1, 1));
10024 }
10025 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
10026 into (A OR B).
10027 For sequence point consistancy, we need to check for trapping,
10028 and side-effects. */
10029 else if (code == icode && simple_condition_p (exp: arg0)
10030 && simple_condition_p (exp: arg1))
10031 return fold_build2_loc (loc, ncode, type, arg0, arg1);
10032 }
10033
10034 return NULL_TREE;
10035}
10036
10037/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
10038 by changing CODE to reduce the magnitude of constants involved in
10039 ARG0 of the comparison.
10040 Returns a canonicalized comparison tree if a simplification was
10041 possible, otherwise returns NULL_TREE.
10042 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
10043 valid if signed overflow is undefined. */
10044
10045static tree
10046maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
10047 tree arg0, tree arg1,
10048 bool *strict_overflow_p)
10049{
10050 enum tree_code code0 = TREE_CODE (arg0);
10051 tree t, cst0 = NULL_TREE;
10052 int sgn0;
10053
10054 /* Match A +- CST code arg1. We can change this only if overflow
10055 is undefined. */
10056 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10057 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
10058 /* In principle pointers also have undefined overflow behavior,
10059 but that causes problems elsewhere. */
10060 && !POINTER_TYPE_P (TREE_TYPE (arg0))
10061 && (code0 == MINUS_EXPR
10062 || code0 == PLUS_EXPR)
10063 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
10064 return NULL_TREE;
10065
10066 /* Identify the constant in arg0 and its sign. */
10067 cst0 = TREE_OPERAND (arg0, 1);
10068 sgn0 = tree_int_cst_sgn (cst0);
10069
10070 /* Overflowed constants and zero will cause problems. */
10071 if (integer_zerop (cst0)
10072 || TREE_OVERFLOW (cst0))
10073 return NULL_TREE;
10074
10075 /* See if we can reduce the magnitude of the constant in
10076 arg0 by changing the comparison code. */
10077 /* A - CST < arg1 -> A - CST-1 <= arg1. */
10078 if (code == LT_EXPR
10079 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10080 code = LE_EXPR;
10081 /* A + CST > arg1 -> A + CST-1 >= arg1. */
10082 else if (code == GT_EXPR
10083 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10084 code = GE_EXPR;
10085 /* A + CST <= arg1 -> A + CST-1 < arg1. */
10086 else if (code == LE_EXPR
10087 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10088 code = LT_EXPR;
10089 /* A - CST >= arg1 -> A - CST-1 > arg1. */
10090 else if (code == GE_EXPR
10091 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10092 code = GT_EXPR;
10093 else
10094 return NULL_TREE;
10095 *strict_overflow_p = true;
10096
10097 /* Now build the constant reduced in magnitude. But not if that
10098 would produce one outside of its types range. */
10099 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
10100 && ((sgn0 == 1
10101 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
10102 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
10103 || (sgn0 == -1
10104 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
10105 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
10106 return NULL_TREE;
10107
10108 t = int_const_binop (code: sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
10109 arg1: cst0, arg2: build_int_cst (TREE_TYPE (cst0), 1));
10110 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
10111 t = fold_convert (TREE_TYPE (arg1), t);
10112
10113 return fold_build2_loc (loc, code, type, t, arg1);
10114}
10115
10116/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
10117 overflow further. Try to decrease the magnitude of constants involved
10118 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
10119 and put sole constants at the second argument position.
10120 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
10121
10122static tree
10123maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
10124 tree arg0, tree arg1)
10125{
10126 tree t;
10127 bool strict_overflow_p;
10128 const char * const warnmsg = G_("assuming signed overflow does not occur "
10129 "when reducing constant in comparison");
10130
10131 /* Try canonicalization by simplifying arg0. */
10132 strict_overflow_p = false;
10133 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
10134 strict_overflow_p: &strict_overflow_p);
10135 if (t)
10136 {
10137 if (strict_overflow_p)
10138 fold_overflow_warning (gmsgid: warnmsg, wc: WARN_STRICT_OVERFLOW_MAGNITUDE);
10139 return t;
10140 }
10141
10142 /* Try canonicalization by simplifying arg1 using the swapped
10143 comparison. */
10144 code = swap_tree_comparison (code);
10145 strict_overflow_p = false;
10146 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0: arg1, arg1: arg0,
10147 strict_overflow_p: &strict_overflow_p);
10148 if (t && strict_overflow_p)
10149 fold_overflow_warning (gmsgid: warnmsg, wc: WARN_STRICT_OVERFLOW_MAGNITUDE);
10150 return t;
10151}
10152
10153/* Return whether BASE + OFFSET + BITPOS may wrap around the address
10154 space. This is used to avoid issuing overflow warnings for
10155 expressions like &p->x which cannot wrap. */
10156
10157static bool
10158pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10159{
10160 if (!POINTER_TYPE_P (TREE_TYPE (base)))
10161 return true;
10162
10163 if (maybe_lt (a: bitpos, b: 0))
10164 return true;
10165
10166 poly_wide_int wi_offset;
10167 int precision = TYPE_PRECISION (TREE_TYPE (base));
10168 if (offset == NULL_TREE)
10169 wi_offset = wi::zero (precision);
10170 else if (!poly_int_tree_p (t: offset) || TREE_OVERFLOW (offset))
10171 return true;
10172 else
10173 wi_offset = wi::to_poly_wide (t: offset);
10174
10175 wi::overflow_type overflow;
10176 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10177 precision);
10178 poly_wide_int total = wi::add (a: wi_offset, b: units, sgn: UNSIGNED, overflow: &overflow);
10179 if (overflow)
10180 return true;
10181
10182 poly_uint64 total_hwi, size;
10183 if (!total.to_uhwi (r: &total_hwi)
10184 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10185 value: &size)
10186 || known_eq (size, 0U))
10187 return true;
10188
10189 if (known_le (total_hwi, size))
10190 return false;
10191
10192 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10193 array. */
10194 if (TREE_CODE (base) == ADDR_EXPR
10195 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10196 value: &size)
10197 && maybe_ne (a: size, b: 0U)
10198 && known_le (total_hwi, size))
10199 return false;
10200
10201 return true;
10202}
10203
10204/* Return a positive integer when the symbol DECL is known to have
10205 a nonzero address, zero when it's known not to (e.g., it's a weak
10206 symbol), and a negative integer when the symbol is not yet in the
10207 symbol table and so whether or not its address is zero is unknown.
10208 For function local objects always return positive integer. */
10209static int
10210maybe_nonzero_address (tree decl)
10211{
10212 /* Normally, don't do anything for variables and functions before symtab is
10213 built; it is quite possible that DECL will be declared weak later.
10214 But if folding_initializer, we need a constant answer now, so create
10215 the symtab entry and prevent later weak declaration. */
10216 if (DECL_P (decl) && decl_in_symtab_p (decl))
10217 if (struct symtab_node *symbol
10218 = (folding_initializer
10219 ? symtab_node::get_create (node: decl)
10220 : symtab_node::get (decl)))
10221 return symbol->nonzero_address ();
10222
10223 /* Function local objects are never NULL. */
10224 if (DECL_P (decl)
10225 && (DECL_CONTEXT (decl)
10226 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10227 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10228 return 1;
10229
10230 return -1;
10231}
10232
10233/* Subroutine of fold_binary. This routine performs all of the
10234 transformations that are common to the equality/inequality
10235 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10236 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10237 fold_binary should call fold_binary. Fold a comparison with
10238 tree code CODE and type TYPE with operands OP0 and OP1. Return
10239 the folded comparison or NULL_TREE. */
10240
10241static tree
10242fold_comparison (location_t loc, enum tree_code code, tree type,
10243 tree op0, tree op1)
10244{
10245 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10246 tree arg0, arg1, tem;
10247
10248 arg0 = op0;
10249 arg1 = op1;
10250
10251 STRIP_SIGN_NOPS (arg0);
10252 STRIP_SIGN_NOPS (arg1);
10253
10254 /* For comparisons of pointers we can decompose it to a compile time
10255 comparison of the base objects and the offsets into the object.
10256 This requires at least one operand being an ADDR_EXPR or a
10257 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10258 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10259 && (TREE_CODE (arg0) == ADDR_EXPR
10260 || TREE_CODE (arg1) == ADDR_EXPR
10261 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10262 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10263 {
10264 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10265 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10266 machine_mode mode;
10267 int volatilep, reversep, unsignedp;
10268 bool indirect_base0 = false, indirect_base1 = false;
10269
10270 /* Get base and offset for the access. Strip ADDR_EXPR for
10271 get_inner_reference, but put it back by stripping INDIRECT_REF
10272 off the base object if possible. indirect_baseN will be true
10273 if baseN is not an address but refers to the object itself. */
10274 base0 = arg0;
10275 if (TREE_CODE (arg0) == ADDR_EXPR)
10276 {
10277 base0
10278 = get_inner_reference (TREE_OPERAND (arg0, 0),
10279 &bitsize, &bitpos0, &offset0, &mode,
10280 &unsignedp, &reversep, &volatilep);
10281 if (INDIRECT_REF_P (base0))
10282 base0 = TREE_OPERAND (base0, 0);
10283 else
10284 indirect_base0 = true;
10285 }
10286 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10287 {
10288 base0 = TREE_OPERAND (arg0, 0);
10289 STRIP_SIGN_NOPS (base0);
10290 if (TREE_CODE (base0) == ADDR_EXPR)
10291 {
10292 base0
10293 = get_inner_reference (TREE_OPERAND (base0, 0),
10294 &bitsize, &bitpos0, &offset0, &mode,
10295 &unsignedp, &reversep, &volatilep);
10296 if (INDIRECT_REF_P (base0))
10297 base0 = TREE_OPERAND (base0, 0);
10298 else
10299 indirect_base0 = true;
10300 }
10301 if (offset0 == NULL_TREE || integer_zerop (offset0))
10302 offset0 = TREE_OPERAND (arg0, 1);
10303 else
10304 offset0 = size_binop (PLUS_EXPR, offset0,
10305 TREE_OPERAND (arg0, 1));
10306 if (poly_int_tree_p (t: offset0))
10307 {
10308 poly_offset_int tem = wi::sext (a: wi::to_poly_offset (t: offset0),
10309 TYPE_PRECISION (sizetype));
10310 tem <<= LOG2_BITS_PER_UNIT;
10311 tem += bitpos0;
10312 if (tem.to_shwi (r: &bitpos0))
10313 offset0 = NULL_TREE;
10314 }
10315 }
10316
10317 base1 = arg1;
10318 if (TREE_CODE (arg1) == ADDR_EXPR)
10319 {
10320 base1
10321 = get_inner_reference (TREE_OPERAND (arg1, 0),
10322 &bitsize, &bitpos1, &offset1, &mode,
10323 &unsignedp, &reversep, &volatilep);
10324 if (INDIRECT_REF_P (base1))
10325 base1 = TREE_OPERAND (base1, 0);
10326 else
10327 indirect_base1 = true;
10328 }
10329 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10330 {
10331 base1 = TREE_OPERAND (arg1, 0);
10332 STRIP_SIGN_NOPS (base1);
10333 if (TREE_CODE (base1) == ADDR_EXPR)
10334 {
10335 base1
10336 = get_inner_reference (TREE_OPERAND (base1, 0),
10337 &bitsize, &bitpos1, &offset1, &mode,
10338 &unsignedp, &reversep, &volatilep);
10339 if (INDIRECT_REF_P (base1))
10340 base1 = TREE_OPERAND (base1, 0);
10341 else
10342 indirect_base1 = true;
10343 }
10344 if (offset1 == NULL_TREE || integer_zerop (offset1))
10345 offset1 = TREE_OPERAND (arg1, 1);
10346 else
10347 offset1 = size_binop (PLUS_EXPR, offset1,
10348 TREE_OPERAND (arg1, 1));
10349 if (poly_int_tree_p (t: offset1))
10350 {
10351 poly_offset_int tem = wi::sext (a: wi::to_poly_offset (t: offset1),
10352 TYPE_PRECISION (sizetype));
10353 tem <<= LOG2_BITS_PER_UNIT;
10354 tem += bitpos1;
10355 if (tem.to_shwi (r: &bitpos1))
10356 offset1 = NULL_TREE;
10357 }
10358 }
10359
10360 /* If we have equivalent bases we might be able to simplify. */
10361 if (indirect_base0 == indirect_base1
10362 && operand_equal_p (arg0: base0, arg1: base1,
10363 flags: indirect_base0 ? OEP_ADDRESS_OF : 0))
10364 {
10365 /* We can fold this expression to a constant if the non-constant
10366 offset parts are equal. */
10367 if ((offset0 == offset1
10368 || (offset0 && offset1
10369 && operand_equal_p (arg0: offset0, arg1: offset1, flags: 0)))
10370 && (equality_code
10371 || (indirect_base0
10372 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10373 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10374 {
10375 if (!equality_code
10376 && maybe_ne (a: bitpos0, b: bitpos1)
10377 && (pointer_may_wrap_p (base: base0, offset: offset0, bitpos: bitpos0)
10378 || pointer_may_wrap_p (base: base1, offset: offset1, bitpos: bitpos1)))
10379 fold_overflow_warning (gmsgid: ("assuming pointer wraparound does not "
10380 "occur when comparing P +- C1 with "
10381 "P +- C2"),
10382 wc: WARN_STRICT_OVERFLOW_CONDITIONAL);
10383
10384 switch (code)
10385 {
10386 case EQ_EXPR:
10387 if (known_eq (bitpos0, bitpos1))
10388 return constant_boolean_node (value: true, type);
10389 if (known_ne (bitpos0, bitpos1))
10390 return constant_boolean_node (value: false, type);
10391 break;
10392 case NE_EXPR:
10393 if (known_ne (bitpos0, bitpos1))
10394 return constant_boolean_node (value: true, type);
10395 if (known_eq (bitpos0, bitpos1))
10396 return constant_boolean_node (value: false, type);
10397 break;
10398 case LT_EXPR:
10399 if (known_lt (bitpos0, bitpos1))
10400 return constant_boolean_node (value: true, type);
10401 if (known_ge (bitpos0, bitpos1))
10402 return constant_boolean_node (value: false, type);
10403 break;
10404 case LE_EXPR:
10405 if (known_le (bitpos0, bitpos1))
10406 return constant_boolean_node (value: true, type);
10407 if (known_gt (bitpos0, bitpos1))
10408 return constant_boolean_node (value: false, type);
10409 break;
10410 case GE_EXPR:
10411 if (known_ge (bitpos0, bitpos1))
10412 return constant_boolean_node (value: true, type);
10413 if (known_lt (bitpos0, bitpos1))
10414 return constant_boolean_node (value: false, type);
10415 break;
10416 case GT_EXPR:
10417 if (known_gt (bitpos0, bitpos1))
10418 return constant_boolean_node (value: true, type);
10419 if (known_le (bitpos0, bitpos1))
10420 return constant_boolean_node (value: false, type);
10421 break;
10422 default:;
10423 }
10424 }
10425 /* We can simplify the comparison to a comparison of the variable
10426 offset parts if the constant offset parts are equal.
10427 Be careful to use signed sizetype here because otherwise we
10428 mess with array offsets in the wrong way. This is possible
10429 because pointer arithmetic is restricted to retain within an
10430 object and overflow on pointer differences is undefined as of
10431 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10432 else if (known_eq (bitpos0, bitpos1)
10433 && (equality_code
10434 || (indirect_base0
10435 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10436 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10437 {
10438 /* By converting to signed sizetype we cover middle-end pointer
10439 arithmetic which operates on unsigned pointer types of size
10440 type size and ARRAY_REF offsets which are properly sign or
10441 zero extended from their type in case it is narrower than
10442 sizetype. */
10443 if (offset0 == NULL_TREE)
10444 offset0 = build_int_cst (ssizetype, 0);
10445 else
10446 offset0 = fold_convert_loc (loc, ssizetype, arg: offset0);
10447 if (offset1 == NULL_TREE)
10448 offset1 = build_int_cst (ssizetype, 0);
10449 else
10450 offset1 = fold_convert_loc (loc, ssizetype, arg: offset1);
10451
10452 if (!equality_code
10453 && (pointer_may_wrap_p (base: base0, offset: offset0, bitpos: bitpos0)
10454 || pointer_may_wrap_p (base: base1, offset: offset1, bitpos: bitpos1)))
10455 fold_overflow_warning (gmsgid: ("assuming pointer wraparound does not "
10456 "occur when comparing P +- C1 with "
10457 "P +- C2"),
10458 wc: WARN_STRICT_OVERFLOW_COMPARISON);
10459
10460 return fold_build2_loc (loc, code, type, offset0, offset1);
10461 }
10462 }
10463 /* For equal offsets we can simplify to a comparison of the
10464 base addresses. */
10465 else if (known_eq (bitpos0, bitpos1)
10466 && (indirect_base0
10467 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10468 && (indirect_base1
10469 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10470 && ((offset0 == offset1)
10471 || (offset0 && offset1
10472 && operand_equal_p (arg0: offset0, arg1: offset1, flags: 0))))
10473 {
10474 if (indirect_base0)
10475 base0 = build_fold_addr_expr_loc (loc, t: base0);
10476 if (indirect_base1)
10477 base1 = build_fold_addr_expr_loc (loc, t: base1);
10478 return fold_build2_loc (loc, code, type, base0, base1);
10479 }
10480 /* Comparison between an ordinary (non-weak) symbol and a null
10481 pointer can be eliminated since such symbols must have a non
10482 null address. In C, relational expressions between pointers
10483 to objects and null pointers are undefined. The results
10484 below follow the C++ rules with the additional property that
10485 every object pointer compares greater than a null pointer.
10486 */
10487 else if (((DECL_P (base0)
10488 && maybe_nonzero_address (decl: base0) > 0
10489 /* Avoid folding references to struct members at offset 0 to
10490 prevent tests like '&ptr->firstmember == 0' from getting
10491 eliminated. When ptr is null, although the -> expression
10492 is strictly speaking invalid, GCC retains it as a matter
10493 of QoI. See PR c/44555. */
10494 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10495 || CONSTANT_CLASS_P (base0))
10496 && indirect_base0
10497 /* The caller guarantees that when one of the arguments is
10498 constant (i.e., null in this case) it is second. */
10499 && integer_zerop (arg1))
10500 {
10501 switch (code)
10502 {
10503 case EQ_EXPR:
10504 case LE_EXPR:
10505 case LT_EXPR:
10506 return constant_boolean_node (value: false, type);
10507 case GE_EXPR:
10508 case GT_EXPR:
10509 case NE_EXPR:
10510 return constant_boolean_node (value: true, type);
10511 default:
10512 gcc_unreachable ();
10513 }
10514 }
10515 }
10516
10517 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10518 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10519 the resulting offset is smaller in absolute value than the
10520 original one and has the same sign. */
10521 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10522 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10523 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10524 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10525 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10526 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10527 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10528 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10529 {
10530 tree const1 = TREE_OPERAND (arg0, 1);
10531 tree const2 = TREE_OPERAND (arg1, 1);
10532 tree variable1 = TREE_OPERAND (arg0, 0);
10533 tree variable2 = TREE_OPERAND (arg1, 0);
10534 tree cst;
10535 const char * const warnmsg = G_("assuming signed overflow does not "
10536 "occur when combining constants around "
10537 "a comparison");
10538
10539 /* Put the constant on the side where it doesn't overflow and is
10540 of lower absolute value and of same sign than before. */
10541 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10542 ? MINUS_EXPR : PLUS_EXPR,
10543 arg1: const2, arg2: const1);
10544 if (!TREE_OVERFLOW (cst)
10545 && tree_int_cst_compare (t1: const2, t2: cst) == tree_int_cst_sgn (const2)
10546 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10547 {
10548 fold_overflow_warning (gmsgid: warnmsg, wc: WARN_STRICT_OVERFLOW_COMPARISON);
10549 return fold_build2_loc (loc, code, type,
10550 variable1,
10551 fold_build2_loc (loc, TREE_CODE (arg1),
10552 TREE_TYPE (arg1),
10553 variable2, cst));
10554 }
10555
10556 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10557 ? MINUS_EXPR : PLUS_EXPR,
10558 arg1: const1, arg2: const2);
10559 if (!TREE_OVERFLOW (cst)
10560 && tree_int_cst_compare (t1: const1, t2: cst) == tree_int_cst_sgn (const1)
10561 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10562 {
10563 fold_overflow_warning (gmsgid: warnmsg, wc: WARN_STRICT_OVERFLOW_COMPARISON);
10564 return fold_build2_loc (loc, code, type,
10565 fold_build2_loc (loc, TREE_CODE (arg0),
10566 TREE_TYPE (arg0),
10567 variable1, cst),
10568 variable2);
10569 }
10570 }
10571
10572 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10573 if (tem)
10574 return tem;
10575
10576 /* If we are comparing an expression that just has comparisons
10577 of two integer values, arithmetic expressions of those comparisons,
10578 and constants, we can simplify it. There are only three cases
10579 to check: the two values can either be equal, the first can be
10580 greater, or the second can be greater. Fold the expression for
10581 those three values. Since each value must be 0 or 1, we have
10582 eight possibilities, each of which corresponds to the constant 0
10583 or 1 or one of the six possible comparisons.
10584
10585 This handles common cases like (a > b) == 0 but also handles
10586 expressions like ((x > y) - (y > x)) > 0, which supposedly
10587 occur in macroized code. */
10588
10589 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10590 {
10591 tree cval1 = 0, cval2 = 0;
10592
10593 if (twoval_comparison_p (arg: arg0, cval1: &cval1, cval2: &cval2)
10594 /* Don't handle degenerate cases here; they should already
10595 have been handled anyway. */
10596 && cval1 != 0 && cval2 != 0
10597 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10598 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10599 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10600 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10601 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10602 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10603 TYPE_MAX_VALUE (TREE_TYPE (cval2)), flags: 0))
10604 {
10605 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10606 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10607
10608 /* We can't just pass T to eval_subst in case cval1 or cval2
10609 was the same as ARG1. */
10610
10611 tree high_result
10612 = fold_build2_loc (loc, code, type,
10613 eval_subst (loc, arg: arg0, old0: cval1, new0: maxval,
10614 old1: cval2, new1: minval),
10615 arg1);
10616 tree equal_result
10617 = fold_build2_loc (loc, code, type,
10618 eval_subst (loc, arg: arg0, old0: cval1, new0: maxval,
10619 old1: cval2, new1: maxval),
10620 arg1);
10621 tree low_result
10622 = fold_build2_loc (loc, code, type,
10623 eval_subst (loc, arg: arg0, old0: cval1, new0: minval,
10624 old1: cval2, new1: maxval),
10625 arg1);
10626
10627 /* All three of these results should be 0 or 1. Confirm they are.
10628 Then use those values to select the proper code to use. */
10629
10630 if (TREE_CODE (high_result) == INTEGER_CST
10631 && TREE_CODE (equal_result) == INTEGER_CST
10632 && TREE_CODE (low_result) == INTEGER_CST)
10633 {
10634 /* Make a 3-bit mask with the high-order bit being the
10635 value for `>', the next for '=', and the low for '<'. */
10636 switch ((integer_onep (high_result) * 4)
10637 + (integer_onep (equal_result) * 2)
10638 + integer_onep (low_result))
10639 {
10640 case 0:
10641 /* Always false. */
10642 return omit_one_operand_loc (loc, type, integer_zero_node, omitted: arg0);
10643 case 1:
10644 code = LT_EXPR;
10645 break;
10646 case 2:
10647 code = EQ_EXPR;
10648 break;
10649 case 3:
10650 code = LE_EXPR;
10651 break;
10652 case 4:
10653 code = GT_EXPR;
10654 break;
10655 case 5:
10656 code = NE_EXPR;
10657 break;
10658 case 6:
10659 code = GE_EXPR;
10660 break;
10661 case 7:
10662 /* Always true. */
10663 return omit_one_operand_loc (loc, type, integer_one_node, omitted: arg0);
10664 }
10665
10666 return fold_build2_loc (loc, code, type, cval1, cval2);
10667 }
10668 }
10669 }
10670
10671 return NULL_TREE;
10672}
10673
10674
10675/* Subroutine of fold_binary. Optimize complex multiplications of the
10676 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10677 argument EXPR represents the expression "z" of type TYPE. */
10678
10679static tree
10680fold_mult_zconjz (location_t loc, tree type, tree expr)
10681{
10682 tree itype = TREE_TYPE (type);
10683 tree rpart, ipart, tem;
10684
10685 if (TREE_CODE (expr) == COMPLEX_EXPR)
10686 {
10687 rpart = TREE_OPERAND (expr, 0);
10688 ipart = TREE_OPERAND (expr, 1);
10689 }
10690 else if (TREE_CODE (expr) == COMPLEX_CST)
10691 {
10692 rpart = TREE_REALPART (expr);
10693 ipart = TREE_IMAGPART (expr);
10694 }
10695 else
10696 {
10697 expr = save_expr (expr);
10698 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10699 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10700 }
10701
10702 rpart = save_expr (rpart);
10703 ipart = save_expr (ipart);
10704 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10705 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10706 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10707 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10708 build_zero_cst (itype));
10709}
10710
10711
10712/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10713 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10714 true if successful. */
10715
10716static bool
10717vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10718{
10719 unsigned HOST_WIDE_INT i, nunits;
10720
10721 if (TREE_CODE (arg) == VECTOR_CST
10722 && VECTOR_CST_NELTS (arg).is_constant (const_value: &nunits))
10723 {
10724 for (i = 0; i < nunits; ++i)
10725 elts[i] = VECTOR_CST_ELT (arg, i);
10726 }
10727 else if (TREE_CODE (arg) == CONSTRUCTOR)
10728 {
10729 constructor_elt *elt;
10730
10731 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10732 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10733 return false;
10734 else
10735 elts[i] = elt->value;
10736 }
10737 else
10738 return false;
10739 for (; i < nelts; i++)
10740 elts[i]
10741 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10742 return true;
10743}
10744
10745/* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10746 mask for VLA vec_perm folding.
10747 REASON if specified, will contain the reason why SEL is not suitable.
10748 Used only for debugging and unit-testing. */
10749
10750static bool
10751valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10752 const vec_perm_indices &sel,
10753 const char **reason = NULL)
10754{
10755 unsigned sel_npatterns = sel.encoding ().npatterns ();
10756 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10757
10758 if (!(pow2p_hwi (x: sel_npatterns)
10759 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10760 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10761 {
10762 if (reason)
10763 *reason = "npatterns is not power of 2";
10764 return false;
10765 }
10766
10767 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10768 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10769 poly_uint64 esel;
10770 if (!multiple_p (a: sel.length (), b: sel_npatterns, multiple: &esel))
10771 {
10772 if (reason)
10773 *reason = "sel.length is not multiple of sel_npatterns";
10774 return false;
10775 }
10776
10777 if (sel_nelts_per_pattern < 3)
10778 return true;
10779
10780 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10781 {
10782 poly_uint64 a1 = sel[pattern + sel_npatterns];
10783 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10784 HOST_WIDE_INT step;
10785 if (!poly_int64 (a2 - a1).is_constant (const_value: &step))
10786 {
10787 if (reason)
10788 *reason = "step is not constant";
10789 return false;
10790 }
10791 // FIXME: Punt on step < 0 for now, revisit later.
10792 if (step < 0)
10793 return false;
10794 if (step == 0)
10795 continue;
10796
10797 if (!pow2p_hwi (x: step))
10798 {
10799 if (reason)
10800 *reason = "step is not power of 2";
10801 return false;
10802 }
10803
10804 /* Ensure that stepped sequence of the pattern selects elements
10805 only from the same input vector. */
10806 uint64_t q1, qe;
10807 poly_uint64 r1, re;
10808 poly_uint64 ae = a1 + (esel - 2) * step;
10809 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10810
10811 if (!(can_div_trunc_p (a: a1, b: arg_len, quotient: &q1, remainder: &r1)
10812 && can_div_trunc_p (a: ae, b: arg_len, quotient: &qe, remainder: &re)
10813 && q1 == qe))
10814 {
10815 if (reason)
10816 *reason = "crossed input vectors";
10817 return false;
10818 }
10819
10820 /* Ensure that the stepped sequence always selects from the same
10821 input pattern. */
10822 tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10823 unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10824
10825 if (!multiple_p (a: step, b: arg_npatterns))
10826 {
10827 if (reason)
10828 *reason = "step is not multiple of npatterns";
10829 return false;
10830 }
10831
10832 /* If a1 chooses base element from arg, ensure that it's a natural
10833 stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10834 to preserve arg's encoding. */
10835
10836 if (maybe_lt (a: r1, b: arg_npatterns))
10837 {
10838 unsigned HOST_WIDE_INT index;
10839 if (!r1.is_constant (const_value: &index))
10840 return false;
10841
10842 tree arg_elem0 = vector_cst_elt (arg, index);
10843 tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10844 tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10845
10846 tree step1, step2;
10847 if (!(step1 = const_binop (code: MINUS_EXPR, arg1: arg_elem1, arg2: arg_elem0))
10848 || !(step2 = const_binop (code: MINUS_EXPR, arg1: arg_elem2, arg2: arg_elem1))
10849 || !operand_equal_p (arg0: step1, arg1: step2, flags: 0))
10850 {
10851 if (reason)
10852 *reason = "not a natural stepped sequence";
10853 return false;
10854 }
10855 }
10856 }
10857
10858 return true;
10859}
10860
10861/* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10862 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10863 REASON has same purpose as described in
10864 valid_mask_for_fold_vec_perm_cst_p. */
10865
10866static tree
10867fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10868 const char **reason = NULL)
10869{
10870 unsigned res_npatterns, res_nelts_per_pattern;
10871 unsigned HOST_WIDE_INT res_nelts;
10872
10873 /* First try to implement the fold in a VLA-friendly way.
10874
10875 (1) If the selector is simply a duplication of N elements, the
10876 result is likewise a duplication of N elements.
10877
10878 (2) If the selector is N elements followed by a duplication
10879 of N elements, the result is too.
10880
10881 (3) If the selector is N elements followed by an interleaving
10882 of N linear series, the situation is more complex.
10883
10884 valid_mask_for_fold_vec_perm_cst_p detects whether we
10885 can handle this case. If we can, then each of the N linear
10886 series either (a) selects the same element each time or
10887 (b) selects a linear series from one of the input patterns.
10888
10889 If (b) holds for one of the linear series, the result
10890 will contain a linear series, and so the result will have
10891 the same shape as the selector. If (a) holds for all of
10892 the linear series, the result will be the same as (2) above.
10893
10894 (b) can only hold if one of the input patterns has a
10895 stepped encoding. */
10896
10897 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10898 {
10899 res_npatterns = sel.encoding ().npatterns ();
10900 res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10901 if (res_nelts_per_pattern == 3
10902 && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10903 && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10904 res_nelts_per_pattern = 2;
10905 res_nelts = res_npatterns * res_nelts_per_pattern;
10906 }
10907 else if (TYPE_VECTOR_SUBPARTS (node: type).is_constant (const_value: &res_nelts))
10908 {
10909 res_npatterns = res_nelts;
10910 res_nelts_per_pattern = 1;
10911 }
10912 else
10913 return NULL_TREE;
10914
10915 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10916 for (unsigned i = 0; i < res_nelts; i++)
10917 {
10918 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10919 uint64_t q;
10920 poly_uint64 r;
10921 unsigned HOST_WIDE_INT index;
10922
10923 /* Punt if sel[i] /trunc_div len cannot be determined,
10924 because the input vector to be chosen will depend on
10925 runtime vector length.
10926 For example if len == 4 + 4x, and sel[i] == 4,
10927 If len at runtime equals 4, we choose arg1[0].
10928 For any other value of len > 4 at runtime, we choose arg0[4].
10929 which makes the element choice dependent on runtime vector length. */
10930 if (!can_div_trunc_p (a: sel[i], b: len, quotient: &q, remainder: &r))
10931 {
10932 if (reason)
10933 *reason = "cannot divide selector element by arg len";
10934 return NULL_TREE;
10935 }
10936
10937 /* sel[i] % len will give the index of element in the chosen input
10938 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10939 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10940 if (!r.is_constant (const_value: &index))
10941 {
10942 if (reason)
10943 *reason = "remainder is not constant";
10944 return NULL_TREE;
10945 }
10946
10947 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10948 tree elem = vector_cst_elt (arg, index);
10949 out_elts.quick_push (obj: elem);
10950 }
10951
10952 return out_elts.build ();
10953}
10954
10955/* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10956 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10957 NULL_TREE otherwise. */
10958
10959tree
10960fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10961{
10962 unsigned int i;
10963 unsigned HOST_WIDE_INT nelts;
10964
10965 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10966 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10967 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10968
10969 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10970 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10971 return NULL_TREE;
10972
10973 if (TREE_CODE (arg0) == VECTOR_CST
10974 && TREE_CODE (arg1) == VECTOR_CST)
10975 return fold_vec_perm_cst (type, arg0, arg1, sel);
10976
10977 /* For fall back case, we want to ensure we have VLS vectors
10978 with equal length. */
10979 if (!sel.length ().is_constant (const_value: &nelts))
10980 return NULL_TREE;
10981
10982 gcc_assert (known_eq (sel.length (),
10983 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10984 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10985 if (!vec_cst_ctor_to_array (arg: arg0, nelts, elts: in_elts)
10986 || !vec_cst_ctor_to_array (arg: arg1, nelts, elts: in_elts + nelts))
10987 return NULL_TREE;
10988
10989 vec<constructor_elt, va_gc> *v;
10990 vec_alloc (v, nelems: nelts);
10991 for (i = 0; i < nelts; i++)
10992 {
10993 HOST_WIDE_INT index;
10994 if (!sel[i].is_constant (const_value: &index))
10995 return NULL_TREE;
10996 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10997 }
10998 return build_constructor (type, v);
10999}
11000
11001/* Try to fold a pointer difference of type TYPE two address expressions of
11002 array references AREF0 and AREF1 using location LOC. Return a
11003 simplified expression for the difference or NULL_TREE. */
11004
11005static tree
11006fold_addr_of_array_ref_difference (location_t loc, tree type,
11007 tree aref0, tree aref1,
11008 bool use_pointer_diff)
11009{
11010 tree base0 = TREE_OPERAND (aref0, 0);
11011 tree base1 = TREE_OPERAND (aref1, 0);
11012 tree base_offset = build_int_cst (type, 0);
11013
11014 /* If the bases are array references as well, recurse. If the bases
11015 are pointer indirections compute the difference of the pointers.
11016 If the bases are equal, we are set. */
11017 if ((TREE_CODE (base0) == ARRAY_REF
11018 && TREE_CODE (base1) == ARRAY_REF
11019 && (base_offset
11020 = fold_addr_of_array_ref_difference (loc, type, aref0: base0, aref1: base1,
11021 use_pointer_diff)))
11022 || (INDIRECT_REF_P (base0)
11023 && INDIRECT_REF_P (base1)
11024 && (base_offset
11025 = use_pointer_diff
11026 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
11027 TREE_OPERAND (base0, 0),
11028 TREE_OPERAND (base1, 0))
11029 : fold_binary_loc (loc, MINUS_EXPR, type,
11030 fold_convert (type,
11031 TREE_OPERAND (base0, 0)),
11032 fold_convert (type,
11033 TREE_OPERAND (base1, 0)))))
11034 || operand_equal_p (arg0: base0, arg1: base1, flags: OEP_ADDRESS_OF))
11035 {
11036 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
11037 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
11038 tree esz = fold_convert_loc (loc, type, arg: array_ref_element_size (aref0));
11039 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
11040 return fold_build2_loc (loc, PLUS_EXPR, type,
11041 base_offset,
11042 fold_build2_loc (loc, MULT_EXPR, type,
11043 diff, esz));
11044 }
11045 return NULL_TREE;
11046}
11047
11048/* If the real or vector real constant CST of type TYPE has an exact
11049 inverse, return it, else return NULL. */
11050
11051tree
11052exact_inverse (tree type, tree cst)
11053{
11054 REAL_VALUE_TYPE r;
11055 tree unit_type;
11056 machine_mode mode;
11057
11058 switch (TREE_CODE (cst))
11059 {
11060 case REAL_CST:
11061 r = TREE_REAL_CST (cst);
11062
11063 if (exact_real_inverse (TYPE_MODE (type), &r))
11064 return build_real (type, r);
11065
11066 return NULL_TREE;
11067
11068 case VECTOR_CST:
11069 {
11070 unit_type = TREE_TYPE (type);
11071 mode = TYPE_MODE (unit_type);
11072
11073 tree_vector_builder elts;
11074 if (!elts.new_unary_operation (shape: type, vec: cst, allow_stepped_p: false))
11075 return NULL_TREE;
11076 unsigned int count = elts.encoded_nelts ();
11077 for (unsigned int i = 0; i < count; ++i)
11078 {
11079 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
11080 if (!exact_real_inverse (mode, &r))
11081 return NULL_TREE;
11082 elts.quick_push (obj: build_real (unit_type, r));
11083 }
11084
11085 return elts.build ();
11086 }
11087
11088 default:
11089 return NULL_TREE;
11090 }
11091}
11092
11093/* Mask out the tz least significant bits of X of type TYPE where
11094 tz is the number of trailing zeroes in Y. */
11095static wide_int
11096mask_with_tz (tree type, const wide_int &x, const wide_int &y)
11097{
11098 int tz = wi::ctz (y);
11099 if (tz > 0)
11100 return wi::mask (width: tz, negate_p: true, TYPE_PRECISION (type)) & x;
11101 return x;
11102}
11103
11104/* Return true when T is an address and is known to be nonzero.
11105 For floating point we further ensure that T is not denormal.
11106 Similar logic is present in nonzero_address in rtlanal.h.
11107
11108 If the return value is based on the assumption that signed overflow
11109 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
11110 change *STRICT_OVERFLOW_P. */
11111
11112static bool
11113tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
11114{
11115 tree type = TREE_TYPE (t);
11116 enum tree_code code;
11117
11118 /* Doing something useful for floating point would need more work. */
11119 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11120 return false;
11121
11122 code = TREE_CODE (t);
11123 switch (TREE_CODE_CLASS (code))
11124 {
11125 case tcc_unary:
11126 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11127 strict_overflow_p);
11128 case tcc_binary:
11129 case tcc_comparison:
11130 return tree_binary_nonzero_warnv_p (code, type,
11131 TREE_OPERAND (t, 0),
11132 TREE_OPERAND (t, 1),
11133 strict_overflow_p);
11134 case tcc_constant:
11135 case tcc_declaration:
11136 case tcc_reference:
11137 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11138
11139 default:
11140 break;
11141 }
11142
11143 switch (code)
11144 {
11145 case TRUTH_NOT_EXPR:
11146 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11147 strict_overflow_p);
11148
11149 case TRUTH_AND_EXPR:
11150 case TRUTH_OR_EXPR:
11151 case TRUTH_XOR_EXPR:
11152 return tree_binary_nonzero_warnv_p (code, type,
11153 TREE_OPERAND (t, 0),
11154 TREE_OPERAND (t, 1),
11155 strict_overflow_p);
11156
11157 case COND_EXPR:
11158 case CONSTRUCTOR:
11159 case OBJ_TYPE_REF:
11160 case ADDR_EXPR:
11161 case WITH_SIZE_EXPR:
11162 case SSA_NAME:
11163 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11164
11165 case COMPOUND_EXPR:
11166 case MODIFY_EXPR:
11167 case BIND_EXPR:
11168 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
11169 strict_overflow_p);
11170
11171 case SAVE_EXPR:
11172 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
11173 strict_overflow_p);
11174
11175 case CALL_EXPR:
11176 {
11177 tree fndecl = get_callee_fndecl (t);
11178 if (!fndecl) return false;
11179 if (flag_delete_null_pointer_checks && !flag_check_new
11180 && DECL_IS_OPERATOR_NEW_P (fndecl)
11181 && !TREE_NOTHROW (fndecl))
11182 return true;
11183 if (flag_delete_null_pointer_checks
11184 && lookup_attribute (attr_name: "returns_nonnull",
11185 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
11186 return true;
11187 return alloca_call_p (t);
11188 }
11189
11190 default:
11191 break;
11192 }
11193 return false;
11194}
11195
11196/* Return true when T is an address and is known to be nonzero.
11197 Handle warnings about undefined signed overflow. */
11198
11199bool
11200tree_expr_nonzero_p (tree t)
11201{
11202 bool ret, strict_overflow_p;
11203
11204 strict_overflow_p = false;
11205 ret = tree_expr_nonzero_warnv_p (t, strict_overflow_p: &strict_overflow_p);
11206 if (strict_overflow_p)
11207 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur when "
11208 "determining that expression is always "
11209 "non-zero"),
11210 wc: WARN_STRICT_OVERFLOW_MISC);
11211 return ret;
11212}
11213
11214/* Return true if T is known not to be equal to an integer W. */
11215
11216bool
11217expr_not_equal_to (tree t, const wide_int &w)
11218{
11219 int_range_max vr;
11220 switch (TREE_CODE (t))
11221 {
11222 case INTEGER_CST:
11223 return wi::to_wide (t) != w;
11224
11225 case SSA_NAME:
11226 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11227 return false;
11228
11229 get_range_query (cfun)->range_of_expr (r&: vr, expr: t);
11230 if (!vr.undefined_p () && !vr.contains_p (w))
11231 return true;
11232 /* If T has some known zero bits and W has any of those bits set,
11233 then T is known not to be equal to W. */
11234 if (wi::ne_p (x: wi::zext (x: wi::bit_and_not (x: w, y: get_nonzero_bits (t)),
11235 TYPE_PRECISION (TREE_TYPE (t))), y: 0))
11236 return true;
11237 return false;
11238
11239 default:
11240 return false;
11241 }
11242}
11243
11244/* Fold a binary expression of code CODE and type TYPE with operands
11245 OP0 and OP1. LOC is the location of the resulting expression.
11246 Return the folded expression if folding is successful. Otherwise,
11247 return NULL_TREE. */
11248
11249tree
11250fold_binary_loc (location_t loc, enum tree_code code, tree type,
11251 tree op0, tree op1)
11252{
11253 enum tree_code_class kind = TREE_CODE_CLASS (code);
11254 tree arg0, arg1, tem;
11255 tree t1 = NULL_TREE;
11256 bool strict_overflow_p;
11257 unsigned int prec;
11258
11259 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11260 && TREE_CODE_LENGTH (code) == 2
11261 && op0 != NULL_TREE
11262 && op1 != NULL_TREE);
11263
11264 arg0 = op0;
11265 arg1 = op1;
11266
11267 /* Strip any conversions that don't change the mode. This is
11268 safe for every expression, except for a comparison expression
11269 because its signedness is derived from its operands. So, in
11270 the latter case, only strip conversions that don't change the
11271 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11272 preserved.
11273
11274 Note that this is done as an internal manipulation within the
11275 constant folder, in order to find the simplest representation
11276 of the arguments so that their form can be studied. In any
11277 cases, the appropriate type conversions should be put back in
11278 the tree that will get out of the constant folder. */
11279
11280 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11281 {
11282 STRIP_SIGN_NOPS (arg0);
11283 STRIP_SIGN_NOPS (arg1);
11284 }
11285 else
11286 {
11287 STRIP_NOPS (arg0);
11288 STRIP_NOPS (arg1);
11289 }
11290
11291 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11292 constant but we can't do arithmetic on them. */
11293 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11294 {
11295 tem = const_binop (code, type, arg1: arg0, arg2: arg1);
11296 if (tem != NULL_TREE)
11297 {
11298 if (TREE_TYPE (tem) != type)
11299 tem = fold_convert_loc (loc, type, arg: tem);
11300 return tem;
11301 }
11302 }
11303
11304 /* If this is a commutative operation, and ARG0 is a constant, move it
11305 to ARG1 to reduce the number of tests below. */
11306 if (commutative_tree_code (code)
11307 && tree_swap_operands_p (arg0, arg1))
11308 return fold_build2_loc (loc, code, type, op1, op0);
11309
11310 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11311 to ARG1 to reduce the number of tests below. */
11312 if (kind == tcc_comparison
11313 && tree_swap_operands_p (arg0, arg1))
11314 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11315
11316 tem = generic_simplify (loc, code, type, op0, op1);
11317 if (tem)
11318 return tem;
11319
11320 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11321
11322 First check for cases where an arithmetic operation is applied to a
11323 compound, conditional, or comparison operation. Push the arithmetic
11324 operation inside the compound or conditional to see if any folding
11325 can then be done. Convert comparison to conditional for this purpose.
11326 The also optimizes non-constant cases that used to be done in
11327 expand_expr.
11328
11329 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11330 one of the operands is a comparison and the other is a comparison, a
11331 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11332 code below would make the expression more complex. Change it to a
11333 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11334 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11335
11336 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11337 || code == EQ_EXPR || code == NE_EXPR)
11338 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11339 && ((truth_value_p (TREE_CODE (arg0))
11340 && (truth_value_p (TREE_CODE (arg1))
11341 || (TREE_CODE (arg1) == BIT_AND_EXPR
11342 && integer_onep (TREE_OPERAND (arg1, 1)))))
11343 || (truth_value_p (TREE_CODE (arg1))
11344 && (truth_value_p (TREE_CODE (arg0))
11345 || (TREE_CODE (arg0) == BIT_AND_EXPR
11346 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11347 {
11348 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11349 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11350 : TRUTH_XOR_EXPR,
11351 boolean_type_node,
11352 fold_convert_loc (loc, boolean_type_node, arg: arg0),
11353 fold_convert_loc (loc, boolean_type_node, arg: arg1));
11354
11355 if (code == EQ_EXPR)
11356 tem = invert_truthvalue_loc (loc, arg: tem);
11357
11358 return fold_convert_loc (loc, type, arg: tem);
11359 }
11360
11361 if (TREE_CODE_CLASS (code) == tcc_binary
11362 || TREE_CODE_CLASS (code) == tcc_comparison)
11363 {
11364 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11365 {
11366 tem = fold_build2_loc (loc, code, type,
11367 fold_convert_loc (loc, TREE_TYPE (op0),
11368 TREE_OPERAND (arg0, 1)), op1);
11369 return build2_loc (loc, code: COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11370 arg1: tem);
11371 }
11372 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11373 {
11374 tem = fold_build2_loc (loc, code, type, op0,
11375 fold_convert_loc (loc, TREE_TYPE (op1),
11376 TREE_OPERAND (arg1, 1)));
11377 return build2_loc (loc, code: COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11378 arg1: tem);
11379 }
11380
11381 if (TREE_CODE (arg0) == COND_EXPR
11382 || TREE_CODE (arg0) == VEC_COND_EXPR
11383 || COMPARISON_CLASS_P (arg0))
11384 {
11385 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11386 cond: arg0, arg: arg1,
11387 /*cond_first_p=*/1);
11388 if (tem != NULL_TREE)
11389 return tem;
11390 }
11391
11392 if (TREE_CODE (arg1) == COND_EXPR
11393 || TREE_CODE (arg1) == VEC_COND_EXPR
11394 || COMPARISON_CLASS_P (arg1))
11395 {
11396 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11397 cond: arg1, arg: arg0,
11398 /*cond_first_p=*/0);
11399 if (tem != NULL_TREE)
11400 return tem;
11401 }
11402 }
11403
11404 switch (code)
11405 {
11406 case MEM_REF:
11407 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11408 if (TREE_CODE (arg0) == ADDR_EXPR
11409 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11410 {
11411 tree iref = TREE_OPERAND (arg0, 0);
11412 return fold_build2 (MEM_REF, type,
11413 TREE_OPERAND (iref, 0),
11414 int_const_binop (PLUS_EXPR, arg1,
11415 TREE_OPERAND (iref, 1)));
11416 }
11417
11418 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11419 if (TREE_CODE (arg0) == ADDR_EXPR
11420 && handled_component_p (TREE_OPERAND (arg0, 0)))
11421 {
11422 tree base;
11423 poly_int64 coffset;
11424 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11425 &coffset);
11426 if (!base)
11427 return NULL_TREE;
11428 return fold_build2 (MEM_REF, type,
11429 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11430 int_const_binop (PLUS_EXPR, arg1,
11431 size_int (coffset)));
11432 }
11433
11434 return NULL_TREE;
11435
11436 case POINTER_PLUS_EXPR:
11437 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11438 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11439 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11440 return fold_convert_loc (loc, type,
11441 arg: fold_build2_loc (loc, PLUS_EXPR, sizetype,
11442 fold_convert_loc (loc, sizetype,
11443 arg: arg1),
11444 fold_convert_loc (loc, sizetype,
11445 arg: arg0)));
11446
11447 return NULL_TREE;
11448
11449 case PLUS_EXPR:
11450 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11451 {
11452 /* X + (X / CST) * -CST is X % CST. */
11453 if (TREE_CODE (arg1) == MULT_EXPR
11454 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11455 && operand_equal_p (arg0,
11456 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), flags: 0))
11457 {
11458 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11459 tree cst1 = TREE_OPERAND (arg1, 1);
11460 tree sum = fold_binary_loc (loc, code: PLUS_EXPR, TREE_TYPE (cst1),
11461 op0: cst1, op1: cst0);
11462 if (sum && integer_zerop (sum))
11463 return fold_convert_loc (loc, type,
11464 arg: fold_build2_loc (loc, TRUNC_MOD_EXPR,
11465 TREE_TYPE (arg0), arg0,
11466 cst0));
11467 }
11468 }
11469
11470 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11471 one. Make sure the type is not saturating and has the signedness of
11472 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11473 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11474 if ((TREE_CODE (arg0) == MULT_EXPR
11475 || TREE_CODE (arg1) == MULT_EXPR)
11476 && !TYPE_SATURATING (type)
11477 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11478 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11479 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11480 {
11481 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11482 if (tem)
11483 return tem;
11484 }
11485
11486 if (! FLOAT_TYPE_P (type))
11487 {
11488 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11489 (plus (plus (mult) (mult)) (foo)) so that we can
11490 take advantage of the factoring cases below. */
11491 if (ANY_INTEGRAL_TYPE_P (type)
11492 && TYPE_OVERFLOW_WRAPS (type)
11493 && (((TREE_CODE (arg0) == PLUS_EXPR
11494 || TREE_CODE (arg0) == MINUS_EXPR)
11495 && TREE_CODE (arg1) == MULT_EXPR)
11496 || ((TREE_CODE (arg1) == PLUS_EXPR
11497 || TREE_CODE (arg1) == MINUS_EXPR)
11498 && TREE_CODE (arg0) == MULT_EXPR)))
11499 {
11500 tree parg0, parg1, parg, marg;
11501 enum tree_code pcode;
11502
11503 if (TREE_CODE (arg1) == MULT_EXPR)
11504 parg = arg0, marg = arg1;
11505 else
11506 parg = arg1, marg = arg0;
11507 pcode = TREE_CODE (parg);
11508 parg0 = TREE_OPERAND (parg, 0);
11509 parg1 = TREE_OPERAND (parg, 1);
11510 STRIP_NOPS (parg0);
11511 STRIP_NOPS (parg1);
11512
11513 if (TREE_CODE (parg0) == MULT_EXPR
11514 && TREE_CODE (parg1) != MULT_EXPR)
11515 return fold_build2_loc (loc, pcode, type,
11516 fold_build2_loc (loc, PLUS_EXPR, type,
11517 fold_convert_loc (loc, type,
11518 arg: parg0),
11519 fold_convert_loc (loc, type,
11520 arg: marg)),
11521 fold_convert_loc (loc, type, arg: parg1));
11522 if (TREE_CODE (parg0) != MULT_EXPR
11523 && TREE_CODE (parg1) == MULT_EXPR)
11524 return
11525 fold_build2_loc (loc, PLUS_EXPR, type,
11526 fold_convert_loc (loc, type, arg: parg0),
11527 fold_build2_loc (loc, pcode, type,
11528 fold_convert_loc (loc, type, arg: marg),
11529 fold_convert_loc (loc, type,
11530 arg: parg1)));
11531 }
11532 }
11533 else
11534 {
11535 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11536 to __complex__ ( x, y ). This is not the same for SNaNs or
11537 if signed zeros are involved. */
11538 if (!HONOR_SNANS (arg0)
11539 && !HONOR_SIGNED_ZEROS (arg0)
11540 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11541 {
11542 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11543 tree arg0r = fold_unary_loc (loc, code: REALPART_EXPR, type: rtype, op0: arg0);
11544 tree arg0i = fold_unary_loc (loc, code: IMAGPART_EXPR, type: rtype, op0: arg0);
11545 bool arg0rz = false, arg0iz = false;
11546 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11547 || (arg0i && (arg0iz = real_zerop (arg0i))))
11548 {
11549 tree arg1r = fold_unary_loc (loc, code: REALPART_EXPR, type: rtype, op0: arg1);
11550 tree arg1i = fold_unary_loc (loc, code: IMAGPART_EXPR, type: rtype, op0: arg1);
11551 if (arg0rz && arg1i && real_zerop (arg1i))
11552 {
11553 tree rp = arg1r ? arg1r
11554 : build1 (REALPART_EXPR, rtype, arg1);
11555 tree ip = arg0i ? arg0i
11556 : build1 (IMAGPART_EXPR, rtype, arg0);
11557 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11558 }
11559 else if (arg0iz && arg1r && real_zerop (arg1r))
11560 {
11561 tree rp = arg0r ? arg0r
11562 : build1 (REALPART_EXPR, rtype, arg0);
11563 tree ip = arg1i ? arg1i
11564 : build1 (IMAGPART_EXPR, rtype, arg1);
11565 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11566 }
11567 }
11568 }
11569
11570 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11571 We associate floats only if the user has specified
11572 -fassociative-math. */
11573 if (flag_associative_math
11574 && TREE_CODE (arg1) == PLUS_EXPR
11575 && TREE_CODE (arg0) != MULT_EXPR)
11576 {
11577 tree tree10 = TREE_OPERAND (arg1, 0);
11578 tree tree11 = TREE_OPERAND (arg1, 1);
11579 if (TREE_CODE (tree11) == MULT_EXPR
11580 && TREE_CODE (tree10) == MULT_EXPR)
11581 {
11582 tree tree0;
11583 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11584 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11585 }
11586 }
11587 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11588 We associate floats only if the user has specified
11589 -fassociative-math. */
11590 if (flag_associative_math
11591 && TREE_CODE (arg0) == PLUS_EXPR
11592 && TREE_CODE (arg1) != MULT_EXPR)
11593 {
11594 tree tree00 = TREE_OPERAND (arg0, 0);
11595 tree tree01 = TREE_OPERAND (arg0, 1);
11596 if (TREE_CODE (tree01) == MULT_EXPR
11597 && TREE_CODE (tree00) == MULT_EXPR)
11598 {
11599 tree tree0;
11600 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11601 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11602 }
11603 }
11604 }
11605
11606 bit_rotate:
11607 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11608 is a rotate of A by C1 bits. */
11609 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11610 is a rotate of A by B bits.
11611 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11612 though in this case CODE must be | and not + or ^, otherwise
11613 it doesn't return A when B is 0. */
11614 {
11615 enum tree_code code0, code1;
11616 tree rtype;
11617 code0 = TREE_CODE (arg0);
11618 code1 = TREE_CODE (arg1);
11619 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11620 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11621 && operand_equal_p (TREE_OPERAND (arg0, 0),
11622 TREE_OPERAND (arg1, 0), flags: 0)
11623 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11624 TYPE_UNSIGNED (rtype))
11625 /* Only create rotates in complete modes. Other cases are not
11626 expanded properly. */
11627 && (element_precision (rtype)
11628 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11629 {
11630 tree tree01, tree11;
11631 tree orig_tree01, orig_tree11;
11632 enum tree_code code01, code11;
11633
11634 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11635 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11636 STRIP_NOPS (tree01);
11637 STRIP_NOPS (tree11);
11638 code01 = TREE_CODE (tree01);
11639 code11 = TREE_CODE (tree11);
11640 if (code11 != MINUS_EXPR
11641 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11642 {
11643 std::swap (a&: code0, b&: code1);
11644 std::swap (a&: code01, b&: code11);
11645 std::swap (a&: tree01, b&: tree11);
11646 std::swap (a&: orig_tree01, b&: orig_tree11);
11647 }
11648 if (code01 == INTEGER_CST
11649 && code11 == INTEGER_CST
11650 && (wi::to_widest (t: tree01) + wi::to_widest (t: tree11)
11651 == element_precision (rtype)))
11652 {
11653 tem = build2_loc (loc, code: LROTATE_EXPR,
11654 type: rtype, TREE_OPERAND (arg0, 0),
11655 arg1: code0 == LSHIFT_EXPR
11656 ? orig_tree01 : orig_tree11);
11657 return fold_convert_loc (loc, type, arg: tem);
11658 }
11659 else if (code11 == MINUS_EXPR)
11660 {
11661 tree tree110, tree111;
11662 tree110 = TREE_OPERAND (tree11, 0);
11663 tree111 = TREE_OPERAND (tree11, 1);
11664 STRIP_NOPS (tree110);
11665 STRIP_NOPS (tree111);
11666 if (TREE_CODE (tree110) == INTEGER_CST
11667 && compare_tree_int (tree110,
11668 element_precision (rtype)) == 0
11669 && operand_equal_p (arg0: tree01, arg1: tree111, flags: 0))
11670 {
11671 tem = build2_loc (loc, code: (code0 == LSHIFT_EXPR
11672 ? LROTATE_EXPR : RROTATE_EXPR),
11673 type: rtype, TREE_OPERAND (arg0, 0),
11674 arg1: orig_tree01);
11675 return fold_convert_loc (loc, type, arg: tem);
11676 }
11677 }
11678 else if (code == BIT_IOR_EXPR
11679 && code11 == BIT_AND_EXPR
11680 && pow2p_hwi (x: element_precision (rtype)))
11681 {
11682 tree tree110, tree111;
11683 tree110 = TREE_OPERAND (tree11, 0);
11684 tree111 = TREE_OPERAND (tree11, 1);
11685 STRIP_NOPS (tree110);
11686 STRIP_NOPS (tree111);
11687 if (TREE_CODE (tree110) == NEGATE_EXPR
11688 && TREE_CODE (tree111) == INTEGER_CST
11689 && compare_tree_int (tree111,
11690 element_precision (rtype) - 1) == 0
11691 && operand_equal_p (arg0: tree01, TREE_OPERAND (tree110, 0), flags: 0))
11692 {
11693 tem = build2_loc (loc, code: (code0 == LSHIFT_EXPR
11694 ? LROTATE_EXPR : RROTATE_EXPR),
11695 type: rtype, TREE_OPERAND (arg0, 0),
11696 arg1: orig_tree01);
11697 return fold_convert_loc (loc, type, arg: tem);
11698 }
11699 }
11700 }
11701 }
11702
11703 associate:
11704 /* In most languages, can't associate operations on floats through
11705 parentheses. Rather than remember where the parentheses were, we
11706 don't associate floats at all, unless the user has specified
11707 -fassociative-math.
11708 And, we need to make sure type is not saturating. */
11709
11710 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11711 && !TYPE_SATURATING (type)
11712 && !TYPE_OVERFLOW_SANITIZED (type))
11713 {
11714 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11715 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11716 tree atype = type;
11717 bool ok = true;
11718
11719 /* Split both trees into variables, constants, and literals. Then
11720 associate each group together, the constants with literals,
11721 then the result with variables. This increases the chances of
11722 literals being recombined later and of generating relocatable
11723 expressions for the sum of a constant and literal. */
11724 var0 = split_tree (in: arg0, type, code,
11725 minus_varp: &minus_var0, conp: &con0, minus_conp: &minus_con0,
11726 litp: &lit0, minus_litp: &minus_lit0, negate_p: 0);
11727 var1 = split_tree (in: arg1, type, code,
11728 minus_varp: &minus_var1, conp: &con1, minus_conp: &minus_con1,
11729 litp: &lit1, minus_litp: &minus_lit1, negate_p: code == MINUS_EXPR);
11730
11731 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11732 if (code == MINUS_EXPR)
11733 code = PLUS_EXPR;
11734
11735 /* With undefined overflow prefer doing association in a type
11736 which wraps on overflow, if that is one of the operand types. */
11737 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11738 && !TYPE_OVERFLOW_WRAPS (type))
11739 {
11740 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11741 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11742 atype = TREE_TYPE (arg0);
11743 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11744 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11745 atype = TREE_TYPE (arg1);
11746 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11747 }
11748
11749 /* With undefined overflow we can only associate constants with one
11750 variable, and constants whose association doesn't overflow. */
11751 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11752 && !TYPE_OVERFLOW_WRAPS (atype))
11753 {
11754 if ((var0 && var1) || (minus_var0 && minus_var1))
11755 {
11756 /* ??? If split_tree would handle NEGATE_EXPR we could
11757 simply reject these cases and the allowed cases would
11758 be the var0/minus_var1 ones. */
11759 tree tmp0 = var0 ? var0 : minus_var0;
11760 tree tmp1 = var1 ? var1 : minus_var1;
11761 bool one_neg = false;
11762
11763 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11764 {
11765 tmp0 = TREE_OPERAND (tmp0, 0);
11766 one_neg = !one_neg;
11767 }
11768 if (CONVERT_EXPR_P (tmp0)
11769 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11770 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11771 <= TYPE_PRECISION (atype)))
11772 tmp0 = TREE_OPERAND (tmp0, 0);
11773 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11774 {
11775 tmp1 = TREE_OPERAND (tmp1, 0);
11776 one_neg = !one_neg;
11777 }
11778 if (CONVERT_EXPR_P (tmp1)
11779 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11780 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11781 <= TYPE_PRECISION (atype)))
11782 tmp1 = TREE_OPERAND (tmp1, 0);
11783 /* The only case we can still associate with two variables
11784 is if they cancel out. */
11785 if (!one_neg
11786 || !operand_equal_p (arg0: tmp0, arg1: tmp1, flags: 0))
11787 ok = false;
11788 }
11789 else if ((var0 && minus_var1
11790 && ! operand_equal_p (arg0: var0, arg1: minus_var1, flags: 0))
11791 || (minus_var0 && var1
11792 && ! operand_equal_p (arg0: minus_var0, arg1: var1, flags: 0)))
11793 ok = false;
11794 }
11795
11796 /* Only do something if we found more than two objects. Otherwise,
11797 nothing has changed and we risk infinite recursion. */
11798 if (ok
11799 && ((var0 != 0) + (var1 != 0)
11800 + (minus_var0 != 0) + (minus_var1 != 0)
11801 + (con0 != 0) + (con1 != 0)
11802 + (minus_con0 != 0) + (minus_con1 != 0)
11803 + (lit0 != 0) + (lit1 != 0)
11804 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11805 {
11806 int var0_origin = (var0 != 0) + 2 * (var1 != 0);
11807 int minus_var0_origin
11808 = (minus_var0 != 0) + 2 * (minus_var1 != 0);
11809 int con0_origin = (con0 != 0) + 2 * (con1 != 0);
11810 int minus_con0_origin
11811 = (minus_con0 != 0) + 2 * (minus_con1 != 0);
11812 int lit0_origin = (lit0 != 0) + 2 * (lit1 != 0);
11813 int minus_lit0_origin
11814 = (minus_lit0 != 0) + 2 * (minus_lit1 != 0);
11815 var0 = associate_trees (loc, t1: var0, t2: var1, code, type: atype);
11816 minus_var0 = associate_trees (loc, t1: minus_var0, t2: minus_var1,
11817 code, type: atype);
11818 con0 = associate_trees (loc, t1: con0, t2: con1, code, type: atype);
11819 minus_con0 = associate_trees (loc, t1: minus_con0, t2: minus_con1,
11820 code, type: atype);
11821 lit0 = associate_trees (loc, t1: lit0, t2: lit1, code, type: atype);
11822 minus_lit0 = associate_trees (loc, t1: minus_lit0, t2: minus_lit1,
11823 code, type: atype);
11824
11825 if (minus_var0 && var0)
11826 {
11827 var0_origin |= minus_var0_origin;
11828 var0 = associate_trees (loc, t1: var0, t2: minus_var0,
11829 code: MINUS_EXPR, type: atype);
11830 minus_var0 = 0;
11831 minus_var0_origin = 0;
11832 }
11833 if (minus_con0 && con0)
11834 {
11835 con0_origin |= minus_con0_origin;
11836 con0 = associate_trees (loc, t1: con0, t2: minus_con0,
11837 code: MINUS_EXPR, type: atype);
11838 minus_con0 = 0;
11839 minus_con0_origin = 0;
11840 }
11841
11842 /* Preserve the MINUS_EXPR if the negative part of the literal is
11843 greater than the positive part. Otherwise, the multiplicative
11844 folding code (i.e extract_muldiv) may be fooled in case
11845 unsigned constants are subtracted, like in the following
11846 example: ((X*2 + 4) - 8U)/2. */
11847 if (minus_lit0 && lit0)
11848 {
11849 if (TREE_CODE (lit0) == INTEGER_CST
11850 && TREE_CODE (minus_lit0) == INTEGER_CST
11851 && tree_int_cst_lt (t1: lit0, t2: minus_lit0)
11852 /* But avoid ending up with only negated parts. */
11853 && (var0 || con0))
11854 {
11855 minus_lit0_origin |= lit0_origin;
11856 minus_lit0 = associate_trees (loc, t1: minus_lit0, t2: lit0,
11857 code: MINUS_EXPR, type: atype);
11858 lit0 = 0;
11859 lit0_origin = 0;
11860 }
11861 else
11862 {
11863 lit0_origin |= minus_lit0_origin;
11864 lit0 = associate_trees (loc, t1: lit0, t2: minus_lit0,
11865 code: MINUS_EXPR, type: atype);
11866 minus_lit0 = 0;
11867 minus_lit0_origin = 0;
11868 }
11869 }
11870
11871 /* Don't introduce overflows through reassociation. */
11872 if ((lit0 && TREE_OVERFLOW_P (lit0))
11873 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11874 return NULL_TREE;
11875
11876 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11877 con0_origin |= lit0_origin;
11878 con0 = associate_trees (loc, t1: con0, t2: lit0, code, type: atype);
11879 minus_con0_origin |= minus_lit0_origin;
11880 minus_con0 = associate_trees (loc, t1: minus_con0, t2: minus_lit0,
11881 code, type: atype);
11882
11883 /* Eliminate minus_con0. */
11884 if (minus_con0)
11885 {
11886 if (con0)
11887 {
11888 con0_origin |= minus_con0_origin;
11889 con0 = associate_trees (loc, t1: con0, t2: minus_con0,
11890 code: MINUS_EXPR, type: atype);
11891 }
11892 else if (var0)
11893 {
11894 var0_origin |= minus_con0_origin;
11895 var0 = associate_trees (loc, t1: var0, t2: minus_con0,
11896 code: MINUS_EXPR, type: atype);
11897 }
11898 else
11899 gcc_unreachable ();
11900 }
11901
11902 /* Eliminate minus_var0. */
11903 if (minus_var0)
11904 {
11905 if (con0)
11906 {
11907 con0_origin |= minus_var0_origin;
11908 con0 = associate_trees (loc, t1: con0, t2: minus_var0,
11909 code: MINUS_EXPR, type: atype);
11910 }
11911 else
11912 gcc_unreachable ();
11913 }
11914
11915 /* Reassociate only if there has been any actual association
11916 between subtrees from op0 and subtrees from op1 in at
11917 least one of the operands, otherwise we risk infinite
11918 recursion. See PR114084. */
11919 if (var0_origin != 3 && con0_origin != 3)
11920 return NULL_TREE;
11921
11922 return
11923 fold_convert_loc (loc, type, arg: associate_trees (loc, t1: var0, t2: con0,
11924 code, type: atype));
11925 }
11926 }
11927
11928 return NULL_TREE;
11929
11930 case POINTER_DIFF_EXPR:
11931 case MINUS_EXPR:
11932 /* Fold &a[i] - &a[j] to i-j. */
11933 if (TREE_CODE (arg0) == ADDR_EXPR
11934 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11935 && TREE_CODE (arg1) == ADDR_EXPR
11936 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11937 {
11938 tree tem = fold_addr_of_array_ref_difference (loc, type,
11939 TREE_OPERAND (arg0, 0),
11940 TREE_OPERAND (arg1, 0),
11941 use_pointer_diff: code
11942 == POINTER_DIFF_EXPR);
11943 if (tem)
11944 return tem;
11945 }
11946
11947 /* Further transformations are not for pointers. */
11948 if (code == POINTER_DIFF_EXPR)
11949 return NULL_TREE;
11950
11951 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11952 if (TREE_CODE (arg0) == NEGATE_EXPR
11953 && negate_expr_p (t: op1)
11954 /* If arg0 is e.g. unsigned int and type is int, then this could
11955 introduce UB, because if A is INT_MIN at runtime, the original
11956 expression can be well defined while the latter is not.
11957 See PR83269. */
11958 && !(ANY_INTEGRAL_TYPE_P (type)
11959 && TYPE_OVERFLOW_UNDEFINED (type)
11960 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11961 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11962 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (t: op1),
11963 fold_convert_loc (loc, type,
11964 TREE_OPERAND (arg0, 0)));
11965
11966 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11967 __complex__ ( x, -y ). This is not the same for SNaNs or if
11968 signed zeros are involved. */
11969 if (!HONOR_SNANS (arg0)
11970 && !HONOR_SIGNED_ZEROS (arg0)
11971 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11972 {
11973 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11974 tree arg0r = fold_unary_loc (loc, code: REALPART_EXPR, type: rtype, op0: arg0);
11975 tree arg0i = fold_unary_loc (loc, code: IMAGPART_EXPR, type: rtype, op0: arg0);
11976 bool arg0rz = false, arg0iz = false;
11977 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11978 || (arg0i && (arg0iz = real_zerop (arg0i))))
11979 {
11980 tree arg1r = fold_unary_loc (loc, code: REALPART_EXPR, type: rtype, op0: arg1);
11981 tree arg1i = fold_unary_loc (loc, code: IMAGPART_EXPR, type: rtype, op0: arg1);
11982 if (arg0rz && arg1i && real_zerop (arg1i))
11983 {
11984 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11985 arg1r ? arg1r
11986 : build1 (REALPART_EXPR, rtype, arg1));
11987 tree ip = arg0i ? arg0i
11988 : build1 (IMAGPART_EXPR, rtype, arg0);
11989 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11990 }
11991 else if (arg0iz && arg1r && real_zerop (arg1r))
11992 {
11993 tree rp = arg0r ? arg0r
11994 : build1 (REALPART_EXPR, rtype, arg0);
11995 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11996 arg1i ? arg1i
11997 : build1 (IMAGPART_EXPR, rtype, arg1));
11998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11999 }
12000 }
12001 }
12002
12003 /* A - B -> A + (-B) if B is easily negatable. */
12004 if (negate_expr_p (t: op1)
12005 && ! TYPE_OVERFLOW_SANITIZED (type)
12006 && ((FLOAT_TYPE_P (type)
12007 /* Avoid this transformation if B is a positive REAL_CST. */
12008 && (TREE_CODE (op1) != REAL_CST
12009 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
12010 || INTEGRAL_TYPE_P (type)))
12011 return fold_build2_loc (loc, PLUS_EXPR, type,
12012 fold_convert_loc (loc, type, arg: arg0),
12013 negate_expr (t: op1));
12014
12015 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
12016 one. Make sure the type is not saturating and has the signedness of
12017 the stripped operands, as fold_plusminus_mult_expr will re-associate.
12018 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
12019 if ((TREE_CODE (arg0) == MULT_EXPR
12020 || TREE_CODE (arg1) == MULT_EXPR)
12021 && !TYPE_SATURATING (type)
12022 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
12023 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
12024 && (!FLOAT_TYPE_P (type) || flag_associative_math))
12025 {
12026 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
12027 if (tem)
12028 return tem;
12029 }
12030
12031 goto associate;
12032
12033 case MULT_EXPR:
12034 if (! FLOAT_TYPE_P (type))
12035 {
12036 /* Transform x * -C into -x * C if x is easily negatable. */
12037 if (TREE_CODE (op1) == INTEGER_CST
12038 && tree_int_cst_sgn (op1) == -1
12039 && negate_expr_p (t: op0)
12040 && negate_expr_p (t: op1)
12041 && (tem = negate_expr (t: op1)) != op1
12042 && ! TREE_OVERFLOW (tem))
12043 return fold_build2_loc (loc, MULT_EXPR, type,
12044 fold_convert_loc (loc, type,
12045 arg: negate_expr (t: op0)), tem);
12046
12047 strict_overflow_p = false;
12048 if (TREE_CODE (arg1) == INTEGER_CST
12049 && (tem = extract_muldiv (t: op0, c: arg1, code, NULL_TREE,
12050 strict_overflow_p: &strict_overflow_p)) != 0)
12051 {
12052 if (strict_overflow_p)
12053 fold_overflow_warning (gmsgid: ("assuming signed overflow does not "
12054 "occur when simplifying "
12055 "multiplication"),
12056 wc: WARN_STRICT_OVERFLOW_MISC);
12057 return fold_convert_loc (loc, type, arg: tem);
12058 }
12059
12060 /* Optimize z * conj(z) for integer complex numbers. */
12061 if (TREE_CODE (arg0) == CONJ_EXPR
12062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, flags: 0))
12063 return fold_mult_zconjz (loc, type, expr: arg1);
12064 if (TREE_CODE (arg1) == CONJ_EXPR
12065 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), flags: 0))
12066 return fold_mult_zconjz (loc, type, expr: arg0);
12067 }
12068 else
12069 {
12070 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
12071 This is not the same for NaNs or if signed zeros are
12072 involved. */
12073 if (!HONOR_NANS (arg0)
12074 && !HONOR_SIGNED_ZEROS (arg0)
12075 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12076 && TREE_CODE (arg1) == COMPLEX_CST
12077 && real_zerop (TREE_REALPART (arg1)))
12078 {
12079 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
12080 if (real_onep (TREE_IMAGPART (arg1)))
12081 return
12082 fold_build2_loc (loc, COMPLEX_EXPR, type,
12083 negate_expr (t: fold_build1_loc (loc, IMAGPART_EXPR,
12084 rtype, arg0)),
12085 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
12086 else if (real_minus_onep (TREE_IMAGPART (arg1)))
12087 return
12088 fold_build2_loc (loc, COMPLEX_EXPR, type,
12089 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
12090 negate_expr (t: fold_build1_loc (loc, REALPART_EXPR,
12091 rtype, arg0)));
12092 }
12093
12094 /* Optimize z * conj(z) for floating point complex numbers.
12095 Guarded by flag_unsafe_math_optimizations as non-finite
12096 imaginary components don't produce scalar results. */
12097 if (flag_unsafe_math_optimizations
12098 && TREE_CODE (arg0) == CONJ_EXPR
12099 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, flags: 0))
12100 return fold_mult_zconjz (loc, type, expr: arg1);
12101 if (flag_unsafe_math_optimizations
12102 && TREE_CODE (arg1) == CONJ_EXPR
12103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), flags: 0))
12104 return fold_mult_zconjz (loc, type, expr: arg0);
12105 }
12106 goto associate;
12107
12108 case BIT_IOR_EXPR:
12109 /* Canonicalize (X & C1) | C2. */
12110 if (TREE_CODE (arg0) == BIT_AND_EXPR
12111 && TREE_CODE (arg1) == INTEGER_CST
12112 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12113 {
12114 int width = TYPE_PRECISION (type), w;
12115 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
12116 wide_int c2 = wi::to_wide (t: arg1);
12117
12118 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
12119 if ((c1 & c2) == c1)
12120 return omit_one_operand_loc (loc, type, result: arg1,
12121 TREE_OPERAND (arg0, 0));
12122
12123 wide_int msk = wi::mask (width, negate_p: false,
12124 TYPE_PRECISION (TREE_TYPE (arg1)));
12125
12126 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
12127 if (wi::bit_and_not (x: msk, y: c1 | c2) == 0)
12128 {
12129 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12130 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12131 }
12132
12133 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
12134 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
12135 mode which allows further optimizations. */
12136 c1 &= msk;
12137 c2 &= msk;
12138 wide_int c3 = wi::bit_and_not (x: c1, y: c2);
12139 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
12140 {
12141 wide_int mask = wi::mask (width: w, negate_p: false,
12142 TYPE_PRECISION (type));
12143 if (((c1 | c2) & mask) == mask
12144 && wi::bit_and_not (x: c1, y: mask) == 0)
12145 {
12146 c3 = mask;
12147 break;
12148 }
12149 }
12150
12151 if (c3 != c1)
12152 {
12153 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12154 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
12155 wide_int_to_tree (type, cst: c3));
12156 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12157 }
12158 }
12159
12160 /* See if this can be simplified into a rotate first. If that
12161 is unsuccessful continue in the association code. */
12162 goto bit_rotate;
12163
12164 case BIT_XOR_EXPR:
12165 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
12166 if (TREE_CODE (arg0) == BIT_AND_EXPR
12167 && INTEGRAL_TYPE_P (type)
12168 && integer_onep (TREE_OPERAND (arg0, 1))
12169 && integer_onep (arg1))
12170 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
12171 build_zero_cst (TREE_TYPE (arg0)));
12172
12173 /* See if this can be simplified into a rotate first. If that
12174 is unsuccessful continue in the association code. */
12175 goto bit_rotate;
12176
12177 case BIT_AND_EXPR:
12178 /* Fold !X & 1 as X == 0. */
12179 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12180 && integer_onep (arg1))
12181 {
12182 tem = TREE_OPERAND (arg0, 0);
12183 return fold_build2_loc (loc, EQ_EXPR, type, tem,
12184 build_zero_cst (TREE_TYPE (tem)));
12185 }
12186
12187 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
12188 multiple of 1 << CST. */
12189 if (TREE_CODE (arg1) == INTEGER_CST)
12190 {
12191 wi::tree_to_wide_ref cst1 = wi::to_wide (t: arg1);
12192 wide_int ncst1 = -cst1;
12193 if ((cst1 & ncst1) == ncst1
12194 && multiple_of_p (type, arg0,
12195 wide_int_to_tree (TREE_TYPE (arg1), cst: ncst1)))
12196 return fold_convert_loc (loc, type, arg: arg0);
12197 }
12198
12199 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12200 bits from CST2. */
12201 if (TREE_CODE (arg1) == INTEGER_CST
12202 && TREE_CODE (arg0) == MULT_EXPR
12203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12204 {
12205 wi::tree_to_wide_ref warg1 = wi::to_wide (t: arg1);
12206 wide_int masked
12207 = mask_with_tz (type, x: warg1, y: wi::to_wide (TREE_OPERAND (arg0, 1)));
12208
12209 if (masked == 0)
12210 return omit_two_operands_loc (loc, type, result: build_zero_cst (type),
12211 omitted1: arg0, omitted2: arg1);
12212 else if (masked != warg1)
12213 {
12214 /* Avoid the transform if arg1 is a mask of some
12215 mode which allows further optimizations. */
12216 int pop = wi::popcount (warg1);
12217 if (!(pop >= BITS_PER_UNIT
12218 && pow2p_hwi (x: pop)
12219 && wi::mask (width: pop, negate_p: false, precision: warg1.get_precision ()) == warg1))
12220 return fold_build2_loc (loc, code, type, op0,
12221 wide_int_to_tree (type, cst: masked));
12222 }
12223 }
12224
12225 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12226 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12227 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12228 {
12229 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12230
12231 wide_int mask = wide_int::from (x: wi::to_wide (t: arg1), precision: prec, sgn: UNSIGNED);
12232 if (mask == -1)
12233 return
12234 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12235 }
12236
12237 goto associate;
12238
12239 case RDIV_EXPR:
12240 /* Don't touch a floating-point divide by zero unless the mode
12241 of the constant can represent infinity. */
12242 if (TREE_CODE (arg1) == REAL_CST
12243 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12244 && real_zerop (arg1))
12245 return NULL_TREE;
12246
12247 /* (-A) / (-B) -> A / B */
12248 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (t: arg1))
12249 return fold_build2_loc (loc, RDIV_EXPR, type,
12250 TREE_OPERAND (arg0, 0),
12251 negate_expr (t: arg1));
12252 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (t: arg0))
12253 return fold_build2_loc (loc, RDIV_EXPR, type,
12254 negate_expr (t: arg0),
12255 TREE_OPERAND (arg1, 0));
12256 return NULL_TREE;
12257
12258 case TRUNC_DIV_EXPR:
12259 /* Fall through */
12260
12261 case FLOOR_DIV_EXPR:
12262 /* Simplify A / (B << N) where A and B are positive and B is
12263 a power of 2, to A >> (N + log2(B)). */
12264 strict_overflow_p = false;
12265 if (TREE_CODE (arg1) == LSHIFT_EXPR
12266 && (TYPE_UNSIGNED (type)
12267 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12268 {
12269 tree sval = TREE_OPERAND (arg1, 0);
12270 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12271 {
12272 tree sh_cnt = TREE_OPERAND (arg1, 1);
12273 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12274 wi::exact_log2 (wi::to_wide (t: sval)));
12275
12276 if (strict_overflow_p)
12277 fold_overflow_warning (gmsgid: ("assuming signed overflow does not "
12278 "occur when simplifying A / (B << N)"),
12279 wc: WARN_STRICT_OVERFLOW_MISC);
12280
12281 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12282 sh_cnt, pow2);
12283 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12284 fold_convert_loc (loc, type, arg: arg0), sh_cnt);
12285 }
12286 }
12287
12288 /* Fall through */
12289
12290 case ROUND_DIV_EXPR:
12291 case CEIL_DIV_EXPR:
12292 case EXACT_DIV_EXPR:
12293 if (integer_zerop (arg1))
12294 return NULL_TREE;
12295
12296 /* Convert -A / -B to A / B when the type is signed and overflow is
12297 undefined. */
12298 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12299 && TREE_CODE (op0) == NEGATE_EXPR
12300 && negate_expr_p (t: op1))
12301 {
12302 if (ANY_INTEGRAL_TYPE_P (type))
12303 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur "
12304 "when distributing negation across "
12305 "division"),
12306 wc: WARN_STRICT_OVERFLOW_MISC);
12307 return fold_build2_loc (loc, code, type,
12308 fold_convert_loc (loc, type,
12309 TREE_OPERAND (arg0, 0)),
12310 negate_expr (t: op1));
12311 }
12312 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12313 && TREE_CODE (arg1) == NEGATE_EXPR
12314 && negate_expr_p (t: op0))
12315 {
12316 if (ANY_INTEGRAL_TYPE_P (type))
12317 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur "
12318 "when distributing negation across "
12319 "division"),
12320 wc: WARN_STRICT_OVERFLOW_MISC);
12321 return fold_build2_loc (loc, code, type,
12322 negate_expr (t: op0),
12323 fold_convert_loc (loc, type,
12324 TREE_OPERAND (arg1, 0)));
12325 }
12326
12327 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12328 operation, EXACT_DIV_EXPR.
12329
12330 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12331 At one time others generated faster code, it's not clear if they do
12332 after the last round to changes to the DIV code in expmed.cc. */
12333 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12334 && multiple_of_p (type, arg0, arg1))
12335 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12336 fold_convert (type, arg0),
12337 fold_convert (type, arg1));
12338
12339 strict_overflow_p = false;
12340 if (TREE_CODE (arg1) == INTEGER_CST
12341 && (tem = extract_muldiv (t: op0, c: arg1, code, NULL_TREE,
12342 strict_overflow_p: &strict_overflow_p)) != 0)
12343 {
12344 if (strict_overflow_p)
12345 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur "
12346 "when simplifying division"),
12347 wc: WARN_STRICT_OVERFLOW_MISC);
12348 return fold_convert_loc (loc, type, arg: tem);
12349 }
12350
12351 return NULL_TREE;
12352
12353 case CEIL_MOD_EXPR:
12354 case FLOOR_MOD_EXPR:
12355 case ROUND_MOD_EXPR:
12356 case TRUNC_MOD_EXPR:
12357 strict_overflow_p = false;
12358 if (TREE_CODE (arg1) == INTEGER_CST
12359 && (tem = extract_muldiv (t: op0, c: arg1, code, NULL_TREE,
12360 strict_overflow_p: &strict_overflow_p)) != 0)
12361 {
12362 if (strict_overflow_p)
12363 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur "
12364 "when simplifying modulus"),
12365 wc: WARN_STRICT_OVERFLOW_MISC);
12366 return fold_convert_loc (loc, type, arg: tem);
12367 }
12368
12369 return NULL_TREE;
12370
12371 case LROTATE_EXPR:
12372 case RROTATE_EXPR:
12373 case RSHIFT_EXPR:
12374 case LSHIFT_EXPR:
12375 /* Since negative shift count is not well-defined,
12376 don't try to compute it in the compiler. */
12377 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12378 return NULL_TREE;
12379
12380 prec = element_precision (type);
12381
12382 /* If we have a rotate of a bit operation with the rotate count and
12383 the second operand of the bit operation both constant,
12384 permute the two operations. */
12385 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12386 && (TREE_CODE (arg0) == BIT_AND_EXPR
12387 || TREE_CODE (arg0) == BIT_IOR_EXPR
12388 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12390 {
12391 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12392 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12393 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12394 fold_build2_loc (loc, code, type,
12395 arg00, arg1),
12396 fold_build2_loc (loc, code, type,
12397 arg01, arg1));
12398 }
12399
12400 /* Two consecutive rotates adding up to the some integer
12401 multiple of the precision of the type can be ignored. */
12402 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12403 && TREE_CODE (arg0) == RROTATE_EXPR
12404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12405 && wi::umod_trunc (x: wi::to_wide (t: arg1)
12406 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12407 y: prec) == 0)
12408 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12409
12410 return NULL_TREE;
12411
12412 case MIN_EXPR:
12413 case MAX_EXPR:
12414 goto associate;
12415
12416 case TRUTH_ANDIF_EXPR:
12417 /* Note that the operands of this must be ints
12418 and their values must be 0 or 1.
12419 ("true" is a fixed value perhaps depending on the language.) */
12420 /* If first arg is constant zero, return it. */
12421 if (integer_zerop (arg0))
12422 return fold_convert_loc (loc, type, arg: arg0);
12423 /* FALLTHRU */
12424 case TRUTH_AND_EXPR:
12425 /* If either arg is constant true, drop it. */
12426 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12427 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: arg1));
12428 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12429 /* Preserve sequence points. */
12430 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12431 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: arg0));
12432 /* If second arg is constant zero, result is zero, but first arg
12433 must be evaluated. */
12434 if (integer_zerop (arg1))
12435 return omit_one_operand_loc (loc, type, result: arg1, omitted: arg0);
12436 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12437 case will be handled here. */
12438 if (integer_zerop (arg0))
12439 return omit_one_operand_loc (loc, type, result: arg0, omitted: arg1);
12440
12441 /* !X && X is always false. */
12442 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12443 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, flags: 0))
12444 return omit_one_operand_loc (loc, type, integer_zero_node, omitted: arg1);
12445 /* X && !X is always false. */
12446 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12447 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), flags: 0))
12448 return omit_one_operand_loc (loc, type, integer_zero_node, omitted: arg0);
12449
12450 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12451 means A >= Y && A != MAX, but in this case we know that
12452 A < X <= MAX. */
12453
12454 if (!TREE_SIDE_EFFECTS (arg0)
12455 && !TREE_SIDE_EFFECTS (arg1))
12456 {
12457 tem = fold_to_nonsharp_ineq_using_bound (loc, ineq: arg0, bound: arg1);
12458 if (tem && !operand_equal_p (arg0: tem, arg1: arg0, flags: 0))
12459 return fold_convert (type,
12460 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12461 tem, arg1));
12462
12463 tem = fold_to_nonsharp_ineq_using_bound (loc, ineq: arg1, bound: arg0);
12464 if (tem && !operand_equal_p (arg0: tem, arg1, flags: 0))
12465 return fold_convert (type,
12466 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12467 arg0, tem));
12468 }
12469
12470 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12471 != NULL_TREE)
12472 return tem;
12473
12474 return NULL_TREE;
12475
12476 case TRUTH_ORIF_EXPR:
12477 /* Note that the operands of this must be ints
12478 and their values must be 0 or true.
12479 ("true" is a fixed value perhaps depending on the language.) */
12480 /* If first arg is constant true, return it. */
12481 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12482 return fold_convert_loc (loc, type, arg: arg0);
12483 /* FALLTHRU */
12484 case TRUTH_OR_EXPR:
12485 /* If either arg is constant zero, drop it. */
12486 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12487 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: arg1));
12488 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12489 /* Preserve sequence points. */
12490 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12491 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: arg0));
12492 /* If second arg is constant true, result is true, but we must
12493 evaluate first arg. */
12494 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12495 return omit_one_operand_loc (loc, type, result: arg1, omitted: arg0);
12496 /* Likewise for first arg, but note this only occurs here for
12497 TRUTH_OR_EXPR. */
12498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12499 return omit_one_operand_loc (loc, type, result: arg0, omitted: arg1);
12500
12501 /* !X || X is always true. */
12502 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12503 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, flags: 0))
12504 return omit_one_operand_loc (loc, type, integer_one_node, omitted: arg1);
12505 /* X || !X is always true. */
12506 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), flags: 0))
12508 return omit_one_operand_loc (loc, type, integer_one_node, omitted: arg0);
12509
12510 /* (X && !Y) || (!X && Y) is X ^ Y */
12511 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12512 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12513 {
12514 tree a0, a1, l0, l1, n0, n1;
12515
12516 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12517 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12518
12519 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12520 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12521
12522 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12523 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12524
12525 if ((operand_equal_p (arg0: n0, arg1: a0, flags: 0)
12526 && operand_equal_p (arg0: n1, arg1: a1, flags: 0))
12527 || (operand_equal_p (arg0: n0, arg1: a1, flags: 0)
12528 && operand_equal_p (arg0: n1, arg1: a0, flags: 0)))
12529 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12530 }
12531
12532 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12533 != NULL_TREE)
12534 return tem;
12535
12536 return NULL_TREE;
12537
12538 case TRUTH_XOR_EXPR:
12539 /* If the second arg is constant zero, drop it. */
12540 if (integer_zerop (arg1))
12541 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: arg0));
12542 /* If the second arg is constant true, this is a logical inversion. */
12543 if (integer_onep (arg1))
12544 {
12545 tem = invert_truthvalue_loc (loc, arg: arg0);
12546 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: tem));
12547 }
12548 /* Identical arguments cancel to zero. */
12549 if (operand_equal_p (arg0, arg1, flags: 0))
12550 return omit_one_operand_loc (loc, type, integer_zero_node, omitted: arg0);
12551
12552 /* !X ^ X is always true. */
12553 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, flags: 0))
12555 return omit_one_operand_loc (loc, type, integer_one_node, omitted: arg1);
12556
12557 /* X ^ !X is always true. */
12558 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), flags: 0))
12560 return omit_one_operand_loc (loc, type, integer_one_node, omitted: arg0);
12561
12562 return NULL_TREE;
12563
12564 case EQ_EXPR:
12565 case NE_EXPR:
12566 STRIP_NOPS (arg0);
12567 STRIP_NOPS (arg1);
12568
12569 tem = fold_comparison (loc, code, type, op0, op1);
12570 if (tem != NULL_TREE)
12571 return tem;
12572
12573 /* bool_var != 1 becomes !bool_var. */
12574 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12575 && code == NE_EXPR)
12576 return fold_convert_loc (loc, type,
12577 arg: fold_build1_loc (loc, TRUTH_NOT_EXPR,
12578 TREE_TYPE (arg0), arg0));
12579
12580 /* bool_var == 0 becomes !bool_var. */
12581 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12582 && code == EQ_EXPR)
12583 return fold_convert_loc (loc, type,
12584 arg: fold_build1_loc (loc, TRUTH_NOT_EXPR,
12585 TREE_TYPE (arg0), arg0));
12586
12587 /* !exp != 0 becomes !exp */
12588 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12589 && code == NE_EXPR)
12590 return non_lvalue_loc (loc, x: fold_convert_loc (loc, type, arg: arg0));
12591
12592 /* If this is an EQ or NE comparison with zero and ARG0 is
12593 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12594 two operations, but the latter can be done in one less insn
12595 on machines that have only two-operand insns or on which a
12596 constant cannot be the first operand. */
12597 if (TREE_CODE (arg0) == BIT_AND_EXPR
12598 && integer_zerop (arg1))
12599 {
12600 tree arg00 = TREE_OPERAND (arg0, 0);
12601 tree arg01 = TREE_OPERAND (arg0, 1);
12602 if (TREE_CODE (arg00) == LSHIFT_EXPR
12603 && integer_onep (TREE_OPERAND (arg00, 0)))
12604 {
12605 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12606 arg01, TREE_OPERAND (arg00, 1));
12607 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12608 build_one_cst (TREE_TYPE (arg0)));
12609 return fold_build2_loc (loc, code, type,
12610 fold_convert_loc (loc, TREE_TYPE (arg1),
12611 arg: tem), arg1);
12612 }
12613 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12614 && integer_onep (TREE_OPERAND (arg01, 0)))
12615 {
12616 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12617 arg00, TREE_OPERAND (arg01, 1));
12618 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12619 build_one_cst (TREE_TYPE (arg0)));
12620 return fold_build2_loc (loc, code, type,
12621 fold_convert_loc (loc, TREE_TYPE (arg1),
12622 arg: tem), arg1);
12623 }
12624 }
12625
12626 /* If this is a comparison of a field, we may be able to simplify it. */
12627 if ((TREE_CODE (arg0) == COMPONENT_REF
12628 || TREE_CODE (arg0) == BIT_FIELD_REF)
12629 /* Handle the constant case even without -O
12630 to make sure the warnings are given. */
12631 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12632 {
12633 t1 = optimize_bit_field_compare (loc, code, compare_type: type, lhs: arg0, rhs: arg1);
12634 if (t1)
12635 return t1;
12636 }
12637
12638 /* Optimize comparisons of strlen vs zero to a compare of the
12639 first character of the string vs zero. To wit,
12640 strlen(ptr) == 0 => *ptr == 0
12641 strlen(ptr) != 0 => *ptr != 0
12642 Other cases should reduce to one of these two (or a constant)
12643 due to the return value of strlen being unsigned. */
12644 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12645 {
12646 tree fndecl = get_callee_fndecl (arg0);
12647
12648 if (fndecl
12649 && fndecl_built_in_p (node: fndecl, name1: BUILT_IN_STRLEN)
12650 && call_expr_nargs (arg0) == 1
12651 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12652 == POINTER_TYPE))
12653 {
12654 tree ptrtype
12655 = build_pointer_type (build_qualified_type (char_type_node,
12656 TYPE_QUAL_CONST));
12657 tree ptr = fold_convert_loc (loc, type: ptrtype,
12658 CALL_EXPR_ARG (arg0, 0));
12659 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12660 return fold_build2_loc (loc, code, type, iref,
12661 build_int_cst (TREE_TYPE (iref), 0));
12662 }
12663 }
12664
12665 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12666 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12667 if (TREE_CODE (arg0) == RSHIFT_EXPR
12668 && integer_zerop (arg1)
12669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12670 {
12671 tree arg00 = TREE_OPERAND (arg0, 0);
12672 tree arg01 = TREE_OPERAND (arg0, 1);
12673 tree itype = TREE_TYPE (arg00);
12674 if (wi::to_wide (t: arg01) == element_precision (itype) - 1)
12675 {
12676 if (TYPE_UNSIGNED (itype))
12677 {
12678 itype = signed_type_for (itype);
12679 arg00 = fold_convert_loc (loc, type: itype, arg: arg00);
12680 }
12681 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12682 type, arg00, build_zero_cst (itype));
12683 }
12684 }
12685
12686 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12687 (X & C) == 0 when C is a single bit. */
12688 if (TREE_CODE (arg0) == BIT_AND_EXPR
12689 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12690 && integer_zerop (arg1)
12691 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12692 {
12693 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12694 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12695 TREE_OPERAND (arg0, 1));
12696 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12697 type, tem,
12698 fold_convert_loc (loc, TREE_TYPE (arg0),
12699 arg: arg1));
12700 }
12701
12702 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12703 constant C is a power of two, i.e. a single bit. */
12704 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12705 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12706 && integer_zerop (arg1)
12707 && integer_pow2p (TREE_OPERAND (arg0, 1))
12708 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12709 TREE_OPERAND (arg0, 1), flags: OEP_ONLY_CONST))
12710 {
12711 tree arg00 = TREE_OPERAND (arg0, 0);
12712 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12713 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12714 }
12715
12716 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12717 when is C is a power of two, i.e. a single bit. */
12718 if (TREE_CODE (arg0) == BIT_AND_EXPR
12719 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12720 && integer_zerop (arg1)
12721 && integer_pow2p (TREE_OPERAND (arg0, 1))
12722 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12723 TREE_OPERAND (arg0, 1), flags: OEP_ONLY_CONST))
12724 {
12725 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12726 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12727 arg000, TREE_OPERAND (arg0, 1));
12728 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12729 tem, build_int_cst (TREE_TYPE (tem), 0));
12730 }
12731
12732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12733 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12734 {
12735 tree arg00 = TREE_OPERAND (arg0, 0);
12736 tree arg01 = TREE_OPERAND (arg0, 1);
12737 tree arg10 = TREE_OPERAND (arg1, 0);
12738 tree arg11 = TREE_OPERAND (arg1, 1);
12739 tree itype = TREE_TYPE (arg0);
12740
12741 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12742 operand_equal_p guarantees no side-effects so we don't need
12743 to use omit_one_operand on Z. */
12744 if (operand_equal_p (arg0: arg01, arg1: arg11, flags: 0))
12745 return fold_build2_loc (loc, code, type, arg00,
12746 fold_convert_loc (loc, TREE_TYPE (arg00),
12747 arg: arg10));
12748 if (operand_equal_p (arg0: arg01, arg1: arg10, flags: 0))
12749 return fold_build2_loc (loc, code, type, arg00,
12750 fold_convert_loc (loc, TREE_TYPE (arg00),
12751 arg: arg11));
12752 if (operand_equal_p (arg0: arg00, arg1: arg11, flags: 0))
12753 return fold_build2_loc (loc, code, type, arg01,
12754 fold_convert_loc (loc, TREE_TYPE (arg01),
12755 arg: arg10));
12756 if (operand_equal_p (arg0: arg00, arg1: arg10, flags: 0))
12757 return fold_build2_loc (loc, code, type, arg01,
12758 fold_convert_loc (loc, TREE_TYPE (arg01),
12759 arg: arg11));
12760
12761 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12762 if (TREE_CODE (arg01) == INTEGER_CST
12763 && TREE_CODE (arg11) == INTEGER_CST)
12764 {
12765 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12766 fold_convert_loc (loc, type: itype, arg: arg11));
12767 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12768 return fold_build2_loc (loc, code, type, tem,
12769 fold_convert_loc (loc, type: itype, arg: arg10));
12770 }
12771 }
12772
12773 /* Attempt to simplify equality/inequality comparisons of complex
12774 values. Only lower the comparison if the result is known or
12775 can be simplified to a single scalar comparison. */
12776 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12777 || TREE_CODE (arg0) == COMPLEX_CST)
12778 && (TREE_CODE (arg1) == COMPLEX_EXPR
12779 || TREE_CODE (arg1) == COMPLEX_CST))
12780 {
12781 tree real0, imag0, real1, imag1;
12782 tree rcond, icond;
12783
12784 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12785 {
12786 real0 = TREE_OPERAND (arg0, 0);
12787 imag0 = TREE_OPERAND (arg0, 1);
12788 }
12789 else
12790 {
12791 real0 = TREE_REALPART (arg0);
12792 imag0 = TREE_IMAGPART (arg0);
12793 }
12794
12795 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12796 {
12797 real1 = TREE_OPERAND (arg1, 0);
12798 imag1 = TREE_OPERAND (arg1, 1);
12799 }
12800 else
12801 {
12802 real1 = TREE_REALPART (arg1);
12803 imag1 = TREE_IMAGPART (arg1);
12804 }
12805
12806 rcond = fold_binary_loc (loc, code, type, op0: real0, op1: real1);
12807 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12808 {
12809 if (integer_zerop (rcond))
12810 {
12811 if (code == EQ_EXPR)
12812 return omit_two_operands_loc (loc, type, boolean_false_node,
12813 omitted1: imag0, omitted2: imag1);
12814 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12815 }
12816 else
12817 {
12818 if (code == NE_EXPR)
12819 return omit_two_operands_loc (loc, type, boolean_true_node,
12820 omitted1: imag0, omitted2: imag1);
12821 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12822 }
12823 }
12824
12825 icond = fold_binary_loc (loc, code, type, op0: imag0, op1: imag1);
12826 if (icond && TREE_CODE (icond) == INTEGER_CST)
12827 {
12828 if (integer_zerop (icond))
12829 {
12830 if (code == EQ_EXPR)
12831 return omit_two_operands_loc (loc, type, boolean_false_node,
12832 omitted1: real0, omitted2: real1);
12833 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12834 }
12835 else
12836 {
12837 if (code == NE_EXPR)
12838 return omit_two_operands_loc (loc, type, boolean_true_node,
12839 omitted1: real0, omitted2: real1);
12840 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12841 }
12842 }
12843 }
12844
12845 return NULL_TREE;
12846
12847 case LT_EXPR:
12848 case GT_EXPR:
12849 case LE_EXPR:
12850 case GE_EXPR:
12851 tem = fold_comparison (loc, code, type, op0, op1);
12852 if (tem != NULL_TREE)
12853 return tem;
12854
12855 /* Transform comparisons of the form X +- C CMP X. */
12856 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, flags: 0)
12858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12859 && !HONOR_SNANS (arg0))
12860 {
12861 tree arg01 = TREE_OPERAND (arg0, 1);
12862 enum tree_code code0 = TREE_CODE (arg0);
12863 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12864
12865 /* (X - c) > X becomes false. */
12866 if (code == GT_EXPR
12867 && ((code0 == MINUS_EXPR && is_positive >= 0)
12868 || (code0 == PLUS_EXPR && is_positive <= 0)))
12869 return constant_boolean_node (value: 0, type);
12870
12871 /* Likewise (X + c) < X becomes false. */
12872 if (code == LT_EXPR
12873 && ((code0 == PLUS_EXPR && is_positive >= 0)
12874 || (code0 == MINUS_EXPR && is_positive <= 0)))
12875 return constant_boolean_node (value: 0, type);
12876
12877 /* Convert (X - c) <= X to true. */
12878 if (!HONOR_NANS (arg1)
12879 && code == LE_EXPR
12880 && ((code0 == MINUS_EXPR && is_positive >= 0)
12881 || (code0 == PLUS_EXPR && is_positive <= 0)))
12882 return constant_boolean_node (value: 1, type);
12883
12884 /* Convert (X + c) >= X to true. */
12885 if (!HONOR_NANS (arg1)
12886 && code == GE_EXPR
12887 && ((code0 == PLUS_EXPR && is_positive >= 0)
12888 || (code0 == MINUS_EXPR && is_positive <= 0)))
12889 return constant_boolean_node (value: 1, type);
12890 }
12891
12892 /* If we are comparing an ABS_EXPR with a constant, we can
12893 convert all the cases into explicit comparisons, but they may
12894 well not be faster than doing the ABS and one comparison.
12895 But ABS (X) <= C is a range comparison, which becomes a subtraction
12896 and a comparison, and is probably faster. */
12897 if (code == LE_EXPR
12898 && TREE_CODE (arg1) == INTEGER_CST
12899 && TREE_CODE (arg0) == ABS_EXPR
12900 && ! TREE_SIDE_EFFECTS (arg0)
12901 && (tem = negate_expr (t: arg1)) != 0
12902 && TREE_CODE (tem) == INTEGER_CST
12903 && !TREE_OVERFLOW (tem))
12904 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12905 build2 (GE_EXPR, type,
12906 TREE_OPERAND (arg0, 0), tem),
12907 build2 (LE_EXPR, type,
12908 TREE_OPERAND (arg0, 0), arg1));
12909
12910 /* Convert ABS_EXPR<x> >= 0 to true. */
12911 strict_overflow_p = false;
12912 if (code == GE_EXPR
12913 && (integer_zerop (arg1)
12914 || (! HONOR_NANS (arg0)
12915 && real_zerop (arg1)))
12916 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12917 {
12918 if (strict_overflow_p)
12919 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur "
12920 "when simplifying comparison of "
12921 "absolute value and zero"),
12922 wc: WARN_STRICT_OVERFLOW_CONDITIONAL);
12923 return omit_one_operand_loc (loc, type,
12924 result: constant_boolean_node (value: true, type),
12925 omitted: arg0);
12926 }
12927
12928 /* Convert ABS_EXPR<x> < 0 to false. */
12929 strict_overflow_p = false;
12930 if (code == LT_EXPR
12931 && (integer_zerop (arg1) || real_zerop (arg1))
12932 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12933 {
12934 if (strict_overflow_p)
12935 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur "
12936 "when simplifying comparison of "
12937 "absolute value and zero"),
12938 wc: WARN_STRICT_OVERFLOW_CONDITIONAL);
12939 return omit_one_operand_loc (loc, type,
12940 result: constant_boolean_node (value: false, type),
12941 omitted: arg0);
12942 }
12943
12944 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12945 and similarly for >= into !=. */
12946 if ((code == LT_EXPR || code == GE_EXPR)
12947 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12948 && TREE_CODE (arg1) == LSHIFT_EXPR
12949 && integer_onep (TREE_OPERAND (arg1, 0)))
12950 return build2_loc (loc, code: code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12951 arg0: build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12952 TREE_OPERAND (arg1, 1)),
12953 arg1: build_zero_cst (TREE_TYPE (arg0)));
12954
12955 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12956 otherwise Y might be >= # of bits in X's type and thus e.g.
12957 (unsigned char) (1 << Y) for Y 15 might be 0.
12958 If the cast is widening, then 1 << Y should have unsigned type,
12959 otherwise if Y is number of bits in the signed shift type minus 1,
12960 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12961 31 might be 0xffffffff80000000. */
12962 if ((code == LT_EXPR || code == GE_EXPR)
12963 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12964 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12965 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12966 && CONVERT_EXPR_P (arg1)
12967 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12968 && (element_precision (TREE_TYPE (arg1))
12969 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12970 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12971 || (element_precision (TREE_TYPE (arg1))
12972 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12973 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12974 {
12975 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12976 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12977 return build2_loc (loc, code: code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12978 arg0: fold_convert_loc (loc, TREE_TYPE (arg0), arg: tem),
12979 arg1: build_zero_cst (TREE_TYPE (arg0)));
12980 }
12981
12982 return NULL_TREE;
12983
12984 case UNORDERED_EXPR:
12985 case ORDERED_EXPR:
12986 case UNLT_EXPR:
12987 case UNLE_EXPR:
12988 case UNGT_EXPR:
12989 case UNGE_EXPR:
12990 case UNEQ_EXPR:
12991 case LTGT_EXPR:
12992 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12993 {
12994 tree targ0 = strip_float_extensions (arg0);
12995 tree targ1 = strip_float_extensions (arg1);
12996 tree newtype = TREE_TYPE (targ0);
12997
12998 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12999 newtype = TREE_TYPE (targ1);
13000
13001 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
13002 && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
13003 return fold_build2_loc (loc, code, type,
13004 fold_convert_loc (loc, type: newtype, arg: targ0),
13005 fold_convert_loc (loc, type: newtype, arg: targ1));
13006 }
13007
13008 return NULL_TREE;
13009
13010 case COMPOUND_EXPR:
13011 /* When pedantic, a compound expression can be neither an lvalue
13012 nor an integer constant expression. */
13013 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13014 return NULL_TREE;
13015 /* Don't let (0, 0) be null pointer constant. */
13016 tem = integer_zerop (arg1) ? build1_loc (loc, code: NOP_EXPR, type, arg1)
13017 : fold_convert_loc (loc, type, arg: arg1);
13018 return tem;
13019
13020 default:
13021 return NULL_TREE;
13022 } /* switch (code) */
13023}
13024
13025/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
13026 ((A & N) + B) & M -> (A + B) & M
13027 Similarly if (N & M) == 0,
13028 ((A | N) + B) & M -> (A + B) & M
13029 and for - instead of + (or unary - instead of +)
13030 and/or ^ instead of |.
13031 If B is constant and (B & M) == 0, fold into A & M.
13032
13033 This function is a helper for match.pd patterns. Return non-NULL
13034 type in which the simplified operation should be performed only
13035 if any optimization is possible.
13036
13037 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
13038 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
13039 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
13040 +/-. */
13041tree
13042fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
13043 tree arg00, enum tree_code code00, tree arg000, tree arg001,
13044 tree arg01, enum tree_code code01, tree arg010, tree arg011,
13045 tree *pmop)
13046{
13047 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
13048 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
13049 wi::tree_to_wide_ref cst1 = wi::to_wide (t: arg1);
13050 if (~cst1 == 0
13051 || (cst1 & (cst1 + 1)) != 0
13052 || !INTEGRAL_TYPE_P (type)
13053 || (!TYPE_OVERFLOW_WRAPS (type)
13054 && TREE_CODE (type) != INTEGER_TYPE)
13055 || (wi::max_value (type) & cst1) != cst1)
13056 return NULL_TREE;
13057
13058 enum tree_code codes[2] = { code00, code01 };
13059 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
13060 int which = 0;
13061 wide_int cst0;
13062
13063 /* Now we know that arg0 is (C + D) or (C - D) or -C and
13064 arg1 (M) is == (1LL << cst) - 1.
13065 Store C into PMOP[0] and D into PMOP[1]. */
13066 pmop[0] = arg00;
13067 pmop[1] = arg01;
13068 which = code != NEGATE_EXPR;
13069
13070 for (; which >= 0; which--)
13071 switch (codes[which])
13072 {
13073 case BIT_AND_EXPR:
13074 case BIT_IOR_EXPR:
13075 case BIT_XOR_EXPR:
13076 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
13077 cst0 = wi::to_wide (t: arg0xx[2 * which + 1]) & cst1;
13078 if (codes[which] == BIT_AND_EXPR)
13079 {
13080 if (cst0 != cst1)
13081 break;
13082 }
13083 else if (cst0 != 0)
13084 break;
13085 /* If C or D is of the form (A & N) where
13086 (N & M) == M, or of the form (A | N) or
13087 (A ^ N) where (N & M) == 0, replace it with A. */
13088 pmop[which] = arg0xx[2 * which];
13089 break;
13090 case ERROR_MARK:
13091 if (TREE_CODE (pmop[which]) != INTEGER_CST)
13092 break;
13093 /* If C or D is a N where (N & M) == 0, it can be
13094 omitted (replaced with 0). */
13095 if ((code == PLUS_EXPR
13096 || (code == MINUS_EXPR && which == 0))
13097 && (cst1 & wi::to_wide (t: pmop[which])) == 0)
13098 pmop[which] = build_int_cst (type, 0);
13099 /* Similarly, with C - N where (-N & M) == 0. */
13100 if (code == MINUS_EXPR
13101 && which == 1
13102 && (cst1 & -wi::to_wide (t: pmop[which])) == 0)
13103 pmop[which] = build_int_cst (type, 0);
13104 break;
13105 default:
13106 gcc_unreachable ();
13107 }
13108
13109 /* Only build anything new if we optimized one or both arguments above. */
13110 if (pmop[0] == arg00 && pmop[1] == arg01)
13111 return NULL_TREE;
13112
13113 if (TYPE_OVERFLOW_WRAPS (type))
13114 return type;
13115 else
13116 return unsigned_type_for (type);
13117}
13118
13119/* Used by contains_label_[p1]. */
13120
13121struct contains_label_data
13122{
13123 hash_set<tree> *pset;
13124 bool inside_switch_p;
13125};
13126
13127/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13128 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
13129 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
13130
13131static tree
13132contains_label_1 (tree *tp, int *walk_subtrees, void *data)
13133{
13134 contains_label_data *d = (contains_label_data *) data;
13135 switch (TREE_CODE (*tp))
13136 {
13137 case LABEL_EXPR:
13138 return *tp;
13139
13140 case CASE_LABEL_EXPR:
13141 if (!d->inside_switch_p)
13142 return *tp;
13143 return NULL_TREE;
13144
13145 case SWITCH_EXPR:
13146 if (!d->inside_switch_p)
13147 {
13148 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
13149 return *tp;
13150 d->inside_switch_p = true;
13151 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
13152 return *tp;
13153 d->inside_switch_p = false;
13154 *walk_subtrees = 0;
13155 }
13156 return NULL_TREE;
13157
13158 case GOTO_EXPR:
13159 *walk_subtrees = 0;
13160 return NULL_TREE;
13161
13162 default:
13163 return NULL_TREE;
13164 }
13165}
13166
13167/* Return whether the sub-tree ST contains a label which is accessible from
13168 outside the sub-tree. */
13169
13170static bool
13171contains_label_p (tree st)
13172{
13173 hash_set<tree> pset;
13174 contains_label_data data = { .pset: &pset, .inside_switch_p: false };
13175 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
13176}
13177
13178/* Fold a ternary expression of code CODE and type TYPE with operands
13179 OP0, OP1, and OP2. Return the folded expression if folding is
13180 successful. Otherwise, return NULL_TREE. */
13181
13182tree
13183fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13184 tree op0, tree op1, tree op2)
13185{
13186 tree tem;
13187 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13188 enum tree_code_class kind = TREE_CODE_CLASS (code);
13189
13190 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13191 && TREE_CODE_LENGTH (code) == 3);
13192
13193 /* If this is a commutative operation, and OP0 is a constant, move it
13194 to OP1 to reduce the number of tests below. */
13195 if (commutative_ternary_tree_code (code)
13196 && tree_swap_operands_p (arg0: op0, arg1: op1))
13197 return fold_build3_loc (loc, code, type, op1, op0, op2);
13198
13199 tem = generic_simplify (loc, code, type, op0, op1, op2);
13200 if (tem)
13201 return tem;
13202
13203 /* Strip any conversions that don't change the mode. This is safe
13204 for every expression, except for a comparison expression because
13205 its signedness is derived from its operands. So, in the latter
13206 case, only strip conversions that don't change the signedness.
13207
13208 Note that this is done as an internal manipulation within the
13209 constant folder, in order to find the simplest representation of
13210 the arguments so that their form can be studied. In any cases,
13211 the appropriate type conversions should be put back in the tree
13212 that will get out of the constant folder. */
13213 if (op0)
13214 {
13215 arg0 = op0;
13216 STRIP_NOPS (arg0);
13217 }
13218
13219 if (op1)
13220 {
13221 arg1 = op1;
13222 STRIP_NOPS (arg1);
13223 }
13224
13225 if (op2)
13226 {
13227 arg2 = op2;
13228 STRIP_NOPS (arg2);
13229 }
13230
13231 switch (code)
13232 {
13233 case COMPONENT_REF:
13234 if (TREE_CODE (arg0) == CONSTRUCTOR
13235 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13236 {
13237 unsigned HOST_WIDE_INT idx;
13238 tree field, value;
13239 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13240 if (field == arg1)
13241 return value;
13242 }
13243 return NULL_TREE;
13244
13245 case COND_EXPR:
13246 case VEC_COND_EXPR:
13247 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13248 so all simple results must be passed through pedantic_non_lvalue. */
13249 if (TREE_CODE (arg0) == INTEGER_CST)
13250 {
13251 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13252 tem = integer_zerop (arg0) ? op2 : op1;
13253 /* Only optimize constant conditions when the selected branch
13254 has the same type as the COND_EXPR. This avoids optimizing
13255 away "c ? x : throw", where the throw has a void type.
13256 Avoid throwing away that operand which contains label. */
13257 if ((!TREE_SIDE_EFFECTS (unused_op)
13258 || !contains_label_p (st: unused_op))
13259 && (! VOID_TYPE_P (TREE_TYPE (tem))
13260 || VOID_TYPE_P (type)))
13261 return protected_set_expr_location_unshare (x: tem, loc);
13262 return NULL_TREE;
13263 }
13264 else if (TREE_CODE (arg0) == VECTOR_CST)
13265 {
13266 unsigned HOST_WIDE_INT nelts;
13267 if ((TREE_CODE (arg1) == VECTOR_CST
13268 || TREE_CODE (arg1) == CONSTRUCTOR)
13269 && (TREE_CODE (arg2) == VECTOR_CST
13270 || TREE_CODE (arg2) == CONSTRUCTOR)
13271 && TYPE_VECTOR_SUBPARTS (node: type).is_constant (const_value: &nelts))
13272 {
13273 vec_perm_builder sel (nelts, nelts, 1);
13274 for (unsigned int i = 0; i < nelts; i++)
13275 {
13276 tree val = VECTOR_CST_ELT (arg0, i);
13277 if (integer_all_onesp (val))
13278 sel.quick_push (obj: i);
13279 else if (integer_zerop (val))
13280 sel.quick_push (obj: nelts + i);
13281 else /* Currently unreachable. */
13282 return NULL_TREE;
13283 }
13284 vec_perm_indices indices (sel, 2, nelts);
13285 tree t = fold_vec_perm (type, arg0: arg1, arg1: arg2, sel: indices);
13286 if (t != NULL_TREE)
13287 return t;
13288 }
13289 }
13290
13291 /* If we have A op B ? A : C, we may be able to convert this to a
13292 simpler expression, depending on the operation and the values
13293 of B and C. Signed zeros prevent all of these transformations,
13294 for reasons given above each one.
13295
13296 Also try swapping the arguments and inverting the conditional. */
13297 if (COMPARISON_CLASS_P (arg0)
13298 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), arg1: op1)
13299 && !HONOR_SIGNED_ZEROS (op1))
13300 {
13301 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13302 TREE_OPERAND (arg0, 0),
13303 TREE_OPERAND (arg0, 1),
13304 arg1: op1, arg2: op2);
13305 if (tem)
13306 return tem;
13307 }
13308
13309 if (COMPARISON_CLASS_P (arg0)
13310 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), arg1: op2)
13311 && !HONOR_SIGNED_ZEROS (op2))
13312 {
13313 enum tree_code comp_code = TREE_CODE (arg0);
13314 tree arg00 = TREE_OPERAND (arg0, 0);
13315 tree arg01 = TREE_OPERAND (arg0, 1);
13316 comp_code = invert_tree_comparison (code: comp_code, honor_nans: HONOR_NANS (arg00));
13317 if (comp_code != ERROR_MARK)
13318 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13319 arg00,
13320 arg01,
13321 arg1: op2, arg2: op1);
13322 if (tem)
13323 return tem;
13324 }
13325
13326 /* If the second operand is simpler than the third, swap them
13327 since that produces better jump optimization results. */
13328 if (truth_value_p (TREE_CODE (arg0))
13329 && tree_swap_operands_p (arg0: op1, arg1: op2))
13330 {
13331 location_t loc0 = expr_location_or (t: arg0, loc);
13332 /* See if this can be inverted. If it can't, possibly because
13333 it was a floating-point inequality comparison, don't do
13334 anything. */
13335 tem = fold_invert_truthvalue (loc: loc0, arg: arg0);
13336 if (tem)
13337 return fold_build3_loc (loc, code, type, tem, op2, op1);
13338 }
13339
13340 /* Convert A ? 1 : 0 to simply A. */
13341 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13342 : (integer_onep (op1)
13343 && !VECTOR_TYPE_P (type)))
13344 && integer_zerop (op2)
13345 /* If we try to convert OP0 to our type, the
13346 call to fold will try to move the conversion inside
13347 a COND, which will recurse. In that case, the COND_EXPR
13348 is probably the best choice, so leave it alone. */
13349 && type == TREE_TYPE (arg0))
13350 return protected_set_expr_location_unshare (x: arg0, loc);
13351
13352 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13353 over COND_EXPR in cases such as floating point comparisons. */
13354 if (integer_zerop (op1)
13355 && code == COND_EXPR
13356 && integer_onep (op2)
13357 && !VECTOR_TYPE_P (type)
13358 && truth_value_p (TREE_CODE (arg0)))
13359 return fold_convert_loc (loc, type,
13360 arg: invert_truthvalue_loc (loc, arg: arg0));
13361
13362 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13363 if (TREE_CODE (arg0) == LT_EXPR
13364 && integer_zerop (TREE_OPERAND (arg0, 1))
13365 && integer_zerop (op2)
13366 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), val: arg1)))
13367 {
13368 /* sign_bit_p looks through both zero and sign extensions,
13369 but for this optimization only sign extensions are
13370 usable. */
13371 tree tem2 = TREE_OPERAND (arg0, 0);
13372 while (tem != tem2)
13373 {
13374 if (TREE_CODE (tem2) != NOP_EXPR
13375 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13376 {
13377 tem = NULL_TREE;
13378 break;
13379 }
13380 tem2 = TREE_OPERAND (tem2, 0);
13381 }
13382 /* sign_bit_p only checks ARG1 bits within A's precision.
13383 If <sign bit of A> has wider type than A, bits outside
13384 of A's precision in <sign bit of A> need to be checked.
13385 If they are all 0, this optimization needs to be done
13386 in unsigned A's type, if they are all 1 in signed A's type,
13387 otherwise this can't be done. */
13388 if (tem
13389 && TYPE_PRECISION (TREE_TYPE (tem))
13390 < TYPE_PRECISION (TREE_TYPE (arg1))
13391 && TYPE_PRECISION (TREE_TYPE (tem))
13392 < TYPE_PRECISION (type))
13393 {
13394 int inner_width, outer_width;
13395 tree tem_type;
13396
13397 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13398 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13399 if (outer_width > TYPE_PRECISION (type))
13400 outer_width = TYPE_PRECISION (type);
13401
13402 wide_int mask = wi::shifted_mask
13403 (start: inner_width, width: outer_width - inner_width, negate_p: false,
13404 TYPE_PRECISION (TREE_TYPE (arg1)));
13405
13406 wide_int common = mask & wi::to_wide (t: arg1);
13407 if (common == mask)
13408 {
13409 tem_type = signed_type_for (TREE_TYPE (tem));
13410 tem = fold_convert_loc (loc, type: tem_type, arg: tem);
13411 }
13412 else if (common == 0)
13413 {
13414 tem_type = unsigned_type_for (TREE_TYPE (tem));
13415 tem = fold_convert_loc (loc, type: tem_type, arg: tem);
13416 }
13417 else
13418 tem = NULL;
13419 }
13420
13421 if (tem)
13422 return
13423 fold_convert_loc (loc, type,
13424 arg: fold_build2_loc (loc, BIT_AND_EXPR,
13425 TREE_TYPE (tem), tem,
13426 fold_convert_loc (loc,
13427 TREE_TYPE (tem),
13428 arg: arg1)));
13429 }
13430
13431 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13432 already handled above. */
13433 if (TREE_CODE (arg0) == BIT_AND_EXPR
13434 && integer_onep (TREE_OPERAND (arg0, 1))
13435 && integer_zerop (op2)
13436 && integer_pow2p (arg1))
13437 {
13438 tree tem = TREE_OPERAND (arg0, 0);
13439 STRIP_NOPS (tem);
13440 if (TREE_CODE (tem) == RSHIFT_EXPR
13441 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13442 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13443 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13444 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13445 fold_convert_loc (loc, type,
13446 TREE_OPERAND (tem, 0)),
13447 op1);
13448 }
13449
13450 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13451 is probably obsolete because the first operand should be a
13452 truth value (that's why we have the two cases above), but let's
13453 leave it in until we can confirm this for all front-ends. */
13454 if (integer_zerop (op2)
13455 && TREE_CODE (arg0) == NE_EXPR
13456 && integer_zerop (TREE_OPERAND (arg0, 1))
13457 && integer_pow2p (arg1)
13458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13459 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13460 arg1, flags: OEP_ONLY_CONST)
13461 /* operand_equal_p compares just value, not precision, so e.g.
13462 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13463 second operand 32-bit -128, which is not a power of two (or vice
13464 versa. */
13465 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13466 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13467
13468 /* Disable the transformations below for vectors, since
13469 fold_binary_op_with_conditional_arg may undo them immediately,
13470 yielding an infinite loop. */
13471 if (code == VEC_COND_EXPR)
13472 return NULL_TREE;
13473
13474 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13475 if (integer_zerop (op2)
13476 && truth_value_p (TREE_CODE (arg0))
13477 && truth_value_p (TREE_CODE (arg1))
13478 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13479 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13480 : TRUTH_ANDIF_EXPR,
13481 type, fold_convert_loc (loc, type, arg: arg0), op1);
13482
13483 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13484 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13485 && truth_value_p (TREE_CODE (arg0))
13486 && truth_value_p (TREE_CODE (arg1))
13487 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13488 {
13489 location_t loc0 = expr_location_or (t: arg0, loc);
13490 /* Only perform transformation if ARG0 is easily inverted. */
13491 tem = fold_invert_truthvalue (loc: loc0, arg: arg0);
13492 if (tem)
13493 return fold_build2_loc (loc, code == VEC_COND_EXPR
13494 ? BIT_IOR_EXPR
13495 : TRUTH_ORIF_EXPR,
13496 type, fold_convert_loc (loc, type, arg: tem),
13497 op1);
13498 }
13499
13500 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13501 if (integer_zerop (arg1)
13502 && truth_value_p (TREE_CODE (arg0))
13503 && truth_value_p (TREE_CODE (op2))
13504 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13505 {
13506 location_t loc0 = expr_location_or (t: arg0, loc);
13507 /* Only perform transformation if ARG0 is easily inverted. */
13508 tem = fold_invert_truthvalue (loc: loc0, arg: arg0);
13509 if (tem)
13510 return fold_build2_loc (loc, code == VEC_COND_EXPR
13511 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13512 type, fold_convert_loc (loc, type, arg: tem),
13513 op2);
13514 }
13515
13516 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13517 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13518 && truth_value_p (TREE_CODE (arg0))
13519 && truth_value_p (TREE_CODE (op2))
13520 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13521 return fold_build2_loc (loc, code == VEC_COND_EXPR
13522 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13523 type, fold_convert_loc (loc, type, arg: arg0), op2);
13524
13525 return NULL_TREE;
13526
13527 case CALL_EXPR:
13528 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13529 of fold_ternary on them. */
13530 gcc_unreachable ();
13531
13532 case BIT_FIELD_REF:
13533 if (TREE_CODE (arg0) == VECTOR_CST
13534 && (type == TREE_TYPE (TREE_TYPE (arg0))
13535 || (VECTOR_TYPE_P (type)
13536 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13537 && tree_fits_uhwi_p (op1)
13538 && tree_fits_uhwi_p (op2))
13539 {
13540 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13541 unsigned HOST_WIDE_INT width
13542 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13543 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13544 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13545 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13546
13547 if (n != 0
13548 && (idx % width) == 0
13549 && (n % width) == 0
13550 && known_le ((idx + n) / width,
13551 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13552 {
13553 idx = idx / width;
13554 n = n / width;
13555
13556 if (TREE_CODE (arg0) == VECTOR_CST)
13557 {
13558 if (n == 1)
13559 {
13560 tem = VECTOR_CST_ELT (arg0, idx);
13561 if (VECTOR_TYPE_P (type))
13562 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13563 return tem;
13564 }
13565
13566 tree_vector_builder vals (type, n, 1);
13567 for (unsigned i = 0; i < n; ++i)
13568 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13569 return vals.build ();
13570 }
13571 }
13572 }
13573
13574 /* On constants we can use native encode/interpret to constant
13575 fold (nearly) all BIT_FIELD_REFs. */
13576 if (CONSTANT_CLASS_P (arg0)
13577 && can_native_interpret_type_p (type)
13578 && BITS_PER_UNIT == 8
13579 && tree_fits_uhwi_p (op1)
13580 && tree_fits_uhwi_p (op2))
13581 {
13582 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13583 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13584 /* Limit us to a reasonable amount of work. To relax the
13585 other limitations we need bit-shifting of the buffer
13586 and rounding up the size. */
13587 if (bitpos % BITS_PER_UNIT == 0
13588 && bitsize % BITS_PER_UNIT == 0
13589 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13590 {
13591 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13592 unsigned HOST_WIDE_INT len
13593 = native_encode_expr (expr: arg0, ptr: b, len: bitsize / BITS_PER_UNIT,
13594 off: bitpos / BITS_PER_UNIT);
13595 if (len > 0
13596 && len * BITS_PER_UNIT >= bitsize)
13597 {
13598 tree v = native_interpret_expr (type, ptr: b,
13599 len: bitsize / BITS_PER_UNIT);
13600 if (v)
13601 return v;
13602 }
13603 }
13604 }
13605
13606 return NULL_TREE;
13607
13608 case VEC_PERM_EXPR:
13609 /* Perform constant folding of BIT_INSERT_EXPR. */
13610 if (TREE_CODE (arg2) == VECTOR_CST
13611 && TREE_CODE (op0) == VECTOR_CST
13612 && TREE_CODE (op1) == VECTOR_CST)
13613 {
13614 /* Build a vector of integers from the tree mask. */
13615 vec_perm_builder builder;
13616 if (!tree_to_vec_perm_builder (&builder, arg2))
13617 return NULL_TREE;
13618
13619 /* Create a vec_perm_indices for the integer vector. */
13620 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (node: type);
13621 bool single_arg = (op0 == op1);
13622 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13623 return fold_vec_perm (type, arg0: op0, arg1: op1, sel);
13624 }
13625 return NULL_TREE;
13626
13627 case BIT_INSERT_EXPR:
13628 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13629 if (TREE_CODE (arg0) == INTEGER_CST
13630 && TREE_CODE (arg1) == INTEGER_CST)
13631 {
13632 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13633 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13634 wide_int tem = (wi::to_wide (t: arg0)
13635 & wi::shifted_mask (start: bitpos, width: bitsize, negate_p: true,
13636 TYPE_PRECISION (type)));
13637 wide_int tem2
13638 = wi::lshift (x: wi::zext (x: wi::to_wide (t: arg1, TYPE_PRECISION (type)),
13639 offset: bitsize), y: bitpos);
13640 return wide_int_to_tree (type, cst: wi::bit_or (x: tem, y: tem2));
13641 }
13642 else if (TREE_CODE (arg0) == VECTOR_CST
13643 && CONSTANT_CLASS_P (arg1)
13644 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13645 TREE_TYPE (arg1)))
13646 {
13647 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13648 unsigned HOST_WIDE_INT elsize
13649 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13650 if (bitpos % elsize == 0)
13651 {
13652 unsigned k = bitpos / elsize;
13653 unsigned HOST_WIDE_INT nelts;
13654 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, flags: 0))
13655 return arg0;
13656 else if (VECTOR_CST_NELTS (arg0).is_constant (const_value: &nelts))
13657 {
13658 tree_vector_builder elts (type, nelts, 1);
13659 elts.quick_grow (len: nelts);
13660 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13661 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13662 return elts.build ();
13663 }
13664 }
13665 }
13666 return NULL_TREE;
13667
13668 default:
13669 return NULL_TREE;
13670 } /* switch (code) */
13671}
13672
13673/* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13674 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13675 constructor element index of the value returned. If the element is
13676 not found NULL_TREE is returned and *CTOR_IDX is updated to
13677 the index of the element after the ACCESS_INDEX position (which
13678 may be outside of the CTOR array). */
13679
13680tree
13681get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13682 unsigned *ctor_idx)
13683{
13684 tree index_type = NULL_TREE;
13685 signop index_sgn = UNSIGNED;
13686 offset_int low_bound = 0;
13687
13688 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13689 {
13690 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13691 if (domain_type && TYPE_MIN_VALUE (domain_type))
13692 {
13693 /* Static constructors for variably sized objects makes no sense. */
13694 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13695 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13696 /* ??? When it is obvious that the range is signed, treat it so. */
13697 if (TYPE_UNSIGNED (index_type)
13698 && TYPE_MAX_VALUE (domain_type)
13699 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13700 TYPE_MIN_VALUE (domain_type)))
13701 {
13702 index_sgn = SIGNED;
13703 low_bound
13704 = offset_int::from (x: wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13705 sgn: SIGNED);
13706 }
13707 else
13708 {
13709 index_sgn = TYPE_SIGN (index_type);
13710 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13711 }
13712 }
13713 }
13714
13715 if (index_type)
13716 access_index = wi::ext (x: access_index, TYPE_PRECISION (index_type),
13717 sgn: index_sgn);
13718
13719 offset_int index = low_bound;
13720 if (index_type)
13721 index = wi::ext (x: index, TYPE_PRECISION (index_type), sgn: index_sgn);
13722
13723 offset_int max_index = index;
13724 unsigned cnt;
13725 tree cfield, cval;
13726 bool first_p = true;
13727
13728 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13729 {
13730 /* Array constructor might explicitly set index, or specify a range,
13731 or leave index NULL meaning that it is next index after previous
13732 one. */
13733 if (cfield)
13734 {
13735 if (TREE_CODE (cfield) == INTEGER_CST)
13736 max_index = index
13737 = offset_int::from (x: wi::to_wide (t: cfield), sgn: index_sgn);
13738 else
13739 {
13740 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13741 index = offset_int::from (x: wi::to_wide (TREE_OPERAND (cfield, 0)),
13742 sgn: index_sgn);
13743 max_index
13744 = offset_int::from (x: wi::to_wide (TREE_OPERAND (cfield, 1)),
13745 sgn: index_sgn);
13746 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13747 }
13748 }
13749 else if (!first_p)
13750 {
13751 index = max_index + 1;
13752 if (index_type)
13753 index = wi::ext (x: index, TYPE_PRECISION (index_type), sgn: index_sgn);
13754 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13755 max_index = index;
13756 }
13757 else
13758 first_p = false;
13759
13760 /* Do we have match? */
13761 if (wi::cmp (x: access_index, y: index, sgn: index_sgn) >= 0)
13762 {
13763 if (wi::cmp (x: access_index, y: max_index, sgn: index_sgn) <= 0)
13764 {
13765 if (ctor_idx)
13766 *ctor_idx = cnt;
13767 return cval;
13768 }
13769 }
13770 else if (in_gimple_form)
13771 /* We're past the element we search for. Note during parsing
13772 the elements might not be sorted.
13773 ??? We should use a binary search and a flag on the
13774 CONSTRUCTOR as to whether elements are sorted in declaration
13775 order. */
13776 break;
13777 }
13778 if (ctor_idx)
13779 *ctor_idx = cnt;
13780 return NULL_TREE;
13781}
13782
13783/* Perform constant folding and related simplification of EXPR.
13784 The related simplifications include x*1 => x, x*0 => 0, etc.,
13785 and application of the associative law.
13786 NOP_EXPR conversions may be removed freely (as long as we
13787 are careful not to change the type of the overall expression).
13788 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13789 but we can constant-fold them if they have constant operands. */
13790
13791#ifdef ENABLE_FOLD_CHECKING
13792# define fold(x) fold_1 (x)
13793static tree fold_1 (tree);
13794static
13795#endif
13796tree
13797fold (tree expr)
13798{
13799 const tree t = expr;
13800 enum tree_code code = TREE_CODE (t);
13801 enum tree_code_class kind = TREE_CODE_CLASS (code);
13802 tree tem;
13803 location_t loc = EXPR_LOCATION (expr);
13804
13805 /* Return right away if a constant. */
13806 if (kind == tcc_constant)
13807 return t;
13808
13809 /* CALL_EXPR-like objects with variable numbers of operands are
13810 treated specially. */
13811 if (kind == tcc_vl_exp)
13812 {
13813 if (code == CALL_EXPR)
13814 {
13815 tem = fold_call_expr (loc, expr, false);
13816 return tem ? tem : expr;
13817 }
13818 return expr;
13819 }
13820
13821 if (IS_EXPR_CODE_CLASS (kind))
13822 {
13823 tree type = TREE_TYPE (t);
13824 tree op0, op1, op2;
13825
13826 switch (TREE_CODE_LENGTH (code))
13827 {
13828 case 1:
13829 op0 = TREE_OPERAND (t, 0);
13830 tem = fold_unary_loc (loc, code, type, op0);
13831 return tem ? tem : expr;
13832 case 2:
13833 op0 = TREE_OPERAND (t, 0);
13834 op1 = TREE_OPERAND (t, 1);
13835 tem = fold_binary_loc (loc, code, type, op0, op1);
13836 return tem ? tem : expr;
13837 case 3:
13838 op0 = TREE_OPERAND (t, 0);
13839 op1 = TREE_OPERAND (t, 1);
13840 op2 = TREE_OPERAND (t, 2);
13841 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13842 return tem ? tem : expr;
13843 default:
13844 break;
13845 }
13846 }
13847
13848 switch (code)
13849 {
13850 case ARRAY_REF:
13851 {
13852 tree op0 = TREE_OPERAND (t, 0);
13853 tree op1 = TREE_OPERAND (t, 1);
13854
13855 if (TREE_CODE (op1) == INTEGER_CST
13856 && TREE_CODE (op0) == CONSTRUCTOR
13857 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13858 {
13859 tree val = get_array_ctor_element_at_index (ctor: op0,
13860 access_index: wi::to_offset (t: op1));
13861 if (val)
13862 return val;
13863 }
13864
13865 return t;
13866 }
13867
13868 /* Return a VECTOR_CST if possible. */
13869 case CONSTRUCTOR:
13870 {
13871 tree type = TREE_TYPE (t);
13872 if (TREE_CODE (type) != VECTOR_TYPE)
13873 return t;
13874
13875 unsigned i;
13876 tree val;
13877 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13878 if (! CONSTANT_CLASS_P (val))
13879 return t;
13880
13881 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13882 }
13883
13884 case CONST_DECL:
13885 return fold (DECL_INITIAL (t));
13886
13887 default:
13888 return t;
13889 } /* switch (code) */
13890}
13891
13892#ifdef ENABLE_FOLD_CHECKING
13893#undef fold
13894
13895static void fold_checksum_tree (const_tree, struct md5_ctx *,
13896 hash_table<nofree_ptr_hash<const tree_node> > *);
13897static void fold_check_failed (const_tree, const_tree);
13898void print_fold_checksum (const_tree);
13899
13900/* When --enable-checking=fold, compute a digest of expr before
13901 and after actual fold call to see if fold did not accidentally
13902 change original expr. */
13903
13904tree
13905fold (tree expr)
13906{
13907 tree ret;
13908 struct md5_ctx ctx;
13909 unsigned char checksum_before[16], checksum_after[16];
13910 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13911
13912 md5_init_ctx (&ctx);
13913 fold_checksum_tree (expr, &ctx, &ht);
13914 md5_finish_ctx (&ctx, checksum_before);
13915 ht.empty ();
13916
13917 ret = fold_1 (expr);
13918
13919 md5_init_ctx (&ctx);
13920 fold_checksum_tree (expr, &ctx, &ht);
13921 md5_finish_ctx (&ctx, checksum_after);
13922
13923 if (memcmp (checksum_before, checksum_after, 16))
13924 fold_check_failed (expr, ret);
13925
13926 return ret;
13927}
13928
13929void
13930print_fold_checksum (const_tree expr)
13931{
13932 struct md5_ctx ctx;
13933 unsigned char checksum[16], cnt;
13934 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13935
13936 md5_init_ctx (&ctx);
13937 fold_checksum_tree (expr, &ctx, &ht);
13938 md5_finish_ctx (&ctx, checksum);
13939 for (cnt = 0; cnt < 16; ++cnt)
13940 fprintf (stderr, "%02x", checksum[cnt]);
13941 putc ('\n', stderr);
13942}
13943
13944static void
13945fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13946{
13947 internal_error ("fold check: original tree changed by fold");
13948}
13949
13950static void
13951fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13952 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13953{
13954 const tree_node **slot;
13955 enum tree_code code;
13956 union tree_node *buf;
13957 int i, len;
13958
13959 recursive_label:
13960 if (expr == NULL)
13961 return;
13962 slot = ht->find_slot (expr, INSERT);
13963 if (*slot != NULL)
13964 return;
13965 *slot = expr;
13966 code = TREE_CODE (expr);
13967 if (TREE_CODE_CLASS (code) == tcc_declaration
13968 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13969 {
13970 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13971 size_t sz = tree_size (expr);
13972 buf = XALLOCAVAR (union tree_node, sz);
13973 memcpy ((char *) buf, expr, sz);
13974 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13975 buf->decl_with_vis.symtab_node = NULL;
13976 buf->base.nowarning_flag = 0;
13977 expr = (tree) buf;
13978 }
13979 else if (TREE_CODE_CLASS (code) == tcc_type
13980 && (TYPE_POINTER_TO (expr)
13981 || TYPE_REFERENCE_TO (expr)
13982 || TYPE_CACHED_VALUES_P (expr)
13983 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13984 || TYPE_NEXT_VARIANT (expr)
13985 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13986 {
13987 /* Allow these fields to be modified. */
13988 tree tmp;
13989 size_t sz = tree_size (expr);
13990 buf = XALLOCAVAR (union tree_node, sz);
13991 memcpy ((char *) buf, expr, sz);
13992 expr = tmp = (tree) buf;
13993 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13994 TYPE_POINTER_TO (tmp) = NULL;
13995 TYPE_REFERENCE_TO (tmp) = NULL;
13996 TYPE_NEXT_VARIANT (tmp) = NULL;
13997 TYPE_ALIAS_SET (tmp) = -1;
13998 if (TYPE_CACHED_VALUES_P (tmp))
13999 {
14000 TYPE_CACHED_VALUES_P (tmp) = 0;
14001 TYPE_CACHED_VALUES (tmp) = NULL;
14002 }
14003 }
14004 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
14005 {
14006 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
14007 that and change builtins.cc etc. instead - see PR89543. */
14008 size_t sz = tree_size (expr);
14009 buf = XALLOCAVAR (union tree_node, sz);
14010 memcpy ((char *) buf, expr, sz);
14011 buf->base.nowarning_flag = 0;
14012 expr = (tree) buf;
14013 }
14014 md5_process_bytes (expr, tree_size (expr), ctx);
14015 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14016 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14017 if (TREE_CODE_CLASS (code) != tcc_type
14018 && TREE_CODE_CLASS (code) != tcc_declaration
14019 && code != TREE_LIST
14020 && code != SSA_NAME
14021 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14022 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14023 switch (TREE_CODE_CLASS (code))
14024 {
14025 case tcc_constant:
14026 switch (code)
14027 {
14028 case STRING_CST:
14029 md5_process_bytes (TREE_STRING_POINTER (expr),
14030 TREE_STRING_LENGTH (expr), ctx);
14031 break;
14032 case COMPLEX_CST:
14033 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14034 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14035 break;
14036 case VECTOR_CST:
14037 len = vector_cst_encoded_nelts (expr);
14038 for (i = 0; i < len; ++i)
14039 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
14040 break;
14041 default:
14042 break;
14043 }
14044 break;
14045 case tcc_exceptional:
14046 switch (code)
14047 {
14048 case TREE_LIST:
14049 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14050 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14051 expr = TREE_CHAIN (expr);
14052 goto recursive_label;
14053 break;
14054 case TREE_VEC:
14055 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14056 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14057 break;
14058 default:
14059 break;
14060 }
14061 break;
14062 case tcc_expression:
14063 case tcc_reference:
14064 case tcc_comparison:
14065 case tcc_unary:
14066 case tcc_binary:
14067 case tcc_statement:
14068 case tcc_vl_exp:
14069 len = TREE_OPERAND_LENGTH (expr);
14070 for (i = 0; i < len; ++i)
14071 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14072 break;
14073 case tcc_declaration:
14074 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14075 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14076 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14077 {
14078 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14079 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14080 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14081 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14082 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14083 }
14084
14085 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14086 {
14087 if (TREE_CODE (expr) == FUNCTION_DECL)
14088 {
14089 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14090 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14091 }
14092 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14093 }
14094 break;
14095 case tcc_type:
14096 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14097 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14098 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14099 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14100 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14101 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14102 if (INTEGRAL_TYPE_P (expr)
14103 || SCALAR_FLOAT_TYPE_P (expr))
14104 {
14105 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14106 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14107 }
14108 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14109 if (RECORD_OR_UNION_TYPE_P (expr))
14110 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14111 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14112 break;
14113 default:
14114 break;
14115 }
14116}
14117
14118/* Helper function for outputting the checksum of a tree T. When
14119 debugging with gdb, you can "define mynext" to be "next" followed
14120 by "call debug_fold_checksum (op0)", then just trace down till the
14121 outputs differ. */
14122
14123DEBUG_FUNCTION void
14124debug_fold_checksum (const_tree t)
14125{
14126 int i;
14127 unsigned char checksum[16];
14128 struct md5_ctx ctx;
14129 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14130
14131 md5_init_ctx (&ctx);
14132 fold_checksum_tree (t, &ctx, &ht);
14133 md5_finish_ctx (&ctx, checksum);
14134 ht.empty ();
14135
14136 for (i = 0; i < 16; i++)
14137 fprintf (stderr, "%d ", checksum[i]);
14138
14139 fprintf (stderr, "\n");
14140}
14141
14142#endif
14143
14144/* Fold a unary tree expression with code CODE of type TYPE with an
14145 operand OP0. LOC is the location of the resulting expression.
14146 Return a folded expression if successful. Otherwise, return a tree
14147 expression with code CODE of type TYPE with an operand OP0. */
14148
14149tree
14150fold_build1_loc (location_t loc,
14151 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14152{
14153 tree tem;
14154#ifdef ENABLE_FOLD_CHECKING
14155 unsigned char checksum_before[16], checksum_after[16];
14156 struct md5_ctx ctx;
14157 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14158
14159 md5_init_ctx (&ctx);
14160 fold_checksum_tree (op0, &ctx, &ht);
14161 md5_finish_ctx (&ctx, checksum_before);
14162 ht.empty ();
14163#endif
14164
14165 tem = fold_unary_loc (loc, code, type, op0);
14166 if (!tem)
14167 tem = build1_loc (loc, code, type, arg1: op0 PASS_MEM_STAT);
14168
14169#ifdef ENABLE_FOLD_CHECKING
14170 md5_init_ctx (&ctx);
14171 fold_checksum_tree (op0, &ctx, &ht);
14172 md5_finish_ctx (&ctx, checksum_after);
14173
14174 if (memcmp (checksum_before, checksum_after, 16))
14175 fold_check_failed (op0, tem);
14176#endif
14177 return tem;
14178}
14179
14180/* Fold a binary tree expression with code CODE of type TYPE with
14181 operands OP0 and OP1. LOC is the location of the resulting
14182 expression. Return a folded expression if successful. Otherwise,
14183 return a tree expression with code CODE of type TYPE with operands
14184 OP0 and OP1. */
14185
14186tree
14187fold_build2_loc (location_t loc,
14188 enum tree_code code, tree type, tree op0, tree op1
14189 MEM_STAT_DECL)
14190{
14191 tree tem;
14192#ifdef ENABLE_FOLD_CHECKING
14193 unsigned char checksum_before_op0[16],
14194 checksum_before_op1[16],
14195 checksum_after_op0[16],
14196 checksum_after_op1[16];
14197 struct md5_ctx ctx;
14198 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14199
14200 md5_init_ctx (&ctx);
14201 fold_checksum_tree (op0, &ctx, &ht);
14202 md5_finish_ctx (&ctx, checksum_before_op0);
14203 ht.empty ();
14204
14205 md5_init_ctx (&ctx);
14206 fold_checksum_tree (op1, &ctx, &ht);
14207 md5_finish_ctx (&ctx, checksum_before_op1);
14208 ht.empty ();
14209#endif
14210
14211 tem = fold_binary_loc (loc, code, type, op0, op1);
14212 if (!tem)
14213 tem = build2_loc (loc, code, type, arg0: op0, arg1: op1 PASS_MEM_STAT);
14214
14215#ifdef ENABLE_FOLD_CHECKING
14216 md5_init_ctx (&ctx);
14217 fold_checksum_tree (op0, &ctx, &ht);
14218 md5_finish_ctx (&ctx, checksum_after_op0);
14219 ht.empty ();
14220
14221 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14222 fold_check_failed (op0, tem);
14223
14224 md5_init_ctx (&ctx);
14225 fold_checksum_tree (op1, &ctx, &ht);
14226 md5_finish_ctx (&ctx, checksum_after_op1);
14227
14228 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14229 fold_check_failed (op1, tem);
14230#endif
14231 return tem;
14232}
14233
14234/* Fold a ternary tree expression with code CODE of type TYPE with
14235 operands OP0, OP1, and OP2. Return a folded expression if
14236 successful. Otherwise, return a tree expression with code CODE of
14237 type TYPE with operands OP0, OP1, and OP2. */
14238
14239tree
14240fold_build3_loc (location_t loc, enum tree_code code, tree type,
14241 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14242{
14243 tree tem;
14244#ifdef ENABLE_FOLD_CHECKING
14245 unsigned char checksum_before_op0[16],
14246 checksum_before_op1[16],
14247 checksum_before_op2[16],
14248 checksum_after_op0[16],
14249 checksum_after_op1[16],
14250 checksum_after_op2[16];
14251 struct md5_ctx ctx;
14252 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14253
14254 md5_init_ctx (&ctx);
14255 fold_checksum_tree (op0, &ctx, &ht);
14256 md5_finish_ctx (&ctx, checksum_before_op0);
14257 ht.empty ();
14258
14259 md5_init_ctx (&ctx);
14260 fold_checksum_tree (op1, &ctx, &ht);
14261 md5_finish_ctx (&ctx, checksum_before_op1);
14262 ht.empty ();
14263
14264 md5_init_ctx (&ctx);
14265 fold_checksum_tree (op2, &ctx, &ht);
14266 md5_finish_ctx (&ctx, checksum_before_op2);
14267 ht.empty ();
14268#endif
14269
14270 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14271 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14272 if (!tem)
14273 tem = build3_loc (loc, code, type, arg0: op0, arg1: op1, arg2: op2 PASS_MEM_STAT);
14274
14275#ifdef ENABLE_FOLD_CHECKING
14276 md5_init_ctx (&ctx);
14277 fold_checksum_tree (op0, &ctx, &ht);
14278 md5_finish_ctx (&ctx, checksum_after_op0);
14279 ht.empty ();
14280
14281 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14282 fold_check_failed (op0, tem);
14283
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (op1, &ctx, &ht);
14286 md5_finish_ctx (&ctx, checksum_after_op1);
14287 ht.empty ();
14288
14289 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14290 fold_check_failed (op1, tem);
14291
14292 md5_init_ctx (&ctx);
14293 fold_checksum_tree (op2, &ctx, &ht);
14294 md5_finish_ctx (&ctx, checksum_after_op2);
14295
14296 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14297 fold_check_failed (op2, tem);
14298#endif
14299 return tem;
14300}
14301
14302/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14303 arguments in ARGARRAY, and a null static chain.
14304 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14305 of type TYPE from the given operands as constructed by build_call_array. */
14306
14307tree
14308fold_build_call_array_loc (location_t loc, tree type, tree fn,
14309 int nargs, tree *argarray)
14310{
14311 tree tem;
14312#ifdef ENABLE_FOLD_CHECKING
14313 unsigned char checksum_before_fn[16],
14314 checksum_before_arglist[16],
14315 checksum_after_fn[16],
14316 checksum_after_arglist[16];
14317 struct md5_ctx ctx;
14318 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14319 int i;
14320
14321 md5_init_ctx (&ctx);
14322 fold_checksum_tree (fn, &ctx, &ht);
14323 md5_finish_ctx (&ctx, checksum_before_fn);
14324 ht.empty ();
14325
14326 md5_init_ctx (&ctx);
14327 for (i = 0; i < nargs; i++)
14328 fold_checksum_tree (argarray[i], &ctx, &ht);
14329 md5_finish_ctx (&ctx, checksum_before_arglist);
14330 ht.empty ();
14331#endif
14332
14333 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14334 if (!tem)
14335 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14336
14337#ifdef ENABLE_FOLD_CHECKING
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (fn, &ctx, &ht);
14340 md5_finish_ctx (&ctx, checksum_after_fn);
14341 ht.empty ();
14342
14343 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14344 fold_check_failed (fn, tem);
14345
14346 md5_init_ctx (&ctx);
14347 for (i = 0; i < nargs; i++)
14348 fold_checksum_tree (argarray[i], &ctx, &ht);
14349 md5_finish_ctx (&ctx, checksum_after_arglist);
14350
14351 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14352 fold_check_failed (NULL_TREE, tem);
14353#endif
14354 return tem;
14355}
14356
14357/* Perform constant folding and related simplification of initializer
14358 expression EXPR. These behave identically to "fold_buildN" but ignore
14359 potential run-time traps and exceptions that fold must preserve. */
14360
14361#define START_FOLD_INIT \
14362 int saved_signaling_nans = flag_signaling_nans;\
14363 int saved_trapping_math = flag_trapping_math;\
14364 int saved_rounding_math = flag_rounding_math;\
14365 int saved_trapv = flag_trapv;\
14366 int saved_folding_initializer = folding_initializer;\
14367 flag_signaling_nans = 0;\
14368 flag_trapping_math = 0;\
14369 flag_rounding_math = 0;\
14370 flag_trapv = 0;\
14371 folding_initializer = 1;
14372
14373#define END_FOLD_INIT \
14374 flag_signaling_nans = saved_signaling_nans;\
14375 flag_trapping_math = saved_trapping_math;\
14376 flag_rounding_math = saved_rounding_math;\
14377 flag_trapv = saved_trapv;\
14378 folding_initializer = saved_folding_initializer;
14379
14380tree
14381fold_init (tree expr)
14382{
14383 tree result;
14384 START_FOLD_INIT;
14385
14386 result = fold (expr);
14387
14388 END_FOLD_INIT;
14389 return result;
14390}
14391
14392tree
14393fold_build1_initializer_loc (location_t loc, enum tree_code code,
14394 tree type, tree op)
14395{
14396 tree result;
14397 START_FOLD_INIT;
14398
14399 result = fold_build1_loc (loc, code, type, op0: op);
14400
14401 END_FOLD_INIT;
14402 return result;
14403}
14404
14405tree
14406fold_build2_initializer_loc (location_t loc, enum tree_code code,
14407 tree type, tree op0, tree op1)
14408{
14409 tree result;
14410 START_FOLD_INIT;
14411
14412 result = fold_build2_loc (loc, code, type, op0, op1);
14413
14414 END_FOLD_INIT;
14415 return result;
14416}
14417
14418tree
14419fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14420 int nargs, tree *argarray)
14421{
14422 tree result;
14423 START_FOLD_INIT;
14424
14425 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14426
14427 END_FOLD_INIT;
14428 return result;
14429}
14430
14431tree
14432fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14433 tree lhs, tree rhs)
14434{
14435 tree result;
14436 START_FOLD_INIT;
14437
14438 result = fold_binary_loc (loc, code, type, op0: lhs, op1: rhs);
14439
14440 END_FOLD_INIT;
14441 return result;
14442}
14443
14444#undef START_FOLD_INIT
14445#undef END_FOLD_INIT
14446
14447/* Determine if first argument is a multiple of second argument. Return
14448 false if it is not, or we cannot easily determined it to be.
14449
14450 An example of the sort of thing we care about (at this point; this routine
14451 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14452 fold cases do now) is discovering that
14453
14454 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14455
14456 is a multiple of
14457
14458 SAVE_EXPR (J * 8)
14459
14460 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14461
14462 This code also handles discovering that
14463
14464 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14465
14466 is a multiple of 8 so we don't have to worry about dealing with a
14467 possible remainder.
14468
14469 Note that we *look* inside a SAVE_EXPR only to determine how it was
14470 calculated; it is not safe for fold to do much of anything else with the
14471 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14472 at run time. For example, the latter example above *cannot* be implemented
14473 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14474 evaluation time of the original SAVE_EXPR is not necessarily the same at
14475 the time the new expression is evaluated. The only optimization of this
14476 sort that would be valid is changing
14477
14478 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14479
14480 divided by 8 to
14481
14482 SAVE_EXPR (I) * SAVE_EXPR (J)
14483
14484 (where the same SAVE_EXPR (J) is used in the original and the
14485 transformed version).
14486
14487 NOWRAP specifies whether all outer operations in TYPE should
14488 be considered not wrapping. Any type conversion within TOP acts
14489 as a barrier and we will fall back to NOWRAP being false.
14490 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14491 as not wrapping even though they are generally using unsigned arithmetic. */
14492
14493bool
14494multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14495{
14496 gimple *stmt;
14497 tree op1, op2;
14498
14499 if (operand_equal_p (arg0: top, arg1: bottom, flags: 0))
14500 return true;
14501
14502 if (TREE_CODE (type) != INTEGER_TYPE)
14503 return false;
14504
14505 switch (TREE_CODE (top))
14506 {
14507 case BIT_AND_EXPR:
14508 /* Bitwise and provides a power of two multiple. If the mask is
14509 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14510 if (!integer_pow2p (bottom))
14511 return false;
14512 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14513 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14514
14515 case MULT_EXPR:
14516 /* If the multiplication can wrap we cannot recurse further unless
14517 the bottom is a power of two which is where wrapping does not
14518 matter. */
14519 if (!nowrap
14520 && !TYPE_OVERFLOW_UNDEFINED (type)
14521 && !integer_pow2p (bottom))
14522 return false;
14523 if (TREE_CODE (bottom) == INTEGER_CST)
14524 {
14525 op1 = TREE_OPERAND (top, 0);
14526 op2 = TREE_OPERAND (top, 1);
14527 if (TREE_CODE (op1) == INTEGER_CST)
14528 std::swap (a&: op1, b&: op2);
14529 if (TREE_CODE (op2) == INTEGER_CST)
14530 {
14531 if (multiple_of_p (type, top: op2, bottom, nowrap))
14532 return true;
14533 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14534 if (multiple_of_p (type, top: bottom, bottom: op2, nowrap))
14535 {
14536 widest_int w = wi::sdiv_trunc (x: wi::to_widest (t: bottom),
14537 y: wi::to_widest (t: op2));
14538 if (wi::fits_to_tree_p (x: w, TREE_TYPE (bottom)))
14539 {
14540 op2 = wide_int_to_tree (TREE_TYPE (bottom), cst: w);
14541 return multiple_of_p (type, top: op1, bottom: op2, nowrap);
14542 }
14543 }
14544 return multiple_of_p (type, top: op1, bottom, nowrap);
14545 }
14546 }
14547 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14548 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14549
14550 case LSHIFT_EXPR:
14551 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14552 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14553 {
14554 op1 = TREE_OPERAND (top, 1);
14555 if (wi::to_widest (t: op1) < TYPE_PRECISION (type))
14556 {
14557 wide_int mul_op
14558 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (t: op1);
14559 return multiple_of_p (type,
14560 top: wide_int_to_tree (type, cst: mul_op), bottom,
14561 nowrap);
14562 }
14563 }
14564 return false;
14565
14566 case MINUS_EXPR:
14567 case PLUS_EXPR:
14568 /* If the addition or subtraction can wrap we cannot recurse further
14569 unless bottom is a power of two which is where wrapping does not
14570 matter. */
14571 if (!nowrap
14572 && !TYPE_OVERFLOW_UNDEFINED (type)
14573 && !integer_pow2p (bottom))
14574 return false;
14575
14576 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14577 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14578 but 0xfffffffd is not. */
14579 op1 = TREE_OPERAND (top, 1);
14580 if (TREE_CODE (top) == PLUS_EXPR
14581 && nowrap
14582 && TYPE_UNSIGNED (type)
14583 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14584 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14585
14586 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14587 precisely, so be conservative here checking if both op0 and op1
14588 are multiple of bottom. Note we check the second operand first
14589 since it's usually simpler. */
14590 return (multiple_of_p (type, top: op1, bottom, nowrap)
14591 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14592
14593 CASE_CONVERT:
14594 /* Can't handle conversions from non-integral or wider integral type. */
14595 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14596 || (TYPE_PRECISION (type)
14597 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14598 return false;
14599 /* NOWRAP only extends to operations in the outermost type so
14600 make sure to strip it off here. */
14601 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14602 TREE_OPERAND (top, 0), bottom, nowrap: false);
14603
14604 case SAVE_EXPR:
14605 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14606
14607 case COND_EXPR:
14608 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14609 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14610
14611 case INTEGER_CST:
14612 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14613 return false;
14614 return wi::multiple_of_p (x: wi::to_widest (t: top), y: wi::to_widest (t: bottom),
14615 sgn: SIGNED);
14616
14617 case SSA_NAME:
14618 if (TREE_CODE (bottom) == INTEGER_CST
14619 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14620 && gimple_code (g: stmt) == GIMPLE_ASSIGN)
14621 {
14622 enum tree_code code = gimple_assign_rhs_code (gs: stmt);
14623
14624 /* Check for special cases to see if top is defined as multiple
14625 of bottom:
14626
14627 top = (X & ~(bottom - 1) ; bottom is power of 2
14628
14629 or
14630
14631 Y = X % bottom
14632 top = X - Y. */
14633 if (code == BIT_AND_EXPR
14634 && (op2 = gimple_assign_rhs2 (gs: stmt)) != NULL_TREE
14635 && TREE_CODE (op2) == INTEGER_CST
14636 && integer_pow2p (bottom)
14637 && wi::multiple_of_p (x: wi::to_widest (t: op2),
14638 y: wi::to_widest (t: bottom), sgn: SIGNED))
14639 return true;
14640
14641 op1 = gimple_assign_rhs1 (gs: stmt);
14642 if (code == MINUS_EXPR
14643 && (op2 = gimple_assign_rhs2 (gs: stmt)) != NULL_TREE
14644 && TREE_CODE (op2) == SSA_NAME
14645 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14646 && gimple_code (g: stmt) == GIMPLE_ASSIGN
14647 && (code = gimple_assign_rhs_code (gs: stmt)) == TRUNC_MOD_EXPR
14648 && operand_equal_p (arg0: op1, arg1: gimple_assign_rhs1 (gs: stmt), flags: 0)
14649 && operand_equal_p (arg0: bottom, arg1: gimple_assign_rhs2 (gs: stmt), flags: 0))
14650 return true;
14651 }
14652
14653 /* fall through */
14654
14655 default:
14656 if (POLY_INT_CST_P (top) && poly_int_tree_p (t: bottom))
14657 return multiple_p (a: wi::to_poly_widest (t: top),
14658 b: wi::to_poly_widest (t: bottom));
14659
14660 return false;
14661 }
14662}
14663
14664/* Return true if expression X cannot be (or contain) a NaN or infinity.
14665 This function returns true for integer expressions, and returns
14666 false if uncertain. */
14667
14668bool
14669tree_expr_finite_p (const_tree x)
14670{
14671 machine_mode mode = element_mode (x);
14672 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14673 return true;
14674 switch (TREE_CODE (x))
14675 {
14676 case REAL_CST:
14677 return real_isfinite (TREE_REAL_CST_PTR (x));
14678 case COMPLEX_CST:
14679 return tree_expr_finite_p (TREE_REALPART (x))
14680 && tree_expr_finite_p (TREE_IMAGPART (x));
14681 case FLOAT_EXPR:
14682 return true;
14683 case ABS_EXPR:
14684 case CONVERT_EXPR:
14685 case NON_LVALUE_EXPR:
14686 case NEGATE_EXPR:
14687 case SAVE_EXPR:
14688 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14689 case MIN_EXPR:
14690 case MAX_EXPR:
14691 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14692 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14693 case COND_EXPR:
14694 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14695 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14696 case CALL_EXPR:
14697 switch (get_call_combined_fn (x))
14698 {
14699 CASE_CFN_FABS:
14700 CASE_CFN_FABS_FN:
14701 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14702 CASE_CFN_FMAX:
14703 CASE_CFN_FMAX_FN:
14704 CASE_CFN_FMIN:
14705 CASE_CFN_FMIN_FN:
14706 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14707 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14708 default:
14709 return false;
14710 }
14711
14712 default:
14713 return false;
14714 }
14715}
14716
14717/* Return true if expression X evaluates to an infinity.
14718 This function returns false for integer expressions. */
14719
14720bool
14721tree_expr_infinite_p (const_tree x)
14722{
14723 if (!HONOR_INFINITIES (x))
14724 return false;
14725 switch (TREE_CODE (x))
14726 {
14727 case REAL_CST:
14728 return real_isinf (TREE_REAL_CST_PTR (x));
14729 case ABS_EXPR:
14730 case NEGATE_EXPR:
14731 case NON_LVALUE_EXPR:
14732 case SAVE_EXPR:
14733 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14734 case COND_EXPR:
14735 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14736 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14737 default:
14738 return false;
14739 }
14740}
14741
14742/* Return true if expression X could evaluate to an infinity.
14743 This function returns false for integer expressions, and returns
14744 true if uncertain. */
14745
14746bool
14747tree_expr_maybe_infinite_p (const_tree x)
14748{
14749 if (!HONOR_INFINITIES (x))
14750 return false;
14751 switch (TREE_CODE (x))
14752 {
14753 case REAL_CST:
14754 return real_isinf (TREE_REAL_CST_PTR (x));
14755 case FLOAT_EXPR:
14756 return false;
14757 case ABS_EXPR:
14758 case NEGATE_EXPR:
14759 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14760 case COND_EXPR:
14761 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14762 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14763 default:
14764 return true;
14765 }
14766}
14767
14768/* Return true if expression X evaluates to a signaling NaN.
14769 This function returns false for integer expressions. */
14770
14771bool
14772tree_expr_signaling_nan_p (const_tree x)
14773{
14774 if (!HONOR_SNANS (x))
14775 return false;
14776 switch (TREE_CODE (x))
14777 {
14778 case REAL_CST:
14779 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14780 case NON_LVALUE_EXPR:
14781 case SAVE_EXPR:
14782 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14783 case COND_EXPR:
14784 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14785 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14786 default:
14787 return false;
14788 }
14789}
14790
14791/* Return true if expression X could evaluate to a signaling NaN.
14792 This function returns false for integer expressions, and returns
14793 true if uncertain. */
14794
14795bool
14796tree_expr_maybe_signaling_nan_p (const_tree x)
14797{
14798 if (!HONOR_SNANS (x))
14799 return false;
14800 switch (TREE_CODE (x))
14801 {
14802 case REAL_CST:
14803 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14804 case FLOAT_EXPR:
14805 return false;
14806 case ABS_EXPR:
14807 case CONVERT_EXPR:
14808 case NEGATE_EXPR:
14809 case NON_LVALUE_EXPR:
14810 case SAVE_EXPR:
14811 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14812 case MIN_EXPR:
14813 case MAX_EXPR:
14814 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14815 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14816 case COND_EXPR:
14817 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14818 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14819 case CALL_EXPR:
14820 switch (get_call_combined_fn (x))
14821 {
14822 CASE_CFN_FABS:
14823 CASE_CFN_FABS_FN:
14824 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14825 CASE_CFN_FMAX:
14826 CASE_CFN_FMAX_FN:
14827 CASE_CFN_FMIN:
14828 CASE_CFN_FMIN_FN:
14829 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14830 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14831 default:
14832 return true;
14833 }
14834 default:
14835 return true;
14836 }
14837}
14838
14839/* Return true if expression X evaluates to a NaN.
14840 This function returns false for integer expressions. */
14841
14842bool
14843tree_expr_nan_p (const_tree x)
14844{
14845 if (!HONOR_NANS (x))
14846 return false;
14847 switch (TREE_CODE (x))
14848 {
14849 case REAL_CST:
14850 return real_isnan (TREE_REAL_CST_PTR (x));
14851 case NON_LVALUE_EXPR:
14852 case SAVE_EXPR:
14853 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14854 case COND_EXPR:
14855 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14856 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14857 default:
14858 return false;
14859 }
14860}
14861
14862/* Return true if expression X could evaluate to a NaN.
14863 This function returns false for integer expressions, and returns
14864 true if uncertain. */
14865
14866bool
14867tree_expr_maybe_nan_p (const_tree x)
14868{
14869 if (!HONOR_NANS (x))
14870 return false;
14871 switch (TREE_CODE (x))
14872 {
14873 case REAL_CST:
14874 return real_isnan (TREE_REAL_CST_PTR (x));
14875 case FLOAT_EXPR:
14876 return false;
14877 case PLUS_EXPR:
14878 case MINUS_EXPR:
14879 case MULT_EXPR:
14880 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14881 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14882 case ABS_EXPR:
14883 case CONVERT_EXPR:
14884 case NEGATE_EXPR:
14885 case NON_LVALUE_EXPR:
14886 case SAVE_EXPR:
14887 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14888 case MIN_EXPR:
14889 case MAX_EXPR:
14890 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14891 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14892 case COND_EXPR:
14893 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14894 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14895 case CALL_EXPR:
14896 switch (get_call_combined_fn (x))
14897 {
14898 CASE_CFN_FABS:
14899 CASE_CFN_FABS_FN:
14900 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14901 CASE_CFN_FMAX:
14902 CASE_CFN_FMAX_FN:
14903 CASE_CFN_FMIN:
14904 CASE_CFN_FMIN_FN:
14905 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14906 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14907 default:
14908 return true;
14909 }
14910 default:
14911 return true;
14912 }
14913}
14914
14915/* Return true if expression X could evaluate to -0.0.
14916 This function returns true if uncertain. */
14917
14918bool
14919tree_expr_maybe_real_minus_zero_p (const_tree x)
14920{
14921 if (!HONOR_SIGNED_ZEROS (x))
14922 return false;
14923 switch (TREE_CODE (x))
14924 {
14925 case REAL_CST:
14926 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14927 case INTEGER_CST:
14928 case FLOAT_EXPR:
14929 case ABS_EXPR:
14930 return false;
14931 case NON_LVALUE_EXPR:
14932 case SAVE_EXPR:
14933 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14934 case COND_EXPR:
14935 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14936 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14937 case CALL_EXPR:
14938 switch (get_call_combined_fn (x))
14939 {
14940 CASE_CFN_FABS:
14941 CASE_CFN_FABS_FN:
14942 return false;
14943 default:
14944 break;
14945 }
14946 default:
14947 break;
14948 }
14949 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14950 * but currently those predicates require tree and not const_tree. */
14951 return true;
14952}
14953
14954#define tree_expr_nonnegative_warnv_p(X, Y) \
14955 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14956
14957#define RECURSE(X) \
14958 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14959
14960/* Return true if CODE or TYPE is known to be non-negative. */
14961
14962static bool
14963tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14964{
14965 if (!VECTOR_TYPE_P (type)
14966 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14967 && truth_value_p (code))
14968 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14969 have a signed:1 type (where the value is -1 and 0). */
14970 return true;
14971 return false;
14972}
14973
14974/* Return true if (CODE OP0) is known to be non-negative. If the return
14975 value is based on the assumption that signed overflow is undefined,
14976 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14977 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14978
14979bool
14980tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14981 bool *strict_overflow_p, int depth)
14982{
14983 if (TYPE_UNSIGNED (type))
14984 return true;
14985
14986 switch (code)
14987 {
14988 case ABS_EXPR:
14989 /* We can't return 1 if flag_wrapv is set because
14990 ABS_EXPR<INT_MIN> = INT_MIN. */
14991 if (!ANY_INTEGRAL_TYPE_P (type))
14992 return true;
14993 if (TYPE_OVERFLOW_UNDEFINED (type))
14994 {
14995 *strict_overflow_p = true;
14996 return true;
14997 }
14998 break;
14999
15000 case NON_LVALUE_EXPR:
15001 case FLOAT_EXPR:
15002 case FIX_TRUNC_EXPR:
15003 return RECURSE (op0);
15004
15005 CASE_CONVERT:
15006 {
15007 tree inner_type = TREE_TYPE (op0);
15008 tree outer_type = type;
15009
15010 if (SCALAR_FLOAT_TYPE_P (outer_type))
15011 {
15012 if (SCALAR_FLOAT_TYPE_P (inner_type))
15013 return RECURSE (op0);
15014 if (INTEGRAL_TYPE_P (inner_type))
15015 {
15016 if (TYPE_UNSIGNED (inner_type))
15017 return true;
15018 return RECURSE (op0);
15019 }
15020 }
15021 else if (INTEGRAL_TYPE_P (outer_type))
15022 {
15023 if (SCALAR_FLOAT_TYPE_P (inner_type))
15024 return RECURSE (op0);
15025 if (INTEGRAL_TYPE_P (inner_type))
15026 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15027 && TYPE_UNSIGNED (inner_type);
15028 }
15029 }
15030 break;
15031
15032 default:
15033 return tree_simple_nonnegative_warnv_p (code, type);
15034 }
15035
15036 /* We don't know sign of `t', so be conservative and return false. */
15037 return false;
15038}
15039
15040/* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15041 value is based on the assumption that signed overflow is undefined,
15042 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15043 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15044
15045bool
15046tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15047 tree op1, bool *strict_overflow_p,
15048 int depth)
15049{
15050 if (TYPE_UNSIGNED (type))
15051 return true;
15052
15053 switch (code)
15054 {
15055 case POINTER_PLUS_EXPR:
15056 case PLUS_EXPR:
15057 if (FLOAT_TYPE_P (type))
15058 return RECURSE (op0) && RECURSE (op1);
15059
15060 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15061 both unsigned and at least 2 bits shorter than the result. */
15062 if (TREE_CODE (type) == INTEGER_TYPE
15063 && TREE_CODE (op0) == NOP_EXPR
15064 && TREE_CODE (op1) == NOP_EXPR)
15065 {
15066 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15067 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15068 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15069 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15070 {
15071 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15072 TYPE_PRECISION (inner2)) + 1;
15073 return prec < TYPE_PRECISION (type);
15074 }
15075 }
15076 break;
15077
15078 case MULT_EXPR:
15079 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15080 {
15081 /* x * x is always non-negative for floating point x
15082 or without overflow. */
15083 if (operand_equal_p (arg0: op0, arg1: op1, flags: 0)
15084 || (RECURSE (op0) && RECURSE (op1)))
15085 {
15086 if (ANY_INTEGRAL_TYPE_P (type)
15087 && TYPE_OVERFLOW_UNDEFINED (type))
15088 *strict_overflow_p = true;
15089 return true;
15090 }
15091 }
15092
15093 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15094 both unsigned and their total bits is shorter than the result. */
15095 if (TREE_CODE (type) == INTEGER_TYPE
15096 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15097 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15098 {
15099 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15100 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15101 : TREE_TYPE (op0);
15102 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15103 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15104 : TREE_TYPE (op1);
15105
15106 bool unsigned0 = TYPE_UNSIGNED (inner0);
15107 bool unsigned1 = TYPE_UNSIGNED (inner1);
15108
15109 if (TREE_CODE (op0) == INTEGER_CST)
15110 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15111
15112 if (TREE_CODE (op1) == INTEGER_CST)
15113 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15114
15115 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15116 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15117 {
15118 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15119 ? tree_int_cst_min_precision (op0, UNSIGNED)
15120 : TYPE_PRECISION (inner0);
15121
15122 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15123 ? tree_int_cst_min_precision (op1, UNSIGNED)
15124 : TYPE_PRECISION (inner1);
15125
15126 return precision0 + precision1 < TYPE_PRECISION (type);
15127 }
15128 }
15129 return false;
15130
15131 case BIT_AND_EXPR:
15132 return RECURSE (op0) || RECURSE (op1);
15133
15134 case MAX_EXPR:
15135 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
15136 things. */
15137 if (tree_expr_maybe_nan_p (x: op0) || tree_expr_maybe_nan_p (x: op1))
15138 return RECURSE (op0) && RECURSE (op1);
15139 return RECURSE (op0) || RECURSE (op1);
15140
15141 case BIT_IOR_EXPR:
15142 case BIT_XOR_EXPR:
15143 case MIN_EXPR:
15144 case RDIV_EXPR:
15145 case TRUNC_DIV_EXPR:
15146 case CEIL_DIV_EXPR:
15147 case FLOOR_DIV_EXPR:
15148 case ROUND_DIV_EXPR:
15149 return RECURSE (op0) && RECURSE (op1);
15150
15151 case TRUNC_MOD_EXPR:
15152 return RECURSE (op0);
15153
15154 case FLOOR_MOD_EXPR:
15155 return RECURSE (op1);
15156
15157 case CEIL_MOD_EXPR:
15158 case ROUND_MOD_EXPR:
15159 default:
15160 return tree_simple_nonnegative_warnv_p (code, type);
15161 }
15162
15163 /* We don't know sign of `t', so be conservative and return false. */
15164 return false;
15165}
15166
15167/* Return true if T is known to be non-negative. If the return
15168 value is based on the assumption that signed overflow is undefined,
15169 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15170 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15171
15172bool
15173tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15174{
15175 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15176 return true;
15177
15178 switch (TREE_CODE (t))
15179 {
15180 case INTEGER_CST:
15181 return tree_int_cst_sgn (t) >= 0;
15182
15183 case REAL_CST:
15184 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15185
15186 case FIXED_CST:
15187 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15188
15189 case COND_EXPR:
15190 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15191
15192 case SSA_NAME:
15193 /* Limit the depth of recursion to avoid quadratic behavior.
15194 This is expected to catch almost all occurrences in practice.
15195 If this code misses important cases that unbounded recursion
15196 would not, passes that need this information could be revised
15197 to provide it through dataflow propagation. */
15198 return (!name_registered_for_update_p (t)
15199 && depth < param_max_ssa_name_query_depth
15200 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15201 strict_overflow_p, depth));
15202
15203 default:
15204 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15205 }
15206}
15207
15208/* Return true if T is known to be non-negative. If the return
15209 value is based on the assumption that signed overflow is undefined,
15210 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15211 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15212
15213bool
15214tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15215 bool *strict_overflow_p, int depth)
15216{
15217 switch (fn)
15218 {
15219 CASE_CFN_ACOS:
15220 CASE_CFN_ACOS_FN:
15221 CASE_CFN_ACOSH:
15222 CASE_CFN_ACOSH_FN:
15223 CASE_CFN_CABS:
15224 CASE_CFN_CABS_FN:
15225 CASE_CFN_COSH:
15226 CASE_CFN_COSH_FN:
15227 CASE_CFN_ERFC:
15228 CASE_CFN_ERFC_FN:
15229 CASE_CFN_EXP:
15230 CASE_CFN_EXP_FN:
15231 CASE_CFN_EXP10:
15232 CASE_CFN_EXP2:
15233 CASE_CFN_EXP2_FN:
15234 CASE_CFN_FABS:
15235 CASE_CFN_FABS_FN:
15236 CASE_CFN_FDIM:
15237 CASE_CFN_FDIM_FN:
15238 CASE_CFN_HYPOT:
15239 CASE_CFN_HYPOT_FN:
15240 CASE_CFN_POW10:
15241 CASE_CFN_FFS:
15242 CASE_CFN_PARITY:
15243 CASE_CFN_POPCOUNT:
15244 CASE_CFN_CLZ:
15245 CASE_CFN_CLRSB:
15246 case CFN_BUILT_IN_BSWAP16:
15247 case CFN_BUILT_IN_BSWAP32:
15248 case CFN_BUILT_IN_BSWAP64:
15249 case CFN_BUILT_IN_BSWAP128:
15250 /* Always true. */
15251 return true;
15252
15253 CASE_CFN_SQRT:
15254 CASE_CFN_SQRT_FN:
15255 /* sqrt(-0.0) is -0.0. */
15256 if (!HONOR_SIGNED_ZEROS (type))
15257 return true;
15258 return RECURSE (arg0);
15259
15260 CASE_CFN_ASINH:
15261 CASE_CFN_ASINH_FN:
15262 CASE_CFN_ATAN:
15263 CASE_CFN_ATAN_FN:
15264 CASE_CFN_ATANH:
15265 CASE_CFN_ATANH_FN:
15266 CASE_CFN_CBRT:
15267 CASE_CFN_CBRT_FN:
15268 CASE_CFN_CEIL:
15269 CASE_CFN_CEIL_FN:
15270 CASE_CFN_ERF:
15271 CASE_CFN_ERF_FN:
15272 CASE_CFN_EXPM1:
15273 CASE_CFN_EXPM1_FN:
15274 CASE_CFN_FLOOR:
15275 CASE_CFN_FLOOR_FN:
15276 CASE_CFN_FMOD:
15277 CASE_CFN_FMOD_FN:
15278 CASE_CFN_FREXP:
15279 CASE_CFN_FREXP_FN:
15280 CASE_CFN_ICEIL:
15281 CASE_CFN_IFLOOR:
15282 CASE_CFN_IRINT:
15283 CASE_CFN_IROUND:
15284 CASE_CFN_LCEIL:
15285 CASE_CFN_LDEXP:
15286 CASE_CFN_LFLOOR:
15287 CASE_CFN_LLCEIL:
15288 CASE_CFN_LLFLOOR:
15289 CASE_CFN_LLRINT:
15290 CASE_CFN_LLRINT_FN:
15291 CASE_CFN_LLROUND:
15292 CASE_CFN_LLROUND_FN:
15293 CASE_CFN_LRINT:
15294 CASE_CFN_LRINT_FN:
15295 CASE_CFN_LROUND:
15296 CASE_CFN_LROUND_FN:
15297 CASE_CFN_MODF:
15298 CASE_CFN_MODF_FN:
15299 CASE_CFN_NEARBYINT:
15300 CASE_CFN_NEARBYINT_FN:
15301 CASE_CFN_RINT:
15302 CASE_CFN_RINT_FN:
15303 CASE_CFN_ROUND:
15304 CASE_CFN_ROUND_FN:
15305 CASE_CFN_ROUNDEVEN:
15306 CASE_CFN_ROUNDEVEN_FN:
15307 CASE_CFN_SCALB:
15308 CASE_CFN_SCALBLN:
15309 CASE_CFN_SCALBLN_FN:
15310 CASE_CFN_SCALBN:
15311 CASE_CFN_SCALBN_FN:
15312 CASE_CFN_SIGNBIT:
15313 CASE_CFN_SIGNIFICAND:
15314 CASE_CFN_SINH:
15315 CASE_CFN_SINH_FN:
15316 CASE_CFN_TANH:
15317 CASE_CFN_TANH_FN:
15318 CASE_CFN_TRUNC:
15319 CASE_CFN_TRUNC_FN:
15320 /* True if the 1st argument is nonnegative. */
15321 return RECURSE (arg0);
15322
15323 CASE_CFN_FMAX:
15324 CASE_CFN_FMAX_FN:
15325 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15326 things. In the presence of sNaNs, we're only guaranteed to be
15327 non-negative if both operands are non-negative. In the presence
15328 of qNaNs, we're non-negative if either operand is non-negative
15329 and can't be a qNaN, or if both operands are non-negative. */
15330 if (tree_expr_maybe_signaling_nan_p (x: arg0) ||
15331 tree_expr_maybe_signaling_nan_p (x: arg1))
15332 return RECURSE (arg0) && RECURSE (arg1);
15333 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (x: arg0)
15334 || RECURSE (arg1))
15335 : (RECURSE (arg1)
15336 && !tree_expr_maybe_nan_p (x: arg1));
15337
15338 CASE_CFN_FMIN:
15339 CASE_CFN_FMIN_FN:
15340 /* True if the 1st AND 2nd arguments are nonnegative. */
15341 return RECURSE (arg0) && RECURSE (arg1);
15342
15343 CASE_CFN_COPYSIGN:
15344 CASE_CFN_COPYSIGN_FN:
15345 /* True if the 2nd argument is nonnegative. */
15346 return RECURSE (arg1);
15347
15348 CASE_CFN_POWI:
15349 /* True if the 1st argument is nonnegative or the second
15350 argument is an even integer. */
15351 if (TREE_CODE (arg1) == INTEGER_CST
15352 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15353 return true;
15354 return RECURSE (arg0);
15355
15356 CASE_CFN_POW:
15357 CASE_CFN_POW_FN:
15358 /* True if the 1st argument is nonnegative or the second
15359 argument is an even integer valued real. */
15360 if (TREE_CODE (arg1) == REAL_CST)
15361 {
15362 REAL_VALUE_TYPE c;
15363 HOST_WIDE_INT n;
15364
15365 c = TREE_REAL_CST (arg1);
15366 n = real_to_integer (&c);
15367 if ((n & 1) == 0)
15368 {
15369 REAL_VALUE_TYPE cint;
15370 real_from_integer (&cint, VOIDmode, n, SIGNED);
15371 if (real_identical (&c, &cint))
15372 return true;
15373 }
15374 }
15375 return RECURSE (arg0);
15376
15377 default:
15378 break;
15379 }
15380 return tree_simple_nonnegative_warnv_p (code: CALL_EXPR, type);
15381}
15382
15383/* Return true if T is known to be non-negative. If the return
15384 value is based on the assumption that signed overflow is undefined,
15385 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15386 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15387
15388static bool
15389tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15390{
15391 enum tree_code code = TREE_CODE (t);
15392 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15393 return true;
15394
15395 switch (code)
15396 {
15397 case TARGET_EXPR:
15398 {
15399 tree temp = TARGET_EXPR_SLOT (t);
15400 t = TARGET_EXPR_INITIAL (t);
15401
15402 /* If the initializer is non-void, then it's a normal expression
15403 that will be assigned to the slot. */
15404 if (!VOID_TYPE_P (TREE_TYPE (t)))
15405 return RECURSE (t);
15406
15407 /* Otherwise, the initializer sets the slot in some way. One common
15408 way is an assignment statement at the end of the initializer. */
15409 while (1)
15410 {
15411 if (TREE_CODE (t) == BIND_EXPR)
15412 t = expr_last (BIND_EXPR_BODY (t));
15413 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15414 || TREE_CODE (t) == TRY_CATCH_EXPR)
15415 t = expr_last (TREE_OPERAND (t, 0));
15416 else if (TREE_CODE (t) == STATEMENT_LIST)
15417 t = expr_last (t);
15418 else
15419 break;
15420 }
15421 if (TREE_CODE (t) == MODIFY_EXPR
15422 && TREE_OPERAND (t, 0) == temp)
15423 return RECURSE (TREE_OPERAND (t, 1));
15424
15425 return false;
15426 }
15427
15428 case CALL_EXPR:
15429 {
15430 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15431 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15432
15433 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15434 fn: get_call_combined_fn (t),
15435 arg0,
15436 arg1,
15437 strict_overflow_p, depth);
15438 }
15439 case COMPOUND_EXPR:
15440 case MODIFY_EXPR:
15441 return RECURSE (TREE_OPERAND (t, 1));
15442
15443 case BIND_EXPR:
15444 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15445
15446 case SAVE_EXPR:
15447 return RECURSE (TREE_OPERAND (t, 0));
15448
15449 default:
15450 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15451 }
15452}
15453
15454#undef RECURSE
15455#undef tree_expr_nonnegative_warnv_p
15456
15457/* Return true if T is known to be non-negative. If the return
15458 value is based on the assumption that signed overflow is undefined,
15459 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15460 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15461
15462bool
15463tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15464{
15465 enum tree_code code;
15466 if (t == error_mark_node)
15467 return false;
15468
15469 code = TREE_CODE (t);
15470 switch (TREE_CODE_CLASS (code))
15471 {
15472 case tcc_binary:
15473 case tcc_comparison:
15474 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15475 TREE_TYPE (t),
15476 TREE_OPERAND (t, 0),
15477 TREE_OPERAND (t, 1),
15478 strict_overflow_p, depth);
15479
15480 case tcc_unary:
15481 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15482 TREE_TYPE (t),
15483 TREE_OPERAND (t, 0),
15484 strict_overflow_p, depth);
15485
15486 case tcc_constant:
15487 case tcc_declaration:
15488 case tcc_reference:
15489 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15490
15491 default:
15492 break;
15493 }
15494
15495 switch (code)
15496 {
15497 case TRUTH_AND_EXPR:
15498 case TRUTH_OR_EXPR:
15499 case TRUTH_XOR_EXPR:
15500 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15501 TREE_TYPE (t),
15502 TREE_OPERAND (t, 0),
15503 TREE_OPERAND (t, 1),
15504 strict_overflow_p, depth);
15505 case TRUTH_NOT_EXPR:
15506 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15507 TREE_TYPE (t),
15508 TREE_OPERAND (t, 0),
15509 strict_overflow_p, depth);
15510
15511 case COND_EXPR:
15512 case CONSTRUCTOR:
15513 case OBJ_TYPE_REF:
15514 case ADDR_EXPR:
15515 case WITH_SIZE_EXPR:
15516 case SSA_NAME:
15517 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15518
15519 default:
15520 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15521 }
15522}
15523
15524/* Return true if `t' is known to be non-negative. Handle warnings
15525 about undefined signed overflow. */
15526
15527bool
15528tree_expr_nonnegative_p (tree t)
15529{
15530 bool ret, strict_overflow_p;
15531
15532 strict_overflow_p = false;
15533 ret = tree_expr_nonnegative_warnv_p (t, strict_overflow_p: &strict_overflow_p);
15534 if (strict_overflow_p)
15535 fold_overflow_warning (gmsgid: ("assuming signed overflow does not occur when "
15536 "determining that expression is always "
15537 "non-negative"),
15538 wc: WARN_STRICT_OVERFLOW_MISC);
15539 return ret;
15540}
15541
15542
15543/* Return true when (CODE OP0) is an address and is known to be nonzero.
15544 For floating point we further ensure that T is not denormal.
15545 Similar logic is present in nonzero_address in rtlanal.h.
15546
15547 If the return value is based on the assumption that signed overflow
15548 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15549 change *STRICT_OVERFLOW_P. */
15550
15551bool
15552tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15553 bool *strict_overflow_p)
15554{
15555 switch (code)
15556 {
15557 case ABS_EXPR:
15558 return tree_expr_nonzero_warnv_p (t: op0,
15559 strict_overflow_p);
15560
15561 case NOP_EXPR:
15562 {
15563 tree inner_type = TREE_TYPE (op0);
15564 tree outer_type = type;
15565
15566 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15567 && tree_expr_nonzero_warnv_p (t: op0,
15568 strict_overflow_p));
15569 }
15570 break;
15571
15572 case NON_LVALUE_EXPR:
15573 return tree_expr_nonzero_warnv_p (t: op0,
15574 strict_overflow_p);
15575
15576 default:
15577 break;
15578 }
15579
15580 return false;
15581}
15582
15583/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15584 For floating point we further ensure that T is not denormal.
15585 Similar logic is present in nonzero_address in rtlanal.h.
15586
15587 If the return value is based on the assumption that signed overflow
15588 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15589 change *STRICT_OVERFLOW_P. */
15590
15591bool
15592tree_binary_nonzero_warnv_p (enum tree_code code,
15593 tree type,
15594 tree op0,
15595 tree op1, bool *strict_overflow_p)
15596{
15597 bool sub_strict_overflow_p;
15598 switch (code)
15599 {
15600 case POINTER_PLUS_EXPR:
15601 case PLUS_EXPR:
15602 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15603 {
15604 /* With the presence of negative values it is hard
15605 to say something. */
15606 sub_strict_overflow_p = false;
15607 if (!tree_expr_nonnegative_warnv_p (t: op0,
15608 strict_overflow_p: &sub_strict_overflow_p)
15609 || !tree_expr_nonnegative_warnv_p (t: op1,
15610 strict_overflow_p: &sub_strict_overflow_p))
15611 return false;
15612 /* One of operands must be positive and the other non-negative. */
15613 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15614 overflows, on a twos-complement machine the sum of two
15615 nonnegative numbers can never be zero. */
15616 return (tree_expr_nonzero_warnv_p (t: op0,
15617 strict_overflow_p)
15618 || tree_expr_nonzero_warnv_p (t: op1,
15619 strict_overflow_p));
15620 }
15621 break;
15622
15623 case MULT_EXPR:
15624 if (TYPE_OVERFLOW_UNDEFINED (type))
15625 {
15626 if (tree_expr_nonzero_warnv_p (t: op0,
15627 strict_overflow_p)
15628 && tree_expr_nonzero_warnv_p (t: op1,
15629 strict_overflow_p))
15630 {
15631 *strict_overflow_p = true;
15632 return true;
15633 }
15634 }
15635 break;
15636
15637 case MIN_EXPR:
15638 sub_strict_overflow_p = false;
15639 if (tree_expr_nonzero_warnv_p (t: op0,
15640 strict_overflow_p: &sub_strict_overflow_p)
15641 && tree_expr_nonzero_warnv_p (t: op1,
15642 strict_overflow_p: &sub_strict_overflow_p))
15643 {
15644 if (sub_strict_overflow_p)
15645 *strict_overflow_p = true;
15646 }
15647 break;
15648
15649 case MAX_EXPR:
15650 sub_strict_overflow_p = false;
15651 if (tree_expr_nonzero_warnv_p (t: op0,
15652 strict_overflow_p: &sub_strict_overflow_p))
15653 {
15654 if (sub_strict_overflow_p)
15655 *strict_overflow_p = true;
15656
15657 /* When both operands are nonzero, then MAX must be too. */
15658 if (tree_expr_nonzero_warnv_p (t: op1,
15659 strict_overflow_p))
15660 return true;
15661
15662 /* MAX where operand 0 is positive is positive. */
15663 return tree_expr_nonnegative_warnv_p (t: op0,
15664 strict_overflow_p);
15665 }
15666 /* MAX where operand 1 is positive is positive. */
15667 else if (tree_expr_nonzero_warnv_p (t: op1,
15668 strict_overflow_p: &sub_strict_overflow_p)
15669 && tree_expr_nonnegative_warnv_p (t: op1,
15670 strict_overflow_p: &sub_strict_overflow_p))
15671 {
15672 if (sub_strict_overflow_p)
15673 *strict_overflow_p = true;
15674 return true;
15675 }
15676 break;
15677
15678 case BIT_IOR_EXPR:
15679 return (tree_expr_nonzero_warnv_p (t: op1,
15680 strict_overflow_p)
15681 || tree_expr_nonzero_warnv_p (t: op0,
15682 strict_overflow_p));
15683
15684 default:
15685 break;
15686 }
15687
15688 return false;
15689}
15690
15691/* Return true when T is an address and is known to be nonzero.
15692 For floating point we further ensure that T is not denormal.
15693 Similar logic is present in nonzero_address in rtlanal.h.
15694
15695 If the return value is based on the assumption that signed overflow
15696 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15697 change *STRICT_OVERFLOW_P. */
15698
15699bool
15700tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15701{
15702 bool sub_strict_overflow_p;
15703 switch (TREE_CODE (t))
15704 {
15705 case INTEGER_CST:
15706 return !integer_zerop (t);
15707
15708 case ADDR_EXPR:
15709 {
15710 tree base = TREE_OPERAND (t, 0);
15711
15712 if (!DECL_P (base))
15713 base = get_base_address (t: base);
15714
15715 if (base && TREE_CODE (base) == TARGET_EXPR)
15716 base = TARGET_EXPR_SLOT (base);
15717
15718 if (!base)
15719 return false;
15720
15721 /* For objects in symbol table check if we know they are non-zero.
15722 Don't do anything for variables and functions before symtab is built;
15723 it is quite possible that they will be declared weak later. */
15724 int nonzero_addr = maybe_nonzero_address (decl: base);
15725 if (nonzero_addr >= 0)
15726 return nonzero_addr;
15727
15728 /* Constants are never weak. */
15729 if (CONSTANT_CLASS_P (base))
15730 return true;
15731
15732 return false;
15733 }
15734
15735 case COND_EXPR:
15736 sub_strict_overflow_p = false;
15737 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15738 strict_overflow_p: &sub_strict_overflow_p)
15739 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15740 strict_overflow_p: &sub_strict_overflow_p))
15741 {
15742 if (sub_strict_overflow_p)
15743 *strict_overflow_p = true;
15744 return true;
15745 }
15746 break;
15747
15748 case SSA_NAME:
15749 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15750 break;
15751 return expr_not_equal_to (t, w: wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15752
15753 default:
15754 break;
15755 }
15756 return false;
15757}
15758
15759#define integer_valued_real_p(X) \
15760 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15761
15762#define RECURSE(X) \
15763 ((integer_valued_real_p) (X, depth + 1))
15764
15765/* Return true if the floating point result of (CODE OP0) has an
15766 integer value. We also allow +Inf, -Inf and NaN to be considered
15767 integer values. Return false for signaling NaN.
15768
15769 DEPTH is the current nesting depth of the query. */
15770
15771bool
15772integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15773{
15774 switch (code)
15775 {
15776 case FLOAT_EXPR:
15777 return true;
15778
15779 case ABS_EXPR:
15780 return RECURSE (op0);
15781
15782 CASE_CONVERT:
15783 {
15784 tree type = TREE_TYPE (op0);
15785 if (TREE_CODE (type) == INTEGER_TYPE)
15786 return true;
15787 if (SCALAR_FLOAT_TYPE_P (type))
15788 return RECURSE (op0);
15789 break;
15790 }
15791
15792 default:
15793 break;
15794 }
15795 return false;
15796}
15797
15798/* Return true if the floating point result of (CODE OP0 OP1) has an
15799 integer value. We also allow +Inf, -Inf and NaN to be considered
15800 integer values. Return false for signaling NaN.
15801
15802 DEPTH is the current nesting depth of the query. */
15803
15804bool
15805integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15806{
15807 switch (code)
15808 {
15809 case PLUS_EXPR:
15810 case MINUS_EXPR:
15811 case MULT_EXPR:
15812 case MIN_EXPR:
15813 case MAX_EXPR:
15814 return RECURSE (op0) && RECURSE (op1);
15815
15816 default:
15817 break;
15818 }
15819 return false;
15820}
15821
15822/* Return true if the floating point result of calling FNDECL with arguments
15823 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15824 considered integer values. Return false for signaling NaN. If FNDECL
15825 takes fewer than 2 arguments, the remaining ARGn are null.
15826
15827 DEPTH is the current nesting depth of the query. */
15828
15829bool
15830integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15831{
15832 switch (fn)
15833 {
15834 CASE_CFN_CEIL:
15835 CASE_CFN_CEIL_FN:
15836 CASE_CFN_FLOOR:
15837 CASE_CFN_FLOOR_FN:
15838 CASE_CFN_NEARBYINT:
15839 CASE_CFN_NEARBYINT_FN:
15840 CASE_CFN_RINT:
15841 CASE_CFN_RINT_FN:
15842 CASE_CFN_ROUND:
15843 CASE_CFN_ROUND_FN:
15844 CASE_CFN_ROUNDEVEN:
15845 CASE_CFN_ROUNDEVEN_FN:
15846 CASE_CFN_TRUNC:
15847 CASE_CFN_TRUNC_FN:
15848 return true;
15849
15850 CASE_CFN_FMIN:
15851 CASE_CFN_FMIN_FN:
15852 CASE_CFN_FMAX:
15853 CASE_CFN_FMAX_FN:
15854 return RECURSE (arg0) && RECURSE (arg1);
15855
15856 default:
15857 break;
15858 }
15859 return false;
15860}
15861
15862/* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15863 has an integer value. We also allow +Inf, -Inf and NaN to be
15864 considered integer values. Return false for signaling NaN.
15865
15866 DEPTH is the current nesting depth of the query. */
15867
15868bool
15869integer_valued_real_single_p (tree t, int depth)
15870{
15871 switch (TREE_CODE (t))
15872 {
15873 case REAL_CST:
15874 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15875
15876 case COND_EXPR:
15877 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15878
15879 case SSA_NAME:
15880 /* Limit the depth of recursion to avoid quadratic behavior.
15881 This is expected to catch almost all occurrences in practice.
15882 If this code misses important cases that unbounded recursion
15883 would not, passes that need this information could be revised
15884 to provide it through dataflow propagation. */
15885 return (!name_registered_for_update_p (t)
15886 && depth < param_max_ssa_name_query_depth
15887 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15888 depth));
15889
15890 default:
15891 break;
15892 }
15893 return false;
15894}
15895
15896/* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15897 has an integer value. We also allow +Inf, -Inf and NaN to be
15898 considered integer values. Return false for signaling NaN.
15899
15900 DEPTH is the current nesting depth of the query. */
15901
15902static bool
15903integer_valued_real_invalid_p (tree t, int depth)
15904{
15905 switch (TREE_CODE (t))
15906 {
15907 case COMPOUND_EXPR:
15908 case MODIFY_EXPR:
15909 case BIND_EXPR:
15910 return RECURSE (TREE_OPERAND (t, 1));
15911
15912 case SAVE_EXPR:
15913 return RECURSE (TREE_OPERAND (t, 0));
15914
15915 default:
15916 break;
15917 }
15918 return false;
15919}
15920
15921#undef RECURSE
15922#undef integer_valued_real_p
15923
15924/* Return true if the floating point expression T has an integer value.
15925 We also allow +Inf, -Inf and NaN to be considered integer values.
15926 Return false for signaling NaN.
15927
15928 DEPTH is the current nesting depth of the query. */
15929
15930bool
15931integer_valued_real_p (tree t, int depth)
15932{
15933 if (t == error_mark_node)
15934 return false;
15935
15936 STRIP_ANY_LOCATION_WRAPPER (t);
15937
15938 tree_code code = TREE_CODE (t);
15939 switch (TREE_CODE_CLASS (code))
15940 {
15941 case tcc_binary:
15942 case tcc_comparison:
15943 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15944 TREE_OPERAND (t, 1), depth);
15945
15946 case tcc_unary:
15947 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15948
15949 case tcc_constant:
15950 case tcc_declaration:
15951 case tcc_reference:
15952 return integer_valued_real_single_p (t, depth);
15953
15954 default:
15955 break;
15956 }
15957
15958 switch (code)
15959 {
15960 case COND_EXPR:
15961 case SSA_NAME:
15962 return integer_valued_real_single_p (t, depth);
15963
15964 case CALL_EXPR:
15965 {
15966 tree arg0 = (call_expr_nargs (t) > 0
15967 ? CALL_EXPR_ARG (t, 0)
15968 : NULL_TREE);
15969 tree arg1 = (call_expr_nargs (t) > 1
15970 ? CALL_EXPR_ARG (t, 1)
15971 : NULL_TREE);
15972 return integer_valued_real_call_p (fn: get_call_combined_fn (t),
15973 arg0, arg1, depth);
15974 }
15975
15976 default:
15977 return integer_valued_real_invalid_p (t, depth);
15978 }
15979}
15980
15981/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15982 attempt to fold the expression to a constant without modifying TYPE,
15983 OP0 or OP1.
15984
15985 If the expression could be simplified to a constant, then return
15986 the constant. If the expression would not be simplified to a
15987 constant, then return NULL_TREE. */
15988
15989tree
15990fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15991{
15992 tree tem = fold_binary (code, type, op0, op1);
15993 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15994}
15995
15996/* Given the components of a unary expression CODE, TYPE and OP0,
15997 attempt to fold the expression to a constant without modifying
15998 TYPE or OP0.
15999
16000 If the expression could be simplified to a constant, then return
16001 the constant. If the expression would not be simplified to a
16002 constant, then return NULL_TREE. */
16003
16004tree
16005fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16006{
16007 tree tem = fold_unary (code, type, op0);
16008 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16009}
16010
16011/* If EXP represents referencing an element in a constant string
16012 (either via pointer arithmetic or array indexing), return the
16013 tree representing the value accessed, otherwise return NULL. */
16014
16015tree
16016fold_read_from_constant_string (tree exp)
16017{
16018 if ((INDIRECT_REF_P (exp)
16019 || TREE_CODE (exp) == ARRAY_REF)
16020 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16021 {
16022 tree exp1 = TREE_OPERAND (exp, 0);
16023 tree index;
16024 tree string;
16025 location_t loc = EXPR_LOCATION (exp);
16026
16027 if (INDIRECT_REF_P (exp))
16028 string = string_constant (exp1, &index, NULL, NULL);
16029 else
16030 {
16031 tree low_bound = array_ref_low_bound (exp);
16032 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16033
16034 /* Optimize the special-case of a zero lower bound.
16035
16036 We convert the low_bound to sizetype to avoid some problems
16037 with constant folding. (E.g. suppose the lower bound is 1,
16038 and its mode is QI. Without the conversion,l (ARRAY
16039 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16040 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16041 if (! integer_zerop (low_bound))
16042 index = size_diffop_loc (loc, arg0: index,
16043 arg1: fold_convert_loc (loc, sizetype, arg: low_bound));
16044
16045 string = exp1;
16046 }
16047
16048 scalar_int_mode char_mode;
16049 if (string
16050 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16051 && TREE_CODE (string) == STRING_CST
16052 && tree_fits_uhwi_p (index)
16053 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16054 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
16055 int_mode: &char_mode)
16056 && GET_MODE_SIZE (mode: char_mode) == 1)
16057 return build_int_cst_type (TREE_TYPE (exp),
16058 (TREE_STRING_POINTER (string)
16059 [TREE_INT_CST_LOW (index)]));
16060 }
16061 return NULL;
16062}
16063
16064/* Folds a read from vector element at IDX of vector ARG. */
16065
16066tree
16067fold_read_from_vector (tree arg, poly_uint64 idx)
16068{
16069 unsigned HOST_WIDE_INT i;
16070 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
16071 && known_ge (idx, 0u)
16072 && idx.is_constant (const_value: &i))
16073 {
16074 if (TREE_CODE (arg) == VECTOR_CST)
16075 return VECTOR_CST_ELT (arg, i);
16076 else if (TREE_CODE (arg) == CONSTRUCTOR)
16077 {
16078 if (CONSTRUCTOR_NELTS (arg)
16079 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
16080 return NULL_TREE;
16081 if (i >= CONSTRUCTOR_NELTS (arg))
16082 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
16083 return CONSTRUCTOR_ELT (arg, i)->value;
16084 }
16085 }
16086 return NULL_TREE;
16087}
16088
16089/* Return the tree for neg (ARG0) when ARG0 is known to be either
16090 an integer constant, real, or fixed-point constant.
16091
16092 TYPE is the type of the result. */
16093
16094static tree
16095fold_negate_const (tree arg0, tree type)
16096{
16097 tree t = NULL_TREE;
16098
16099 switch (TREE_CODE (arg0))
16100 {
16101 case REAL_CST:
16102 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16103 break;
16104
16105 case FIXED_CST:
16106 {
16107 FIXED_VALUE_TYPE f;
16108 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16109 &(TREE_FIXED_CST (arg0)), NULL,
16110 TYPE_SATURATING (type));
16111 t = build_fixed (type, f);
16112 /* Propagate overflow flags. */
16113 if (overflow_p | TREE_OVERFLOW (arg0))
16114 TREE_OVERFLOW (t) = 1;
16115 break;
16116 }
16117
16118 default:
16119 if (poly_int_tree_p (t: arg0))
16120 {
16121 wi::overflow_type overflow;
16122 poly_wide_int res = wi::neg (a: wi::to_poly_wide (t: arg0), overflow: &overflow);
16123 t = force_fit_type (type, res, 1,
16124 (overflow && ! TYPE_UNSIGNED (type))
16125 || TREE_OVERFLOW (arg0));
16126 break;
16127 }
16128
16129 gcc_unreachable ();
16130 }
16131
16132 return t;
16133}
16134
16135/* Return the tree for abs (ARG0) when ARG0 is known to be either
16136 an integer constant or real constant.
16137
16138 TYPE is the type of the result. */
16139
16140tree
16141fold_abs_const (tree arg0, tree type)
16142{
16143 tree t = NULL_TREE;
16144
16145 switch (TREE_CODE (arg0))
16146 {
16147 case INTEGER_CST:
16148 {
16149 /* If the value is unsigned or non-negative, then the absolute value
16150 is the same as the ordinary value. */
16151 wide_int val = wi::to_wide (t: arg0);
16152 wi::overflow_type overflow = wi::OVF_NONE;
16153 if (!wi::neg_p (x: val, TYPE_SIGN (TREE_TYPE (arg0))))
16154 ;
16155
16156 /* If the value is negative, then the absolute value is
16157 its negation. */
16158 else
16159 val = wi::neg (x: val, overflow: &overflow);
16160
16161 /* Force to the destination type, set TREE_OVERFLOW for signed
16162 TYPE only. */
16163 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
16164 }
16165 break;
16166
16167 case REAL_CST:
16168 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16169 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16170 else
16171 t = arg0;
16172 break;
16173
16174 default:
16175 gcc_unreachable ();
16176 }
16177
16178 return t;
16179}
16180
16181/* Return the tree for not (ARG0) when ARG0 is known to be an integer
16182 constant. TYPE is the type of the result. */
16183
16184static tree
16185fold_not_const (const_tree arg0, tree type)
16186{
16187 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16188
16189 return force_fit_type (type, ~wi::to_wide (t: arg0), 0, TREE_OVERFLOW (arg0));
16190}
16191
16192/* Given CODE, a relational operator, the target type, TYPE and two
16193 constant operands OP0 and OP1, return the result of the
16194 relational operation. If the result is not a compile time
16195 constant, then return NULL_TREE. */
16196
16197static tree
16198fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16199{
16200 int result, invert;
16201
16202 /* From here on, the only cases we handle are when the result is
16203 known to be a constant. */
16204
16205 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16206 {
16207 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16208 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16209
16210 /* Handle the cases where either operand is a NaN. */
16211 if (real_isnan (c0) || real_isnan (c1))
16212 {
16213 switch (code)
16214 {
16215 case EQ_EXPR:
16216 case ORDERED_EXPR:
16217 result = 0;
16218 break;
16219
16220 case NE_EXPR:
16221 case UNORDERED_EXPR:
16222 case UNLT_EXPR:
16223 case UNLE_EXPR:
16224 case UNGT_EXPR:
16225 case UNGE_EXPR:
16226 case UNEQ_EXPR:
16227 result = 1;
16228 break;
16229
16230 case LT_EXPR:
16231 case LE_EXPR:
16232 case GT_EXPR:
16233 case GE_EXPR:
16234 case LTGT_EXPR:
16235 if (flag_trapping_math)
16236 return NULL_TREE;
16237 result = 0;
16238 break;
16239
16240 default:
16241 gcc_unreachable ();
16242 }
16243
16244 return constant_boolean_node (value: result, type);
16245 }
16246
16247 return constant_boolean_node (value: real_compare (code, c0, c1), type);
16248 }
16249
16250 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16251 {
16252 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16253 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16254 return constant_boolean_node (value: fixed_compare (code, c0, c1), type);
16255 }
16256
16257 /* Handle equality/inequality of complex constants. */
16258 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16259 {
16260 tree rcond = fold_relational_const (code, type,
16261 TREE_REALPART (op0),
16262 TREE_REALPART (op1));
16263 tree icond = fold_relational_const (code, type,
16264 TREE_IMAGPART (op0),
16265 TREE_IMAGPART (op1));
16266 if (code == EQ_EXPR)
16267 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16268 else if (code == NE_EXPR)
16269 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16270 else
16271 return NULL_TREE;
16272 }
16273
16274 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16275 {
16276 if (!VECTOR_TYPE_P (type))
16277 {
16278 /* Have vector comparison with scalar boolean result. */
16279 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16280 && known_eq (VECTOR_CST_NELTS (op0),
16281 VECTOR_CST_NELTS (op1)));
16282 unsigned HOST_WIDE_INT nunits;
16283 if (!VECTOR_CST_NELTS (op0).is_constant (const_value: &nunits))
16284 return NULL_TREE;
16285 for (unsigned i = 0; i < nunits; i++)
16286 {
16287 tree elem0 = VECTOR_CST_ELT (op0, i);
16288 tree elem1 = VECTOR_CST_ELT (op1, i);
16289 tree tmp = fold_relational_const (code: EQ_EXPR, type, op0: elem0, op1: elem1);
16290 if (tmp == NULL_TREE)
16291 return NULL_TREE;
16292 if (integer_zerop (tmp))
16293 return constant_boolean_node (value: code == NE_EXPR, type);
16294 }
16295 return constant_boolean_node (value: code == EQ_EXPR, type);
16296 }
16297 tree_vector_builder elts;
16298 if (!elts.new_binary_operation (shape: type, vec1: op0, vec2: op1, allow_stepped_p: false))
16299 return NULL_TREE;
16300 unsigned int count = elts.encoded_nelts ();
16301 for (unsigned i = 0; i < count; i++)
16302 {
16303 tree elem_type = TREE_TYPE (type);
16304 tree elem0 = VECTOR_CST_ELT (op0, i);
16305 tree elem1 = VECTOR_CST_ELT (op1, i);
16306
16307 tree tem = fold_relational_const (code, type: elem_type,
16308 op0: elem0, op1: elem1);
16309
16310 if (tem == NULL_TREE)
16311 return NULL_TREE;
16312
16313 elts.quick_push (obj: build_int_cst (elem_type,
16314 integer_zerop (tem) ? 0 : -1));
16315 }
16316
16317 return elts.build ();
16318 }
16319
16320 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16321
16322 To compute GT, swap the arguments and do LT.
16323 To compute GE, do LT and invert the result.
16324 To compute LE, swap the arguments, do LT and invert the result.
16325 To compute NE, do EQ and invert the result.
16326
16327 Therefore, the code below must handle only EQ and LT. */
16328
16329 if (code == LE_EXPR || code == GT_EXPR)
16330 {
16331 std::swap (a&: op0, b&: op1);
16332 code = swap_tree_comparison (code);
16333 }
16334
16335 /* Note that it is safe to invert for real values here because we
16336 have already handled the one case that it matters. */
16337
16338 invert = 0;
16339 if (code == NE_EXPR || code == GE_EXPR)
16340 {
16341 invert = 1;
16342 code = invert_tree_comparison (code, honor_nans: false);
16343 }
16344
16345 /* Compute a result for LT or EQ if args permit;
16346 Otherwise return T. */
16347 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16348 {
16349 if (code == EQ_EXPR)
16350 result = tree_int_cst_equal (op0, op1);
16351 else
16352 result = tree_int_cst_lt (t1: op0, t2: op1);
16353 }
16354 else
16355 return NULL_TREE;
16356
16357 if (invert)
16358 result ^= 1;
16359 return constant_boolean_node (value: result, type);
16360}
16361
16362/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16363 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16364 itself. */
16365
16366tree
16367fold_build_cleanup_point_expr (tree type, tree expr)
16368{
16369 /* If the expression does not have side effects then we don't have to wrap
16370 it with a cleanup point expression. */
16371 if (!TREE_SIDE_EFFECTS (expr))
16372 return expr;
16373
16374 /* If the expression is a return, check to see if the expression inside the
16375 return has no side effects or the right hand side of the modify expression
16376 inside the return. If either don't have side effects set we don't need to
16377 wrap the expression in a cleanup point expression. Note we don't check the
16378 left hand side of the modify because it should always be a return decl. */
16379 if (TREE_CODE (expr) == RETURN_EXPR)
16380 {
16381 tree op = TREE_OPERAND (expr, 0);
16382 if (!op || !TREE_SIDE_EFFECTS (op))
16383 return expr;
16384 op = TREE_OPERAND (op, 1);
16385 if (!TREE_SIDE_EFFECTS (op))
16386 return expr;
16387 }
16388
16389 return build1_loc (EXPR_LOCATION (expr), code: CLEANUP_POINT_EXPR, type, arg1: expr);
16390}
16391
16392/* Given a pointer value OP0 and a type TYPE, return a simplified version
16393 of an indirection through OP0, or NULL_TREE if no simplification is
16394 possible. */
16395
16396tree
16397fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16398{
16399 tree sub = op0;
16400 tree subtype;
16401 poly_uint64 const_op01;
16402
16403 STRIP_NOPS (sub);
16404 subtype = TREE_TYPE (sub);
16405 if (!POINTER_TYPE_P (subtype)
16406 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16407 return NULL_TREE;
16408
16409 if (TREE_CODE (sub) == ADDR_EXPR)
16410 {
16411 tree op = TREE_OPERAND (sub, 0);
16412 tree optype = TREE_TYPE (op);
16413
16414 /* *&CONST_DECL -> to the value of the const decl. */
16415 if (TREE_CODE (op) == CONST_DECL)
16416 return DECL_INITIAL (op);
16417 /* *&p => p; make sure to handle *&"str"[cst] here. */
16418 if (type == optype)
16419 {
16420 tree fop = fold_read_from_constant_string (exp: op);
16421 if (fop)
16422 return fop;
16423 else
16424 return op;
16425 }
16426 /* *(foo *)&fooarray => fooarray[0] */
16427 else if (TREE_CODE (optype) == ARRAY_TYPE
16428 && type == TREE_TYPE (optype)
16429 && (!in_gimple_form
16430 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16431 {
16432 tree type_domain = TYPE_DOMAIN (optype);
16433 tree min_val = size_zero_node;
16434 if (type_domain && TYPE_MIN_VALUE (type_domain))
16435 min_val = TYPE_MIN_VALUE (type_domain);
16436 if (in_gimple_form
16437 && TREE_CODE (min_val) != INTEGER_CST)
16438 return NULL_TREE;
16439 return build4_loc (loc, code: ARRAY_REF, type, arg0: op, arg1: min_val,
16440 NULL_TREE, NULL_TREE);
16441 }
16442 /* *(foo *)&complexfoo => __real__ complexfoo */
16443 else if (TREE_CODE (optype) == COMPLEX_TYPE
16444 && type == TREE_TYPE (optype))
16445 return fold_build1_loc (loc, code: REALPART_EXPR, type, op0: op);
16446 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16447 else if (VECTOR_TYPE_P (optype)
16448 && type == TREE_TYPE (optype))
16449 {
16450 tree part_width = TYPE_SIZE (type);
16451 tree index = bitsize_int (0);
16452 return fold_build3_loc (loc, code: BIT_FIELD_REF, type, op0: op, op1: part_width,
16453 op2: index);
16454 }
16455 }
16456
16457 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16458 && poly_int_tree_p (TREE_OPERAND (sub, 1), value: &const_op01))
16459 {
16460 tree op00 = TREE_OPERAND (sub, 0);
16461 tree op01 = TREE_OPERAND (sub, 1);
16462
16463 STRIP_NOPS (op00);
16464 if (TREE_CODE (op00) == ADDR_EXPR)
16465 {
16466 tree op00type;
16467 op00 = TREE_OPERAND (op00, 0);
16468 op00type = TREE_TYPE (op00);
16469
16470 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16471 if (VECTOR_TYPE_P (op00type)
16472 && type == TREE_TYPE (op00type)
16473 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16474 but we want to treat offsets with MSB set as negative.
16475 For the code below negative offsets are invalid and
16476 TYPE_SIZE of the element is something unsigned, so
16477 check whether op01 fits into poly_int64, which implies
16478 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16479 then just use poly_uint64 because we want to treat the
16480 value as unsigned. */
16481 && tree_fits_poly_int64_p (op01))
16482 {
16483 tree part_width = TYPE_SIZE (type);
16484 poly_uint64 max_offset
16485 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16486 * TYPE_VECTOR_SUBPARTS (node: op00type));
16487 if (known_lt (const_op01, max_offset))
16488 {
16489 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16490 return fold_build3_loc (loc,
16491 code: BIT_FIELD_REF, type, op0: op00,
16492 op1: part_width, op2: index);
16493 }
16494 }
16495 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16496 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16497 && type == TREE_TYPE (op00type))
16498 {
16499 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16500 const_op01))
16501 return fold_build1_loc (loc, code: IMAGPART_EXPR, type, op0: op00);
16502 }
16503 /* ((foo *)&fooarray)[1] => fooarray[1] */
16504 else if (TREE_CODE (op00type) == ARRAY_TYPE
16505 && type == TREE_TYPE (op00type))
16506 {
16507 tree type_domain = TYPE_DOMAIN (op00type);
16508 tree min_val = size_zero_node;
16509 if (type_domain && TYPE_MIN_VALUE (type_domain))
16510 min_val = TYPE_MIN_VALUE (type_domain);
16511 poly_uint64 type_size, index;
16512 if (poly_int_tree_p (t: min_val)
16513 && poly_int_tree_p (TYPE_SIZE_UNIT (type), value: &type_size)
16514 && multiple_p (a: const_op01, b: type_size, multiple: &index))
16515 {
16516 poly_offset_int off = index + wi::to_poly_offset (t: min_val);
16517 op01 = wide_int_to_tree (sizetype, cst: off);
16518 return build4_loc (loc, code: ARRAY_REF, type, arg0: op00, arg1: op01,
16519 NULL_TREE, NULL_TREE);
16520 }
16521 }
16522 }
16523 }
16524
16525 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16526 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16527 && type == TREE_TYPE (TREE_TYPE (subtype))
16528 && (!in_gimple_form
16529 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16530 {
16531 tree type_domain;
16532 tree min_val = size_zero_node;
16533 sub = build_fold_indirect_ref_loc (loc, sub);
16534 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16535 if (type_domain && TYPE_MIN_VALUE (type_domain))
16536 min_val = TYPE_MIN_VALUE (type_domain);
16537 if (in_gimple_form
16538 && TREE_CODE (min_val) != INTEGER_CST)
16539 return NULL_TREE;
16540 return build4_loc (loc, code: ARRAY_REF, type, arg0: sub, arg1: min_val, NULL_TREE,
16541 NULL_TREE);
16542 }
16543
16544 return NULL_TREE;
16545}
16546
16547/* Builds an expression for an indirection through T, simplifying some
16548 cases. */
16549
16550tree
16551build_fold_indirect_ref_loc (location_t loc, tree t)
16552{
16553 tree type = TREE_TYPE (TREE_TYPE (t));
16554 tree sub = fold_indirect_ref_1 (loc, type, op0: t);
16555
16556 if (sub)
16557 return sub;
16558
16559 return build1_loc (loc, code: INDIRECT_REF, type, arg1: t);
16560}
16561
16562/* Given an INDIRECT_REF T, return either T or a simplified version. */
16563
16564tree
16565fold_indirect_ref_loc (location_t loc, tree t)
16566{
16567 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16568
16569 if (sub)
16570 return sub;
16571 else
16572 return t;
16573}
16574
16575/* Strip non-trapping, non-side-effecting tree nodes from an expression
16576 whose result is ignored. The type of the returned tree need not be
16577 the same as the original expression. */
16578
16579tree
16580fold_ignored_result (tree t)
16581{
16582 if (!TREE_SIDE_EFFECTS (t))
16583 return integer_zero_node;
16584
16585 for (;;)
16586 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16587 {
16588 case tcc_unary:
16589 t = TREE_OPERAND (t, 0);
16590 break;
16591
16592 case tcc_binary:
16593 case tcc_comparison:
16594 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16595 t = TREE_OPERAND (t, 0);
16596 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16597 t = TREE_OPERAND (t, 1);
16598 else
16599 return t;
16600 break;
16601
16602 case tcc_expression:
16603 switch (TREE_CODE (t))
16604 {
16605 case COMPOUND_EXPR:
16606 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16607 return t;
16608 t = TREE_OPERAND (t, 0);
16609 break;
16610
16611 case COND_EXPR:
16612 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16613 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16614 return t;
16615 t = TREE_OPERAND (t, 0);
16616 break;
16617
16618 default:
16619 return t;
16620 }
16621 break;
16622
16623 default:
16624 return t;
16625 }
16626}
16627
16628/* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16629
16630tree
16631round_up_loc (location_t loc, tree value, unsigned int divisor)
16632{
16633 tree div = NULL_TREE;
16634
16635 if (divisor == 1)
16636 return value;
16637
16638 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16639 have to do anything. Only do this when we are not given a const,
16640 because in that case, this check is more expensive than just
16641 doing it. */
16642 if (TREE_CODE (value) != INTEGER_CST)
16643 {
16644 div = build_int_cst (TREE_TYPE (value), divisor);
16645
16646 if (multiple_of_p (TREE_TYPE (value), top: value, bottom: div))
16647 return value;
16648 }
16649
16650 /* If divisor is a power of two, simplify this to bit manipulation. */
16651 if (pow2_or_zerop (x: divisor))
16652 {
16653 if (TREE_CODE (value) == INTEGER_CST)
16654 {
16655 wide_int val = wi::to_wide (t: value);
16656 bool overflow_p;
16657
16658 if ((val & (divisor - 1)) == 0)
16659 return value;
16660
16661 overflow_p = TREE_OVERFLOW (value);
16662 val += divisor - 1;
16663 val &= (int) -divisor;
16664 if (val == 0)
16665 overflow_p = true;
16666
16667 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16668 }
16669 else
16670 {
16671 tree t;
16672
16673 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16674 value = size_binop_loc (loc, code: PLUS_EXPR, arg0: value, arg1: t);
16675 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16676 value = size_binop_loc (loc, code: BIT_AND_EXPR, arg0: value, arg1: t);
16677 }
16678 }
16679 else
16680 {
16681 if (!div)
16682 div = build_int_cst (TREE_TYPE (value), divisor);
16683 value = size_binop_loc (loc, code: CEIL_DIV_EXPR, arg0: value, arg1: div);
16684 value = size_binop_loc (loc, code: MULT_EXPR, arg0: value, arg1: div);
16685 }
16686
16687 return value;
16688}
16689
16690/* Likewise, but round down. */
16691
16692tree
16693round_down_loc (location_t loc, tree value, int divisor)
16694{
16695 tree div = NULL_TREE;
16696
16697 gcc_assert (divisor > 0);
16698 if (divisor == 1)
16699 return value;
16700
16701 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16702 have to do anything. Only do this when we are not given a const,
16703 because in that case, this check is more expensive than just
16704 doing it. */
16705 if (TREE_CODE (value) != INTEGER_CST)
16706 {
16707 div = build_int_cst (TREE_TYPE (value), divisor);
16708
16709 if (multiple_of_p (TREE_TYPE (value), top: value, bottom: div))
16710 return value;
16711 }
16712
16713 /* If divisor is a power of two, simplify this to bit manipulation. */
16714 if (pow2_or_zerop (x: divisor))
16715 {
16716 tree t;
16717
16718 t = build_int_cst (TREE_TYPE (value), -divisor);
16719 value = size_binop_loc (loc, code: BIT_AND_EXPR, arg0: value, arg1: t);
16720 }
16721 else
16722 {
16723 if (!div)
16724 div = build_int_cst (TREE_TYPE (value), divisor);
16725 value = size_binop_loc (loc, code: FLOOR_DIV_EXPR, arg0: value, arg1: div);
16726 value = size_binop_loc (loc, code: MULT_EXPR, arg0: value, arg1: div);
16727 }
16728
16729 return value;
16730}
16731
16732/* Returns the pointer to the base of the object addressed by EXP and
16733 extracts the information about the offset of the access, storing it
16734 to PBITPOS and POFFSET. */
16735
16736static tree
16737split_address_to_core_and_offset (tree exp,
16738 poly_int64 *pbitpos, tree *poffset)
16739{
16740 tree core;
16741 machine_mode mode;
16742 int unsignedp, reversep, volatilep;
16743 poly_int64 bitsize;
16744 location_t loc = EXPR_LOCATION (exp);
16745
16746 if (TREE_CODE (exp) == SSA_NAME)
16747 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16748 if (gimple_assign_rhs_code (gs: def) == ADDR_EXPR)
16749 exp = gimple_assign_rhs1 (gs: def);
16750
16751 if (TREE_CODE (exp) == ADDR_EXPR)
16752 {
16753 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16754 poffset, &mode, &unsignedp, &reversep,
16755 &volatilep);
16756 core = build_fold_addr_expr_loc (loc, t: core);
16757 }
16758 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16759 {
16760 core = TREE_OPERAND (exp, 0);
16761 STRIP_NOPS (core);
16762 *pbitpos = 0;
16763 *poffset = TREE_OPERAND (exp, 1);
16764 if (poly_int_tree_p (t: *poffset))
16765 {
16766 poly_offset_int tem
16767 = wi::sext (a: wi::to_poly_offset (t: *poffset),
16768 TYPE_PRECISION (TREE_TYPE (*poffset)));
16769 tem <<= LOG2_BITS_PER_UNIT;
16770 if (tem.to_shwi (r: pbitpos))
16771 *poffset = NULL_TREE;
16772 }
16773 }
16774 else
16775 {
16776 core = exp;
16777 *pbitpos = 0;
16778 *poffset = NULL_TREE;
16779 }
16780
16781 return core;
16782}
16783
16784/* Returns true if addresses of E1 and E2 differ by a constant, false
16785 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16786
16787bool
16788ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16789{
16790 tree core1, core2;
16791 poly_int64 bitpos1, bitpos2;
16792 tree toffset1, toffset2, tdiff, type;
16793
16794 core1 = split_address_to_core_and_offset (exp: e1, pbitpos: &bitpos1, poffset: &toffset1);
16795 core2 = split_address_to_core_and_offset (exp: e2, pbitpos: &bitpos2, poffset: &toffset2);
16796
16797 poly_int64 bytepos1, bytepos2;
16798 if (!multiple_p (a: bitpos1, BITS_PER_UNIT, multiple: &bytepos1)
16799 || !multiple_p (a: bitpos2, BITS_PER_UNIT, multiple: &bytepos2)
16800 || !operand_equal_p (arg0: core1, arg1: core2, flags: 0))
16801 return false;
16802
16803 if (toffset1 && toffset2)
16804 {
16805 type = TREE_TYPE (toffset1);
16806 if (type != TREE_TYPE (toffset2))
16807 toffset2 = fold_convert (type, toffset2);
16808
16809 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16810 if (!cst_and_fits_in_hwi (tdiff))
16811 return false;
16812
16813 *diff = int_cst_value (tdiff);
16814 }
16815 else if (toffset1 || toffset2)
16816 {
16817 /* If only one of the offsets is non-constant, the difference cannot
16818 be a constant. */
16819 return false;
16820 }
16821 else
16822 *diff = 0;
16823
16824 *diff += bytepos1 - bytepos2;
16825 return true;
16826}
16827
16828/* Return OFF converted to a pointer offset type suitable as offset for
16829 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16830tree
16831convert_to_ptrofftype_loc (location_t loc, tree off)
16832{
16833 if (ptrofftype_p (TREE_TYPE (off)))
16834 return off;
16835 return fold_convert_loc (loc, sizetype, arg: off);
16836}
16837
16838/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16839tree
16840fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16841{
16842 return fold_build2_loc (loc, code: POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16843 op0: ptr, op1: convert_to_ptrofftype_loc (loc, off));
16844}
16845
16846/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16847tree
16848fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16849{
16850 return fold_build2_loc (loc, code: POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16851 op0: ptr, size_int (off));
16852}
16853
16854/* Return a pointer to a NUL-terminated string containing the sequence
16855 of bytes corresponding to the representation of the object referred to
16856 by SRC (or a subsequence of such bytes within it if SRC is a reference
16857 to an initialized constant array plus some constant offset).
16858 Set *STRSIZE the number of bytes in the constant sequence including
16859 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16860 where A is the array that stores the constant sequence that SRC points
16861 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16862 need not point to a string or even an array of characters but may point
16863 to an object of any type. */
16864
16865const char *
16866getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16867{
16868 /* The offset into the array A storing the string, and A's byte size. */
16869 tree offset_node;
16870 tree mem_size;
16871
16872 if (strsize)
16873 *strsize = 0;
16874
16875 if (strsize)
16876 src = byte_representation (src, &offset_node, &mem_size, NULL);
16877 else
16878 src = string_constant (src, &offset_node, &mem_size, NULL);
16879 if (!src)
16880 return NULL;
16881
16882 unsigned HOST_WIDE_INT offset = 0;
16883 if (offset_node != NULL_TREE)
16884 {
16885 if (!tree_fits_uhwi_p (offset_node))
16886 return NULL;
16887 else
16888 offset = tree_to_uhwi (offset_node);
16889 }
16890
16891 if (!tree_fits_uhwi_p (mem_size))
16892 return NULL;
16893
16894 /* ARRAY_SIZE is the byte size of the array the constant sequence
16895 is stored in and equal to sizeof A. INIT_BYTES is the number
16896 of bytes in the constant sequence used to initialize the array,
16897 including any embedded NULs as well as the terminating NUL (for
16898 strings), but not including any trailing zeros/NULs past
16899 the terminating one appended implicitly to a string literal to
16900 zero out the remainder of the array it's stored in. For example,
16901 given:
16902 const char a[7] = "abc\0d";
16903 n = strlen (a + 1);
16904 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16905 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16906 is equal to strlen (A) + 1. */
16907 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16908 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16909 const char *string = TREE_STRING_POINTER (src);
16910
16911 /* Ideally this would turn into a gcc_checking_assert over time. */
16912 if (init_bytes > array_size)
16913 init_bytes = array_size;
16914
16915 if (init_bytes == 0 || offset >= array_size)
16916 return NULL;
16917
16918 if (strsize)
16919 {
16920 /* Compute and store the number of characters from the beginning
16921 of the substring at OFFSET to the end, including the terminating
16922 nul. Offsets past the initial length refer to null strings. */
16923 if (offset < init_bytes)
16924 *strsize = init_bytes - offset;
16925 else
16926 *strsize = 1;
16927 }
16928 else
16929 {
16930 tree eltype = TREE_TYPE (TREE_TYPE (src));
16931 /* Support only properly NUL-terminated single byte strings. */
16932 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16933 return NULL;
16934 if (string[init_bytes - 1] != '\0')
16935 return NULL;
16936 }
16937
16938 return offset < init_bytes ? string + offset : "";
16939}
16940
16941/* Return a pointer to a NUL-terminated string corresponding to
16942 the expression STR referencing a constant string, possibly
16943 involving a constant offset. Return null if STR either doesn't
16944 reference a constant string or if it involves a nonconstant
16945 offset. */
16946
16947const char *
16948c_getstr (tree str)
16949{
16950 return getbyterep (src: str, NULL);
16951}
16952
16953/* Given a tree T, compute which bits in T may be nonzero. */
16954
16955wide_int
16956tree_nonzero_bits (const_tree t)
16957{
16958 switch (TREE_CODE (t))
16959 {
16960 case INTEGER_CST:
16961 return wi::to_wide (t);
16962 case SSA_NAME:
16963 return get_nonzero_bits (t);
16964 case NON_LVALUE_EXPR:
16965 case SAVE_EXPR:
16966 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16967 case BIT_AND_EXPR:
16968 return wi::bit_and (x: tree_nonzero_bits (TREE_OPERAND (t, 0)),
16969 y: tree_nonzero_bits (TREE_OPERAND (t, 1)));
16970 case BIT_IOR_EXPR:
16971 case BIT_XOR_EXPR:
16972 return wi::bit_or (x: tree_nonzero_bits (TREE_OPERAND (t, 0)),
16973 y: tree_nonzero_bits (TREE_OPERAND (t, 1)));
16974 case COND_EXPR:
16975 return wi::bit_or (x: tree_nonzero_bits (TREE_OPERAND (t, 1)),
16976 y: tree_nonzero_bits (TREE_OPERAND (t, 2)));
16977 CASE_CONVERT:
16978 return wide_int::from (x: tree_nonzero_bits (TREE_OPERAND (t, 0)),
16979 TYPE_PRECISION (TREE_TYPE (t)),
16980 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16981 case PLUS_EXPR:
16982 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16983 {
16984 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16985 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16986 if (wi::bit_and (x: nzbits1, y: nzbits2) == 0)
16987 return wi::bit_or (x: nzbits1, y: nzbits2);
16988 }
16989 break;
16990 case LSHIFT_EXPR:
16991 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16992 {
16993 tree type = TREE_TYPE (t);
16994 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16995 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16996 TYPE_PRECISION (type));
16997 return wi::neg_p (x: arg1)
16998 ? wi::rshift (x: nzbits, y: -arg1, TYPE_SIGN (type))
16999 : wi::lshift (x: nzbits, y: arg1);
17000 }
17001 break;
17002 case RSHIFT_EXPR:
17003 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
17004 {
17005 tree type = TREE_TYPE (t);
17006 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
17007 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
17008 TYPE_PRECISION (type));
17009 return wi::neg_p (x: arg1)
17010 ? wi::lshift (x: nzbits, y: -arg1)
17011 : wi::rshift (x: nzbits, y: arg1, TYPE_SIGN (type));
17012 }
17013 break;
17014 default:
17015 break;
17016 }
17017
17018 return wi::shwi (val: -1, TYPE_PRECISION (TREE_TYPE (t)));
17019}
17020
17021/* Helper function for address compare simplifications in match.pd.
17022 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
17023 TYPE is the type of comparison operands.
17024 BASE0, BASE1, OFF0 and OFF1 are set by the function.
17025 GENERIC is true if GENERIC folding and false for GIMPLE folding.
17026 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
17027 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
17028 and 2 if unknown. */
17029
17030int
17031address_compare (tree_code code, tree type, tree op0, tree op1,
17032 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
17033 bool generic)
17034{
17035 if (TREE_CODE (op0) == SSA_NAME)
17036 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
17037 if (TREE_CODE (op1) == SSA_NAME)
17038 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
17039 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
17040 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
17041 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
17042 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
17043 if (base0 && TREE_CODE (base0) == MEM_REF)
17044 {
17045 off0 += mem_ref_offset (base0).force_shwi ();
17046 base0 = TREE_OPERAND (base0, 0);
17047 }
17048 if (base1 && TREE_CODE (base1) == MEM_REF)
17049 {
17050 off1 += mem_ref_offset (base1).force_shwi ();
17051 base1 = TREE_OPERAND (base1, 0);
17052 }
17053 if (base0 == NULL_TREE || base1 == NULL_TREE)
17054 return 2;
17055
17056 int equal = 2;
17057 /* Punt in GENERIC on variables with value expressions;
17058 the value expressions might point to fields/elements
17059 of other vars etc. */
17060 if (generic
17061 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
17062 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
17063 return 2;
17064 else if (decl_in_symtab_p (decl: base0) && decl_in_symtab_p (decl: base1))
17065 {
17066 symtab_node *node0 = symtab_node::get_create (node: base0);
17067 symtab_node *node1 = symtab_node::get_create (node: base1);
17068 equal = node0->equal_address_to (s2: node1);
17069 }
17070 else if ((DECL_P (base0)
17071 || TREE_CODE (base0) == SSA_NAME
17072 || TREE_CODE (base0) == STRING_CST)
17073 && (DECL_P (base1)
17074 || TREE_CODE (base1) == SSA_NAME
17075 || TREE_CODE (base1) == STRING_CST))
17076 equal = (base0 == base1);
17077 /* Assume different STRING_CSTs with the same content will be
17078 merged. */
17079 if (equal == 0
17080 && TREE_CODE (base0) == STRING_CST
17081 && TREE_CODE (base1) == STRING_CST
17082 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
17083 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
17084 TREE_STRING_LENGTH (base0)) == 0)
17085 equal = 1;
17086 if (equal == 1)
17087 {
17088 if (code == EQ_EXPR
17089 || code == NE_EXPR
17090 /* If the offsets are equal we can ignore overflow. */
17091 || known_eq (off0, off1)
17092 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
17093 /* Or if we compare using pointers to decls or strings. */
17094 || (POINTER_TYPE_P (type)
17095 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
17096 return 1;
17097 return 2;
17098 }
17099 if (equal != 0)
17100 return equal;
17101 if (code != EQ_EXPR && code != NE_EXPR)
17102 return 2;
17103
17104 /* At this point we know (or assume) the two pointers point at
17105 different objects. */
17106 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
17107 off0.is_constant (const_value: &ioff0);
17108 off1.is_constant (const_value: &ioff1);
17109 /* Punt on non-zero offsets from functions. */
17110 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
17111 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
17112 return 2;
17113 /* Or if the bases are neither decls nor string literals. */
17114 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
17115 return 2;
17116 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
17117 return 2;
17118 /* For initializers, assume addresses of different functions are
17119 different. */
17120 if (folding_initializer
17121 && TREE_CODE (base0) == FUNCTION_DECL
17122 && TREE_CODE (base1) == FUNCTION_DECL)
17123 return 0;
17124
17125 /* Compute whether one address points to the start of one
17126 object and another one to the end of another one. */
17127 poly_int64 size0 = 0, size1 = 0;
17128 if (TREE_CODE (base0) == STRING_CST)
17129 {
17130 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
17131 equal = 2;
17132 else
17133 size0 = TREE_STRING_LENGTH (base0);
17134 }
17135 else if (TREE_CODE (base0) == FUNCTION_DECL)
17136 size0 = 1;
17137 else
17138 {
17139 tree sz0 = DECL_SIZE_UNIT (base0);
17140 if (!tree_fits_poly_int64_p (sz0))
17141 equal = 2;
17142 else
17143 size0 = tree_to_poly_int64 (sz0);
17144 }
17145 if (TREE_CODE (base1) == STRING_CST)
17146 {
17147 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
17148 equal = 2;
17149 else
17150 size1 = TREE_STRING_LENGTH (base1);
17151 }
17152 else if (TREE_CODE (base1) == FUNCTION_DECL)
17153 size1 = 1;
17154 else
17155 {
17156 tree sz1 = DECL_SIZE_UNIT (base1);
17157 if (!tree_fits_poly_int64_p (sz1))
17158 equal = 2;
17159 else
17160 size1 = tree_to_poly_int64 (sz1);
17161 }
17162 if (equal == 0)
17163 {
17164 /* If one offset is pointing (or could be) to the beginning of one
17165 object and the other is pointing to one past the last byte of the
17166 other object, punt. */
17167 if (maybe_eq (a: off0, b: 0) && maybe_eq (a: off1, b: size1))
17168 equal = 2;
17169 else if (maybe_eq (a: off1, b: 0) && maybe_eq (a: off0, b: size0))
17170 equal = 2;
17171 /* If both offsets are the same, there are some cases we know that are
17172 ok. Either if we know they aren't zero, or if we know both sizes
17173 are no zero. */
17174 if (equal == 2
17175 && known_eq (off0, off1)
17176 && (known_ne (off0, 0)
17177 || (known_ne (size0, 0) && known_ne (size1, 0))))
17178 equal = 0;
17179 }
17180
17181 /* At this point, equal is 2 if either one or both pointers are out of
17182 bounds of their object, or one points to start of its object and the
17183 other points to end of its object. This is unspecified behavior
17184 e.g. in C++. Otherwise equal is 0. */
17185 if (folding_cxx_constexpr && equal)
17186 return equal;
17187
17188 /* When both pointers point to string literals, even when equal is 0,
17189 due to tail merging of string literals the pointers might be the same. */
17190 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
17191 {
17192 if (ioff0 < 0
17193 || ioff1 < 0
17194 || ioff0 > TREE_STRING_LENGTH (base0)
17195 || ioff1 > TREE_STRING_LENGTH (base1))
17196 return 2;
17197
17198 /* If the bytes in the string literals starting at the pointers
17199 differ, the pointers need to be different. */
17200 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17201 TREE_STRING_POINTER (base1) + ioff1,
17202 MIN (TREE_STRING_LENGTH (base0) - ioff0,
17203 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17204 {
17205 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17206 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17207 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17208 n: ioffmin) == 0)
17209 /* If even the bytes in the string literal before the
17210 pointers are the same, the string literals could be
17211 tail merged. */
17212 return 2;
17213 }
17214 return 0;
17215 }
17216
17217 if (folding_cxx_constexpr)
17218 return 0;
17219
17220 /* If this is a pointer comparison, ignore for now even
17221 valid equalities where one pointer is the offset zero
17222 of one object and the other to one past end of another one. */
17223 if (!INTEGRAL_TYPE_P (type))
17224 return 0;
17225
17226 /* Assume that string literals can't be adjacent to variables
17227 (automatic or global). */
17228 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17229 return 0;
17230
17231 /* Assume that automatic variables can't be adjacent to global
17232 variables. */
17233 if (is_global_var (t: base0) != is_global_var (t: base1))
17234 return 0;
17235
17236 return equal;
17237}
17238
17239/* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17240tree
17241ctor_single_nonzero_element (const_tree t)
17242{
17243 unsigned HOST_WIDE_INT idx;
17244 constructor_elt *ce;
17245 tree elt = NULL_TREE;
17246
17247 if (TREE_CODE (t) != CONSTRUCTOR)
17248 return NULL_TREE;
17249 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), ix: idx, ptr: &ce); idx++)
17250 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17251 {
17252 if (elt)
17253 return NULL_TREE;
17254 elt = ce->value;
17255 }
17256 return elt;
17257}
17258
17259#if CHECKING_P
17260
17261namespace selftest {
17262
17263/* Helper functions for writing tests of folding trees. */
17264
17265/* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17266
17267static void
17268assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17269 tree constant)
17270{
17271 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17272}
17273
17274/* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17275 wrapping WRAPPED_EXPR. */
17276
17277static void
17278assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17279 tree wrapped_expr)
17280{
17281 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17282 ASSERT_NE (wrapped_expr, result);
17283 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17284 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17285}
17286
17287/* Verify that various arithmetic binary operations are folded
17288 correctly. */
17289
17290static void
17291test_arithmetic_folding ()
17292{
17293 tree type = integer_type_node;
17294 tree x = create_tmp_var_raw (type, "x");
17295 tree zero = build_zero_cst (type);
17296 tree one = build_int_cst (type, 1);
17297
17298 /* Addition. */
17299 /* 1 <-- (0 + 1) */
17300 assert_binop_folds_to_const (lhs: zero, code: PLUS_EXPR, rhs: one,
17301 constant: one);
17302 assert_binop_folds_to_const (lhs: one, code: PLUS_EXPR, rhs: zero,
17303 constant: one);
17304
17305 /* (nonlvalue)x <-- (x + 0) */
17306 assert_binop_folds_to_nonlvalue (lhs: x, code: PLUS_EXPR, rhs: zero,
17307 wrapped_expr: x);
17308
17309 /* Subtraction. */
17310 /* 0 <-- (x - x) */
17311 assert_binop_folds_to_const (lhs: x, code: MINUS_EXPR, rhs: x,
17312 constant: zero);
17313 assert_binop_folds_to_nonlvalue (lhs: x, code: MINUS_EXPR, rhs: zero,
17314 wrapped_expr: x);
17315
17316 /* Multiplication. */
17317 /* 0 <-- (x * 0) */
17318 assert_binop_folds_to_const (lhs: x, code: MULT_EXPR, rhs: zero,
17319 constant: zero);
17320
17321 /* (nonlvalue)x <-- (x * 1) */
17322 assert_binop_folds_to_nonlvalue (lhs: x, code: MULT_EXPR, rhs: one,
17323 wrapped_expr: x);
17324}
17325
17326namespace test_fold_vec_perm_cst {
17327
17328/* Build a VECTOR_CST corresponding to VMODE, and has
17329 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17330 Fill it with randomized elements, using rand() % THRESHOLD. */
17331
17332static tree
17333build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17334 unsigned nelts_per_pattern,
17335 int step = 0, bool natural_stepped = false,
17336 int threshold = 100)
17337{
17338 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17339 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17340 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17341
17342 // Fill a0 for each pattern
17343 for (unsigned i = 0; i < npatterns; i++)
17344 builder.quick_push (obj: build_int_cst (inner_type, rand () % threshold));
17345
17346 if (nelts_per_pattern == 1)
17347 return builder.build ();
17348
17349 // Fill a1 for each pattern
17350 for (unsigned i = 0; i < npatterns; i++)
17351 {
17352 tree a1;
17353 if (natural_stepped)
17354 {
17355 tree a0 = builder[i];
17356 wide_int a0_val = wi::to_wide (t: a0);
17357 wide_int a1_val = a0_val + step;
17358 a1 = wide_int_to_tree (type: inner_type, cst: a1_val);
17359 }
17360 else
17361 a1 = build_int_cst (inner_type, rand () % threshold);
17362 builder.quick_push (obj: a1);
17363 }
17364 if (nelts_per_pattern == 2)
17365 return builder.build ();
17366
17367 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17368 {
17369 tree prev_elem = builder[i - npatterns];
17370 wide_int prev_elem_val = wi::to_wide (t: prev_elem);
17371 wide_int val = prev_elem_val + step;
17372 builder.quick_push (obj: wide_int_to_tree (type: inner_type, cst: val));
17373 }
17374
17375 return builder.build ();
17376}
17377
17378/* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17379 when result is VLA. */
17380
17381static void
17382validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17383 tree res, tree *expected_res)
17384{
17385 /* Actual npatterns and encoded_elts in res may be less than expected due
17386 to canonicalization. */
17387 ASSERT_TRUE (res != NULL_TREE);
17388 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17389 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17390
17391 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17392 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17393}
17394
17395/* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17396 when the result is VLS. */
17397
17398static void
17399validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17400{
17401 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17402 for (unsigned i = 0; i < expected_nelts; i++)
17403 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17404}
17405
17406/* Helper routine to push multiple elements into BUILDER. */
17407template<unsigned N>
17408static void builder_push_elems (vec_perm_builder& builder,
17409 poly_uint64 (&elems)[N])
17410{
17411 for (unsigned i = 0; i < N; i++)
17412 builder.quick_push (obj: elems[i]);
17413}
17414
17415#define ARG0(index) vector_cst_elt (arg0, index)
17416#define ARG1(index) vector_cst_elt (arg1, index)
17417
17418/* Test cases where result is VNx4SI and input vectors are V4SI. */
17419
17420static void
17421test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17422{
17423 for (int i = 0; i < 10; i++)
17424 {
17425 /* Case 1:
17426 sel = { 0, 4, 1, 5, ... }
17427 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17428 {
17429 tree arg0 = build_vec_cst_rand (vmode: v4si_mode, npatterns: 4, nelts_per_pattern: 1, step: 0);
17430 tree arg1 = build_vec_cst_rand (vmode: v4si_mode, npatterns: 4, nelts_per_pattern: 1, step: 0);
17431
17432 tree inner_type
17433 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17434 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17435
17436 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (node: res_type);
17437 vec_perm_builder builder (res_len, 4, 1);
17438 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17439 builder_push_elems (builder, elems&: mask_elems);
17440
17441 vec_perm_indices sel (builder, 2, res_len);
17442 tree res = fold_vec_perm_cst (type: res_type, arg0, arg1, sel);
17443
17444 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17445 validate_res (npatterns: 4, nelts_per_pattern: 1, res, expected_res);
17446 }
17447
17448 /* Case 2: Same as case 1, but contains an out of bounds access which
17449 should wrap around.
17450 sel = {0, 8, 4, 12, ...} (4, 1)
17451 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17452 {
17453 tree arg0 = build_vec_cst_rand (vmode: v4si_mode, npatterns: 4, nelts_per_pattern: 1, step: 0);
17454 tree arg1 = build_vec_cst_rand (vmode: v4si_mode, npatterns: 4, nelts_per_pattern: 1, step: 0);
17455
17456 tree inner_type
17457 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17458 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17459
17460 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (node: res_type);
17461 vec_perm_builder builder (res_len, 4, 1);
17462 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17463 builder_push_elems (builder, elems&: mask_elems);
17464
17465 vec_perm_indices sel (builder, 2, res_len);
17466 tree res = fold_vec_perm_cst (type: res_type, arg0, arg1, sel);
17467
17468 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17469 validate_res (npatterns: 4, nelts_per_pattern: 1, res, expected_res);
17470 }
17471 }
17472}
17473
17474/* Test cases where result is V4SI and input vectors are VNx4SI. */
17475
17476static void
17477test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17478{
17479 for (int i = 0; i < 10; i++)
17480 {
17481 /* Case 1:
17482 sel = { 0, 1, 2, 3}
17483 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17484 {
17485 tree arg0 = build_vec_cst_rand (vmode: vnx4si_mode, npatterns: 4, nelts_per_pattern: 1);
17486 tree arg1 = build_vec_cst_rand (vmode: vnx4si_mode, npatterns: 4, nelts_per_pattern: 1);
17487
17488 tree inner_type
17489 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17490 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17491
17492 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (node: res_type);
17493 vec_perm_builder builder (res_len, 4, 1);
17494 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17495 builder_push_elems (builder, elems&: mask_elems);
17496
17497 vec_perm_indices sel (builder, 2, res_len);
17498 tree res = fold_vec_perm_cst (type: res_type, arg0, arg1, sel);
17499
17500 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17501 validate_res_vls (res, expected_res, expected_nelts: 4);
17502 }
17503
17504 /* Case 2: Same as Case 1, but crossing input vector.
17505 sel = {0, 2, 4, 6}
17506 In this case,the index 4 is ambiguous since len = 4 + 4x.
17507 Since we cannot determine, which vector to choose from during
17508 compile time, should return NULL_TREE. */
17509 {
17510 tree arg0 = build_vec_cst_rand (vmode: vnx4si_mode, npatterns: 4, nelts_per_pattern: 1);
17511 tree arg1 = build_vec_cst_rand (vmode: vnx4si_mode, npatterns: 4, nelts_per_pattern: 1);
17512
17513 tree inner_type
17514 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17515 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17516
17517 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (node: res_type);
17518 vec_perm_builder builder (res_len, 4, 1);
17519 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17520 builder_push_elems (builder, elems&: mask_elems);
17521
17522 vec_perm_indices sel (builder, 2, res_len);
17523 const char *reason;
17524 tree res = fold_vec_perm_cst (type: res_type, arg0, arg1, sel, reason: &reason);
17525
17526 ASSERT_TRUE (res == NULL_TREE);
17527 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17528 }
17529 }
17530}
17531
17532/* Test all input vectors. */
17533
17534static void
17535test_all_nunits (machine_mode vmode)
17536{
17537 /* Test with 10 different inputs. */
17538 for (int i = 0; i < 10; i++)
17539 {
17540 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17541 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17542 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17543
17544 /* Case 1: mask = {0, ...} // (1, 1)
17545 res = { arg0[0], ... } // (1, 1) */
17546 {
17547 vec_perm_builder builder (len, 1, 1);
17548 builder.quick_push (obj: 0);
17549 vec_perm_indices sel (builder, 2, len);
17550 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17551 tree expected_res[] = { ARG0(0) };
17552 validate_res (npatterns: 1, nelts_per_pattern: 1, res, expected_res);
17553 }
17554
17555 /* Case 2: mask = {len, ...} // (1, 1)
17556 res = { arg1[0], ... } // (1, 1) */
17557 {
17558 vec_perm_builder builder (len, 1, 1);
17559 builder.quick_push (obj: len);
17560 vec_perm_indices sel (builder, 2, len);
17561 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17562
17563 tree expected_res[] = { ARG1(0) };
17564 validate_res (npatterns: 1, nelts_per_pattern: 1, res, expected_res);
17565 }
17566 }
17567}
17568
17569/* Test all vectors which contain at-least 2 elements. */
17570
17571static void
17572test_nunits_min_2 (machine_mode vmode)
17573{
17574 for (int i = 0; i < 10; i++)
17575 {
17576 /* Case 1: mask = { 0, len, ... } // (2, 1)
17577 res = { arg0[0], arg1[0], ... } // (2, 1) */
17578 {
17579 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17580 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17581 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17582
17583 vec_perm_builder builder (len, 2, 1);
17584 poly_uint64 mask_elems[] = { 0, len };
17585 builder_push_elems (builder, elems&: mask_elems);
17586
17587 vec_perm_indices sel (builder, 2, len);
17588 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17589
17590 tree expected_res[] = { ARG0(0), ARG1(0) };
17591 validate_res (npatterns: 2, nelts_per_pattern: 1, res, expected_res);
17592 }
17593
17594 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17595 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17596 {
17597 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17598 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17599 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17600
17601 vec_perm_builder builder (len, 2, 2);
17602 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17603 builder_push_elems (builder, elems&: mask_elems);
17604
17605 vec_perm_indices sel (builder, 2, len);
17606 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17607
17608 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17609 validate_res (npatterns: 2, nelts_per_pattern: 2, res, expected_res);
17610 }
17611
17612 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17613 Test that the stepped sequence of the pattern selects from
17614 same input pattern. Since input vectors have npatterns = 2,
17615 and step (a2 - a1) = 1, step is not a multiple of npatterns
17616 in input vector. So return NULL_TREE. */
17617 {
17618 tree arg0 = build_vec_cst_rand (vmode, npatterns: 2, nelts_per_pattern: 3, step: 1, natural_stepped: true);
17619 tree arg1 = build_vec_cst_rand (vmode, npatterns: 2, nelts_per_pattern: 3, step: 1);
17620 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17621
17622 vec_perm_builder builder (len, 1, 3);
17623 poly_uint64 mask_elems[] = { 0, 0, 1 };
17624 builder_push_elems (builder, elems&: mask_elems);
17625
17626 vec_perm_indices sel (builder, 2, len);
17627 const char *reason;
17628 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17629 reason: &reason);
17630 ASSERT_TRUE (res == NULL_TREE);
17631 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17632 }
17633
17634 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17635 Test that stepped sequence of the pattern selects from arg0.
17636 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17637 {
17638 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1, natural_stepped: true);
17639 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17640 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17641
17642 vec_perm_builder builder (len, 1, 3);
17643 poly_uint64 mask_elems[] = { len, 0, 1 };
17644 builder_push_elems (builder, elems&: mask_elems);
17645
17646 vec_perm_indices sel (builder, 2, len);
17647 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17648
17649 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17650 validate_res (npatterns: 1, nelts_per_pattern: 3, res, expected_res);
17651 }
17652
17653 /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17654 In this case ensure that arg has a natural stepped sequence
17655 to preserve arg's encoding.
17656
17657 As a concrete example, consider:
17658 arg0: { -16, -9, -10, ... } // (1, 3)
17659 arg1: { -12, -5, -6, ... } // (1, 3)
17660 sel = { 0, len, len + 1, ... } // (1, 3)
17661
17662 This will create res with following encoding:
17663 res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17664 = { -16, -12, -5, ... }
17665
17666 The step in above encoding would be: (-5) - (-12) = 7
17667 And hence res[3] would be computed as -5 + 7 = 2.
17668 instead of arg1[2], ie, -6.
17669 Ensure that valid_mask_for_fold_vec_perm_cst returns false
17670 for this case. */
17671 {
17672 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17673 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17674 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17675
17676 vec_perm_builder builder (len, 1, 3);
17677 poly_uint64 mask_elems[] = { 0, len, len+1 };
17678 builder_push_elems (builder, elems&: mask_elems);
17679
17680 vec_perm_indices sel (builder, 2, len);
17681 const char *reason;
17682 /* FIXME: It may happen that build_vec_cst_rand may build a natural
17683 stepped pattern, even if we didn't explicitly tell it to. So folding
17684 may not always fail, but if it does, ensure that's because arg1 does
17685 not have a natural stepped sequence (and not due to other reason) */
17686 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, reason: &reason);
17687 if (res == NULL_TREE)
17688 ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17689 }
17690
17691 /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17692 sequence and thus folding should be valid for this case. */
17693 {
17694 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17695 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1, natural_stepped: true);
17696 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17697
17698 vec_perm_builder builder (len, 1, 3);
17699 poly_uint64 mask_elems[] = { 0, len, len+1 };
17700 builder_push_elems (builder, elems&: mask_elems);
17701
17702 vec_perm_indices sel (builder, 2, len);
17703 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17704
17705 tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17706 validate_res (npatterns: 1, nelts_per_pattern: 3, res, expected_res);
17707 }
17708
17709 /* Case 8: Same as aarch64/sve/slp_3.c:
17710 arg0, arg1 are dup vectors.
17711 sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17712 So res = { arg0[0], arg1[0], ... } // (2, 1)
17713
17714 In this case, since the input vectors are dup, only the first two
17715 elements per pattern in sel are considered significant. */
17716 {
17717 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 1);
17718 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 1);
17719 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17720
17721 vec_perm_builder builder (len, 2, 3);
17722 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17723 builder_push_elems (builder, elems&: mask_elems);
17724
17725 vec_perm_indices sel (builder, 2, len);
17726 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17727
17728 tree expected_res[] = { ARG0(0), ARG1(0) };
17729 validate_res (npatterns: 2, nelts_per_pattern: 1, res, expected_res);
17730 }
17731 }
17732}
17733
17734/* Test all vectors which contain at-least 4 elements. */
17735
17736static void
17737test_nunits_min_4 (machine_mode vmode)
17738{
17739 for (int i = 0; i < 10; i++)
17740 {
17741 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17742 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17743 {
17744 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17745 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17746 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17747
17748 vec_perm_builder builder (len, 4, 1);
17749 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17750 builder_push_elems (builder, elems&: mask_elems);
17751
17752 vec_perm_indices sel (builder, 2, len);
17753 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17754
17755 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17756 validate_res (npatterns: 4, nelts_per_pattern: 1, res, expected_res);
17757 }
17758
17759 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17760 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17761 {
17762 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 2);
17763 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 2);
17764 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17765
17766 vec_perm_builder builder (arg0_len, 1, 3);
17767 poly_uint64 mask_elems[] = {0, 1, 2};
17768 builder_push_elems (builder, elems&: mask_elems);
17769
17770 vec_perm_indices sel (builder, 2, arg0_len);
17771 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17772 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17773 validate_res (npatterns: 1, nelts_per_pattern: 3, res, expected_res);
17774 }
17775
17776 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17777 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17778 {
17779 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 2);
17780 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 2);
17781 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17782
17783 vec_perm_builder builder (len, 1, 3);
17784 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17785 builder_push_elems (builder, elems&: mask_elems);
17786
17787 vec_perm_indices sel (builder, 2, len);
17788 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17789 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17790 validate_res (npatterns: 1, nelts_per_pattern: 3, res, expected_res);
17791 }
17792
17793 /* Case 4:
17794 sel = { len, 0, 2, ... } // (1, 3)
17795 This should return NULL because we cross the input vectors.
17796 Because,
17797 Let's assume len = C + Cx
17798 a1 = 0
17799 S = 2
17800 esel = arg0_len / sel_npatterns = C + Cx
17801 ae = 0 + (esel - 2) * S
17802 = 0 + (C + Cx - 2) * 2
17803 = 2(C-2) + 2Cx
17804
17805 For C >= 4:
17806 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17807 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17808 Since q1 != qe, we cross input vectors.
17809 So return NULL_TREE. */
17810 {
17811 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 2);
17812 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 2);
17813 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17814
17815 vec_perm_builder builder (arg0_len, 1, 3);
17816 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17817 builder_push_elems (builder, elems&: mask_elems);
17818
17819 vec_perm_indices sel (builder, 2, arg0_len);
17820 const char *reason;
17821 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, reason: &reason);
17822 ASSERT_TRUE (res == NULL_TREE);
17823 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17824 }
17825
17826 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17827 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17828 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17829
17830 Note that fold_vec_perm_cst will set
17831 res_npatterns = max(4, max(4, 2)) = 4
17832 However after canonicalizing, we will end up with shape (2, 2). */
17833 {
17834 tree arg0 = build_vec_cst_rand (vmode, npatterns: 4, nelts_per_pattern: 1);
17835 tree arg1 = build_vec_cst_rand (vmode, npatterns: 4, nelts_per_pattern: 1);
17836 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17837
17838 vec_perm_builder builder (len, 2, 2);
17839 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17840 builder_push_elems (builder, elems&: mask_elems);
17841
17842 vec_perm_indices sel (builder, 2, len);
17843 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17844 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17845 validate_res (npatterns: 2, nelts_per_pattern: 2, res, expected_res);
17846 }
17847
17848 /* Case 6: Test combination in sel, where one pattern is dup and other
17849 is stepped sequence.
17850 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17851 res = { arg0[0], arg0[0], arg0[0],
17852 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17853 {
17854 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17855 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17856 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17857
17858 vec_perm_builder builder (len, 2, 3);
17859 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17860 builder_push_elems (builder, elems&: mask_elems);
17861
17862 vec_perm_indices sel (builder, 2, len);
17863 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17864
17865 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17866 ARG0(1), ARG0(0), ARG0(2) };
17867 validate_res (npatterns: 2, nelts_per_pattern: 3, res, expected_res);
17868 }
17869
17870 /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17871 when arg0, arg1 and sel have different number of patterns.
17872 arg0 is of shape (1, 1)
17873 arg1 is of shape (4, 1)
17874 sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17875
17876 In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17877 However,
17878 step = (len+2) - (len+1) = 1
17879 arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17880 Since step is not a multiple of arg_npatterns,
17881 valid_mask_for_fold_vec_perm_cst should return false,
17882 and thus fold_vec_perm_cst should return NULL_TREE. */
17883 {
17884 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 1);
17885 tree arg1 = build_vec_cst_rand (vmode, npatterns: 4, nelts_per_pattern: 1);
17886 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17887
17888 vec_perm_builder builder (len, 2, 3);
17889 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17890 builder_push_elems (builder, elems&: mask_elems);
17891
17892 vec_perm_indices sel (builder, 2, len);
17893 const char *reason;
17894 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, reason: &reason);
17895
17896 ASSERT_TRUE (res == NULL_TREE);
17897 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17898 }
17899
17900 /* Case 8: PR111754: When input vector is not a stepped sequence,
17901 check that the result is not a stepped sequence either, even
17902 if sel has a stepped sequence. */
17903 {
17904 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 2);
17905 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17906
17907 vec_perm_builder builder (len, 1, 3);
17908 poly_uint64 mask_elems[] = { 0, 1, 2 };
17909 builder_push_elems (builder, elems&: mask_elems);
17910
17911 vec_perm_indices sel (builder, 1, len);
17912 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1: arg0, sel);
17913
17914 tree expected_res[] = { ARG0(0), ARG0(1) };
17915 validate_res (npatterns: sel.encoding ().npatterns (), nelts_per_pattern: 2, res, expected_res);
17916 }
17917
17918 /* Case 9: If sel doesn't contain a stepped sequence,
17919 check that the result has same encoding as sel, irrespective
17920 of shape of input vectors. */
17921 {
17922 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17923 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17924 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17925
17926 vec_perm_builder builder (len, 1, 2);
17927 poly_uint64 mask_elems[] = { 0, len };
17928 builder_push_elems (builder, elems&: mask_elems);
17929
17930 vec_perm_indices sel (builder, 2, len);
17931 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17932
17933 tree expected_res[] = { ARG0(0), ARG1(0) };
17934 validate_res (npatterns: sel.encoding ().npatterns (),
17935 nelts_per_pattern: sel.encoding ().nelts_per_pattern (), res, expected_res);
17936 }
17937 }
17938}
17939
17940/* Test all vectors which contain at-least 8 elements. */
17941
17942static void
17943test_nunits_min_8 (machine_mode vmode)
17944{
17945 for (int i = 0; i < 10; i++)
17946 {
17947 /* Case 1: sel_npatterns (4) > input npatterns (2)
17948 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17949 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17950 arg0[2], arg0[0], arg0[3], arg1[0],
17951 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17952 {
17953 tree arg0 = build_vec_cst_rand (vmode, npatterns: 2, nelts_per_pattern: 3, step: 2);
17954 tree arg1 = build_vec_cst_rand (vmode, npatterns: 2, nelts_per_pattern: 3, step: 2);
17955 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17956
17957 vec_perm_builder builder(len, 4, 3);
17958 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17959 4, 0, 5, len };
17960 builder_push_elems (builder, elems&: mask_elems);
17961
17962 vec_perm_indices sel (builder, 2, len);
17963 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17964
17965 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17966 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17967 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17968 validate_res (npatterns: 4, nelts_per_pattern: 3, res, expected_res);
17969 }
17970 }
17971}
17972
17973/* Test vectors for which nunits[0] <= 4. */
17974
17975static void
17976test_nunits_max_4 (machine_mode vmode)
17977{
17978 /* Case 1: mask = {0, 4, ...} // (1, 2)
17979 This should return NULL_TREE because the index 4 may choose
17980 from either arg0 or arg1 depending on vector length. */
17981 {
17982 tree arg0 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17983 tree arg1 = build_vec_cst_rand (vmode, npatterns: 1, nelts_per_pattern: 3, step: 1);
17984 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17985
17986 vec_perm_builder builder (len, 1, 2);
17987 poly_uint64 mask_elems[] = {0, 4};
17988 builder_push_elems (builder, elems&: mask_elems);
17989
17990 vec_perm_indices sel (builder, 2, len);
17991 const char *reason;
17992 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, reason: &reason);
17993 ASSERT_TRUE (res == NULL_TREE);
17994 ASSERT_TRUE (reason != NULL);
17995 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17996 }
17997}
17998
17999#undef ARG0
18000#undef ARG1
18001
18002/* Return true if SIZE is of the form C + Cx and C is power of 2. */
18003
18004static bool
18005is_simple_vla_size (poly_uint64 size)
18006{
18007 if (size.is_constant ()
18008 || !pow2p_hwi (x: size.coeffs[0]))
18009 return false;
18010 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
18011 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
18012 return false;
18013 return true;
18014}
18015
18016/* Execute fold_vec_perm_cst unit tests. */
18017
18018static void
18019test ()
18020{
18021 machine_mode vnx4si_mode = E_VOIDmode;
18022 machine_mode v4si_mode = E_VOIDmode;
18023
18024 machine_mode vmode;
18025 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
18026 {
18027 /* Obtain modes corresponding to VNx4SI and V4SI,
18028 to call mixed mode tests below.
18029 FIXME: Is there a better way to do this ? */
18030 if (GET_MODE_INNER (vmode) == SImode)
18031 {
18032 poly_uint64 nunits = GET_MODE_NUNITS (mode: vmode);
18033 if (is_simple_vla_size (size: nunits)
18034 && nunits.coeffs[0] == 4)
18035 vnx4si_mode = vmode;
18036 else if (known_eq (nunits, poly_uint64 (4)))
18037 v4si_mode = vmode;
18038 }
18039
18040 if (!is_simple_vla_size (size: GET_MODE_NUNITS (mode: vmode))
18041 || !targetm.vector_mode_supported_p (vmode))
18042 continue;
18043
18044 poly_uint64 nunits = GET_MODE_NUNITS (mode: vmode);
18045 test_all_nunits (vmode);
18046 if (nunits.coeffs[0] >= 2)
18047 test_nunits_min_2 (vmode);
18048 if (nunits.coeffs[0] >= 4)
18049 test_nunits_min_4 (vmode);
18050 if (nunits.coeffs[0] >= 8)
18051 test_nunits_min_8 (vmode);
18052
18053 if (nunits.coeffs[0] <= 4)
18054 test_nunits_max_4 (vmode);
18055 }
18056
18057 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
18058 && targetm.vector_mode_supported_p (vnx4si_mode)
18059 && targetm.vector_mode_supported_p (v4si_mode))
18060 {
18061 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
18062 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
18063 }
18064}
18065} // end of test_fold_vec_perm_cst namespace
18066
18067/* Verify that various binary operations on vectors are folded
18068 correctly. */
18069
18070static void
18071test_vector_folding ()
18072{
18073 tree inner_type = integer_type_node;
18074 tree type = build_vector_type (inner_type, 4);
18075 tree zero = build_zero_cst (type);
18076 tree one = build_one_cst (type);
18077 tree index = build_index_vector (type, 0, 1);
18078
18079 /* Verify equality tests that return a scalar boolean result. */
18080 tree res_type = boolean_type_node;
18081 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
18082 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
18083 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
18084 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
18085 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
18086 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18087 index, one)));
18088 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
18089 index, index)));
18090 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18091 index, index)));
18092}
18093
18094/* Verify folding of VEC_DUPLICATE_EXPRs. */
18095
18096static void
18097test_vec_duplicate_folding ()
18098{
18099 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
18100 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
18101 /* This will be 1 if VEC_MODE isn't a vector mode. */
18102 poly_uint64 nunits = GET_MODE_NUNITS (mode: vec_mode);
18103
18104 tree type = build_vector_type (ssizetype, nunits);
18105 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
18106 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
18107 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
18108}
18109
18110/* Run all of the selftests within this file. */
18111
18112void
18113fold_const_cc_tests ()
18114{
18115 test_arithmetic_folding ();
18116 test_vector_folding ();
18117 test_vec_duplicate_folding ();
18118 test_fold_vec_perm_cst::test ();
18119}
18120
18121} // namespace selftest
18122
18123#endif /* CHECKING_P */
18124

source code of gcc/fold-const.cc