1/* Helpers for the autogenerated gimple-match.cc file.
2 Copyright (C) 2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "ssa.h"
29#include "cgraph.h"
30#include "vec-perm-indices.h"
31#include "fold-const.h"
32#include "fold-const-call.h"
33#include "stor-layout.h"
34#include "gimple-iterator.h"
35#include "gimple-fold.h"
36#include "calls.h"
37#include "tree-dfa.h"
38#include "builtins.h"
39#include "gimple-match.h"
40#include "tree-pass.h"
41#include "internal-fn.h"
42#include "case-cfn-macros.h"
43#include "gimplify.h"
44#include "optabs-tree.h"
45#include "tree-eh.h"
46#include "dbgcnt.h"
47#include "tm.h"
48#include "gimple-range.h"
49#include "langhooks.h"
50
51tree (*mprts_hook) (gimple_match_op *);
52
53extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
54 code_helper, tree, tree);
55extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
56 code_helper, tree, tree, tree);
57extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
58 code_helper, tree, tree, tree, tree);
59extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
60 code_helper, tree, tree, tree, tree, tree);
61extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
62 code_helper, tree, tree, tree, tree, tree, tree);
63extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
64 code_helper, tree, tree, tree, tree, tree, tree,
65 tree);
66extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
67 code_helper, tree, tree, tree, tree, tree, tree,
68 tree, tree);
69
70/* Functions that are needed by gimple-match but that are exported and used in
71 other places in the compiler. */
72
73tree gimple_simplify (enum tree_code, tree, tree, gimple_seq *,
74 tree (*)(tree));
75tree gimple_simplify (enum tree_code, tree, tree, tree, gimple_seq *,
76 tree (*)(tree));
77tree gimple_simplify (enum tree_code, tree, tree, tree, tree, gimple_seq *,
78 tree (*)(tree));
79tree gimple_simplify (combined_fn, tree, tree, gimple_seq *,
80 tree (*)(tree));
81tree gimple_simplify (combined_fn, tree, tree, tree, gimple_seq *,
82 tree (*)(tree));
83tree gimple_simplify (combined_fn, tree, tree, tree, tree, gimple_seq *,
84 tree (*)(tree));
85
86tree do_valueize (tree, tree (*)(tree), bool &);
87tree do_valueize (tree (*)(tree), tree);
88
89/* Forward declarations of the private auto-generated matchers.
90 They expect valueized operands in canonical order and do not
91 perform simplification of all-constant operands. */
92
93static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *, tree (*)(tree));
94static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *, tree (*)(tree));
95static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *, tree (*)(tree));
96static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *, tree (*)(tree));
97static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *, tree (*)(tree));
98static bool gimple_resimplify6 (gimple_seq *, gimple_match_op *, tree (*)(tree));
99static bool gimple_resimplify7 (gimple_seq *, gimple_match_op *, tree (*)(tree));
100
101/* Match and simplify the toplevel valueized operation THIS.
102 Replaces THIS with a simplified and/or canonicalized result and
103 returns whether any change was made. */
104
105bool
106gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
107{
108 switch (num_ops)
109 {
110 case 1:
111 return gimple_resimplify1 (seq, this, valueize);
112 case 2:
113 return gimple_resimplify2 (seq, this, valueize);
114 case 3:
115 return gimple_resimplify3 (seq, this, valueize);
116 case 4:
117 return gimple_resimplify4 (seq, this, valueize);
118 case 5:
119 return gimple_resimplify5 (seq, this, valueize);
120 case 6:
121 return gimple_resimplify6 (seq, this, valueize);
122 case 7:
123 return gimple_resimplify7 (seq, this, valueize);
124 default:
125 gcc_unreachable ();
126 }
127}
128
129/* Return whether T is a constant that we'll dispatch to fold to
130 evaluate fully constant expressions. */
131
132static inline bool
133constant_for_folding (tree t)
134{
135 return (CONSTANT_CLASS_P (t)
136 /* The following is only interesting to string builtins. */
137 || (TREE_CODE (t) == ADDR_EXPR
138 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
139}
140
141/* Try to convert conditional operation ORIG_OP into an IFN_COND_*
142 operation. Return true on success, storing the new operation in NEW_OP. */
143
144static bool
145convert_conditional_op (gimple_match_op *orig_op,
146 gimple_match_op *new_op)
147{
148 internal_fn ifn;
149 if (orig_op->code.is_tree_code ())
150 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
151 else
152 {
153 auto cfn = combined_fn (orig_op->code);
154 if (!internal_fn_p (code: cfn))
155 return false;
156 ifn = get_conditional_internal_fn (as_internal_fn (code: cfn));
157 }
158 if (ifn == IFN_LAST)
159 return false;
160 unsigned int num_ops = orig_op->num_ops;
161 unsigned int num_cond_ops = 2;
162 if (orig_op->cond.len)
163 {
164 /* Add the length and bias parameters. */
165 ifn = get_len_internal_fn (ifn);
166 num_cond_ops = 4;
167 }
168 new_op->set_op (code_in: as_combined_fn (fn: ifn), type_in: orig_op->type, num_ops_in: num_ops + num_cond_ops);
169 new_op->ops[0] = orig_op->cond.cond;
170 for (unsigned int i = 0; i < num_ops; ++i)
171 new_op->ops[i + 1] = orig_op->ops[i];
172 tree else_value = orig_op->cond.else_value;
173 if (!else_value)
174 else_value = targetm.preferred_else_value (ifn, orig_op->type,
175 num_ops, orig_op->ops);
176 new_op->ops[num_ops + 1] = else_value;
177 if (orig_op->cond.len)
178 {
179 new_op->ops[num_ops + 2] = orig_op->cond.len;
180 new_op->ops[num_ops + 3] = orig_op->cond.bias;
181 }
182 return true;
183}
184/* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
185 VALUEIZED to true if valueization changed OP. */
186
187inline tree
188do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
189{
190 if (valueize && TREE_CODE (op) == SSA_NAME)
191 {
192 tree tem = valueize (op);
193 if (tem && tem != op)
194 {
195 op = tem;
196 valueized = true;
197 }
198 }
199 return op;
200}
201
202/* If in GIMPLE the operation described by RES_OP should be single-rhs,
203 build a GENERIC tree for that expression and update RES_OP accordingly. */
204
205void
206maybe_build_generic_op (gimple_match_op *res_op)
207{
208 tree_code code = (tree_code) res_op->code;
209 tree val;
210 switch (code)
211 {
212 case REALPART_EXPR:
213 case IMAGPART_EXPR:
214 case VIEW_CONVERT_EXPR:
215 val = build1 (code, res_op->type, res_op->ops[0]);
216 res_op->set_value (val);
217 break;
218 case BIT_FIELD_REF:
219 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
220 res_op->ops[2]);
221 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
222 res_op->set_value (val);
223 break;
224 default:;
225 }
226}
227
228/* Try to build RES_OP, which is known to be a call to FN. Return null
229 if the target doesn't support the function. */
230
231static gcall *
232build_call_internal (internal_fn fn, gimple_match_op *res_op)
233{
234 if (direct_internal_fn_p (fn))
235 {
236 tree_pair types = direct_internal_fn_types (fn, res_op->type,
237 res_op->ops);
238 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
239 return NULL;
240 }
241 return gimple_build_call_internal (fn, res_op->num_ops,
242 res_op->op_or_null (i: 0),
243 res_op->op_or_null (i: 1),
244 res_op->op_or_null (i: 2),
245 res_op->op_or_null (i: 3),
246 res_op->op_or_null (i: 4),
247 res_op->op_or_null (i: 5),
248 res_op->op_or_null (i: 6));
249}
250
251/* RES_OP is the result of a simplification. If it is conditional,
252 try to replace it with the equivalent UNCOND form, such as an
253 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
254 result of the replacement if appropriate, adding any new statements to
255 SEQ and using VALUEIZE as the valueization function. Return true if
256 this resimplification occurred and resulted in at least one change. */
257
258static bool
259maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
260 tree (*valueize) (tree))
261{
262 if (!res_op->cond.cond)
263 return false;
264
265 if (!res_op->cond.else_value
266 && res_op->code.is_tree_code ())
267 {
268 /* The "else" value doesn't matter. If the "then" value is a
269 gimple value, just use it unconditionally. This isn't a
270 simplification in itself, since there was no operation to
271 build in the first place. */
272 if (gimple_simplified_result_is_gimple_val (op: res_op))
273 {
274 res_op->cond.cond = NULL_TREE;
275 return false;
276 }
277
278 /* Likewise if the operation would not trap. */
279 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
280 && TYPE_OVERFLOW_TRAPS (res_op->type));
281 tree_code op_code = (tree_code) res_op->code;
282 bool op_could_trap;
283
284 /* COND_EXPR will trap if, and only if, the condition
285 traps and hence we have to check this. For all other operations, we
286 don't need to consider the operands. */
287 if (op_code == COND_EXPR)
288 op_could_trap = generic_expr_could_trap_p (expr: res_op->ops[0]);
289 else
290 op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
291 FLOAT_TYPE_P (res_op->type),
292 honor_trapv,
293 res_op->op_or_null (i: 1));
294
295 if (!op_could_trap)
296 {
297 res_op->cond.cond = NULL_TREE;
298 return false;
299 }
300 }
301
302 /* If the "then" value is a gimple value and the "else" value matters,
303 create a VEC_COND_EXPR between them, then see if it can be further
304 simplified. */
305 gimple_match_op new_op;
306 if (res_op->cond.else_value
307 && VECTOR_TYPE_P (res_op->type)
308 && gimple_simplified_result_is_gimple_val (op: res_op))
309 {
310 tree len = res_op->cond.len;
311 if (!len)
312 new_op.set_op (code_in: VEC_COND_EXPR, type_in: res_op->type,
313 op0: res_op->cond.cond, op1: res_op->ops[0],
314 op2: res_op->cond.else_value);
315 else
316 new_op.set_op (code_in: IFN_VCOND_MASK_LEN, type_in: res_op->type,
317 op0: res_op->cond.cond, op1: res_op->ops[0],
318 op2: res_op->cond.else_value,
319 op3: res_op->cond.len, op4: res_op->cond.bias);
320 *res_op = new_op;
321 return gimple_resimplify3 (seq, res_op, valueize);
322 }
323
324 /* Otherwise try rewriting the operation as an IFN_COND_* call.
325 Again, this isn't a simplification in itself, since it's what
326 RES_OP already described. */
327 if (convert_conditional_op (orig_op: res_op, new_op: &new_op))
328 *res_op = new_op;
329
330 return false;
331}
332
333/* If RES_OP is a call to a conditional internal function, try simplifying
334 the associated unconditional operation and using the result to build
335 a new conditional operation. For example, if RES_OP is:
336
337 IFN_COND_ADD (COND, A, B, ELSE)
338
339 try simplifying (plus A B) and using the result to build a replacement
340 for the whole IFN_COND_ADD.
341
342 Return true if this approach led to a simplification, otherwise leave
343 RES_OP unchanged (and so suitable for other simplifications). When
344 returning true, add any new statements to SEQ and use VALUEIZE as the
345 valueization function.
346
347 RES_OP is known to be a call to IFN. */
348
349static bool
350try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
351 gimple_seq *seq, tree (*valueize) (tree))
352{
353 code_helper op;
354 tree_code code = conditional_internal_fn_code (ifn);
355 int len_index = internal_fn_len_index (ifn);
356 if (code != ERROR_MARK)
357 op = code;
358 else
359 {
360 ifn = get_unconditional_internal_fn (ifn);
361 if (ifn == IFN_LAST)
362 return false;
363 op = as_combined_fn (fn: ifn);
364 }
365
366 unsigned int num_ops = res_op->num_ops;
367 /* num_cond_ops = 2 for COND_ADD (MASK and ELSE)
368 wheras num_cond_ops = 4 for COND_LEN_ADD (MASK, ELSE, LEN and BIAS). */
369 unsigned int num_cond_ops = len_index < 0 ? 2 : 4;
370 tree else_value
371 = len_index < 0 ? res_op->ops[num_ops - 1] : res_op->ops[num_ops - 3];
372 tree len = len_index < 0 ? NULL_TREE : res_op->ops[num_ops - 2];
373 tree bias = len_index < 0 ? NULL_TREE : res_op->ops[num_ops - 1];
374 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
375 else_value, len, bias),
376 op, res_op->type, num_ops - num_cond_ops);
377
378 memcpy (dest: cond_op.ops, src: res_op->ops + 1, n: (num_ops - 1) * sizeof *cond_op.ops);
379 switch (num_ops - num_cond_ops)
380 {
381 case 1:
382 if (!gimple_resimplify1 (seq, &cond_op, valueize))
383 return false;
384 break;
385 case 2:
386 if (!gimple_resimplify2 (seq, &cond_op, valueize))
387 return false;
388 break;
389 case 3:
390 if (!gimple_resimplify3 (seq, &cond_op, valueize))
391 return false;
392 break;
393 default:
394 gcc_unreachable ();
395 }
396 *res_op = cond_op;
397 maybe_resimplify_conditional_op (seq, res_op, valueize);
398 return true;
399}
400
401/* Helper for the autogenerated code, valueize OP. */
402
403tree
404do_valueize (tree (*valueize)(tree), tree op)
405{
406 if (valueize && TREE_CODE (op) == SSA_NAME)
407 {
408 tree tem = valueize (op);
409 if (tem)
410 return tem;
411 }
412 return op;
413}
414
415/* Push the exploded expression described by RES_OP as a statement to
416 SEQ if necessary and return a gimple value denoting the value of the
417 expression. If RES is not NULL then the result will be always RES
418 and even gimple values are pushed to SEQ. */
419
420tree
421maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
422{
423 tree *ops = res_op->ops;
424 unsigned num_ops = res_op->num_ops;
425
426 /* The caller should have converted conditional operations into an UNCOND
427 form and resimplified as appropriate. The conditional form only
428 survives this far if that conversion failed. */
429 if (res_op->cond.cond)
430 return NULL_TREE;
431
432 if (res_op->code.is_tree_code ())
433 {
434 if (!res
435 && gimple_simplified_result_is_gimple_val (op: res_op))
436 return ops[0];
437 if (mprts_hook)
438 {
439 tree tem = mprts_hook (res_op);
440 if (tem)
441 return tem;
442 }
443 }
444
445 if (!seq)
446 return NULL_TREE;
447
448 /* Play safe and do not allow abnormals to be mentioned in
449 newly created statements. */
450 for (unsigned int i = 0; i < num_ops; ++i)
451 if (TREE_CODE (ops[i]) == SSA_NAME
452 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
453 return NULL_TREE;
454
455 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
456 for (unsigned int i = 0; i < 2; ++i)
457 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
458 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
459 return NULL_TREE;
460
461 if (res_op->code.is_tree_code ())
462 {
463 auto code = tree_code (res_op->code);
464 if (!res)
465 {
466 if (gimple_in_ssa_p (cfun))
467 res = make_ssa_name (var: res_op->type);
468 else
469 res = create_tmp_reg (res_op->type);
470 }
471 maybe_build_generic_op (res_op);
472 gimple *new_stmt = gimple_build_assign (res, code,
473 res_op->op_or_null (i: 0),
474 res_op->op_or_null (i: 1),
475 res_op->op_or_null (i: 2));
476 gimple_seq_add_stmt_without_update (seq, new_stmt);
477 return res;
478 }
479 else
480 {
481 gcc_assert (num_ops != 0);
482 auto fn = combined_fn (res_op->code);
483 gcall *new_stmt = NULL;
484 if (internal_fn_p (code: fn))
485 {
486 /* Generate the given function if we can. */
487 internal_fn ifn = as_internal_fn (code: fn);
488 new_stmt = build_call_internal (fn: ifn, res_op);
489 if (!new_stmt)
490 return NULL_TREE;
491 }
492 else
493 {
494 /* Find the function we want to call. */
495 tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn));
496 if (!decl)
497 return NULL;
498
499 /* We can't and should not emit calls to non-const functions. */
500 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
501 return NULL;
502
503 new_stmt = gimple_build_call (decl, num_ops,
504 res_op->op_or_null (i: 0),
505 res_op->op_or_null (i: 1),
506 res_op->op_or_null (i: 2),
507 res_op->op_or_null (i: 3),
508 res_op->op_or_null (i: 4));
509 }
510 if (!res)
511 {
512 if (gimple_in_ssa_p (cfun))
513 res = make_ssa_name (var: res_op->type);
514 else
515 res = create_tmp_reg (res_op->type);
516 }
517 gimple_call_set_lhs (gs: new_stmt, lhs: res);
518 gimple_seq_add_stmt_without_update (seq, new_stmt);
519 return res;
520 }
521}
522
523
524/* Public API overloads follow for operation being tree_code or
525 built_in_function and for one to three operands or arguments.
526 They return NULL_TREE if nothing could be simplified or
527 the resulting simplified value with parts pushed to SEQ.
528 If SEQ is NULL then if the simplification needs to create
529 new stmts it will fail. If VALUEIZE is non-NULL then all
530 SSA names will be valueized using that hook prior to
531 applying simplifications. */
532
533/* Unary ops. */
534
535tree
536gimple_simplify (enum tree_code code, tree type,
537 tree op0,
538 gimple_seq *seq, tree (*valueize)(tree))
539{
540 if (constant_for_folding (t: op0))
541 {
542 tree res = const_unop (code, type, op0);
543 if (res != NULL_TREE
544 && CONSTANT_CLASS_P (res))
545 return res;
546 }
547
548 gimple_match_op res_op;
549 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
550 return NULL_TREE;
551 return maybe_push_res_to_seq (res_op: &res_op, seq);
552}
553
554/* Binary ops. */
555
556tree
557gimple_simplify (enum tree_code code, tree type,
558 tree op0, tree op1,
559 gimple_seq *seq, tree (*valueize)(tree))
560{
561 if (constant_for_folding (t: op0) && constant_for_folding (t: op1))
562 {
563 tree res = const_binop (code, type, op0, op1);
564 if (res != NULL_TREE
565 && CONSTANT_CLASS_P (res))
566 return res;
567 }
568
569 /* Canonicalize operand order both for matching and fallback stmt
570 generation. */
571 if ((commutative_tree_code (code)
572 || TREE_CODE_CLASS (code) == tcc_comparison)
573 && tree_swap_operands_p (op0, op1))
574 {
575 std::swap (a&: op0, b&: op1);
576 if (TREE_CODE_CLASS (code) == tcc_comparison)
577 code = swap_tree_comparison (code);
578 }
579
580 gimple_match_op res_op;
581 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
582 return NULL_TREE;
583 return maybe_push_res_to_seq (res_op: &res_op, seq);
584}
585
586/* Ternary ops. */
587
588tree
589gimple_simplify (enum tree_code code, tree type,
590 tree op0, tree op1, tree op2,
591 gimple_seq *seq, tree (*valueize)(tree))
592{
593 if (constant_for_folding (t: op0) && constant_for_folding (t: op1)
594 && constant_for_folding (t: op2))
595 {
596 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
597 if (res != NULL_TREE
598 && CONSTANT_CLASS_P (res))
599 return res;
600 }
601
602 /* Canonicalize operand order both for matching and fallback stmt
603 generation. */
604 if (commutative_ternary_tree_code (code)
605 && tree_swap_operands_p (op0, op1))
606 std::swap (a&: op0, b&: op1);
607
608 gimple_match_op res_op;
609 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
610 return NULL_TREE;
611 return maybe_push_res_to_seq (res_op: &res_op, seq);
612}
613
614/* Builtin or internal function with one argument. */
615
616tree
617gimple_simplify (combined_fn fn, tree type,
618 tree arg0,
619 gimple_seq *seq, tree (*valueize)(tree))
620{
621 if (constant_for_folding (t: arg0))
622 {
623 tree res = fold_const_call (fn, type, arg0);
624 if (res && CONSTANT_CLASS_P (res))
625 return res;
626 }
627
628 gimple_match_op res_op;
629 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
630 return NULL_TREE;
631 return maybe_push_res_to_seq (res_op: &res_op, seq);
632}
633
634/* Builtin or internal function with two arguments. */
635
636tree
637gimple_simplify (combined_fn fn, tree type,
638 tree arg0, tree arg1,
639 gimple_seq *seq, tree (*valueize)(tree))
640{
641 if (constant_for_folding (t: arg0)
642 && constant_for_folding (t: arg1))
643 {
644 tree res = fold_const_call (fn, type, arg0, arg1);
645 if (res && CONSTANT_CLASS_P (res))
646 return res;
647 }
648
649 gimple_match_op res_op;
650 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
651 return NULL_TREE;
652 return maybe_push_res_to_seq (res_op: &res_op, seq);
653}
654
655/* Builtin or internal function with three arguments. */
656
657tree
658gimple_simplify (combined_fn fn, tree type,
659 tree arg0, tree arg1, tree arg2,
660 gimple_seq *seq, tree (*valueize)(tree))
661{
662 if (constant_for_folding (t: arg0)
663 && constant_for_folding (t: arg1)
664 && constant_for_folding (t: arg2))
665 {
666 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
667 if (res && CONSTANT_CLASS_P (res))
668 return res;
669 }
670
671 gimple_match_op res_op;
672 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
673 return NULL_TREE;
674 return maybe_push_res_to_seq (res_op: &res_op, seq);
675}
676
677/* Common subroutine of gimple_extract_op and gimple_simplify. Try to
678 describe STMT in RES_OP, returning true on success. Before recording
679 an operand, call:
680
681 - VALUEIZE_CONDITION for a COND_EXPR condition
682 - VALUEIZE_OP for every other top-level operand
683
684 Both routines take a tree argument and returns a tree. */
685
686template<typename ValueizeOp, typename ValueizeCondition>
687inline bool
688gimple_extract (gimple *stmt, gimple_match_op *res_op,
689 ValueizeOp valueize_op,
690 ValueizeCondition valueize_condition)
691{
692 switch (gimple_code (g: stmt))
693 {
694 case GIMPLE_ASSIGN:
695 {
696 enum tree_code code = gimple_assign_rhs_code (gs: stmt);
697 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
698 switch (gimple_assign_rhs_class (gs: stmt))
699 {
700 case GIMPLE_SINGLE_RHS:
701 if (code == REALPART_EXPR
702 || code == IMAGPART_EXPR
703 || code == VIEW_CONVERT_EXPR)
704 {
705 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
706 res_op->set_op (code, type, valueize_op (op0));
707 return true;
708 }
709 else if (code == BIT_FIELD_REF)
710 {
711 tree rhs1 = gimple_assign_rhs1 (gs: stmt);
712 tree op0 = valueize_op (TREE_OPERAND (rhs1, 0));
713 res_op->set_op (code, type, op0,
714 TREE_OPERAND (rhs1, 1),
715 TREE_OPERAND (rhs1, 2),
716 REF_REVERSE_STORAGE_ORDER (rhs1));
717 return true;
718 }
719 else if (code == SSA_NAME)
720 {
721 tree op0 = gimple_assign_rhs1 (gs: stmt);
722 res_op->set_op (TREE_CODE (op0), type, valueize_op (op0));
723 return true;
724 }
725 break;
726 case GIMPLE_UNARY_RHS:
727 {
728 tree rhs1 = gimple_assign_rhs1 (gs: stmt);
729 res_op->set_op (code, type, valueize_op (rhs1));
730 return true;
731 }
732 case GIMPLE_BINARY_RHS:
733 {
734 tree rhs1 = valueize_op (gimple_assign_rhs1 (gs: stmt));
735 tree rhs2 = valueize_op (gimple_assign_rhs2 (gs: stmt));
736 res_op->set_op (code_in: code, type_in: type, op0: rhs1, op1: rhs2);
737 return true;
738 }
739 case GIMPLE_TERNARY_RHS:
740 {
741 tree rhs1 = gimple_assign_rhs1 (gs: stmt);
742 if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1))
743 rhs1 = valueize_condition (rhs1);
744 else
745 rhs1 = valueize_op (rhs1);
746 tree rhs2 = valueize_op (gimple_assign_rhs2 (gs: stmt));
747 tree rhs3 = valueize_op (gimple_assign_rhs3 (gs: stmt));
748 res_op->set_op (code_in: code, type_in: type, op0: rhs1, op1: rhs2, op2: rhs3);
749 return true;
750 }
751 default:
752 gcc_unreachable ();
753 }
754 break;
755 }
756
757 case GIMPLE_CALL:
758 /* ??? This way we can't simplify calls with side-effects. */
759 if (gimple_call_lhs (gs: stmt) != NULL_TREE
760 && gimple_call_num_args (gs: stmt) >= 1
761 && gimple_call_num_args (gs: stmt) <= 7)
762 {
763 combined_fn cfn;
764 if (gimple_call_internal_p (gs: stmt))
765 cfn = as_combined_fn (fn: gimple_call_internal_fn (gs: stmt));
766 else
767 {
768 tree fn = gimple_call_fn (gs: stmt);
769 if (!fn)
770 return false;
771
772 fn = valueize_op (fn);
773 if (TREE_CODE (fn) != ADDR_EXPR
774 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
775 return false;
776
777 tree decl = TREE_OPERAND (fn, 0);
778 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
779 || !gimple_builtin_call_types_compatible_p (stmt, decl))
780 return false;
781
782 cfn = as_combined_fn (fn: DECL_FUNCTION_CODE (decl));
783 }
784
785 unsigned int num_args = gimple_call_num_args (gs: stmt);
786 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
787 for (unsigned i = 0; i < num_args; ++i)
788 res_op->ops[i] = valueize_op (gimple_call_arg (gs: stmt, index: i));
789 return true;
790 }
791 break;
792
793 case GIMPLE_COND:
794 {
795 tree lhs = valueize_op (gimple_cond_lhs (gs: stmt));
796 tree rhs = valueize_op (gimple_cond_rhs (gs: stmt));
797 res_op->set_op (code_in: gimple_cond_code (gs: stmt), boolean_type_node, op0: lhs, op1: rhs);
798 return true;
799 }
800
801 default:
802 break;
803 }
804
805 return false;
806}
807
808/* Try to describe STMT in RES_OP, returning true on success.
809 For GIMPLE_CONDs, describe the condition that is being tested.
810 For GIMPLE_ASSIGNs, describe the rhs of the assignment.
811 For GIMPLE_CALLs, describe the call. */
812
813bool
814gimple_extract_op (gimple *stmt, gimple_match_op *res_op)
815{
816 auto nop = [](tree op) { return op; };
817 return gimple_extract (stmt, res_op, valueize_op: nop, valueize_condition: nop);
818}
819
820/* The main STMT based simplification entry. It is used by the fold_stmt
821 and the fold_stmt_to_constant APIs. */
822
823bool
824gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
825 tree (*valueize)(tree), tree (*top_valueize)(tree))
826{
827 bool valueized = false;
828 auto valueize_op = [&](tree op)
829 {
830 return do_valueize (op, valueize: top_valueize, valueized);
831 };
832 auto valueize_condition = [&](tree op) -> tree
833 {
834 bool cond_valueized = false;
835 tree lhs = do_valueize (TREE_OPERAND (op, 0), valueize: top_valueize,
836 valueized&: cond_valueized);
837 tree rhs = do_valueize (TREE_OPERAND (op, 1), valueize: top_valueize,
838 valueized&: cond_valueized);
839 gimple_match_op res_op2 (res_op->cond, TREE_CODE (op),
840 TREE_TYPE (op), lhs, rhs);
841 if ((gimple_resimplify2 (seq, &res_op2, valueize)
842 || cond_valueized)
843 && res_op2.code.is_tree_code ())
844 {
845 auto code = tree_code (res_op2.code);
846 if (TREE_CODE_CLASS (code) == tcc_comparison)
847 {
848 valueized = true;
849 return build2 (code, TREE_TYPE (op),
850 res_op2.ops[0], res_op2.ops[1]);
851 }
852 else if (code == SSA_NAME
853 || code == INTEGER_CST
854 || code == VECTOR_CST)
855 {
856 valueized = true;
857 return res_op2.ops[0];
858 }
859 }
860 return valueize_op (op);
861 };
862
863 if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition))
864 return false;
865
866 if (res_op->code.is_internal_fn ())
867 {
868 internal_fn ifn = internal_fn (res_op->code);
869 if (try_conditional_simplification (ifn, res_op, seq, valueize))
870 return true;
871 }
872
873 if (!res_op->reverse
874 && res_op->num_ops
875 && res_op->resimplify (seq, valueize))
876 return true;
877
878 return valueized;
879}
880
881/* Helper that matches and simplifies the toplevel result from
882 a gimple_simplify run (where we don't want to build
883 a stmt in case it's used in in-place folding). Replaces
884 RES_OP with a simplified and/or canonicalized result and
885 returns whether any change was made. */
886
887static bool
888gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
889 tree (*valueize)(tree))
890{
891 if (constant_for_folding (t: res_op->ops[0]))
892 {
893 tree tem = NULL_TREE;
894 if (res_op->code.is_tree_code ())
895 {
896 auto code = tree_code (res_op->code);
897 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
898 && TREE_CODE_LENGTH (code) == 1)
899 tem = const_unop (code, res_op->type, res_op->ops[0]);
900 }
901 else
902 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
903 res_op->ops[0]);
904 if (tem != NULL_TREE
905 && CONSTANT_CLASS_P (tem))
906 {
907 if (TREE_OVERFLOW_P (tem))
908 tem = drop_tree_overflow (tem);
909 res_op->set_value (tem);
910 maybe_resimplify_conditional_op (seq, res_op, valueize);
911 return true;
912 }
913 }
914
915 /* Limit recursion, there are cases like PR80887 and others, for
916 example when value-numbering presents us with unfolded expressions
917 that we are really not prepared to handle without eventual
918 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
919 itself as available expression. */
920 static unsigned depth;
921 if (depth > 10)
922 {
923 if (dump_file && (dump_flags & TDF_FOLDING))
924 fprintf (stream: dump_file, format: "Aborting expression simplification due to "
925 "deep recursion\n");
926 return false;
927 }
928
929 ++depth;
930 gimple_match_op res_op2 (*res_op);
931 if (gimple_simplify (&res_op2, seq, valueize,
932 res_op->code, res_op->type, res_op->ops[0]))
933 {
934 --depth;
935 *res_op = res_op2;
936 return true;
937 }
938 --depth;
939
940 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
941 return true;
942
943 return false;
944}
945
946/* Helper that matches and simplifies the toplevel result from
947 a gimple_simplify run (where we don't want to build
948 a stmt in case it's used in in-place folding). Replaces
949 RES_OP with a simplified and/or canonicalized result and
950 returns whether any change was made. */
951
952static bool
953gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
954 tree (*valueize)(tree))
955{
956 if (constant_for_folding (t: res_op->ops[0])
957 && constant_for_folding (t: res_op->ops[1]))
958 {
959 tree tem = NULL_TREE;
960 if (res_op->code.is_tree_code ())
961 {
962 auto code = tree_code (res_op->code);
963 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
964 && TREE_CODE_LENGTH (code) == 2)
965 tem = const_binop (code, res_op->type,
966 res_op->ops[0], res_op->ops[1]);
967 }
968 else
969 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
970 res_op->ops[0], res_op->ops[1]);
971 if (tem != NULL_TREE
972 && CONSTANT_CLASS_P (tem))
973 {
974 if (TREE_OVERFLOW_P (tem))
975 tem = drop_tree_overflow (tem);
976 res_op->set_value (tem);
977 maybe_resimplify_conditional_op (seq, res_op, valueize);
978 return true;
979 }
980 }
981
982 /* Canonicalize operand order. */
983 bool canonicalized = false;
984 bool is_comparison
985 = (res_op->code.is_tree_code ()
986 && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison);
987 if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type))
988 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
989 {
990 std::swap (a&: res_op->ops[0], b&: res_op->ops[1]);
991 if (is_comparison)
992 res_op->code = swap_tree_comparison (tree_code (res_op->code));
993 canonicalized = true;
994 }
995
996 /* Limit recursion, see gimple_resimplify1. */
997 static unsigned depth;
998 if (depth > 10)
999 {
1000 if (dump_file && (dump_flags & TDF_FOLDING))
1001 fprintf (stream: dump_file, format: "Aborting expression simplification due to "
1002 "deep recursion\n");
1003 return false;
1004 }
1005
1006 ++depth;
1007 gimple_match_op res_op2 (*res_op);
1008 if (gimple_simplify (&res_op2, seq, valueize,
1009 res_op->code, res_op->type,
1010 res_op->ops[0], res_op->ops[1]))
1011 {
1012 --depth;
1013 *res_op = res_op2;
1014 return true;
1015 }
1016 --depth;
1017
1018 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
1019 return true;
1020
1021 return canonicalized;
1022}
1023
1024/* Helper that matches and simplifies the toplevel result from
1025 a gimple_simplify run (where we don't want to build
1026 a stmt in case it's used in in-place folding). Replaces
1027 RES_OP with a simplified and/or canonicalized result and
1028 returns whether any change was made. */
1029
1030static bool
1031gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
1032 tree (*valueize)(tree))
1033{
1034 if (constant_for_folding (t: res_op->ops[0])
1035 && constant_for_folding (t: res_op->ops[1])
1036 && constant_for_folding (t: res_op->ops[2]))
1037 {
1038 tree tem = NULL_TREE;
1039 if (res_op->code.is_tree_code ())
1040 {
1041 auto code = tree_code (res_op->code);
1042 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1043 && TREE_CODE_LENGTH (code) == 3)
1044 tem = fold_ternary/*_to_constant*/ (code, res_op->type,
1045 res_op->ops[0], res_op->ops[1],
1046 res_op->ops[2]);
1047 }
1048 else
1049 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
1050 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
1051 if (tem != NULL_TREE
1052 && CONSTANT_CLASS_P (tem))
1053 {
1054 if (TREE_OVERFLOW_P (tem))
1055 tem = drop_tree_overflow (tem);
1056 res_op->set_value (tem);
1057 maybe_resimplify_conditional_op (seq, res_op, valueize);
1058 return true;
1059 }
1060 }
1061
1062 /* Canonicalize operand order. */
1063 bool canonicalized = false;
1064 int argno = first_commutative_argument (res_op->code, res_op->type);
1065 if (argno >= 0
1066 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
1067 {
1068 std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]);
1069 canonicalized = true;
1070 }
1071
1072 /* Limit recursion, see gimple_resimplify1. */
1073 static unsigned depth;
1074 if (depth > 10)
1075 {
1076 if (dump_file && (dump_flags & TDF_FOLDING))
1077 fprintf (stream: dump_file, format: "Aborting expression simplification due to "
1078 "deep recursion\n");
1079 return false;
1080 }
1081
1082 ++depth;
1083 gimple_match_op res_op2 (*res_op);
1084 if (gimple_simplify (&res_op2, seq, valueize,
1085 res_op->code, res_op->type,
1086 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
1087 {
1088 --depth;
1089 *res_op = res_op2;
1090 return true;
1091 }
1092 --depth;
1093
1094 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
1095 return true;
1096
1097 return canonicalized;
1098}
1099
1100/* Helper that matches and simplifies the toplevel result from
1101 a gimple_simplify run (where we don't want to build
1102 a stmt in case it's used in in-place folding). Replaces
1103 RES_OP with a simplified and/or canonicalized result and
1104 returns whether any change was made. */
1105
1106static bool
1107gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
1108 tree (*valueize)(tree))
1109{
1110 /* No constant folding is defined for four-operand functions. */
1111
1112 /* Canonicalize operand order. */
1113 bool canonicalized = false;
1114 int argno = first_commutative_argument (res_op->code, res_op->type);
1115 if (argno >= 0
1116 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
1117 {
1118 std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]);
1119 canonicalized = true;
1120 }
1121
1122 /* Limit recursion, see gimple_resimplify1. */
1123 static unsigned depth;
1124 if (depth > 10)
1125 {
1126 if (dump_file && (dump_flags & TDF_FOLDING))
1127 fprintf (stream: dump_file, format: "Aborting expression simplification due to "
1128 "deep recursion\n");
1129 return false;
1130 }
1131
1132 ++depth;
1133 gimple_match_op res_op2 (*res_op);
1134 if (gimple_simplify (&res_op2, seq, valueize,
1135 res_op->code, res_op->type,
1136 res_op->ops[0], res_op->ops[1], res_op->ops[2],
1137 res_op->ops[3]))
1138 {
1139 --depth;
1140 *res_op = res_op2;
1141 return true;
1142 }
1143 --depth;
1144
1145 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
1146 return true;
1147
1148 return canonicalized;
1149}
1150
1151/* Helper that matches and simplifies the toplevel result from
1152 a gimple_simplify run (where we don't want to build
1153 a stmt in case it's used in in-place folding). Replaces
1154 RES_OP with a simplified and/or canonicalized result and
1155 returns whether any change was made. */
1156
1157static bool
1158gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
1159 tree (*valueize)(tree))
1160{
1161 /* No constant folding is defined for five-operand functions. */
1162
1163 /* Canonicalize operand order. */
1164 bool canonicalized = false;
1165 int argno = first_commutative_argument (res_op->code, res_op->type);
1166 if (argno >= 0
1167 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
1168 {
1169 std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]);
1170 canonicalized = true;
1171 }
1172
1173 gimple_match_op res_op2 (*res_op);
1174 if (gimple_simplify (&res_op2, seq, valueize,
1175 res_op->code, res_op->type,
1176 res_op->ops[0], res_op->ops[1], res_op->ops[2],
1177 res_op->ops[3], res_op->ops[4]))
1178 {
1179 *res_op = res_op2;
1180 return true;
1181 }
1182
1183 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
1184 return true;
1185
1186 return canonicalized;
1187}
1188
1189/* Helper that matches and simplifies the toplevel result from
1190 a gimple_simplify run (where we don't want to build
1191 a stmt in case it's used in in-place folding). Replaces
1192 RES_OP with a simplified and/or canonicalized result and
1193 returns whether any change was made. */
1194
1195static bool
1196gimple_resimplify6 (gimple_seq *seq, gimple_match_op *res_op,
1197 tree (*valueize)(tree))
1198{
1199 /* No constant folding is defined for six-operand functions. */
1200
1201 /* Canonicalize operand order. */
1202 bool canonicalized = false;
1203 int argno = first_commutative_argument (res_op->code, res_op->type);
1204 if (argno >= 0
1205 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
1206 {
1207 std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]);
1208 canonicalized = true;
1209 }
1210
1211 gimple_match_op res_op2 (*res_op);
1212 if (gimple_simplify (&res_op2, seq, valueize,
1213 res_op->code, res_op->type,
1214 res_op->ops[0], res_op->ops[1], res_op->ops[2],
1215 res_op->ops[3], res_op->ops[4], res_op->ops[5]))
1216 {
1217 *res_op = res_op2;
1218 return true;
1219 }
1220
1221 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
1222 return true;
1223
1224 return canonicalized;
1225}
1226
1227/* Helper that matches and simplifies the toplevel result from
1228 a gimple_simplify run (where we don't want to build
1229 a stmt in case it's used in in-place folding). Replaces
1230 RES_OP with a simplified and/or canonicalized result and
1231 returns whether any change was made. */
1232
1233static bool
1234gimple_resimplify7 (gimple_seq *seq, gimple_match_op *res_op,
1235 tree (*valueize)(tree))
1236{
1237 /* No constant folding is defined for seven-operand functions. */
1238
1239 /* Canonicalize operand order. */
1240 bool canonicalized = false;
1241 int argno = first_commutative_argument (res_op->code, res_op->type);
1242 if (argno >= 0
1243 && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
1244 {
1245 std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]);
1246 canonicalized = true;
1247 }
1248
1249 gimple_match_op res_op2 (*res_op);
1250 if (gimple_simplify (&res_op2, seq, valueize,
1251 res_op->code, res_op->type,
1252 res_op->ops[0], res_op->ops[1], res_op->ops[2],
1253 res_op->ops[3], res_op->ops[4], res_op->ops[5],
1254 res_op->ops[6]))
1255 {
1256 *res_op = res_op2;
1257 return true;
1258 }
1259
1260 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
1261 return true;
1262
1263 return canonicalized;
1264}
1265
1266/* Return a canonical form for CODE when operating on TYPE. The idea
1267 is to remove redundant ways of representing the same operation so
1268 that code_helpers can be hashed and compared for equality.
1269
1270 The only current canonicalization is to replace built-in functions
1271 with internal functions, in cases where internal-fn.def defines
1272 such an internal function.
1273
1274 Note that the new code_helper cannot necessarily be used in place of
1275 the original code_helper. For example, the new code_helper might be
1276 an internal function that the target does not support. */
1277
1278code_helper
1279canonicalize_code (code_helper code, tree type)
1280{
1281 if (code.is_fn_code ())
1282 return associated_internal_fn (combined_fn (code), type);
1283 return code;
1284}
1285
1286/* Return true if CODE is a binary operation and if CODE is commutative when
1287 operating on type TYPE. */
1288
1289bool
1290commutative_binary_op_p (code_helper code, tree type)
1291{
1292 if (code.is_tree_code ())
1293 return commutative_tree_code (tree_code (code));
1294 auto cfn = combined_fn (code);
1295 return commutative_binary_fn_p (associated_internal_fn (cfn, type));
1296}
1297
1298/* Return true if CODE represents a ternary operation and if the first two
1299 operands are commutative when CODE is operating on TYPE. */
1300
1301bool
1302commutative_ternary_op_p (code_helper code, tree type)
1303{
1304 if (code.is_tree_code ())
1305 return commutative_ternary_tree_code (tree_code (code));
1306 auto cfn = combined_fn (code);
1307 return commutative_ternary_fn_p (associated_internal_fn (cfn, type));
1308}
1309
1310/* If CODE is commutative in two consecutive operands, return the
1311 index of the first, otherwise return -1. */
1312
1313int
1314first_commutative_argument (code_helper code, tree type)
1315{
1316 if (code.is_tree_code ())
1317 {
1318 auto tcode = tree_code (code);
1319 if (commutative_tree_code (tcode)
1320 || commutative_ternary_tree_code (tcode))
1321 return 0;
1322 return -1;
1323 }
1324 auto cfn = combined_fn (code);
1325 return first_commutative_argument (associated_internal_fn (cfn, type));
1326}
1327
1328/* Return true if CODE is a binary operation that is associative when
1329 operating on type TYPE. */
1330
1331bool
1332associative_binary_op_p (code_helper code, tree type)
1333{
1334 if (code.is_tree_code ())
1335 return associative_tree_code (tree_code (code));
1336 auto cfn = combined_fn (code);
1337 return associative_binary_fn_p (associated_internal_fn (cfn, type));
1338}
1339
1340/* Return true if the target directly supports operation CODE on type TYPE.
1341 QUERY_TYPE acts as for optab_for_tree_code. */
1342
1343bool
1344directly_supported_p (code_helper code, tree type, optab_subtype query_type)
1345{
1346 if (code.is_tree_code ())
1347 {
1348 direct_optab optab = optab_for_tree_code (tree_code (code), type,
1349 query_type);
1350 return (optab != unknown_optab
1351 && optab_handler (op: optab, TYPE_MODE (type)) != CODE_FOR_nothing);
1352 }
1353 gcc_assert (query_type == optab_default
1354 || (query_type == optab_vector && VECTOR_TYPE_P (type))
1355 || (query_type == optab_scalar && !VECTOR_TYPE_P (type)));
1356 internal_fn ifn = associated_internal_fn (combined_fn (code), type);
1357 return (direct_internal_fn_p (fn: ifn)
1358 && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED));
1359}
1360
1361/* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn
1362 for a code_helper CODE operating on type TYPE. */
1363
1364internal_fn
1365get_conditional_internal_fn (code_helper code, tree type)
1366{
1367 if (code.is_tree_code ())
1368 return get_conditional_internal_fn (tree_code (code));
1369 auto cfn = combined_fn (code);
1370 return get_conditional_internal_fn (associated_internal_fn (cfn, type));
1371}
1372

source code of gcc/gimple-match-exports.cc