1 | /* Statement simplification on GIMPLE. |
2 | Copyright (C) 2010-2024 Free Software Foundation, Inc. |
3 | Split out from tree-ssa-ccp.cc. |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it |
8 | under the terms of the GNU General Public License as published by the |
9 | Free Software Foundation; either version 3, or (at your option) any |
10 | later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT |
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "target.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "gimple.h" |
29 | #include "predict.h" |
30 | #include "ssa.h" |
31 | #include "cgraph.h" |
32 | #include "gimple-pretty-print.h" |
33 | #include "gimple-ssa-warn-access.h" |
34 | #include "gimple-ssa-warn-restrict.h" |
35 | #include "fold-const.h" |
36 | #include "stmt.h" |
37 | #include "expr.h" |
38 | #include "stor-layout.h" |
39 | #include "dumpfile.h" |
40 | #include "gimple-iterator.h" |
41 | #include "gimple-fold.h" |
42 | #include "gimplify.h" |
43 | #include "tree-into-ssa.h" |
44 | #include "tree-dfa.h" |
45 | #include "tree-object-size.h" |
46 | #include "tree-ssa.h" |
47 | #include "tree-ssa-propagate.h" |
48 | #include "ipa-utils.h" |
49 | #include "tree-ssa-address.h" |
50 | #include "langhooks.h" |
51 | #include "gimplify-me.h" |
52 | #include "dbgcnt.h" |
53 | #include "builtins.h" |
54 | #include "tree-eh.h" |
55 | #include "gimple-match.h" |
56 | #include "gomp-constants.h" |
57 | #include "optabs-query.h" |
58 | #include "omp-general.h" |
59 | #include "tree-cfg.h" |
60 | #include "fold-const-call.h" |
61 | #include "stringpool.h" |
62 | #include "attribs.h" |
63 | #include "asan.h" |
64 | #include "diagnostic-core.h" |
65 | #include "intl.h" |
66 | #include "calls.h" |
67 | #include "tree-vector-builder.h" |
68 | #include "tree-ssa-strlen.h" |
69 | #include "varasm.h" |
70 | #include "internal-fn.h" |
71 | #include "gimple-range.h" |
72 | |
73 | enum strlen_range_kind { |
74 | /* Compute the exact constant string length. */ |
75 | SRK_STRLEN, |
76 | /* Compute the maximum constant string length. */ |
77 | SRK_STRLENMAX, |
78 | /* Compute a range of string lengths bounded by object sizes. When |
79 | the length of a string cannot be determined, consider as the upper |
80 | bound the size of the enclosing object the string may be a member |
81 | or element of. Also determine the size of the largest character |
82 | array the string may refer to. */ |
83 | SRK_LENRANGE, |
84 | /* Determine the integer value of the argument (not string length). */ |
85 | SRK_INT_VALUE |
86 | }; |
87 | |
88 | static bool |
89 | get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned); |
90 | |
91 | /* Return true when DECL can be referenced from current unit. |
92 | FROM_DECL (if non-null) specify constructor of variable DECL was taken from. |
93 | We can get declarations that are not possible to reference for various |
94 | reasons: |
95 | |
96 | 1) When analyzing C++ virtual tables. |
97 | C++ virtual tables do have known constructors even |
98 | when they are keyed to other compilation unit. |
99 | Those tables can contain pointers to methods and vars |
100 | in other units. Those methods have both STATIC and EXTERNAL |
101 | set. |
102 | 2) In WHOPR mode devirtualization might lead to reference |
103 | to method that was partitioned elsehwere. |
104 | In this case we have static VAR_DECL or FUNCTION_DECL |
105 | that has no corresponding callgraph/varpool node |
106 | declaring the body. |
107 | 3) COMDAT functions referred by external vtables that |
108 | we devirtualize only during final compilation stage. |
109 | At this time we already decided that we will not output |
110 | the function body and thus we can't reference the symbol |
111 | directly. */ |
112 | |
113 | static bool |
114 | can_refer_decl_in_current_unit_p (tree decl, tree from_decl) |
115 | { |
116 | varpool_node *vnode; |
117 | struct cgraph_node *node; |
118 | symtab_node *snode; |
119 | |
120 | if (DECL_ABSTRACT_P (decl)) |
121 | return false; |
122 | |
123 | /* We are concerned only about static/external vars and functions. */ |
124 | if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) |
125 | || !VAR_OR_FUNCTION_DECL_P (decl)) |
126 | return true; |
127 | |
128 | /* Static objects can be referred only if they are defined and not optimized |
129 | out yet. */ |
130 | if (!TREE_PUBLIC (decl)) |
131 | { |
132 | if (DECL_EXTERNAL (decl)) |
133 | return false; |
134 | /* Before we start optimizing unreachable code we can be sure all |
135 | static objects are defined. */ |
136 | if (symtab->function_flags_ready) |
137 | return true; |
138 | snode = symtab_node::get (decl); |
139 | if (!snode || !snode->definition) |
140 | return false; |
141 | node = dyn_cast <cgraph_node *> (p: snode); |
142 | return !node || !node->inlined_to; |
143 | } |
144 | |
145 | /* We will later output the initializer, so we can refer to it. |
146 | So we are concerned only when DECL comes from initializer of |
147 | external var or var that has been optimized out. */ |
148 | if (!from_decl |
149 | || !VAR_P (from_decl) |
150 | || (!DECL_EXTERNAL (from_decl) |
151 | && (vnode = varpool_node::get (decl: from_decl)) != NULL |
152 | && vnode->definition) |
153 | || (flag_ltrans |
154 | && (vnode = varpool_node::get (decl: from_decl)) != NULL |
155 | && vnode->in_other_partition)) |
156 | return true; |
157 | /* We are folding reference from external vtable. The vtable may reffer |
158 | to a symbol keyed to other compilation unit. The other compilation |
159 | unit may be in separate DSO and the symbol may be hidden. */ |
160 | if (DECL_VISIBILITY_SPECIFIED (decl) |
161 | && DECL_EXTERNAL (decl) |
162 | && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT |
163 | && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition)) |
164 | return false; |
165 | /* When function is public, we always can introduce new reference. |
166 | Exception are the COMDAT functions where introducing a direct |
167 | reference imply need to include function body in the curren tunit. */ |
168 | if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) |
169 | return true; |
170 | /* We have COMDAT. We are going to check if we still have definition |
171 | or if the definition is going to be output in other partition. |
172 | Bypass this when gimplifying; all needed functions will be produced. |
173 | |
174 | As observed in PR20991 for already optimized out comdat virtual functions |
175 | it may be tempting to not necessarily give up because the copy will be |
176 | output elsewhere when corresponding vtable is output. |
177 | This is however not possible - ABI specify that COMDATs are output in |
178 | units where they are used and when the other unit was compiled with LTO |
179 | it is possible that vtable was kept public while the function itself |
180 | was privatized. */ |
181 | if (!symtab->function_flags_ready) |
182 | return true; |
183 | |
184 | snode = symtab_node::get (decl); |
185 | if (!snode |
186 | || ((!snode->definition || DECL_EXTERNAL (decl)) |
187 | && (!snode->in_other_partition |
188 | || (!snode->forced_by_abi && !snode->force_output)))) |
189 | return false; |
190 | node = dyn_cast <cgraph_node *> (p: snode); |
191 | return !node || !node->inlined_to; |
192 | } |
193 | |
194 | /* Create a temporary for TYPE for a statement STMT. If the current function |
195 | is in SSA form, a SSA name is created. Otherwise a temporary register |
196 | is made. */ |
197 | |
198 | tree |
199 | create_tmp_reg_or_ssa_name (tree type, gimple *stmt) |
200 | { |
201 | if (gimple_in_ssa_p (cfun)) |
202 | return make_ssa_name (var: type, stmt); |
203 | else |
204 | return create_tmp_reg (type); |
205 | } |
206 | |
207 | /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into |
208 | acceptable form for is_gimple_min_invariant. |
209 | FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */ |
210 | |
211 | tree |
212 | canonicalize_constructor_val (tree cval, tree from_decl) |
213 | { |
214 | if (CONSTANT_CLASS_P (cval)) |
215 | return cval; |
216 | |
217 | tree orig_cval = cval; |
218 | STRIP_NOPS (cval); |
219 | if (TREE_CODE (cval) == POINTER_PLUS_EXPR |
220 | && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) |
221 | { |
222 | tree ptr = TREE_OPERAND (cval, 0); |
223 | if (is_gimple_min_invariant (ptr)) |
224 | cval = build1_loc (EXPR_LOCATION (cval), |
225 | code: ADDR_EXPR, TREE_TYPE (ptr), |
226 | fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), |
227 | ptr, |
228 | fold_convert (ptr_type_node, |
229 | TREE_OPERAND (cval, 1)))); |
230 | } |
231 | if (TREE_CODE (cval) == ADDR_EXPR) |
232 | { |
233 | tree base = NULL_TREE; |
234 | if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR) |
235 | { |
236 | base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0)); |
237 | if (base) |
238 | TREE_OPERAND (cval, 0) = base; |
239 | } |
240 | else |
241 | base = get_base_address (TREE_OPERAND (cval, 0)); |
242 | if (!base) |
243 | return NULL_TREE; |
244 | |
245 | if (VAR_OR_FUNCTION_DECL_P (base) |
246 | && !can_refer_decl_in_current_unit_p (decl: base, from_decl)) |
247 | return NULL_TREE; |
248 | if (TREE_TYPE (base) == error_mark_node) |
249 | return NULL_TREE; |
250 | if (VAR_P (base)) |
251 | /* ??? We should be able to assert that TREE_ADDRESSABLE is set, |
252 | but since the use can be in a debug stmt we can't. */ |
253 | ; |
254 | else if (TREE_CODE (base) == FUNCTION_DECL) |
255 | { |
256 | /* Make sure we create a cgraph node for functions we'll reference. |
257 | They can be non-existent if the reference comes from an entry |
258 | of an external vtable for example. */ |
259 | cgraph_node::get_create (base); |
260 | } |
261 | /* Fixup types in global initializers. */ |
262 | if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) |
263 | cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); |
264 | |
265 | if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) |
266 | cval = fold_convert (TREE_TYPE (orig_cval), cval); |
267 | return cval; |
268 | } |
269 | /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */ |
270 | if (TREE_CODE (cval) == INTEGER_CST) |
271 | { |
272 | if (TREE_OVERFLOW_P (cval)) |
273 | cval = drop_tree_overflow (cval); |
274 | if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) |
275 | cval = fold_convert (TREE_TYPE (orig_cval), cval); |
276 | return cval; |
277 | } |
278 | return orig_cval; |
279 | } |
280 | |
281 | /* If SYM is a constant variable with known value, return the value. |
282 | NULL_TREE is returned otherwise. */ |
283 | |
284 | tree |
285 | get_symbol_constant_value (tree sym) |
286 | { |
287 | tree val = ctor_for_folding (sym); |
288 | if (val != error_mark_node) |
289 | { |
290 | if (val) |
291 | { |
292 | val = canonicalize_constructor_val (cval: unshare_expr (val), from_decl: sym); |
293 | if (val |
294 | && is_gimple_min_invariant (val) |
295 | && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val))) |
296 | return val; |
297 | else |
298 | return NULL_TREE; |
299 | } |
300 | /* Variables declared 'const' without an initializer |
301 | have zero as the initializer if they may not be |
302 | overridden at link or run time. */ |
303 | if (!val |
304 | && is_gimple_reg_type (TREE_TYPE (sym))) |
305 | return build_zero_cst (TREE_TYPE (sym)); |
306 | } |
307 | |
308 | return NULL_TREE; |
309 | } |
310 | |
311 | |
312 | |
313 | /* Subroutine of fold_stmt. We perform constant folding of the |
314 | memory reference tree EXPR. */ |
315 | |
316 | static tree |
317 | maybe_fold_reference (tree expr) |
318 | { |
319 | tree result = NULL_TREE; |
320 | |
321 | if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR |
322 | || TREE_CODE (expr) == REALPART_EXPR |
323 | || TREE_CODE (expr) == IMAGPART_EXPR) |
324 | && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) |
325 | result = fold_unary_loc (EXPR_LOCATION (expr), |
326 | TREE_CODE (expr), |
327 | TREE_TYPE (expr), |
328 | TREE_OPERAND (expr, 0)); |
329 | else if (TREE_CODE (expr) == BIT_FIELD_REF |
330 | && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) |
331 | result = fold_ternary_loc (EXPR_LOCATION (expr), |
332 | TREE_CODE (expr), |
333 | TREE_TYPE (expr), |
334 | TREE_OPERAND (expr, 0), |
335 | TREE_OPERAND (expr, 1), |
336 | TREE_OPERAND (expr, 2)); |
337 | else |
338 | result = fold_const_aggregate_ref (expr); |
339 | |
340 | if (result && is_gimple_min_invariant (result)) |
341 | return result; |
342 | |
343 | return NULL_TREE; |
344 | } |
345 | |
346 | /* Return true if EXPR is an acceptable right-hand-side for a |
347 | GIMPLE assignment. We validate the entire tree, not just |
348 | the root node, thus catching expressions that embed complex |
349 | operands that are not permitted in GIMPLE. This function |
350 | is needed because the folding routines in fold-const.cc |
351 | may return such expressions in some cases, e.g., an array |
352 | access with an embedded index addition. It may make more |
353 | sense to have folding routines that are sensitive to the |
354 | constraints on GIMPLE operands, rather than abandoning any |
355 | any attempt to fold if the usual folding turns out to be too |
356 | aggressive. */ |
357 | |
358 | bool |
359 | valid_gimple_rhs_p (tree expr) |
360 | { |
361 | enum tree_code code = TREE_CODE (expr); |
362 | |
363 | switch (TREE_CODE_CLASS (code)) |
364 | { |
365 | case tcc_declaration: |
366 | if (!is_gimple_variable (t: expr)) |
367 | return false; |
368 | break; |
369 | |
370 | case tcc_constant: |
371 | /* All constants are ok. */ |
372 | break; |
373 | |
374 | case tcc_comparison: |
375 | /* GENERIC allows comparisons with non-boolean types, reject |
376 | those for GIMPLE. Let vector-typed comparisons pass - rules |
377 | for GENERIC and GIMPLE are the same here. */ |
378 | if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr)) |
379 | && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE |
380 | || TYPE_PRECISION (TREE_TYPE (expr)) == 1)) |
381 | && ! VECTOR_TYPE_P (TREE_TYPE (expr))) |
382 | return false; |
383 | |
384 | /* Fallthru. */ |
385 | case tcc_binary: |
386 | if (!is_gimple_val (TREE_OPERAND (expr, 0)) |
387 | || !is_gimple_val (TREE_OPERAND (expr, 1))) |
388 | return false; |
389 | break; |
390 | |
391 | case tcc_unary: |
392 | if (!is_gimple_val (TREE_OPERAND (expr, 0))) |
393 | return false; |
394 | break; |
395 | |
396 | case tcc_expression: |
397 | switch (code) |
398 | { |
399 | case ADDR_EXPR: |
400 | { |
401 | tree t; |
402 | if (is_gimple_min_invariant (expr)) |
403 | return true; |
404 | t = TREE_OPERAND (expr, 0); |
405 | while (handled_component_p (t)) |
406 | { |
407 | /* ??? More checks needed, see the GIMPLE verifier. */ |
408 | if ((TREE_CODE (t) == ARRAY_REF |
409 | || TREE_CODE (t) == ARRAY_RANGE_REF) |
410 | && !is_gimple_val (TREE_OPERAND (t, 1))) |
411 | return false; |
412 | t = TREE_OPERAND (t, 0); |
413 | } |
414 | if (!is_gimple_id (t)) |
415 | return false; |
416 | } |
417 | break; |
418 | |
419 | default: |
420 | if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS) |
421 | { |
422 | if (!is_gimple_val (TREE_OPERAND (expr, 0)) |
423 | || !is_gimple_val (TREE_OPERAND (expr, 1)) |
424 | || !is_gimple_val (TREE_OPERAND (expr, 2))) |
425 | return false; |
426 | break; |
427 | } |
428 | return false; |
429 | } |
430 | break; |
431 | |
432 | case tcc_vl_exp: |
433 | return false; |
434 | |
435 | case tcc_exceptional: |
436 | if (code == CONSTRUCTOR) |
437 | { |
438 | unsigned i; |
439 | tree elt; |
440 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt) |
441 | if (!is_gimple_val (elt)) |
442 | return false; |
443 | return true; |
444 | } |
445 | if (code != SSA_NAME) |
446 | return false; |
447 | break; |
448 | |
449 | case tcc_reference: |
450 | if (code == BIT_FIELD_REF) |
451 | return is_gimple_val (TREE_OPERAND (expr, 0)); |
452 | return false; |
453 | |
454 | default: |
455 | return false; |
456 | } |
457 | |
458 | return true; |
459 | } |
460 | |
461 | |
462 | /* Attempt to fold an assignment statement pointed-to by SI. Returns a |
463 | replacement rhs for the statement or NULL_TREE if no simplification |
464 | could be made. It is assumed that the operands have been previously |
465 | folded. */ |
466 | |
467 | static tree |
468 | fold_gimple_assign (gimple_stmt_iterator *si) |
469 | { |
470 | gimple *stmt = gsi_stmt (i: *si); |
471 | enum tree_code subcode = gimple_assign_rhs_code (gs: stmt); |
472 | location_t loc = gimple_location (g: stmt); |
473 | |
474 | tree result = NULL_TREE; |
475 | |
476 | switch (get_gimple_rhs_class (code: subcode)) |
477 | { |
478 | case GIMPLE_SINGLE_RHS: |
479 | { |
480 | tree rhs = gimple_assign_rhs1 (gs: stmt); |
481 | |
482 | if (TREE_CLOBBER_P (rhs)) |
483 | return NULL_TREE; |
484 | |
485 | if (REFERENCE_CLASS_P (rhs)) |
486 | return maybe_fold_reference (expr: rhs); |
487 | |
488 | else if (TREE_CODE (rhs) == OBJ_TYPE_REF) |
489 | { |
490 | tree val = OBJ_TYPE_REF_EXPR (rhs); |
491 | if (is_gimple_min_invariant (val)) |
492 | return val; |
493 | else if (flag_devirtualize && virtual_method_call_p (rhs)) |
494 | { |
495 | bool final; |
496 | vec <cgraph_node *>targets |
497 | = possible_polymorphic_call_targets (ref: rhs, call: stmt, completep: &final); |
498 | if (final && targets.length () <= 1 && dbg_cnt (index: devirt)) |
499 | { |
500 | if (dump_enabled_p ()) |
501 | { |
502 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt, |
503 | "resolving virtual function address " |
504 | "reference to function %s\n" , |
505 | targets.length () == 1 |
506 | ? targets[0]->name () |
507 | : "NULL" ); |
508 | } |
509 | if (targets.length () == 1) |
510 | { |
511 | val = fold_convert (TREE_TYPE (val), |
512 | build_fold_addr_expr_loc |
513 | (loc, targets[0]->decl)); |
514 | STRIP_USELESS_TYPE_CONVERSION (val); |
515 | } |
516 | else |
517 | /* We cannot use __builtin_unreachable here because it |
518 | cannot have address taken. */ |
519 | val = build_int_cst (TREE_TYPE (val), 0); |
520 | return val; |
521 | } |
522 | } |
523 | } |
524 | |
525 | else if (TREE_CODE (rhs) == ADDR_EXPR) |
526 | { |
527 | tree ref = TREE_OPERAND (rhs, 0); |
528 | if (TREE_CODE (ref) == MEM_REF |
529 | && integer_zerop (TREE_OPERAND (ref, 1))) |
530 | { |
531 | result = TREE_OPERAND (ref, 0); |
532 | if (!useless_type_conversion_p (TREE_TYPE (rhs), |
533 | TREE_TYPE (result))) |
534 | result = build1 (NOP_EXPR, TREE_TYPE (rhs), result); |
535 | return result; |
536 | } |
537 | } |
538 | |
539 | else if (TREE_CODE (rhs) == CONSTRUCTOR |
540 | && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE) |
541 | { |
542 | /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */ |
543 | unsigned i; |
544 | tree val; |
545 | |
546 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) |
547 | if (! CONSTANT_CLASS_P (val)) |
548 | return NULL_TREE; |
549 | |
550 | return build_vector_from_ctor (TREE_TYPE (rhs), |
551 | CONSTRUCTOR_ELTS (rhs)); |
552 | } |
553 | |
554 | else if (DECL_P (rhs) |
555 | && is_gimple_reg_type (TREE_TYPE (rhs))) |
556 | return get_symbol_constant_value (sym: rhs); |
557 | } |
558 | break; |
559 | |
560 | case GIMPLE_UNARY_RHS: |
561 | break; |
562 | |
563 | case GIMPLE_BINARY_RHS: |
564 | break; |
565 | |
566 | case GIMPLE_TERNARY_RHS: |
567 | result = fold_ternary_loc (loc, subcode, |
568 | TREE_TYPE (gimple_assign_lhs (stmt)), |
569 | gimple_assign_rhs1 (gs: stmt), |
570 | gimple_assign_rhs2 (gs: stmt), |
571 | gimple_assign_rhs3 (gs: stmt)); |
572 | |
573 | if (result) |
574 | { |
575 | STRIP_USELESS_TYPE_CONVERSION (result); |
576 | if (valid_gimple_rhs_p (expr: result)) |
577 | return result; |
578 | } |
579 | break; |
580 | |
581 | case GIMPLE_INVALID_RHS: |
582 | gcc_unreachable (); |
583 | } |
584 | |
585 | return NULL_TREE; |
586 | } |
587 | |
588 | |
589 | /* Replace a statement at *SI_P with a sequence of statements in STMTS, |
590 | adjusting the replacement stmts location and virtual operands. |
591 | If the statement has a lhs the last stmt in the sequence is expected |
592 | to assign to that lhs. */ |
593 | |
594 | void |
595 | gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts) |
596 | { |
597 | gimple *stmt = gsi_stmt (i: *si_p); |
598 | |
599 | if (gimple_has_location (g: stmt)) |
600 | annotate_all_with_location (stmts, gimple_location (g: stmt)); |
601 | |
602 | /* First iterate over the replacement statements backward, assigning |
603 | virtual operands to their defining statements. */ |
604 | gimple *laststore = NULL; |
605 | for (gimple_stmt_iterator i = gsi_last (seq&: stmts); |
606 | !gsi_end_p (i); gsi_prev (i: &i)) |
607 | { |
608 | gimple *new_stmt = gsi_stmt (i); |
609 | if ((gimple_assign_single_p (gs: new_stmt) |
610 | && !is_gimple_reg (gimple_assign_lhs (gs: new_stmt))) |
611 | || (is_gimple_call (gs: new_stmt) |
612 | && (gimple_call_flags (new_stmt) |
613 | & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) |
614 | { |
615 | tree vdef; |
616 | if (!laststore) |
617 | vdef = gimple_vdef (g: stmt); |
618 | else |
619 | vdef = make_ssa_name (var: gimple_vop (cfun), stmt: new_stmt); |
620 | gimple_set_vdef (g: new_stmt, vdef); |
621 | if (vdef && TREE_CODE (vdef) == SSA_NAME) |
622 | SSA_NAME_DEF_STMT (vdef) = new_stmt; |
623 | laststore = new_stmt; |
624 | } |
625 | } |
626 | |
627 | /* Second iterate over the statements forward, assigning virtual |
628 | operands to their uses. */ |
629 | tree reaching_vuse = gimple_vuse (g: stmt); |
630 | for (gimple_stmt_iterator i = gsi_start (seq&: stmts); |
631 | !gsi_end_p (i); gsi_next (i: &i)) |
632 | { |
633 | gimple *new_stmt = gsi_stmt (i); |
634 | /* If the new statement possibly has a VUSE, update it with exact SSA |
635 | name we know will reach this one. */ |
636 | if (gimple_has_mem_ops (g: new_stmt)) |
637 | gimple_set_vuse (g: new_stmt, vuse: reaching_vuse); |
638 | gimple_set_modified (s: new_stmt, modifiedp: true); |
639 | if (gimple_vdef (g: new_stmt)) |
640 | reaching_vuse = gimple_vdef (g: new_stmt); |
641 | } |
642 | |
643 | /* If the new sequence does not do a store release the virtual |
644 | definition of the original statement. */ |
645 | if (reaching_vuse |
646 | && reaching_vuse == gimple_vuse (g: stmt)) |
647 | { |
648 | tree vdef = gimple_vdef (g: stmt); |
649 | if (vdef |
650 | && TREE_CODE (vdef) == SSA_NAME) |
651 | { |
652 | unlink_stmt_vdef (stmt); |
653 | release_ssa_name (name: vdef); |
654 | } |
655 | } |
656 | |
657 | /* Finally replace the original statement with the sequence. */ |
658 | gsi_replace_with_seq (si_p, stmts, false); |
659 | } |
660 | |
661 | /* Helper function for update_gimple_call and |
662 | gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced |
663 | with GIMPLE_CALL NEW_STMT. */ |
664 | |
665 | static void |
666 | finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt, |
667 | gimple *stmt) |
668 | { |
669 | tree lhs = gimple_call_lhs (gs: stmt); |
670 | gimple_call_set_lhs (gs: new_stmt, lhs); |
671 | if (lhs && TREE_CODE (lhs) == SSA_NAME) |
672 | SSA_NAME_DEF_STMT (lhs) = new_stmt; |
673 | gimple_move_vops (new_stmt, stmt); |
674 | gimple_set_location (g: new_stmt, location: gimple_location (g: stmt)); |
675 | if (gimple_block (g: new_stmt) == NULL_TREE) |
676 | gimple_set_block (g: new_stmt, block: gimple_block (g: stmt)); |
677 | gsi_replace (si_p, new_stmt, false); |
678 | } |
679 | |
680 | /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN |
681 | with number of arguments NARGS, where the arguments in GIMPLE form |
682 | follow NARGS argument. */ |
683 | |
684 | bool |
685 | update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...) |
686 | { |
687 | va_list ap; |
688 | gcall *new_stmt, *stmt = as_a <gcall *> (p: gsi_stmt (i: *si_p)); |
689 | |
690 | gcc_assert (is_gimple_call (stmt)); |
691 | va_start (ap, nargs); |
692 | new_stmt = gimple_build_call_valist (fn, nargs, ap); |
693 | finish_update_gimple_call (si_p, new_stmt, stmt); |
694 | va_end (ap); |
695 | return true; |
696 | } |
697 | |
698 | /* Return true if EXPR is a CALL_EXPR suitable for representation |
699 | as a single GIMPLE_CALL statement. If the arguments require |
700 | further gimplification, return false. */ |
701 | |
702 | static bool |
703 | valid_gimple_call_p (tree expr) |
704 | { |
705 | unsigned i, nargs; |
706 | |
707 | if (TREE_CODE (expr) != CALL_EXPR) |
708 | return false; |
709 | |
710 | nargs = call_expr_nargs (expr); |
711 | for (i = 0; i < nargs; i++) |
712 | { |
713 | tree arg = CALL_EXPR_ARG (expr, i); |
714 | if (is_gimple_reg_type (TREE_TYPE (arg))) |
715 | { |
716 | if (!is_gimple_val (arg)) |
717 | return false; |
718 | } |
719 | else |
720 | if (!is_gimple_lvalue (arg)) |
721 | return false; |
722 | } |
723 | |
724 | return true; |
725 | } |
726 | |
727 | /* Convert EXPR into a GIMPLE value suitable for substitution on the |
728 | RHS of an assignment. Insert the necessary statements before |
729 | iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL |
730 | is replaced. If the call is expected to produces a result, then it |
731 | is replaced by an assignment of the new RHS to the result variable. |
732 | If the result is to be ignored, then the call is replaced by a |
733 | GIMPLE_NOP. A proper VDEF chain is retained by making the first |
734 | VUSE and the last VDEF of the whole sequence be the same as the replaced |
735 | statement and using new SSA names for stores in between. */ |
736 | |
737 | void |
738 | gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) |
739 | { |
740 | tree lhs; |
741 | gimple *stmt, *new_stmt; |
742 | gimple_stmt_iterator i; |
743 | gimple_seq stmts = NULL; |
744 | |
745 | stmt = gsi_stmt (i: *si_p); |
746 | |
747 | gcc_assert (is_gimple_call (stmt)); |
748 | |
749 | if (valid_gimple_call_p (expr)) |
750 | { |
751 | /* The call has simplified to another call. */ |
752 | tree fn = CALL_EXPR_FN (expr); |
753 | unsigned i; |
754 | unsigned nargs = call_expr_nargs (expr); |
755 | vec<tree> args = vNULL; |
756 | gcall *new_stmt; |
757 | |
758 | if (nargs > 0) |
759 | { |
760 | args.create (nelems: nargs); |
761 | args.safe_grow_cleared (len: nargs, exact: true); |
762 | |
763 | for (i = 0; i < nargs; i++) |
764 | args[i] = CALL_EXPR_ARG (expr, i); |
765 | } |
766 | |
767 | new_stmt = gimple_build_call_vec (fn, args); |
768 | finish_update_gimple_call (si_p, new_stmt, stmt); |
769 | args.release (); |
770 | return; |
771 | } |
772 | |
773 | lhs = gimple_call_lhs (gs: stmt); |
774 | if (lhs == NULL_TREE) |
775 | { |
776 | push_gimplify_context (in_ssa: gimple_in_ssa_p (cfun)); |
777 | gimplify_and_add (expr, &stmts); |
778 | pop_gimplify_context (NULL); |
779 | |
780 | /* We can end up with folding a memcpy of an empty class assignment |
781 | which gets optimized away by C++ gimplification. */ |
782 | if (gimple_seq_empty_p (s: stmts)) |
783 | { |
784 | if (gimple_in_ssa_p (cfun)) |
785 | { |
786 | unlink_stmt_vdef (stmt); |
787 | release_defs (stmt); |
788 | } |
789 | gsi_replace (si_p, gimple_build_nop (), false); |
790 | return; |
791 | } |
792 | } |
793 | else |
794 | { |
795 | tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE); |
796 | new_stmt = gimple_build_assign (lhs, tmp); |
797 | i = gsi_last (seq&: stmts); |
798 | gsi_insert_after_without_update (&i, new_stmt, |
799 | GSI_CONTINUE_LINKING); |
800 | } |
801 | |
802 | gsi_replace_with_seq_vops (si_p, stmts); |
803 | } |
804 | |
805 | |
806 | /* Replace the call at *GSI with the gimple value VAL. */ |
807 | |
808 | void |
809 | replace_call_with_value (gimple_stmt_iterator *gsi, tree val) |
810 | { |
811 | gimple *stmt = gsi_stmt (i: *gsi); |
812 | tree lhs = gimple_call_lhs (gs: stmt); |
813 | gimple *repl; |
814 | if (lhs) |
815 | { |
816 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val))) |
817 | val = fold_convert (TREE_TYPE (lhs), val); |
818 | repl = gimple_build_assign (lhs, val); |
819 | } |
820 | else |
821 | repl = gimple_build_nop (); |
822 | tree vdef = gimple_vdef (g: stmt); |
823 | if (vdef && TREE_CODE (vdef) == SSA_NAME) |
824 | { |
825 | unlink_stmt_vdef (stmt); |
826 | release_ssa_name (name: vdef); |
827 | } |
828 | gsi_replace (gsi, repl, false); |
829 | } |
830 | |
831 | /* Replace the call at *GSI with the new call REPL and fold that |
832 | again. */ |
833 | |
834 | static void |
835 | replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl) |
836 | { |
837 | gimple *stmt = gsi_stmt (i: *gsi); |
838 | gimple_call_set_lhs (gs: repl, lhs: gimple_call_lhs (gs: stmt)); |
839 | gimple_set_location (g: repl, location: gimple_location (g: stmt)); |
840 | gimple_move_vops (repl, stmt); |
841 | gsi_replace (gsi, repl, false); |
842 | fold_stmt (gsi); |
843 | } |
844 | |
845 | /* Return true if VAR is a VAR_DECL or a component thereof. */ |
846 | |
847 | static bool |
848 | var_decl_component_p (tree var) |
849 | { |
850 | tree inner = var; |
851 | while (handled_component_p (t: inner)) |
852 | inner = TREE_OPERAND (inner, 0); |
853 | return (DECL_P (inner) |
854 | || (TREE_CODE (inner) == MEM_REF |
855 | && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR)); |
856 | } |
857 | |
858 | /* Return TRUE if the SIZE argument, representing the size of an |
859 | object, is in a range of values of which exactly zero is valid. */ |
860 | |
861 | static bool |
862 | size_must_be_zero_p (tree size) |
863 | { |
864 | if (integer_zerop (size)) |
865 | return true; |
866 | |
867 | if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size))) |
868 | return false; |
869 | |
870 | tree type = TREE_TYPE (size); |
871 | int prec = TYPE_PRECISION (type); |
872 | |
873 | /* Compute the value of SSIZE_MAX, the largest positive value that |
874 | can be stored in ssize_t, the signed counterpart of size_t. */ |
875 | wide_int ssize_max = wi::lshift (x: wi::one (precision: prec), y: prec - 1) - 1; |
876 | wide_int zero = wi::zero (TYPE_PRECISION (type)); |
877 | value_range valid_range (type, zero, ssize_max); |
878 | value_range vr; |
879 | get_range_query (cfun)->range_of_expr (r&: vr, expr: size); |
880 | |
881 | if (vr.undefined_p ()) |
882 | vr.set_varying (TREE_TYPE (size)); |
883 | vr.intersect (valid_range); |
884 | return vr.zero_p (); |
885 | } |
886 | |
887 | /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and |
888 | diagnose (otherwise undefined) overlapping copies without preventing |
889 | folding. When folded, GCC guarantees that overlapping memcpy has |
890 | the same semantics as memmove. Call to the library memcpy need not |
891 | provide the same guarantee. Return false if no simplification can |
892 | be made. */ |
893 | |
894 | static bool |
895 | gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi, |
896 | tree dest, tree src, enum built_in_function code) |
897 | { |
898 | gimple *stmt = gsi_stmt (i: *gsi); |
899 | tree lhs = gimple_call_lhs (gs: stmt); |
900 | tree len = gimple_call_arg (gs: stmt, index: 2); |
901 | location_t loc = gimple_location (g: stmt); |
902 | |
903 | /* If the LEN parameter is a constant zero or in range where |
904 | the only valid value is zero, return DEST. */ |
905 | if (size_must_be_zero_p (size: len)) |
906 | { |
907 | gimple *repl; |
908 | if (gimple_call_lhs (gs: stmt)) |
909 | repl = gimple_build_assign (gimple_call_lhs (gs: stmt), dest); |
910 | else |
911 | repl = gimple_build_nop (); |
912 | tree vdef = gimple_vdef (g: stmt); |
913 | if (vdef && TREE_CODE (vdef) == SSA_NAME) |
914 | { |
915 | unlink_stmt_vdef (stmt); |
916 | release_ssa_name (name: vdef); |
917 | } |
918 | gsi_replace (gsi, repl, false); |
919 | return true; |
920 | } |
921 | |
922 | /* If SRC and DEST are the same (and not volatile), return |
923 | DEST{,+LEN,+LEN-1}. */ |
924 | if (operand_equal_p (src, dest, flags: 0)) |
925 | { |
926 | /* Avoid diagnosing exact overlap in calls to __builtin_memcpy. |
927 | It's safe and may even be emitted by GCC itself (see bug |
928 | 32667). */ |
929 | unlink_stmt_vdef (stmt); |
930 | if (gimple_vdef (g: stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) |
931 | release_ssa_name (name: gimple_vdef (g: stmt)); |
932 | if (!lhs) |
933 | { |
934 | gsi_replace (gsi, gimple_build_nop (), false); |
935 | return true; |
936 | } |
937 | goto done; |
938 | } |
939 | else |
940 | { |
941 | /* We cannot (easily) change the type of the copy if it is a storage |
942 | order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can |
943 | modify the storage order of objects (see storage_order_barrier_p). */ |
944 | tree srctype |
945 | = POINTER_TYPE_P (TREE_TYPE (src)) |
946 | ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE; |
947 | tree desttype |
948 | = POINTER_TYPE_P (TREE_TYPE (dest)) |
949 | ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE; |
950 | tree destvar, srcvar, srcoff; |
951 | unsigned int src_align, dest_align; |
952 | unsigned HOST_WIDE_INT tmp_len; |
953 | const char *tmp_str; |
954 | |
955 | /* Build accesses at offset zero with a ref-all character type. */ |
956 | tree off0 |
957 | = build_int_cst (build_pointer_type_for_mode (char_type_node, |
958 | ptr_mode, true), 0); |
959 | |
960 | /* If we can perform the copy efficiently with first doing all loads |
961 | and then all stores inline it that way. Currently efficiently |
962 | means that we can load all the memory into a single integer |
963 | register which is what MOVE_MAX gives us. */ |
964 | src_align = get_pointer_alignment (src); |
965 | dest_align = get_pointer_alignment (dest); |
966 | if (tree_fits_uhwi_p (len) |
967 | && compare_tree_int (len, MOVE_MAX) <= 0 |
968 | /* FIXME: Don't transform copies from strings with known length. |
969 | Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c |
970 | from being handled, and the case was XFAILed for that reason. |
971 | Now that it is handled and the XFAIL removed, as soon as other |
972 | strlenopt tests that rely on it for passing are adjusted, this |
973 | hack can be removed. */ |
974 | && !c_strlen (src, 1) |
975 | && !((tmp_str = getbyterep (src, &tmp_len)) != NULL |
976 | && memchr (s: tmp_str, c: 0, n: tmp_len) == NULL) |
977 | && !(srctype |
978 | && AGGREGATE_TYPE_P (srctype) |
979 | && TYPE_REVERSE_STORAGE_ORDER (srctype)) |
980 | && !(desttype |
981 | && AGGREGATE_TYPE_P (desttype) |
982 | && TYPE_REVERSE_STORAGE_ORDER (desttype))) |
983 | { |
984 | unsigned ilen = tree_to_uhwi (len); |
985 | if (pow2p_hwi (x: ilen)) |
986 | { |
987 | /* Detect out-of-bounds accesses without issuing warnings. |
988 | Avoid folding out-of-bounds copies but to avoid false |
989 | positives for unreachable code defer warning until after |
990 | DCE has worked its magic. |
991 | -Wrestrict is still diagnosed. */ |
992 | if (int warning = check_bounds_or_overlap (as_a <gcall *>(p: stmt), |
993 | dest, src, len, len, |
994 | false, false)) |
995 | if (warning != OPT_Wrestrict) |
996 | return false; |
997 | |
998 | scalar_int_mode imode; |
999 | machine_mode mode; |
1000 | if (int_mode_for_size (size: ilen * BITS_PER_UNIT, limit: 0).exists (mode: &imode) |
1001 | && bitwise_mode_for_size (ilen |
1002 | * BITS_PER_UNIT).exists (mode: &mode) |
1003 | && known_eq (GET_MODE_BITSIZE (mode), ilen * BITS_PER_UNIT) |
1004 | /* If the destination pointer is not aligned we must be able |
1005 | to emit an unaligned store. */ |
1006 | && (dest_align >= GET_MODE_ALIGNMENT (mode) |
1007 | || !targetm.slow_unaligned_access (mode, dest_align) |
1008 | || (optab_handler (op: movmisalign_optab, mode) |
1009 | != CODE_FOR_nothing))) |
1010 | { |
1011 | tree type = bitwise_type_for_mode (mode); |
1012 | tree srctype = type; |
1013 | tree desttype = type; |
1014 | if (src_align < GET_MODE_ALIGNMENT (mode)) |
1015 | srctype = build_aligned_type (type, src_align); |
1016 | tree srcmem = fold_build2 (MEM_REF, srctype, src, off0); |
1017 | tree tem = fold_const_aggregate_ref (srcmem); |
1018 | if (tem) |
1019 | srcmem = tem; |
1020 | else if (src_align < GET_MODE_ALIGNMENT (mode) |
1021 | && targetm.slow_unaligned_access (mode, src_align) |
1022 | && (optab_handler (op: movmisalign_optab, mode) |
1023 | == CODE_FOR_nothing)) |
1024 | srcmem = NULL_TREE; |
1025 | if (srcmem) |
1026 | { |
1027 | gimple *new_stmt; |
1028 | if (is_gimple_reg_type (TREE_TYPE (srcmem))) |
1029 | { |
1030 | new_stmt = gimple_build_assign (NULL_TREE, srcmem); |
1031 | srcmem |
1032 | = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem), |
1033 | stmt: new_stmt); |
1034 | gimple_assign_set_lhs (gs: new_stmt, lhs: srcmem); |
1035 | gimple_set_vuse (g: new_stmt, vuse: gimple_vuse (g: stmt)); |
1036 | gimple_set_location (g: new_stmt, location: loc); |
1037 | gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
1038 | } |
1039 | if (dest_align < GET_MODE_ALIGNMENT (mode)) |
1040 | desttype = build_aligned_type (type, dest_align); |
1041 | new_stmt |
1042 | = gimple_build_assign (fold_build2 (MEM_REF, desttype, |
1043 | dest, off0), |
1044 | srcmem); |
1045 | gimple_move_vops (new_stmt, stmt); |
1046 | if (!lhs) |
1047 | { |
1048 | gsi_replace (gsi, new_stmt, false); |
1049 | return true; |
1050 | } |
1051 | gimple_set_location (g: new_stmt, location: loc); |
1052 | gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
1053 | goto done; |
1054 | } |
1055 | } |
1056 | } |
1057 | } |
1058 | |
1059 | if (code == BUILT_IN_MEMMOVE) |
1060 | { |
1061 | /* Both DEST and SRC must be pointer types. |
1062 | ??? This is what old code did. Is the testing for pointer types |
1063 | really mandatory? |
1064 | |
1065 | If either SRC is readonly or length is 1, we can use memcpy. */ |
1066 | if (!dest_align || !src_align) |
1067 | return false; |
1068 | if (readonly_data_expr (exp: src) |
1069 | || (tree_fits_uhwi_p (len) |
1070 | && (MIN (src_align, dest_align) / BITS_PER_UNIT |
1071 | >= tree_to_uhwi (len)))) |
1072 | { |
1073 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
1074 | if (!fn) |
1075 | return false; |
1076 | gimple_call_set_fndecl (gs: stmt, decl: fn); |
1077 | gimple_call_set_arg (gs: stmt, index: 0, arg: dest); |
1078 | gimple_call_set_arg (gs: stmt, index: 1, arg: src); |
1079 | fold_stmt (gsi); |
1080 | return true; |
1081 | } |
1082 | |
1083 | /* If *src and *dest can't overlap, optimize into memcpy as well. */ |
1084 | if (TREE_CODE (src) == ADDR_EXPR |
1085 | && TREE_CODE (dest) == ADDR_EXPR) |
1086 | { |
1087 | tree src_base, dest_base, fn; |
1088 | poly_int64 src_offset = 0, dest_offset = 0; |
1089 | poly_uint64 maxsize; |
1090 | |
1091 | srcvar = TREE_OPERAND (src, 0); |
1092 | src_base = get_addr_base_and_unit_offset (srcvar, &src_offset); |
1093 | if (src_base == NULL) |
1094 | src_base = srcvar; |
1095 | destvar = TREE_OPERAND (dest, 0); |
1096 | dest_base = get_addr_base_and_unit_offset (destvar, |
1097 | &dest_offset); |
1098 | if (dest_base == NULL) |
1099 | dest_base = destvar; |
1100 | if (!poly_int_tree_p (t: len, value: &maxsize)) |
1101 | maxsize = -1; |
1102 | if (SSA_VAR_P (src_base) |
1103 | && SSA_VAR_P (dest_base)) |
1104 | { |
1105 | if (operand_equal_p (src_base, dest_base, flags: 0) |
1106 | && ranges_maybe_overlap_p (pos1: src_offset, size1: maxsize, |
1107 | pos2: dest_offset, size2: maxsize)) |
1108 | return false; |
1109 | } |
1110 | else if (TREE_CODE (src_base) == MEM_REF |
1111 | && TREE_CODE (dest_base) == MEM_REF) |
1112 | { |
1113 | if (! operand_equal_p (TREE_OPERAND (src_base, 0), |
1114 | TREE_OPERAND (dest_base, 0), flags: 0)) |
1115 | return false; |
1116 | poly_offset_int full_src_offset |
1117 | = mem_ref_offset (src_base) + src_offset; |
1118 | poly_offset_int full_dest_offset |
1119 | = mem_ref_offset (dest_base) + dest_offset; |
1120 | if (ranges_maybe_overlap_p (pos1: full_src_offset, size1: maxsize, |
1121 | pos2: full_dest_offset, size2: maxsize)) |
1122 | return false; |
1123 | } |
1124 | else |
1125 | return false; |
1126 | |
1127 | fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
1128 | if (!fn) |
1129 | return false; |
1130 | gimple_call_set_fndecl (gs: stmt, decl: fn); |
1131 | gimple_call_set_arg (gs: stmt, index: 0, arg: dest); |
1132 | gimple_call_set_arg (gs: stmt, index: 1, arg: src); |
1133 | fold_stmt (gsi); |
1134 | return true; |
1135 | } |
1136 | |
1137 | /* If the destination and source do not alias optimize into |
1138 | memcpy as well. */ |
1139 | if ((is_gimple_min_invariant (dest) |
1140 | || TREE_CODE (dest) == SSA_NAME) |
1141 | && (is_gimple_min_invariant (src) |
1142 | || TREE_CODE (src) == SSA_NAME)) |
1143 | { |
1144 | ao_ref destr, srcr; |
1145 | ao_ref_init_from_ptr_and_size (&destr, dest, len); |
1146 | ao_ref_init_from_ptr_and_size (&srcr, src, len); |
1147 | if (!refs_may_alias_p_1 (&destr, &srcr, false)) |
1148 | { |
1149 | tree fn; |
1150 | fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
1151 | if (!fn) |
1152 | return false; |
1153 | gimple_call_set_fndecl (gs: stmt, decl: fn); |
1154 | gimple_call_set_arg (gs: stmt, index: 0, arg: dest); |
1155 | gimple_call_set_arg (gs: stmt, index: 1, arg: src); |
1156 | fold_stmt (gsi); |
1157 | return true; |
1158 | } |
1159 | } |
1160 | |
1161 | return false; |
1162 | } |
1163 | |
1164 | if (!tree_fits_shwi_p (len)) |
1165 | return false; |
1166 | if (!srctype |
1167 | || (AGGREGATE_TYPE_P (srctype) |
1168 | && TYPE_REVERSE_STORAGE_ORDER (srctype))) |
1169 | return false; |
1170 | if (!desttype |
1171 | || (AGGREGATE_TYPE_P (desttype) |
1172 | && TYPE_REVERSE_STORAGE_ORDER (desttype))) |
1173 | return false; |
1174 | /* In the following try to find a type that is most natural to be |
1175 | used for the memcpy source and destination and that allows |
1176 | the most optimization when memcpy is turned into a plain assignment |
1177 | using that type. In theory we could always use a char[len] type |
1178 | but that only gains us that the destination and source possibly |
1179 | no longer will have their address taken. */ |
1180 | if (TREE_CODE (srctype) == ARRAY_TYPE |
1181 | && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) |
1182 | srctype = TREE_TYPE (srctype); |
1183 | if (TREE_CODE (desttype) == ARRAY_TYPE |
1184 | && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) |
1185 | desttype = TREE_TYPE (desttype); |
1186 | if (TREE_ADDRESSABLE (srctype) |
1187 | || TREE_ADDRESSABLE (desttype)) |
1188 | return false; |
1189 | |
1190 | /* Make sure we are not copying using a floating-point mode or |
1191 | a type whose size possibly does not match its precision. */ |
1192 | if (FLOAT_MODE_P (TYPE_MODE (desttype)) |
1193 | || TREE_CODE (desttype) == BOOLEAN_TYPE |
1194 | || TREE_CODE (desttype) == ENUMERAL_TYPE) |
1195 | desttype = bitwise_type_for_mode (TYPE_MODE (desttype)); |
1196 | if (FLOAT_MODE_P (TYPE_MODE (srctype)) |
1197 | || TREE_CODE (srctype) == BOOLEAN_TYPE |
1198 | || TREE_CODE (srctype) == ENUMERAL_TYPE) |
1199 | srctype = bitwise_type_for_mode (TYPE_MODE (srctype)); |
1200 | if (!srctype) |
1201 | srctype = desttype; |
1202 | if (!desttype) |
1203 | desttype = srctype; |
1204 | if (!srctype) |
1205 | return false; |
1206 | |
1207 | src_align = get_pointer_alignment (src); |
1208 | dest_align = get_pointer_alignment (dest); |
1209 | |
1210 | /* Choose between src and destination type for the access based |
1211 | on alignment, whether the access constitutes a register access |
1212 | and whether it may actually expose a declaration for SSA rewrite |
1213 | or SRA decomposition. Also try to expose a string constant, we |
1214 | might be able to concatenate several of them later into a single |
1215 | string store. */ |
1216 | destvar = NULL_TREE; |
1217 | srcvar = NULL_TREE; |
1218 | if (TREE_CODE (dest) == ADDR_EXPR |
1219 | && var_decl_component_p (TREE_OPERAND (dest, 0)) |
1220 | && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len) |
1221 | && dest_align >= TYPE_ALIGN (desttype) |
1222 | && (is_gimple_reg_type (type: desttype) |
1223 | || src_align >= TYPE_ALIGN (desttype))) |
1224 | destvar = fold_build2 (MEM_REF, desttype, dest, off0); |
1225 | else if (TREE_CODE (src) == ADDR_EXPR |
1226 | && var_decl_component_p (TREE_OPERAND (src, 0)) |
1227 | && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len) |
1228 | && src_align >= TYPE_ALIGN (srctype) |
1229 | && (is_gimple_reg_type (type: srctype) |
1230 | || dest_align >= TYPE_ALIGN (srctype))) |
1231 | srcvar = fold_build2 (MEM_REF, srctype, src, off0); |
1232 | /* FIXME: Don't transform copies from strings with known original length. |
1233 | As soon as strlenopt tests that rely on it for passing are adjusted, |
1234 | this hack can be removed. */ |
1235 | else if (gimple_call_alloca_for_var_p (s: stmt) |
1236 | && (srcvar = string_constant (src, &srcoff, NULL, NULL)) |
1237 | && integer_zerop (srcoff) |
1238 | && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len) |
1239 | && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar))) |
1240 | srctype = TREE_TYPE (srcvar); |
1241 | else |
1242 | return false; |
1243 | |
1244 | /* Now that we chose an access type express the other side in |
1245 | terms of it if the target allows that with respect to alignment |
1246 | constraints. */ |
1247 | if (srcvar == NULL_TREE) |
1248 | { |
1249 | if (src_align >= TYPE_ALIGN (desttype)) |
1250 | srcvar = fold_build2 (MEM_REF, desttype, src, off0); |
1251 | else |
1252 | { |
1253 | enum machine_mode mode = TYPE_MODE (desttype); |
1254 | if ((mode == BLKmode && STRICT_ALIGNMENT) |
1255 | || (targetm.slow_unaligned_access (mode, src_align) |
1256 | && (optab_handler (op: movmisalign_optab, mode) |
1257 | == CODE_FOR_nothing))) |
1258 | return false; |
1259 | srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), |
1260 | src_align); |
1261 | srcvar = fold_build2 (MEM_REF, srctype, src, off0); |
1262 | } |
1263 | } |
1264 | else if (destvar == NULL_TREE) |
1265 | { |
1266 | if (dest_align >= TYPE_ALIGN (srctype)) |
1267 | destvar = fold_build2 (MEM_REF, srctype, dest, off0); |
1268 | else |
1269 | { |
1270 | enum machine_mode mode = TYPE_MODE (srctype); |
1271 | if ((mode == BLKmode && STRICT_ALIGNMENT) |
1272 | || (targetm.slow_unaligned_access (mode, dest_align) |
1273 | && (optab_handler (op: movmisalign_optab, mode) |
1274 | == CODE_FOR_nothing))) |
1275 | return false; |
1276 | desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype), |
1277 | dest_align); |
1278 | destvar = fold_build2 (MEM_REF, desttype, dest, off0); |
1279 | } |
1280 | } |
1281 | |
1282 | /* Same as above, detect out-of-bounds accesses without issuing |
1283 | warnings. Avoid folding out-of-bounds copies but to avoid |
1284 | false positives for unreachable code defer warning until |
1285 | after DCE has worked its magic. |
1286 | -Wrestrict is still diagnosed. */ |
1287 | if (int warning = check_bounds_or_overlap (as_a <gcall *>(p: stmt), |
1288 | dest, src, len, len, |
1289 | false, false)) |
1290 | if (warning != OPT_Wrestrict) |
1291 | return false; |
1292 | |
1293 | gimple *new_stmt; |
1294 | if (is_gimple_reg_type (TREE_TYPE (srcvar))) |
1295 | { |
1296 | tree tem = fold_const_aggregate_ref (srcvar); |
1297 | if (tem) |
1298 | srcvar = tem; |
1299 | if (! is_gimple_min_invariant (srcvar)) |
1300 | { |
1301 | new_stmt = gimple_build_assign (NULL_TREE, srcvar); |
1302 | srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar), |
1303 | stmt: new_stmt); |
1304 | gimple_assign_set_lhs (gs: new_stmt, lhs: srcvar); |
1305 | gimple_set_vuse (g: new_stmt, vuse: gimple_vuse (g: stmt)); |
1306 | gimple_set_location (g: new_stmt, location: loc); |
1307 | gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
1308 | } |
1309 | new_stmt = gimple_build_assign (destvar, srcvar); |
1310 | goto set_vop_and_replace; |
1311 | } |
1312 | |
1313 | /* We get an aggregate copy. If the source is a STRING_CST, then |
1314 | directly use its type to perform the copy. */ |
1315 | if (TREE_CODE (srcvar) == STRING_CST) |
1316 | desttype = srctype; |
1317 | |
1318 | /* Or else, use an unsigned char[] type to perform the copy in order |
1319 | to preserve padding and to avoid any issues with TREE_ADDRESSABLE |
1320 | types or float modes behavior on copying. */ |
1321 | else |
1322 | { |
1323 | desttype = build_array_type_nelts (unsigned_char_type_node, |
1324 | tree_to_uhwi (len)); |
1325 | srctype = desttype; |
1326 | if (src_align > TYPE_ALIGN (srctype)) |
1327 | srctype = build_aligned_type (srctype, src_align); |
1328 | srcvar = fold_build2 (MEM_REF, srctype, src, off0); |
1329 | } |
1330 | |
1331 | if (dest_align > TYPE_ALIGN (desttype)) |
1332 | desttype = build_aligned_type (desttype, dest_align); |
1333 | destvar = fold_build2 (MEM_REF, desttype, dest, off0); |
1334 | new_stmt = gimple_build_assign (destvar, srcvar); |
1335 | |
1336 | set_vop_and_replace: |
1337 | gimple_move_vops (new_stmt, stmt); |
1338 | if (!lhs) |
1339 | { |
1340 | gsi_replace (gsi, new_stmt, false); |
1341 | return true; |
1342 | } |
1343 | gimple_set_location (g: new_stmt, location: loc); |
1344 | gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
1345 | } |
1346 | |
1347 | done: |
1348 | gimple_seq stmts = NULL; |
1349 | if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE) |
1350 | len = NULL_TREE; |
1351 | else if (code == BUILT_IN_MEMPCPY) |
1352 | { |
1353 | len = gimple_convert_to_ptrofftype (seq: &stmts, loc, op: len); |
1354 | dest = gimple_build (seq: &stmts, loc, code: POINTER_PLUS_EXPR, |
1355 | TREE_TYPE (dest), ops: dest, ops: len); |
1356 | } |
1357 | else |
1358 | gcc_unreachable (); |
1359 | |
1360 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1361 | gimple *repl = gimple_build_assign (lhs, dest); |
1362 | gsi_replace (gsi, repl, false); |
1363 | return true; |
1364 | } |
1365 | |
1366 | /* Transform a call to built-in bcmp(a, b, len) at *GSI into one |
1367 | to built-in memcmp (a, b, len). */ |
1368 | |
1369 | static bool |
1370 | gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi) |
1371 | { |
1372 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCMP); |
1373 | |
1374 | if (!fn) |
1375 | return false; |
1376 | |
1377 | /* Transform bcmp (a, b, len) into memcmp (a, b, len). */ |
1378 | |
1379 | gimple *stmt = gsi_stmt (i: *gsi); |
1380 | tree a = gimple_call_arg (gs: stmt, index: 0); |
1381 | tree b = gimple_call_arg (gs: stmt, index: 1); |
1382 | tree len = gimple_call_arg (gs: stmt, index: 2); |
1383 | |
1384 | gimple *repl = gimple_build_call (fn, 3, a, b, len); |
1385 | replace_call_with_call_and_fold (gsi, repl); |
1386 | |
1387 | return true; |
1388 | } |
1389 | |
1390 | /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one |
1391 | to built-in memmove (dest, src, len). */ |
1392 | |
1393 | static bool |
1394 | gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi) |
1395 | { |
1396 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMMOVE); |
1397 | |
1398 | if (!fn) |
1399 | return false; |
1400 | |
1401 | /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies |
1402 | it's quivalent to memmove (not memcpy). Transform bcopy (src, dest, |
1403 | len) into memmove (dest, src, len). */ |
1404 | |
1405 | gimple *stmt = gsi_stmt (i: *gsi); |
1406 | tree src = gimple_call_arg (gs: stmt, index: 0); |
1407 | tree dest = gimple_call_arg (gs: stmt, index: 1); |
1408 | tree len = gimple_call_arg (gs: stmt, index: 2); |
1409 | |
1410 | gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
1411 | gimple_call_set_fntype (call_stmt: as_a <gcall *> (p: stmt), TREE_TYPE (fn)); |
1412 | replace_call_with_call_and_fold (gsi, repl); |
1413 | |
1414 | return true; |
1415 | } |
1416 | |
1417 | /* Transform a call to built-in bzero (dest, len) at *GSI into one |
1418 | to built-in memset (dest, 0, len). */ |
1419 | |
1420 | static bool |
1421 | gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi) |
1422 | { |
1423 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMSET); |
1424 | |
1425 | if (!fn) |
1426 | return false; |
1427 | |
1428 | /* Transform bzero (dest, len) into memset (dest, 0, len). */ |
1429 | |
1430 | gimple *stmt = gsi_stmt (i: *gsi); |
1431 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
1432 | tree len = gimple_call_arg (gs: stmt, index: 1); |
1433 | |
1434 | gimple_seq seq = NULL; |
1435 | gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len); |
1436 | gimple_seq_add_stmt_without_update (&seq, repl); |
1437 | gsi_replace_with_seq_vops (si_p: gsi, stmts: seq); |
1438 | fold_stmt (gsi); |
1439 | |
1440 | return true; |
1441 | } |
1442 | |
1443 | /* Fold function call to builtin memset or bzero at *GSI setting the |
1444 | memory of size LEN to VAL. Return whether a simplification was made. */ |
1445 | |
1446 | static bool |
1447 | gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len) |
1448 | { |
1449 | gimple *stmt = gsi_stmt (i: *gsi); |
1450 | tree etype; |
1451 | unsigned HOST_WIDE_INT length, cval; |
1452 | |
1453 | /* If the LEN parameter is zero, return DEST. */ |
1454 | if (integer_zerop (len)) |
1455 | { |
1456 | replace_call_with_value (gsi, val: gimple_call_arg (gs: stmt, index: 0)); |
1457 | return true; |
1458 | } |
1459 | |
1460 | if (! tree_fits_uhwi_p (len)) |
1461 | return false; |
1462 | |
1463 | if (TREE_CODE (c) != INTEGER_CST) |
1464 | return false; |
1465 | |
1466 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
1467 | tree var = dest; |
1468 | if (TREE_CODE (var) != ADDR_EXPR) |
1469 | return false; |
1470 | |
1471 | var = TREE_OPERAND (var, 0); |
1472 | if (TREE_THIS_VOLATILE (var)) |
1473 | return false; |
1474 | |
1475 | etype = TREE_TYPE (var); |
1476 | if (TREE_CODE (etype) == ARRAY_TYPE) |
1477 | etype = TREE_TYPE (etype); |
1478 | |
1479 | if ((!INTEGRAL_TYPE_P (etype) |
1480 | && !POINTER_TYPE_P (etype)) |
1481 | || TREE_CODE (etype) == BITINT_TYPE) |
1482 | return NULL_TREE; |
1483 | |
1484 | if (! var_decl_component_p (var)) |
1485 | return NULL_TREE; |
1486 | |
1487 | length = tree_to_uhwi (len); |
1488 | if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length |
1489 | || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype)) |
1490 | != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype))) |
1491 | || get_pointer_alignment (dest) / BITS_PER_UNIT < length) |
1492 | return NULL_TREE; |
1493 | |
1494 | if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) |
1495 | return NULL_TREE; |
1496 | |
1497 | if (!type_has_mode_precision_p (t: etype)) |
1498 | etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype), |
1499 | TYPE_UNSIGNED (etype)); |
1500 | |
1501 | if (integer_zerop (c)) |
1502 | cval = 0; |
1503 | else |
1504 | { |
1505 | if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64) |
1506 | return NULL_TREE; |
1507 | |
1508 | cval = TREE_INT_CST_LOW (c); |
1509 | cval &= 0xff; |
1510 | cval |= cval << 8; |
1511 | cval |= cval << 16; |
1512 | cval |= (cval << 31) << 1; |
1513 | } |
1514 | |
1515 | var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0)); |
1516 | gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval)); |
1517 | gimple_move_vops (store, stmt); |
1518 | gimple_set_location (g: store, location: gimple_location (g: stmt)); |
1519 | gsi_insert_before (gsi, store, GSI_SAME_STMT); |
1520 | if (gimple_call_lhs (gs: stmt)) |
1521 | { |
1522 | gimple *asgn = gimple_build_assign (gimple_call_lhs (gs: stmt), dest); |
1523 | gsi_replace (gsi, asgn, false); |
1524 | } |
1525 | else |
1526 | { |
1527 | gimple_stmt_iterator gsi2 = *gsi; |
1528 | gsi_prev (i: gsi); |
1529 | gsi_remove (&gsi2, true); |
1530 | } |
1531 | |
1532 | return true; |
1533 | } |
1534 | |
1535 | /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */ |
1536 | |
1537 | static bool |
1538 | get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind, |
1539 | c_strlen_data *pdata, unsigned eltsize) |
1540 | { |
1541 | gcc_assert (TREE_CODE (arg) != SSA_NAME); |
1542 | |
1543 | /* The length computed by this invocation of the function. */ |
1544 | tree val = NULL_TREE; |
1545 | |
1546 | /* True if VAL is an optimistic (tight) bound determined from |
1547 | the size of the character array in which the string may be |
1548 | stored. In that case, the computed VAL is used to set |
1549 | PDATA->MAXBOUND. */ |
1550 | bool tight_bound = false; |
1551 | |
1552 | /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ |
1553 | if (TREE_CODE (arg) == ADDR_EXPR |
1554 | && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) |
1555 | { |
1556 | tree op = TREE_OPERAND (arg, 0); |
1557 | if (integer_zerop (TREE_OPERAND (op, 1))) |
1558 | { |
1559 | tree aop0 = TREE_OPERAND (op, 0); |
1560 | if (TREE_CODE (aop0) == INDIRECT_REF |
1561 | && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) |
1562 | return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind, |
1563 | pdata, eltsize); |
1564 | } |
1565 | else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF |
1566 | && rkind == SRK_LENRANGE) |
1567 | { |
1568 | /* Fail if an array is the last member of a struct object |
1569 | since it could be treated as a (fake) flexible array |
1570 | member. */ |
1571 | tree idx = TREE_OPERAND (op, 1); |
1572 | |
1573 | arg = TREE_OPERAND (op, 0); |
1574 | tree optype = TREE_TYPE (arg); |
1575 | if (tree dom = TYPE_DOMAIN (optype)) |
1576 | if (tree bound = TYPE_MAX_VALUE (dom)) |
1577 | if (TREE_CODE (bound) == INTEGER_CST |
1578 | && TREE_CODE (idx) == INTEGER_CST |
1579 | && tree_int_cst_lt (t1: bound, t2: idx)) |
1580 | return false; |
1581 | } |
1582 | } |
1583 | |
1584 | if (rkind == SRK_INT_VALUE) |
1585 | { |
1586 | /* We are computing the maximum value (not string length). */ |
1587 | val = arg; |
1588 | if (TREE_CODE (val) != INTEGER_CST |
1589 | || tree_int_cst_sgn (val) < 0) |
1590 | return false; |
1591 | } |
1592 | else |
1593 | { |
1594 | c_strlen_data lendata = { }; |
1595 | val = c_strlen (arg, 1, &lendata, eltsize); |
1596 | |
1597 | if (!val && lendata.decl) |
1598 | { |
1599 | /* ARG refers to an unterminated const character array. |
1600 | DATA.DECL with size DATA.LEN. */ |
1601 | val = lendata.minlen; |
1602 | pdata->decl = lendata.decl; |
1603 | } |
1604 | } |
1605 | |
1606 | /* Set if VAL represents the maximum length based on array size (set |
1607 | when exact length cannot be determined). */ |
1608 | bool maxbound = false; |
1609 | |
1610 | if (!val && rkind == SRK_LENRANGE) |
1611 | { |
1612 | if (TREE_CODE (arg) == ADDR_EXPR) |
1613 | return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind, |
1614 | pdata, eltsize); |
1615 | |
1616 | if (TREE_CODE (arg) == ARRAY_REF) |
1617 | { |
1618 | tree optype = TREE_TYPE (TREE_OPERAND (arg, 0)); |
1619 | |
1620 | /* Determine the "innermost" array type. */ |
1621 | while (TREE_CODE (optype) == ARRAY_TYPE |
1622 | && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE) |
1623 | optype = TREE_TYPE (optype); |
1624 | |
1625 | /* Avoid arrays of pointers. */ |
1626 | tree eltype = TREE_TYPE (optype); |
1627 | if (TREE_CODE (optype) != ARRAY_TYPE |
1628 | || !INTEGRAL_TYPE_P (eltype)) |
1629 | return false; |
1630 | |
1631 | /* Fail when the array bound is unknown or zero. */ |
1632 | val = TYPE_SIZE_UNIT (optype); |
1633 | if (!val |
1634 | || TREE_CODE (val) != INTEGER_CST |
1635 | || integer_zerop (val)) |
1636 | return false; |
1637 | |
1638 | val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, |
1639 | integer_one_node); |
1640 | |
1641 | /* Set the minimum size to zero since the string in |
1642 | the array could have zero length. */ |
1643 | pdata->minlen = ssize_int (0); |
1644 | |
1645 | tight_bound = true; |
1646 | } |
1647 | else if (TREE_CODE (arg) == COMPONENT_REF |
1648 | && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) |
1649 | == ARRAY_TYPE)) |
1650 | { |
1651 | /* Use the type of the member array to determine the upper |
1652 | bound on the length of the array. This may be overly |
1653 | optimistic if the array itself isn't NUL-terminated and |
1654 | the caller relies on the subsequent member to contain |
1655 | the NUL but that would only be considered valid if |
1656 | the array were the last member of a struct. */ |
1657 | |
1658 | tree fld = TREE_OPERAND (arg, 1); |
1659 | |
1660 | tree optype = TREE_TYPE (fld); |
1661 | |
1662 | /* Determine the "innermost" array type. */ |
1663 | while (TREE_CODE (optype) == ARRAY_TYPE |
1664 | && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE) |
1665 | optype = TREE_TYPE (optype); |
1666 | |
1667 | /* Fail when the array bound is unknown or zero. */ |
1668 | val = TYPE_SIZE_UNIT (optype); |
1669 | if (!val |
1670 | || TREE_CODE (val) != INTEGER_CST |
1671 | || integer_zerop (val)) |
1672 | return false; |
1673 | val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, |
1674 | integer_one_node); |
1675 | |
1676 | /* Set the minimum size to zero since the string in |
1677 | the array could have zero length. */ |
1678 | pdata->minlen = ssize_int (0); |
1679 | |
1680 | /* The array size determined above is an optimistic bound |
1681 | on the length. If the array isn't nul-terminated the |
1682 | length computed by the library function would be greater. |
1683 | Even though using strlen to cross the subobject boundary |
1684 | is undefined, avoid drawing conclusions from the member |
1685 | type about the length here. */ |
1686 | tight_bound = true; |
1687 | } |
1688 | else if (TREE_CODE (arg) == MEM_REF |
1689 | && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE |
1690 | && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE |
1691 | && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR) |
1692 | { |
1693 | /* Handle a MEM_REF into a DECL accessing an array of integers, |
1694 | being conservative about references to extern structures with |
1695 | flexible array members that can be initialized to arbitrary |
1696 | numbers of elements as an extension (static structs are okay). */ |
1697 | tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); |
1698 | if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref)) |
1699 | && (decl_binds_to_current_def_p (ref) |
1700 | || !array_ref_flexible_size_p (arg))) |
1701 | { |
1702 | /* Fail if the offset is out of bounds. Such accesses |
1703 | should be diagnosed at some point. */ |
1704 | val = DECL_SIZE_UNIT (ref); |
1705 | if (!val |
1706 | || TREE_CODE (val) != INTEGER_CST |
1707 | || integer_zerop (val)) |
1708 | return false; |
1709 | |
1710 | poly_offset_int psiz = wi::to_offset (t: val); |
1711 | poly_offset_int poff = mem_ref_offset (arg); |
1712 | if (known_le (psiz, poff)) |
1713 | return false; |
1714 | |
1715 | pdata->minlen = ssize_int (0); |
1716 | |
1717 | /* Subtract the offset and one for the terminating nul. */ |
1718 | psiz -= poff; |
1719 | psiz -= 1; |
1720 | val = wide_int_to_tree (TREE_TYPE (val), cst: psiz); |
1721 | /* Since VAL reflects the size of a declared object |
1722 | rather the type of the access it is not a tight bound. */ |
1723 | } |
1724 | } |
1725 | else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg)) |
1726 | { |
1727 | /* Avoid handling pointers to arrays. GCC might misuse |
1728 | a pointer to an array of one bound to point to an array |
1729 | object of a greater bound. */ |
1730 | tree argtype = TREE_TYPE (arg); |
1731 | if (TREE_CODE (argtype) == ARRAY_TYPE) |
1732 | { |
1733 | val = TYPE_SIZE_UNIT (argtype); |
1734 | if (!val |
1735 | || TREE_CODE (val) != INTEGER_CST |
1736 | || integer_zerop (val)) |
1737 | return false; |
1738 | val = wide_int_to_tree (TREE_TYPE (val), |
1739 | cst: wi::sub (x: wi::to_wide (t: val), y: 1)); |
1740 | |
1741 | /* Set the minimum size to zero since the string in |
1742 | the array could have zero length. */ |
1743 | pdata->minlen = ssize_int (0); |
1744 | } |
1745 | } |
1746 | maxbound = true; |
1747 | } |
1748 | |
1749 | if (!val) |
1750 | return false; |
1751 | |
1752 | /* Adjust the lower bound on the string length as necessary. */ |
1753 | if (!pdata->minlen |
1754 | || (rkind != SRK_STRLEN |
1755 | && TREE_CODE (pdata->minlen) == INTEGER_CST |
1756 | && TREE_CODE (val) == INTEGER_CST |
1757 | && tree_int_cst_lt (t1: val, t2: pdata->minlen))) |
1758 | pdata->minlen = val; |
1759 | |
1760 | if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST) |
1761 | { |
1762 | /* Adjust the tighter (more optimistic) string length bound |
1763 | if necessary and proceed to adjust the more conservative |
1764 | bound. */ |
1765 | if (TREE_CODE (val) == INTEGER_CST) |
1766 | { |
1767 | if (tree_int_cst_lt (t1: pdata->maxbound, t2: val)) |
1768 | pdata->maxbound = val; |
1769 | } |
1770 | else |
1771 | pdata->maxbound = val; |
1772 | } |
1773 | else if (pdata->maxbound || maxbound) |
1774 | /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or |
1775 | if VAL corresponds to the maximum length determined based |
1776 | on the type of the object. */ |
1777 | pdata->maxbound = val; |
1778 | |
1779 | if (tight_bound) |
1780 | { |
1781 | /* VAL computed above represents an optimistically tight bound |
1782 | on the length of the string based on the referenced object's |
1783 | or subobject's type. Determine the conservative upper bound |
1784 | based on the enclosing object's size if possible. */ |
1785 | if (rkind == SRK_LENRANGE) |
1786 | { |
1787 | poly_int64 offset; |
1788 | tree base = get_addr_base_and_unit_offset (arg, &offset); |
1789 | if (!base) |
1790 | { |
1791 | /* When the call above fails due to a non-constant offset |
1792 | assume the offset is zero and use the size of the whole |
1793 | enclosing object instead. */ |
1794 | base = get_base_address (t: arg); |
1795 | offset = 0; |
1796 | } |
1797 | /* If the base object is a pointer no upper bound on the length |
1798 | can be determined. Otherwise the maximum length is equal to |
1799 | the size of the enclosing object minus the offset of |
1800 | the referenced subobject minus 1 (for the terminating nul). */ |
1801 | tree type = TREE_TYPE (base); |
1802 | if (TREE_CODE (type) == POINTER_TYPE |
1803 | || (TREE_CODE (base) != PARM_DECL && !VAR_P (base)) |
1804 | || !(val = DECL_SIZE_UNIT (base))) |
1805 | val = build_all_ones_cst (size_type_node); |
1806 | else |
1807 | { |
1808 | val = DECL_SIZE_UNIT (base); |
1809 | val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, |
1810 | size_int (offset + 1)); |
1811 | } |
1812 | } |
1813 | else |
1814 | return false; |
1815 | } |
1816 | |
1817 | if (pdata->maxlen) |
1818 | { |
1819 | /* Adjust the more conservative bound if possible/necessary |
1820 | and fail otherwise. */ |
1821 | if (rkind != SRK_STRLEN) |
1822 | { |
1823 | if (TREE_CODE (pdata->maxlen) != INTEGER_CST |
1824 | || TREE_CODE (val) != INTEGER_CST) |
1825 | return false; |
1826 | |
1827 | if (tree_int_cst_lt (t1: pdata->maxlen, t2: val)) |
1828 | pdata->maxlen = val; |
1829 | return true; |
1830 | } |
1831 | else if (simple_cst_equal (val, pdata->maxlen) != 1) |
1832 | { |
1833 | /* Fail if the length of this ARG is different from that |
1834 | previously determined from another ARG. */ |
1835 | return false; |
1836 | } |
1837 | } |
1838 | |
1839 | pdata->maxlen = val; |
1840 | return rkind == SRK_LENRANGE || !integer_all_onesp (val); |
1841 | } |
1842 | |
1843 | /* For an ARG referencing one or more strings, try to obtain the range |
1844 | of their lengths, or the size of the largest array ARG referes to if |
1845 | the range of lengths cannot be determined, and store all in *PDATA. |
1846 | For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine |
1847 | the maximum constant value. |
1848 | If ARG is an SSA_NAME, follow its use-def chains. When RKIND == |
1849 | SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined |
1850 | length or if we are unable to determine the length, return false. |
1851 | VISITED is a bitmap of visited variables. |
1852 | RKIND determines the kind of value or range to obtain (see |
1853 | strlen_range_kind). |
1854 | Set PDATA->DECL if ARG refers to an unterminated constant array. |
1855 | On input, set ELTSIZE to 1 for normal single byte character strings, |
1856 | and either 2 or 4 for wide characer strings (the size of wchar_t). |
1857 | Return true if *PDATA was successfully populated and false otherwise. */ |
1858 | |
1859 | static bool |
1860 | get_range_strlen (tree arg, bitmap visited, |
1861 | strlen_range_kind rkind, |
1862 | c_strlen_data *pdata, unsigned eltsize) |
1863 | { |
1864 | |
1865 | if (TREE_CODE (arg) != SSA_NAME) |
1866 | return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize); |
1867 | |
1868 | /* If ARG is registered for SSA update we cannot look at its defining |
1869 | statement. */ |
1870 | if (name_registered_for_update_p (arg)) |
1871 | return false; |
1872 | |
1873 | /* If we were already here, break the infinite cycle. */ |
1874 | if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg))) |
1875 | return true; |
1876 | |
1877 | tree var = arg; |
1878 | gimple *def_stmt = SSA_NAME_DEF_STMT (var); |
1879 | |
1880 | switch (gimple_code (g: def_stmt)) |
1881 | { |
1882 | case GIMPLE_ASSIGN: |
1883 | /* The RHS of the statement defining VAR must either have a |
1884 | constant length or come from another SSA_NAME with a constant |
1885 | length. */ |
1886 | if (gimple_assign_single_p (gs: def_stmt) |
1887 | || gimple_assign_unary_nop_p (def_stmt)) |
1888 | { |
1889 | tree rhs = gimple_assign_rhs1 (gs: def_stmt); |
1890 | return get_range_strlen (arg: rhs, visited, rkind, pdata, eltsize); |
1891 | } |
1892 | else if (gimple_assign_rhs_code (gs: def_stmt) == COND_EXPR) |
1893 | { |
1894 | tree ops[2] = { gimple_assign_rhs2 (gs: def_stmt), |
1895 | gimple_assign_rhs3 (gs: def_stmt) }; |
1896 | |
1897 | for (unsigned int i = 0; i < 2; i++) |
1898 | if (!get_range_strlen (arg: ops[i], visited, rkind, pdata, eltsize)) |
1899 | { |
1900 | if (rkind != SRK_LENRANGE) |
1901 | return false; |
1902 | /* Set the upper bound to the maximum to prevent |
1903 | it from being adjusted in the next iteration but |
1904 | leave MINLEN and the more conservative MAXBOUND |
1905 | determined so far alone (or leave them null if |
1906 | they haven't been set yet). That the MINLEN is |
1907 | in fact zero can be determined from MAXLEN being |
1908 | unbounded but the discovered minimum is used for |
1909 | diagnostics. */ |
1910 | pdata->maxlen = build_all_ones_cst (size_type_node); |
1911 | } |
1912 | return true; |
1913 | } |
1914 | return false; |
1915 | |
1916 | case GIMPLE_PHI: |
1917 | /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node |
1918 | must have a constant length. */ |
1919 | for (unsigned i = 0; i < gimple_phi_num_args (gs: def_stmt); i++) |
1920 | { |
1921 | tree arg = gimple_phi_arg (gs: def_stmt, index: i)->def; |
1922 | |
1923 | /* If this PHI has itself as an argument, we cannot |
1924 | determine the string length of this argument. However, |
1925 | if we can find a constant string length for the other |
1926 | PHI args then we can still be sure that this is a |
1927 | constant string length. So be optimistic and just |
1928 | continue with the next argument. */ |
1929 | if (arg == gimple_phi_result (gs: def_stmt)) |
1930 | continue; |
1931 | |
1932 | if (!get_range_strlen (arg, visited, rkind, pdata, eltsize)) |
1933 | { |
1934 | if (rkind != SRK_LENRANGE) |
1935 | return false; |
1936 | /* Set the upper bound to the maximum to prevent |
1937 | it from being adjusted in the next iteration but |
1938 | leave MINLEN and the more conservative MAXBOUND |
1939 | determined so far alone (or leave them null if |
1940 | they haven't been set yet). That the MINLEN is |
1941 | in fact zero can be determined from MAXLEN being |
1942 | unbounded but the discovered minimum is used for |
1943 | diagnostics. */ |
1944 | pdata->maxlen = build_all_ones_cst (size_type_node); |
1945 | } |
1946 | } |
1947 | return true; |
1948 | |
1949 | default: |
1950 | return false; |
1951 | } |
1952 | } |
1953 | |
1954 | /* Try to obtain the range of the lengths of the string(s) referenced |
1955 | by ARG, or the size of the largest array ARG refers to if the range |
1956 | of lengths cannot be determined, and store all in *PDATA which must |
1957 | be zero-initialized on input except PDATA->MAXBOUND may be set to |
1958 | a non-null tree node other than INTEGER_CST to request to have it |
1959 | set to the length of the longest string in a PHI. ELTSIZE is |
1960 | the expected size of the string element in bytes: 1 for char and |
1961 | some power of 2 for wide characters. |
1962 | Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable |
1963 | for optimization. Returning false means that a nonzero PDATA->MINLEN |
1964 | doesn't reflect the true lower bound of the range when PDATA->MAXLEN |
1965 | is -1 (in that case, the actual range is indeterminate, i.e., |
1966 | [0, PTRDIFF_MAX - 2]. */ |
1967 | |
1968 | bool |
1969 | get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize) |
1970 | { |
1971 | auto_bitmap visited; |
1972 | tree maxbound = pdata->maxbound; |
1973 | |
1974 | if (!get_range_strlen (arg, visited, rkind: SRK_LENRANGE, pdata, eltsize)) |
1975 | { |
1976 | /* On failure extend the length range to an impossible maximum |
1977 | (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other |
1978 | members can stay unchanged regardless. */ |
1979 | pdata->minlen = ssize_int (0); |
1980 | pdata->maxlen = build_all_ones_cst (size_type_node); |
1981 | } |
1982 | else if (!pdata->minlen) |
1983 | pdata->minlen = ssize_int (0); |
1984 | |
1985 | /* If it's unchanged from it initial non-null value, set the conservative |
1986 | MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */ |
1987 | if (maxbound && pdata->maxbound == maxbound) |
1988 | pdata->maxbound = build_all_ones_cst (size_type_node); |
1989 | |
1990 | return !integer_all_onesp (pdata->maxlen); |
1991 | } |
1992 | |
1993 | /* Return the maximum value for ARG given RKIND (see strlen_range_kind). |
1994 | For ARG of pointer types, NONSTR indicates if the caller is prepared |
1995 | to handle unterminated strings. For integer ARG and when RKIND == |
1996 | SRK_INT_VALUE, NONSTR must be null. |
1997 | |
1998 | If an unterminated array is discovered and our caller handles |
1999 | unterminated arrays, then bubble up the offending DECL and |
2000 | return the maximum size. Otherwise return NULL. */ |
2001 | |
2002 | static tree |
2003 | get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL) |
2004 | { |
2005 | /* A non-null NONSTR is meaningless when determining the maximum |
2006 | value of an integer ARG. */ |
2007 | gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL); |
2008 | /* ARG must have an integral type when RKIND says so. */ |
2009 | gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg))); |
2010 | |
2011 | auto_bitmap visited; |
2012 | |
2013 | /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN |
2014 | is unbounded. */ |
2015 | c_strlen_data lendata = { }; |
2016 | if (!get_range_strlen (arg, visited, rkind, pdata: &lendata, /* eltsize = */1)) |
2017 | lendata.maxlen = NULL_TREE; |
2018 | else if (lendata.maxlen && integer_all_onesp (lendata.maxlen)) |
2019 | lendata.maxlen = NULL_TREE; |
2020 | |
2021 | if (nonstr) |
2022 | { |
2023 | /* For callers prepared to handle unterminated arrays set |
2024 | *NONSTR to point to the declaration of the array and return |
2025 | the maximum length/size. */ |
2026 | *nonstr = lendata.decl; |
2027 | return lendata.maxlen; |
2028 | } |
2029 | |
2030 | /* Fail if the constant array isn't nul-terminated. */ |
2031 | return lendata.decl ? NULL_TREE : lendata.maxlen; |
2032 | } |
2033 | |
2034 | /* Return true if LEN is known to be less than or equal to (or if STRICT is |
2035 | true, strictly less than) the lower bound of SIZE at compile time and false |
2036 | otherwise. */ |
2037 | |
2038 | static bool |
2039 | known_lower (gimple *stmt, tree len, tree size, bool strict = false) |
2040 | { |
2041 | if (len == NULL_TREE) |
2042 | return false; |
2043 | |
2044 | wide_int size_range[2]; |
2045 | wide_int len_range[2]; |
2046 | if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range)) |
2047 | { |
2048 | if (strict) |
2049 | return wi::ltu_p (x: len_range[1], y: size_range[0]); |
2050 | else |
2051 | return wi::leu_p (x: len_range[1], y: size_range[0]); |
2052 | } |
2053 | |
2054 | return false; |
2055 | } |
2056 | |
2057 | /* Fold function call to builtin strcpy with arguments DEST and SRC. |
2058 | If LEN is not NULL, it represents the length of the string to be |
2059 | copied. Return NULL_TREE if no simplification can be made. */ |
2060 | |
2061 | static bool |
2062 | gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi, |
2063 | tree dest, tree src) |
2064 | { |
2065 | gimple *stmt = gsi_stmt (i: *gsi); |
2066 | location_t loc = gimple_location (g: stmt); |
2067 | tree fn; |
2068 | |
2069 | /* If SRC and DEST are the same (and not volatile), return DEST. */ |
2070 | if (operand_equal_p (src, dest, flags: 0)) |
2071 | { |
2072 | /* Issue -Wrestrict unless the pointers are null (those do |
2073 | not point to objects and so do not indicate an overlap; |
2074 | such calls could be the result of sanitization and jump |
2075 | threading). */ |
2076 | if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict)) |
2077 | { |
2078 | tree func = gimple_call_fndecl (gs: stmt); |
2079 | |
2080 | warning_at (loc, OPT_Wrestrict, |
2081 | "%qD source argument is the same as destination" , |
2082 | func); |
2083 | } |
2084 | |
2085 | replace_call_with_value (gsi, val: dest); |
2086 | return true; |
2087 | } |
2088 | |
2089 | if (optimize_function_for_size_p (cfun)) |
2090 | return false; |
2091 | |
2092 | fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
2093 | if (!fn) |
2094 | return false; |
2095 | |
2096 | /* Set to non-null if ARG refers to an unterminated array. */ |
2097 | tree nonstr = NULL; |
2098 | tree len = get_maxval_strlen (arg: src, rkind: SRK_STRLEN, nonstr: &nonstr); |
2099 | |
2100 | if (nonstr) |
2101 | { |
2102 | /* Avoid folding calls with unterminated arrays. */ |
2103 | if (!warning_suppressed_p (stmt, OPT_Wstringop_overread)) |
2104 | warn_string_no_nul (loc, stmt, "strcpy" , src, nonstr); |
2105 | suppress_warning (stmt, OPT_Wstringop_overread); |
2106 | return false; |
2107 | } |
2108 | |
2109 | if (!len) |
2110 | return false; |
2111 | |
2112 | len = fold_convert_loc (loc, size_type_node, len); |
2113 | len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1)); |
2114 | len = force_gimple_operand_gsi (gsi, len, true, |
2115 | NULL_TREE, true, GSI_SAME_STMT); |
2116 | gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
2117 | replace_call_with_call_and_fold (gsi, repl); |
2118 | return true; |
2119 | } |
2120 | |
2121 | /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN. |
2122 | If SLEN is not NULL, it represents the length of the source string. |
2123 | Return NULL_TREE if no simplification can be made. */ |
2124 | |
2125 | static bool |
2126 | gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, |
2127 | tree dest, tree src, tree len) |
2128 | { |
2129 | gimple *stmt = gsi_stmt (i: *gsi); |
2130 | location_t loc = gimple_location (g: stmt); |
2131 | bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE; |
2132 | |
2133 | /* If the LEN parameter is zero, return DEST. */ |
2134 | if (integer_zerop (len)) |
2135 | { |
2136 | /* Avoid warning if the destination refers to an array/pointer |
2137 | decorate with attribute nonstring. */ |
2138 | if (!nonstring) |
2139 | { |
2140 | tree fndecl = gimple_call_fndecl (gs: stmt); |
2141 | |
2142 | /* Warn about the lack of nul termination: the result is not |
2143 | a (nul-terminated) string. */ |
2144 | tree slen = get_maxval_strlen (arg: src, rkind: SRK_STRLEN); |
2145 | if (slen && !integer_zerop (slen)) |
2146 | warning_at (loc, OPT_Wstringop_truncation, |
2147 | "%qD destination unchanged after copying no bytes " |
2148 | "from a string of length %E" , |
2149 | fndecl, slen); |
2150 | else |
2151 | warning_at (loc, OPT_Wstringop_truncation, |
2152 | "%qD destination unchanged after copying no bytes" , |
2153 | fndecl); |
2154 | } |
2155 | |
2156 | replace_call_with_value (gsi, val: dest); |
2157 | return true; |
2158 | } |
2159 | |
2160 | /* We can't compare slen with len as constants below if len is not a |
2161 | constant. */ |
2162 | if (TREE_CODE (len) != INTEGER_CST) |
2163 | return false; |
2164 | |
2165 | /* Now, we must be passed a constant src ptr parameter. */ |
2166 | tree slen = get_maxval_strlen (arg: src, rkind: SRK_STRLEN); |
2167 | if (!slen || TREE_CODE (slen) != INTEGER_CST) |
2168 | return false; |
2169 | |
2170 | /* The size of the source string including the terminating nul. */ |
2171 | tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); |
2172 | |
2173 | /* We do not support simplification of this case, though we do |
2174 | support it when expanding trees into RTL. */ |
2175 | /* FIXME: generate a call to __builtin_memset. */ |
2176 | if (tree_int_cst_lt (t1: ssize, t2: len)) |
2177 | return false; |
2178 | |
2179 | /* Diagnose truncation that leaves the copy unterminated. */ |
2180 | maybe_diag_stxncpy_trunc (*gsi, src, len); |
2181 | |
2182 | /* OK transform into builtin memcpy. */ |
2183 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
2184 | if (!fn) |
2185 | return false; |
2186 | |
2187 | len = fold_convert_loc (loc, size_type_node, len); |
2188 | len = force_gimple_operand_gsi (gsi, len, true, |
2189 | NULL_TREE, true, GSI_SAME_STMT); |
2190 | gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
2191 | replace_call_with_call_and_fold (gsi, repl); |
2192 | |
2193 | return true; |
2194 | } |
2195 | |
2196 | /* Fold function call to builtin strchr or strrchr. |
2197 | If both arguments are constant, evaluate and fold the result, |
2198 | otherwise simplify str(r)chr (str, 0) into str + strlen (str). |
2199 | In general strlen is significantly faster than strchr |
2200 | due to being a simpler operation. */ |
2201 | static bool |
2202 | gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr) |
2203 | { |
2204 | gimple *stmt = gsi_stmt (i: *gsi); |
2205 | tree str = gimple_call_arg (gs: stmt, index: 0); |
2206 | tree c = gimple_call_arg (gs: stmt, index: 1); |
2207 | location_t loc = gimple_location (g: stmt); |
2208 | const char *p; |
2209 | char ch; |
2210 | |
2211 | if (!gimple_call_lhs (gs: stmt)) |
2212 | return false; |
2213 | |
2214 | /* Avoid folding if the first argument is not a nul-terminated array. |
2215 | Defer warning until later. */ |
2216 | if (!check_nul_terminated_array (NULL_TREE, str)) |
2217 | return false; |
2218 | |
2219 | if ((p = c_getstr (str)) && target_char_cst_p (t: c, p: &ch)) |
2220 | { |
2221 | const char *p1 = is_strrchr ? strrchr (s: p, c: ch) : strchr (s: p, c: ch); |
2222 | |
2223 | if (p1 == NULL) |
2224 | { |
2225 | replace_call_with_value (gsi, integer_zero_node); |
2226 | return true; |
2227 | } |
2228 | |
2229 | tree len = build_int_cst (size_type_node, p1 - p); |
2230 | gimple_seq stmts = NULL; |
2231 | gimple *new_stmt = gimple_build_assign (gimple_call_lhs (gs: stmt), |
2232 | POINTER_PLUS_EXPR, str, len); |
2233 | gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
2234 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2235 | return true; |
2236 | } |
2237 | |
2238 | if (!integer_zerop (c)) |
2239 | return false; |
2240 | |
2241 | /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */ |
2242 | if (is_strrchr && optimize_function_for_size_p (cfun)) |
2243 | { |
2244 | tree strchr_fn = builtin_decl_implicit (fncode: BUILT_IN_STRCHR); |
2245 | |
2246 | if (strchr_fn) |
2247 | { |
2248 | gimple *repl = gimple_build_call (strchr_fn, 2, str, c); |
2249 | replace_call_with_call_and_fold (gsi, repl); |
2250 | return true; |
2251 | } |
2252 | |
2253 | return false; |
2254 | } |
2255 | |
2256 | tree len; |
2257 | tree strlen_fn = builtin_decl_implicit (fncode: BUILT_IN_STRLEN); |
2258 | |
2259 | if (!strlen_fn) |
2260 | return false; |
2261 | |
2262 | /* Create newstr = strlen (str). */ |
2263 | gimple_seq stmts = NULL; |
2264 | gimple *new_stmt = gimple_build_call (strlen_fn, 1, str); |
2265 | gimple_set_location (g: new_stmt, location: loc); |
2266 | len = create_tmp_reg_or_ssa_name (size_type_node); |
2267 | gimple_call_set_lhs (gs: new_stmt, lhs: len); |
2268 | gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
2269 | |
2270 | /* Create (str p+ strlen (str)). */ |
2271 | new_stmt = gimple_build_assign (gimple_call_lhs (gs: stmt), |
2272 | POINTER_PLUS_EXPR, str, len); |
2273 | gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
2274 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2275 | /* gsi now points at the assignment to the lhs, get a |
2276 | stmt iterator to the strlen. |
2277 | ??? We can't use gsi_for_stmt as that doesn't work when the |
2278 | CFG isn't built yet. */ |
2279 | gimple_stmt_iterator gsi2 = *gsi; |
2280 | gsi_prev (i: &gsi2); |
2281 | fold_stmt (&gsi2); |
2282 | return true; |
2283 | } |
2284 | |
2285 | /* Fold function call to builtin strstr. |
2286 | If both arguments are constant, evaluate and fold the result, |
2287 | additionally fold strstr (x, "") into x and strstr (x, "c") |
2288 | into strchr (x, 'c'). */ |
2289 | static bool |
2290 | gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi) |
2291 | { |
2292 | gimple *stmt = gsi_stmt (i: *gsi); |
2293 | if (!gimple_call_lhs (gs: stmt)) |
2294 | return false; |
2295 | |
2296 | tree haystack = gimple_call_arg (gs: stmt, index: 0); |
2297 | tree needle = gimple_call_arg (gs: stmt, index: 1); |
2298 | |
2299 | /* Avoid folding if either argument is not a nul-terminated array. |
2300 | Defer warning until later. */ |
2301 | if (!check_nul_terminated_array (NULL_TREE, haystack) |
2302 | || !check_nul_terminated_array (NULL_TREE, needle)) |
2303 | return false; |
2304 | |
2305 | const char *q = c_getstr (needle); |
2306 | if (q == NULL) |
2307 | return false; |
2308 | |
2309 | if (const char *p = c_getstr (haystack)) |
2310 | { |
2311 | const char *r = strstr (haystack: p, needle: q); |
2312 | |
2313 | if (r == NULL) |
2314 | { |
2315 | replace_call_with_value (gsi, integer_zero_node); |
2316 | return true; |
2317 | } |
2318 | |
2319 | tree len = build_int_cst (size_type_node, r - p); |
2320 | gimple_seq stmts = NULL; |
2321 | gimple *new_stmt |
2322 | = gimple_build_assign (gimple_call_lhs (gs: stmt), POINTER_PLUS_EXPR, |
2323 | haystack, len); |
2324 | gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
2325 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2326 | return true; |
2327 | } |
2328 | |
2329 | /* For strstr (x, "") return x. */ |
2330 | if (q[0] == '\0') |
2331 | { |
2332 | replace_call_with_value (gsi, val: haystack); |
2333 | return true; |
2334 | } |
2335 | |
2336 | /* Transform strstr (x, "c") into strchr (x, 'c'). */ |
2337 | if (q[1] == '\0') |
2338 | { |
2339 | tree strchr_fn = builtin_decl_implicit (fncode: BUILT_IN_STRCHR); |
2340 | if (strchr_fn) |
2341 | { |
2342 | tree c = build_int_cst (integer_type_node, q[0]); |
2343 | gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c); |
2344 | replace_call_with_call_and_fold (gsi, repl); |
2345 | return true; |
2346 | } |
2347 | } |
2348 | |
2349 | return false; |
2350 | } |
2351 | |
2352 | /* Simplify a call to the strcat builtin. DST and SRC are the arguments |
2353 | to the call. |
2354 | |
2355 | Return NULL_TREE if no simplification was possible, otherwise return the |
2356 | simplified form of the call as a tree. |
2357 | |
2358 | The simplified form may be a constant or other expression which |
2359 | computes the same value, but in a more efficient manner (including |
2360 | calls to other builtin functions). |
2361 | |
2362 | The call may contain arguments which need to be evaluated, but |
2363 | which are not useful to determine the result of the call. In |
2364 | this case we return a chain of COMPOUND_EXPRs. The LHS of each |
2365 | COMPOUND_EXPR will be an argument which must be evaluated. |
2366 | COMPOUND_EXPRs are chained through their RHS. The RHS of the last |
2367 | COMPOUND_EXPR in the chain will contain the tree for the simplified |
2368 | form of the builtin function call. */ |
2369 | |
2370 | static bool |
2371 | gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src) |
2372 | { |
2373 | gimple *stmt = gsi_stmt (i: *gsi); |
2374 | location_t loc = gimple_location (g: stmt); |
2375 | |
2376 | const char *p = c_getstr (src); |
2377 | |
2378 | /* If the string length is zero, return the dst parameter. */ |
2379 | if (p && *p == '\0') |
2380 | { |
2381 | replace_call_with_value (gsi, val: dst); |
2382 | return true; |
2383 | } |
2384 | |
2385 | if (!optimize_bb_for_speed_p (gimple_bb (g: stmt))) |
2386 | return false; |
2387 | |
2388 | /* See if we can store by pieces into (dst + strlen(dst)). */ |
2389 | tree newdst; |
2390 | tree strlen_fn = builtin_decl_implicit (fncode: BUILT_IN_STRLEN); |
2391 | tree memcpy_fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
2392 | |
2393 | if (!strlen_fn || !memcpy_fn) |
2394 | return false; |
2395 | |
2396 | /* If the length of the source string isn't computable don't |
2397 | split strcat into strlen and memcpy. */ |
2398 | tree len = get_maxval_strlen (arg: src, rkind: SRK_STRLEN); |
2399 | if (! len) |
2400 | return false; |
2401 | |
2402 | /* Create strlen (dst). */ |
2403 | gimple_seq stmts = NULL, stmts2; |
2404 | gimple *repl = gimple_build_call (strlen_fn, 1, dst); |
2405 | gimple_set_location (g: repl, location: loc); |
2406 | newdst = create_tmp_reg_or_ssa_name (size_type_node); |
2407 | gimple_call_set_lhs (gs: repl, lhs: newdst); |
2408 | gimple_seq_add_stmt_without_update (&stmts, repl); |
2409 | |
2410 | /* Create (dst p+ strlen (dst)). */ |
2411 | newdst = fold_build_pointer_plus_loc (loc, ptr: dst, off: newdst); |
2412 | newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE); |
2413 | gimple_seq_add_seq_without_update (&stmts, stmts2); |
2414 | |
2415 | len = fold_convert_loc (loc, size_type_node, len); |
2416 | len = size_binop_loc (loc, PLUS_EXPR, len, |
2417 | build_int_cst (size_type_node, 1)); |
2418 | len = force_gimple_operand (len, &stmts2, true, NULL_TREE); |
2419 | gimple_seq_add_seq_without_update (&stmts, stmts2); |
2420 | |
2421 | repl = gimple_build_call (memcpy_fn, 3, newdst, src, len); |
2422 | gimple_seq_add_stmt_without_update (&stmts, repl); |
2423 | if (gimple_call_lhs (gs: stmt)) |
2424 | { |
2425 | repl = gimple_build_assign (gimple_call_lhs (gs: stmt), dst); |
2426 | gimple_seq_add_stmt_without_update (&stmts, repl); |
2427 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2428 | /* gsi now points at the assignment to the lhs, get a |
2429 | stmt iterator to the memcpy call. |
2430 | ??? We can't use gsi_for_stmt as that doesn't work when the |
2431 | CFG isn't built yet. */ |
2432 | gimple_stmt_iterator gsi2 = *gsi; |
2433 | gsi_prev (i: &gsi2); |
2434 | fold_stmt (&gsi2); |
2435 | } |
2436 | else |
2437 | { |
2438 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2439 | fold_stmt (gsi); |
2440 | } |
2441 | return true; |
2442 | } |
2443 | |
2444 | /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE |
2445 | are the arguments to the call. */ |
2446 | |
2447 | static bool |
2448 | gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi) |
2449 | { |
2450 | gimple *stmt = gsi_stmt (i: *gsi); |
2451 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
2452 | tree src = gimple_call_arg (gs: stmt, index: 1); |
2453 | tree size = gimple_call_arg (gs: stmt, index: 2); |
2454 | tree fn; |
2455 | const char *p; |
2456 | |
2457 | |
2458 | p = c_getstr (src); |
2459 | /* If the SRC parameter is "", return DEST. */ |
2460 | if (p && *p == '\0') |
2461 | { |
2462 | replace_call_with_value (gsi, val: dest); |
2463 | return true; |
2464 | } |
2465 | |
2466 | if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size)) |
2467 | return false; |
2468 | |
2469 | /* If __builtin_strcat_chk is used, assume strcat is available. */ |
2470 | fn = builtin_decl_explicit (fncode: BUILT_IN_STRCAT); |
2471 | if (!fn) |
2472 | return false; |
2473 | |
2474 | gimple *repl = gimple_build_call (fn, 2, dest, src); |
2475 | replace_call_with_call_and_fold (gsi, repl); |
2476 | return true; |
2477 | } |
2478 | |
2479 | /* Simplify a call to the strncat builtin. */ |
2480 | |
2481 | static bool |
2482 | gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi) |
2483 | { |
2484 | gimple *stmt = gsi_stmt (i: *gsi); |
2485 | tree dst = gimple_call_arg (gs: stmt, index: 0); |
2486 | tree src = gimple_call_arg (gs: stmt, index: 1); |
2487 | tree len = gimple_call_arg (gs: stmt, index: 2); |
2488 | tree src_len = c_strlen (src, 1); |
2489 | |
2490 | /* If the requested length is zero, or the src parameter string |
2491 | length is zero, return the dst parameter. */ |
2492 | if (integer_zerop (len) || (src_len && integer_zerop (src_len))) |
2493 | { |
2494 | replace_call_with_value (gsi, val: dst); |
2495 | return true; |
2496 | } |
2497 | |
2498 | /* Return early if the requested len is less than the string length. |
2499 | Warnings will be issued elsewhere later. */ |
2500 | if (!src_len || known_lower (stmt, len, size: src_len, strict: true)) |
2501 | return false; |
2502 | |
2503 | /* Warn on constant LEN. */ |
2504 | if (TREE_CODE (len) == INTEGER_CST) |
2505 | { |
2506 | bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_); |
2507 | tree dstsize; |
2508 | |
2509 | if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize) |
2510 | && TREE_CODE (dstsize) == INTEGER_CST) |
2511 | { |
2512 | int cmpdst = tree_int_cst_compare (t1: len, t2: dstsize); |
2513 | |
2514 | if (cmpdst >= 0) |
2515 | { |
2516 | tree fndecl = gimple_call_fndecl (gs: stmt); |
2517 | |
2518 | /* Strncat copies (at most) LEN bytes and always appends |
2519 | the terminating NUL so the specified bound should never |
2520 | be equal to (or greater than) the size of the destination. |
2521 | If it is, the copy could overflow. */ |
2522 | location_t loc = gimple_location (g: stmt); |
2523 | nowarn = warning_at (loc, OPT_Wstringop_overflow_, |
2524 | cmpdst == 0 |
2525 | ? G_("%qD specified bound %E equals " |
2526 | "destination size" ) |
2527 | : G_("%qD specified bound %E exceeds " |
2528 | "destination size %E" ), |
2529 | fndecl, len, dstsize); |
2530 | if (nowarn) |
2531 | suppress_warning (stmt, OPT_Wstringop_overflow_); |
2532 | } |
2533 | } |
2534 | |
2535 | if (!nowarn && TREE_CODE (src_len) == INTEGER_CST |
2536 | && tree_int_cst_compare (t1: src_len, t2: len) == 0) |
2537 | { |
2538 | tree fndecl = gimple_call_fndecl (gs: stmt); |
2539 | location_t loc = gimple_location (g: stmt); |
2540 | |
2541 | /* To avoid possible overflow the specified bound should also |
2542 | not be equal to the length of the source, even when the size |
2543 | of the destination is unknown (it's not an uncommon mistake |
2544 | to specify as the bound to strncpy the length of the source). */ |
2545 | if (warning_at (loc, OPT_Wstringop_overflow_, |
2546 | "%qD specified bound %E equals source length" , |
2547 | fndecl, len)) |
2548 | suppress_warning (stmt, OPT_Wstringop_overflow_); |
2549 | } |
2550 | } |
2551 | |
2552 | if (!known_lower (stmt, len: src_len, size: len)) |
2553 | return false; |
2554 | |
2555 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCAT); |
2556 | |
2557 | /* If the replacement _DECL isn't initialized, don't do the |
2558 | transformation. */ |
2559 | if (!fn) |
2560 | return false; |
2561 | |
2562 | /* Otherwise, emit a call to strcat. */ |
2563 | gcall *repl = gimple_build_call (fn, 2, dst, src); |
2564 | replace_call_with_call_and_fold (gsi, repl); |
2565 | return true; |
2566 | } |
2567 | |
2568 | /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC, |
2569 | LEN, and SIZE. */ |
2570 | |
2571 | static bool |
2572 | gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi) |
2573 | { |
2574 | gimple *stmt = gsi_stmt (i: *gsi); |
2575 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
2576 | tree src = gimple_call_arg (gs: stmt, index: 1); |
2577 | tree len = gimple_call_arg (gs: stmt, index: 2); |
2578 | tree size = gimple_call_arg (gs: stmt, index: 3); |
2579 | tree fn; |
2580 | const char *p; |
2581 | |
2582 | p = c_getstr (src); |
2583 | /* If the SRC parameter is "" or if LEN is 0, return DEST. */ |
2584 | if ((p && *p == '\0') |
2585 | || integer_zerop (len)) |
2586 | { |
2587 | replace_call_with_value (gsi, val: dest); |
2588 | return true; |
2589 | } |
2590 | |
2591 | if (! integer_all_onesp (size)) |
2592 | { |
2593 | tree src_len = c_strlen (src, 1); |
2594 | if (known_lower (stmt, len: src_len, size: len)) |
2595 | { |
2596 | /* If LEN >= strlen (SRC), optimize into __strcat_chk. */ |
2597 | fn = builtin_decl_explicit (fncode: BUILT_IN_STRCAT_CHK); |
2598 | if (!fn) |
2599 | return false; |
2600 | |
2601 | gimple *repl = gimple_build_call (fn, 3, dest, src, size); |
2602 | replace_call_with_call_and_fold (gsi, repl); |
2603 | return true; |
2604 | } |
2605 | return false; |
2606 | } |
2607 | |
2608 | /* If __builtin_strncat_chk is used, assume strncat is available. */ |
2609 | fn = builtin_decl_explicit (fncode: BUILT_IN_STRNCAT); |
2610 | if (!fn) |
2611 | return false; |
2612 | |
2613 | gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
2614 | replace_call_with_call_and_fold (gsi, repl); |
2615 | return true; |
2616 | } |
2617 | |
2618 | /* Build and append gimple statements to STMTS that would load a first |
2619 | character of a memory location identified by STR. LOC is location |
2620 | of the statement. */ |
2621 | |
2622 | static tree |
2623 | gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts) |
2624 | { |
2625 | tree var; |
2626 | |
2627 | tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); |
2628 | tree cst_uchar_ptr_node |
2629 | = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); |
2630 | tree off0 = build_int_cst (cst_uchar_ptr_node, 0); |
2631 | |
2632 | tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0); |
2633 | gassign *stmt = gimple_build_assign (NULL_TREE, temp); |
2634 | var = create_tmp_reg_or_ssa_name (type: cst_uchar_node, stmt); |
2635 | |
2636 | gimple_assign_set_lhs (gs: stmt, lhs: var); |
2637 | gimple_seq_add_stmt_without_update (stmts, stmt); |
2638 | |
2639 | return var; |
2640 | } |
2641 | |
2642 | /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */ |
2643 | |
2644 | static bool |
2645 | gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi) |
2646 | { |
2647 | gimple *stmt = gsi_stmt (i: *gsi); |
2648 | tree callee = gimple_call_fndecl (gs: stmt); |
2649 | enum built_in_function fcode = DECL_FUNCTION_CODE (decl: callee); |
2650 | |
2651 | tree type = integer_type_node; |
2652 | tree str1 = gimple_call_arg (gs: stmt, index: 0); |
2653 | tree str2 = gimple_call_arg (gs: stmt, index: 1); |
2654 | tree lhs = gimple_call_lhs (gs: stmt); |
2655 | |
2656 | tree bound_node = NULL_TREE; |
2657 | unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U; |
2658 | |
2659 | /* Handle strncmp and strncasecmp functions. */ |
2660 | if (gimple_call_num_args (gs: stmt) == 3) |
2661 | { |
2662 | bound_node = gimple_call_arg (gs: stmt, index: 2); |
2663 | if (tree_fits_uhwi_p (bound_node)) |
2664 | bound = tree_to_uhwi (bound_node); |
2665 | } |
2666 | |
2667 | /* If the BOUND parameter is zero, return zero. */ |
2668 | if (bound == 0) |
2669 | { |
2670 | replace_call_with_value (gsi, integer_zero_node); |
2671 | return true; |
2672 | } |
2673 | |
2674 | /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ |
2675 | if (operand_equal_p (str1, str2, flags: 0)) |
2676 | { |
2677 | replace_call_with_value (gsi, integer_zero_node); |
2678 | return true; |
2679 | } |
2680 | |
2681 | /* Initially set to the number of characters, including the terminating |
2682 | nul if each array has one. LENx == strnlen (Sx, LENx) implies that |
2683 | the array Sx is not terminated by a nul. |
2684 | For nul-terminated strings then adjusted to their length so that |
2685 | LENx == NULPOSx holds. */ |
2686 | unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1; |
2687 | const char *p1 = getbyterep (str1, &len1); |
2688 | const char *p2 = getbyterep (str2, &len2); |
2689 | |
2690 | /* The position of the terminating nul character if one exists, otherwise |
2691 | a value greater than LENx. */ |
2692 | unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1; |
2693 | |
2694 | if (p1) |
2695 | { |
2696 | size_t n = strnlen (string: p1, maxlen: len1); |
2697 | if (n < len1) |
2698 | len1 = nulpos1 = n; |
2699 | } |
2700 | |
2701 | if (p2) |
2702 | { |
2703 | size_t n = strnlen (string: p2, maxlen: len2); |
2704 | if (n < len2) |
2705 | len2 = nulpos2 = n; |
2706 | } |
2707 | |
2708 | /* For known strings, return an immediate value. */ |
2709 | if (p1 && p2) |
2710 | { |
2711 | int r = 0; |
2712 | bool known_result = false; |
2713 | |
2714 | switch (fcode) |
2715 | { |
2716 | case BUILT_IN_STRCMP: |
2717 | case BUILT_IN_STRCMP_EQ: |
2718 | if (len1 != nulpos1 || len2 != nulpos2) |
2719 | break; |
2720 | |
2721 | r = strcmp (s1: p1, s2: p2); |
2722 | known_result = true; |
2723 | break; |
2724 | |
2725 | case BUILT_IN_STRNCMP: |
2726 | case BUILT_IN_STRNCMP_EQ: |
2727 | { |
2728 | if (bound == HOST_WIDE_INT_M1U) |
2729 | break; |
2730 | |
2731 | /* Reduce the bound to be no more than the length |
2732 | of the shorter of the two strings, or the sizes |
2733 | of the unterminated arrays. */ |
2734 | unsigned HOST_WIDE_INT n = bound; |
2735 | |
2736 | if (len1 == nulpos1 && len1 < n) |
2737 | n = len1 + 1; |
2738 | if (len2 == nulpos2 && len2 < n) |
2739 | n = len2 + 1; |
2740 | |
2741 | if (MIN (nulpos1, nulpos2) + 1 < n) |
2742 | break; |
2743 | |
2744 | r = strncmp (s1: p1, s2: p2, n: n); |
2745 | known_result = true; |
2746 | break; |
2747 | } |
2748 | /* Only handleable situation is where the string are equal (result 0), |
2749 | which is already handled by operand_equal_p case. */ |
2750 | case BUILT_IN_STRCASECMP: |
2751 | break; |
2752 | case BUILT_IN_STRNCASECMP: |
2753 | { |
2754 | if (bound == HOST_WIDE_INT_M1U) |
2755 | break; |
2756 | r = strncmp (s1: p1, s2: p2, n: bound); |
2757 | if (r == 0) |
2758 | known_result = true; |
2759 | break; |
2760 | } |
2761 | default: |
2762 | gcc_unreachable (); |
2763 | } |
2764 | |
2765 | if (known_result) |
2766 | { |
2767 | replace_call_with_value (gsi, val: build_cmp_result (type, res: r)); |
2768 | return true; |
2769 | } |
2770 | } |
2771 | |
2772 | bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U) |
2773 | || fcode == BUILT_IN_STRCMP |
2774 | || fcode == BUILT_IN_STRCMP_EQ |
2775 | || fcode == BUILT_IN_STRCASECMP; |
2776 | |
2777 | location_t loc = gimple_location (g: stmt); |
2778 | |
2779 | /* If the second arg is "", return *(const unsigned char*)arg1. */ |
2780 | if (p2 && *p2 == '\0' && nonzero_bound) |
2781 | { |
2782 | gimple_seq stmts = NULL; |
2783 | tree var = gimple_load_first_char (loc, str: str1, stmts: &stmts); |
2784 | if (lhs) |
2785 | { |
2786 | stmt = gimple_build_assign (lhs, NOP_EXPR, var); |
2787 | gimple_seq_add_stmt_without_update (&stmts, stmt); |
2788 | } |
2789 | |
2790 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2791 | return true; |
2792 | } |
2793 | |
2794 | /* If the first arg is "", return -*(const unsigned char*)arg2. */ |
2795 | if (p1 && *p1 == '\0' && nonzero_bound) |
2796 | { |
2797 | gimple_seq stmts = NULL; |
2798 | tree var = gimple_load_first_char (loc, str: str2, stmts: &stmts); |
2799 | |
2800 | if (lhs) |
2801 | { |
2802 | tree c = create_tmp_reg_or_ssa_name (integer_type_node); |
2803 | stmt = gimple_build_assign (c, NOP_EXPR, var); |
2804 | gimple_seq_add_stmt_without_update (&stmts, stmt); |
2805 | |
2806 | stmt = gimple_build_assign (lhs, NEGATE_EXPR, c); |
2807 | gimple_seq_add_stmt_without_update (&stmts, stmt); |
2808 | } |
2809 | |
2810 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2811 | return true; |
2812 | } |
2813 | |
2814 | /* If BOUND is one, return an expression corresponding to |
2815 | (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */ |
2816 | if (fcode == BUILT_IN_STRNCMP && bound == 1) |
2817 | { |
2818 | gimple_seq stmts = NULL; |
2819 | tree temp1 = gimple_load_first_char (loc, str: str1, stmts: &stmts); |
2820 | tree temp2 = gimple_load_first_char (loc, str: str2, stmts: &stmts); |
2821 | |
2822 | if (lhs) |
2823 | { |
2824 | tree c1 = create_tmp_reg_or_ssa_name (integer_type_node); |
2825 | gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1); |
2826 | gimple_seq_add_stmt_without_update (&stmts, convert1); |
2827 | |
2828 | tree c2 = create_tmp_reg_or_ssa_name (integer_type_node); |
2829 | gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2); |
2830 | gimple_seq_add_stmt_without_update (&stmts, convert2); |
2831 | |
2832 | stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2); |
2833 | gimple_seq_add_stmt_without_update (&stmts, stmt); |
2834 | } |
2835 | |
2836 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2837 | return true; |
2838 | } |
2839 | |
2840 | /* If BOUND is greater than the length of one constant string, |
2841 | and the other argument is also a nul-terminated string, replace |
2842 | strncmp with strcmp. */ |
2843 | if (fcode == BUILT_IN_STRNCMP |
2844 | && bound > 0 && bound < HOST_WIDE_INT_M1U |
2845 | && ((p2 && len2 < bound && len2 == nulpos2) |
2846 | || (p1 && len1 < bound && len1 == nulpos1))) |
2847 | { |
2848 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCMP); |
2849 | if (!fn) |
2850 | return false; |
2851 | gimple *repl = gimple_build_call (fn, 2, str1, str2); |
2852 | replace_call_with_call_and_fold (gsi, repl); |
2853 | return true; |
2854 | } |
2855 | |
2856 | return false; |
2857 | } |
2858 | |
2859 | /* Fold a call to the memchr pointed by GSI iterator. */ |
2860 | |
2861 | static bool |
2862 | gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi) |
2863 | { |
2864 | gimple *stmt = gsi_stmt (i: *gsi); |
2865 | tree lhs = gimple_call_lhs (gs: stmt); |
2866 | tree arg1 = gimple_call_arg (gs: stmt, index: 0); |
2867 | tree arg2 = gimple_call_arg (gs: stmt, index: 1); |
2868 | tree len = gimple_call_arg (gs: stmt, index: 2); |
2869 | |
2870 | /* If the LEN parameter is zero, return zero. */ |
2871 | if (integer_zerop (len)) |
2872 | { |
2873 | replace_call_with_value (gsi, val: build_int_cst (ptr_type_node, 0)); |
2874 | return true; |
2875 | } |
2876 | |
2877 | char c; |
2878 | if (TREE_CODE (arg2) != INTEGER_CST |
2879 | || !tree_fits_uhwi_p (len) |
2880 | || !target_char_cst_p (t: arg2, p: &c)) |
2881 | return false; |
2882 | |
2883 | unsigned HOST_WIDE_INT length = tree_to_uhwi (len); |
2884 | unsigned HOST_WIDE_INT string_length; |
2885 | const char *p1 = getbyterep (arg1, &string_length); |
2886 | |
2887 | if (p1) |
2888 | { |
2889 | const char *r = (const char *)memchr (s: p1, c: c, MIN (length, string_length)); |
2890 | if (r == NULL) |
2891 | { |
2892 | tree mem_size, offset_node; |
2893 | byte_representation (arg1, &offset_node, &mem_size, NULL); |
2894 | unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE) |
2895 | ? 0 : tree_to_uhwi (offset_node); |
2896 | /* MEM_SIZE is the size of the array the string literal |
2897 | is stored in. */ |
2898 | unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset; |
2899 | gcc_checking_assert (string_length <= string_size); |
2900 | if (length <= string_size) |
2901 | { |
2902 | replace_call_with_value (gsi, val: build_int_cst (ptr_type_node, 0)); |
2903 | return true; |
2904 | } |
2905 | } |
2906 | else |
2907 | { |
2908 | unsigned HOST_WIDE_INT offset = r - p1; |
2909 | gimple_seq stmts = NULL; |
2910 | if (lhs != NULL_TREE) |
2911 | { |
2912 | tree offset_cst = build_int_cst (sizetype, offset); |
2913 | gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR, |
2914 | arg1, offset_cst); |
2915 | gimple_seq_add_stmt_without_update (&stmts, stmt); |
2916 | } |
2917 | else |
2918 | gimple_seq_add_stmt_without_update (&stmts, |
2919 | gimple_build_nop ()); |
2920 | |
2921 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
2922 | return true; |
2923 | } |
2924 | } |
2925 | |
2926 | return false; |
2927 | } |
2928 | |
2929 | /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments |
2930 | to the call. IGNORE is true if the value returned |
2931 | by the builtin will be ignored. UNLOCKED is true is true if this |
2932 | actually a call to fputs_unlocked. If LEN in non-NULL, it represents |
2933 | the known length of the string. Return NULL_TREE if no simplification |
2934 | was possible. */ |
2935 | |
2936 | static bool |
2937 | gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi, |
2938 | tree arg0, tree arg1, |
2939 | bool unlocked) |
2940 | { |
2941 | gimple *stmt = gsi_stmt (i: *gsi); |
2942 | |
2943 | /* If we're using an unlocked function, assume the other unlocked |
2944 | functions exist explicitly. */ |
2945 | tree const fn_fputc = (unlocked |
2946 | ? builtin_decl_explicit (fncode: BUILT_IN_FPUTC_UNLOCKED) |
2947 | : builtin_decl_implicit (fncode: BUILT_IN_FPUTC)); |
2948 | tree const fn_fwrite = (unlocked |
2949 | ? builtin_decl_explicit (fncode: BUILT_IN_FWRITE_UNLOCKED) |
2950 | : builtin_decl_implicit (fncode: BUILT_IN_FWRITE)); |
2951 | |
2952 | /* If the return value is used, don't do the transformation. */ |
2953 | if (gimple_call_lhs (gs: stmt)) |
2954 | return false; |
2955 | |
2956 | /* Get the length of the string passed to fputs. If the length |
2957 | can't be determined, punt. */ |
2958 | tree len = get_maxval_strlen (arg: arg0, rkind: SRK_STRLEN); |
2959 | if (!len || TREE_CODE (len) != INTEGER_CST) |
2960 | return false; |
2961 | |
2962 | switch (compare_tree_int (len, 1)) |
2963 | { |
2964 | case -1: /* length is 0, delete the call entirely . */ |
2965 | replace_call_with_value (gsi, integer_zero_node); |
2966 | return true; |
2967 | |
2968 | case 0: /* length is 1, call fputc. */ |
2969 | { |
2970 | const char *p = c_getstr (arg0); |
2971 | if (p != NULL) |
2972 | { |
2973 | if (!fn_fputc) |
2974 | return false; |
2975 | |
2976 | gimple *repl |
2977 | = gimple_build_call (fn_fputc, 2, |
2978 | build_int_cst (integer_type_node, p[0]), |
2979 | arg1); |
2980 | replace_call_with_call_and_fold (gsi, repl); |
2981 | return true; |
2982 | } |
2983 | } |
2984 | /* FALLTHROUGH */ |
2985 | case 1: /* length is greater than 1, call fwrite. */ |
2986 | { |
2987 | /* If optimizing for size keep fputs. */ |
2988 | if (optimize_function_for_size_p (cfun)) |
2989 | return false; |
2990 | /* New argument list transforming fputs(string, stream) to |
2991 | fwrite(string, 1, len, stream). */ |
2992 | if (!fn_fwrite) |
2993 | return false; |
2994 | |
2995 | gimple *repl |
2996 | = gimple_build_call (fn_fwrite, 4, arg0, size_one_node, |
2997 | fold_convert (size_type_node, len), arg1); |
2998 | replace_call_with_call_and_fold (gsi, repl); |
2999 | return true; |
3000 | } |
3001 | default: |
3002 | gcc_unreachable (); |
3003 | } |
3004 | } |
3005 | |
3006 | /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin. |
3007 | DEST, SRC, LEN, and SIZE are the arguments to the call. |
3008 | IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_* |
3009 | code of the builtin. If MAXLEN is not NULL, it is maximum length |
3010 | passed as third argument. */ |
3011 | |
3012 | static bool |
3013 | gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi, |
3014 | tree dest, tree src, tree len, tree size, |
3015 | enum built_in_function fcode) |
3016 | { |
3017 | gimple *stmt = gsi_stmt (i: *gsi); |
3018 | location_t loc = gimple_location (g: stmt); |
3019 | bool ignore = gimple_call_lhs (gs: stmt) == NULL_TREE; |
3020 | tree fn; |
3021 | |
3022 | /* If SRC and DEST are the same (and not volatile), return DEST |
3023 | (resp. DEST+LEN for __mempcpy_chk). */ |
3024 | if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, flags: 0)) |
3025 | { |
3026 | if (fcode != BUILT_IN_MEMPCPY_CHK) |
3027 | { |
3028 | replace_call_with_value (gsi, val: dest); |
3029 | return true; |
3030 | } |
3031 | else |
3032 | { |
3033 | gimple_seq stmts = NULL; |
3034 | len = gimple_convert_to_ptrofftype (seq: &stmts, loc, op: len); |
3035 | tree temp = gimple_build (seq: &stmts, loc, code: POINTER_PLUS_EXPR, |
3036 | TREE_TYPE (dest), ops: dest, ops: len); |
3037 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
3038 | replace_call_with_value (gsi, val: temp); |
3039 | return true; |
3040 | } |
3041 | } |
3042 | |
3043 | tree maxlen = get_maxval_strlen (arg: len, rkind: SRK_INT_VALUE); |
3044 | if (! integer_all_onesp (size) |
3045 | && !known_lower (stmt, len, size) |
3046 | && !known_lower (stmt, len: maxlen, size)) |
3047 | { |
3048 | /* MAXLEN and LEN both cannot be proved to be less than SIZE, at |
3049 | least try to optimize (void) __mempcpy_chk () into |
3050 | (void) __memcpy_chk () */ |
3051 | if (fcode == BUILT_IN_MEMPCPY_CHK && ignore) |
3052 | { |
3053 | fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY_CHK); |
3054 | if (!fn) |
3055 | return false; |
3056 | |
3057 | gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); |
3058 | replace_call_with_call_and_fold (gsi, repl); |
3059 | return true; |
3060 | } |
3061 | return false; |
3062 | } |
3063 | |
3064 | fn = NULL_TREE; |
3065 | /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume |
3066 | mem{cpy,pcpy,move,set} is available. */ |
3067 | switch (fcode) |
3068 | { |
3069 | case BUILT_IN_MEMCPY_CHK: |
3070 | fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY); |
3071 | break; |
3072 | case BUILT_IN_MEMPCPY_CHK: |
3073 | fn = builtin_decl_explicit (fncode: BUILT_IN_MEMPCPY); |
3074 | break; |
3075 | case BUILT_IN_MEMMOVE_CHK: |
3076 | fn = builtin_decl_explicit (fncode: BUILT_IN_MEMMOVE); |
3077 | break; |
3078 | case BUILT_IN_MEMSET_CHK: |
3079 | fn = builtin_decl_explicit (fncode: BUILT_IN_MEMSET); |
3080 | break; |
3081 | default: |
3082 | break; |
3083 | } |
3084 | |
3085 | if (!fn) |
3086 | return false; |
3087 | |
3088 | gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
3089 | replace_call_with_call_and_fold (gsi, repl); |
3090 | return true; |
3091 | } |
3092 | |
3093 | /* Print a message in the dump file recording transformation of FROM to TO. */ |
3094 | |
3095 | static void |
3096 | dump_transformation (gcall *from, gcall *to) |
3097 | { |
3098 | if (dump_enabled_p ()) |
3099 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n" , |
3100 | gimple_call_fn (gs: from), gimple_call_fn (gs: to)); |
3101 | } |
3102 | |
3103 | /* Fold a call to the __st[rp]cpy_chk builtin. |
3104 | DEST, SRC, and SIZE are the arguments to the call. |
3105 | IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_* |
3106 | code of the builtin. If MAXLEN is not NULL, it is maximum length of |
3107 | strings passed as second argument. */ |
3108 | |
3109 | static bool |
3110 | gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi, |
3111 | tree dest, |
3112 | tree src, tree size, |
3113 | enum built_in_function fcode) |
3114 | { |
3115 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3116 | location_t loc = gimple_location (g: stmt); |
3117 | bool ignore = gimple_call_lhs (gs: stmt) == NULL_TREE; |
3118 | tree len, fn; |
3119 | |
3120 | /* If SRC and DEST are the same (and not volatile), return DEST. */ |
3121 | if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, flags: 0)) |
3122 | { |
3123 | /* Issue -Wrestrict unless the pointers are null (those do |
3124 | not point to objects and so do not indicate an overlap; |
3125 | such calls could be the result of sanitization and jump |
3126 | threading). */ |
3127 | if (!integer_zerop (dest) |
3128 | && !warning_suppressed_p (stmt, OPT_Wrestrict)) |
3129 | { |
3130 | tree func = gimple_call_fndecl (gs: stmt); |
3131 | |
3132 | warning_at (loc, OPT_Wrestrict, |
3133 | "%qD source argument is the same as destination" , |
3134 | func); |
3135 | } |
3136 | |
3137 | replace_call_with_value (gsi, val: dest); |
3138 | return true; |
3139 | } |
3140 | |
3141 | tree maxlen = get_maxval_strlen (arg: src, rkind: SRK_STRLENMAX); |
3142 | if (! integer_all_onesp (size)) |
3143 | { |
3144 | len = c_strlen (src, 1); |
3145 | if (!known_lower (stmt, len, size, strict: true) |
3146 | && !known_lower (stmt, len: maxlen, size, strict: true)) |
3147 | { |
3148 | if (fcode == BUILT_IN_STPCPY_CHK) |
3149 | { |
3150 | if (! ignore) |
3151 | return false; |
3152 | |
3153 | /* If return value of __stpcpy_chk is ignored, |
3154 | optimize into __strcpy_chk. */ |
3155 | fn = builtin_decl_explicit (fncode: BUILT_IN_STRCPY_CHK); |
3156 | if (!fn) |
3157 | return false; |
3158 | |
3159 | gimple *repl = gimple_build_call (fn, 3, dest, src, size); |
3160 | replace_call_with_call_and_fold (gsi, repl); |
3161 | return true; |
3162 | } |
3163 | |
3164 | if (! len || TREE_SIDE_EFFECTS (len)) |
3165 | return false; |
3166 | |
3167 | /* If c_strlen returned something, but not provably less than size, |
3168 | transform __strcpy_chk into __memcpy_chk. */ |
3169 | fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY_CHK); |
3170 | if (!fn) |
3171 | return false; |
3172 | |
3173 | gimple_seq stmts = NULL; |
3174 | len = force_gimple_operand (len, &stmts, true, NULL_TREE); |
3175 | len = gimple_convert (seq: &stmts, loc, size_type_node, op: len); |
3176 | len = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, size_type_node, ops: len, |
3177 | ops: build_int_cst (size_type_node, 1)); |
3178 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
3179 | gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); |
3180 | replace_call_with_call_and_fold (gsi, repl); |
3181 | return true; |
3182 | } |
3183 | } |
3184 | |
3185 | /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */ |
3186 | fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_STPCPY_CHK && !ignore |
3187 | ? BUILT_IN_STPCPY : BUILT_IN_STRCPY); |
3188 | if (!fn) |
3189 | return false; |
3190 | |
3191 | gcall *repl = gimple_build_call (fn, 2, dest, src); |
3192 | dump_transformation (from: stmt, to: repl); |
3193 | replace_call_with_call_and_fold (gsi, repl); |
3194 | return true; |
3195 | } |
3196 | |
3197 | /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE |
3198 | are the arguments to the call. If MAXLEN is not NULL, it is maximum |
3199 | length passed as third argument. IGNORE is true if return value can be |
3200 | ignored. FCODE is the BUILT_IN_* code of the builtin. */ |
3201 | |
3202 | static bool |
3203 | gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi, |
3204 | tree dest, tree src, |
3205 | tree len, tree size, |
3206 | enum built_in_function fcode) |
3207 | { |
3208 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3209 | bool ignore = gimple_call_lhs (gs: stmt) == NULL_TREE; |
3210 | tree fn; |
3211 | |
3212 | tree maxlen = get_maxval_strlen (arg: len, rkind: SRK_INT_VALUE); |
3213 | if (! integer_all_onesp (size) |
3214 | && !known_lower (stmt, len, size) && !known_lower (stmt, len: maxlen, size)) |
3215 | { |
3216 | if (fcode == BUILT_IN_STPNCPY_CHK && ignore) |
3217 | { |
3218 | /* If return value of __stpncpy_chk is ignored, |
3219 | optimize into __strncpy_chk. */ |
3220 | fn = builtin_decl_explicit (fncode: BUILT_IN_STRNCPY_CHK); |
3221 | if (fn) |
3222 | { |
3223 | gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); |
3224 | replace_call_with_call_and_fold (gsi, repl); |
3225 | return true; |
3226 | } |
3227 | } |
3228 | return false; |
3229 | } |
3230 | |
3231 | /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */ |
3232 | fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_STPNCPY_CHK && !ignore |
3233 | ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY); |
3234 | if (!fn) |
3235 | return false; |
3236 | |
3237 | gcall *repl = gimple_build_call (fn, 3, dest, src, len); |
3238 | dump_transformation (from: stmt, to: repl); |
3239 | replace_call_with_call_and_fold (gsi, repl); |
3240 | return true; |
3241 | } |
3242 | |
3243 | /* Fold function call to builtin stpcpy with arguments DEST and SRC. |
3244 | Return NULL_TREE if no simplification can be made. */ |
3245 | |
3246 | static bool |
3247 | gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi) |
3248 | { |
3249 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3250 | location_t loc = gimple_location (g: stmt); |
3251 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
3252 | tree src = gimple_call_arg (gs: stmt, index: 1); |
3253 | tree fn, lenp1; |
3254 | |
3255 | /* If the result is unused, replace stpcpy with strcpy. */ |
3256 | if (gimple_call_lhs (gs: stmt) == NULL_TREE) |
3257 | { |
3258 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY); |
3259 | if (!fn) |
3260 | return false; |
3261 | gimple_call_set_fndecl (gs: stmt, decl: fn); |
3262 | fold_stmt (gsi); |
3263 | return true; |
3264 | } |
3265 | |
3266 | /* Set to non-null if ARG refers to an unterminated array. */ |
3267 | c_strlen_data data = { }; |
3268 | /* The size of the unterminated array if SRC referes to one. */ |
3269 | tree size; |
3270 | /* True if the size is exact/constant, false if it's the lower bound |
3271 | of a range. */ |
3272 | bool exact; |
3273 | tree len = c_strlen (src, 1, &data, 1); |
3274 | if (!len |
3275 | || TREE_CODE (len) != INTEGER_CST) |
3276 | { |
3277 | data.decl = unterminated_array (src, &size, &exact); |
3278 | if (!data.decl) |
3279 | return false; |
3280 | } |
3281 | |
3282 | if (data.decl) |
3283 | { |
3284 | /* Avoid folding calls with unterminated arrays. */ |
3285 | if (!warning_suppressed_p (stmt, OPT_Wstringop_overread)) |
3286 | warn_string_no_nul (loc, stmt, "stpcpy" , src, data.decl, size, |
3287 | exact); |
3288 | suppress_warning (stmt, OPT_Wstringop_overread); |
3289 | return false; |
3290 | } |
3291 | |
3292 | if (optimize_function_for_size_p (cfun) |
3293 | /* If length is zero it's small enough. */ |
3294 | && !integer_zerop (len)) |
3295 | return false; |
3296 | |
3297 | /* If the source has a known length replace stpcpy with memcpy. */ |
3298 | fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY); |
3299 | if (!fn) |
3300 | return false; |
3301 | |
3302 | gimple_seq stmts = NULL; |
3303 | tree tem = gimple_convert (seq: &stmts, loc, size_type_node, op: len); |
3304 | lenp1 = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, size_type_node, |
3305 | ops: tem, ops: build_int_cst (size_type_node, 1)); |
3306 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
3307 | gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1); |
3308 | gimple_move_vops (repl, stmt); |
3309 | gsi_insert_before (gsi, repl, GSI_SAME_STMT); |
3310 | /* Replace the result with dest + len. */ |
3311 | stmts = NULL; |
3312 | tem = gimple_convert (seq: &stmts, loc, sizetype, op: len); |
3313 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
3314 | gassign *ret = gimple_build_assign (gimple_call_lhs (gs: stmt), |
3315 | POINTER_PLUS_EXPR, dest, tem); |
3316 | gsi_replace (gsi, ret, false); |
3317 | /* Finally fold the memcpy call. */ |
3318 | gimple_stmt_iterator gsi2 = *gsi; |
3319 | gsi_prev (i: &gsi2); |
3320 | fold_stmt (&gsi2); |
3321 | return true; |
3322 | } |
3323 | |
3324 | /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return |
3325 | NULL_TREE if a normal call should be emitted rather than expanding |
3326 | the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or |
3327 | BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length |
3328 | passed as second argument. */ |
3329 | |
3330 | static bool |
3331 | gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi, |
3332 | enum built_in_function fcode) |
3333 | { |
3334 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3335 | tree dest, size, len, fn, fmt, flag; |
3336 | const char *fmt_str; |
3337 | |
3338 | /* Verify the required arguments in the original call. */ |
3339 | if (gimple_call_num_args (gs: stmt) < 5) |
3340 | return false; |
3341 | |
3342 | dest = gimple_call_arg (gs: stmt, index: 0); |
3343 | len = gimple_call_arg (gs: stmt, index: 1); |
3344 | flag = gimple_call_arg (gs: stmt, index: 2); |
3345 | size = gimple_call_arg (gs: stmt, index: 3); |
3346 | fmt = gimple_call_arg (gs: stmt, index: 4); |
3347 | |
3348 | tree maxlen = get_maxval_strlen (arg: len, rkind: SRK_INT_VALUE); |
3349 | if (! integer_all_onesp (size) |
3350 | && !known_lower (stmt, len, size) && !known_lower (stmt, len: maxlen, size)) |
3351 | return false; |
3352 | |
3353 | if (!init_target_chars ()) |
3354 | return false; |
3355 | |
3356 | /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0 |
3357 | or if format doesn't contain % chars or is "%s". */ |
3358 | if (! integer_zerop (flag)) |
3359 | { |
3360 | fmt_str = c_getstr (fmt); |
3361 | if (fmt_str == NULL) |
3362 | return false; |
3363 | if (strchr (s: fmt_str, c: target_percent) != NULL |
3364 | && strcmp (s1: fmt_str, s2: target_percent_s)) |
3365 | return false; |
3366 | } |
3367 | |
3368 | /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is |
3369 | available. */ |
3370 | fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_VSNPRINTF_CHK |
3371 | ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF); |
3372 | if (!fn) |
3373 | return false; |
3374 | |
3375 | /* Replace the called function and the first 5 argument by 3 retaining |
3376 | trailing varargs. */ |
3377 | gimple_call_set_fndecl (gs: stmt, decl: fn); |
3378 | gimple_call_set_fntype (call_stmt: stmt, TREE_TYPE (fn)); |
3379 | gimple_call_set_arg (gs: stmt, index: 0, arg: dest); |
3380 | gimple_call_set_arg (gs: stmt, index: 1, arg: len); |
3381 | gimple_call_set_arg (gs: stmt, index: 2, arg: fmt); |
3382 | for (unsigned i = 3; i < gimple_call_num_args (gs: stmt) - 2; ++i) |
3383 | gimple_call_set_arg (gs: stmt, index: i, arg: gimple_call_arg (gs: stmt, index: i + 2)); |
3384 | gimple_set_num_ops (gs: stmt, num_ops: gimple_num_ops (gs: stmt) - 2); |
3385 | fold_stmt (gsi); |
3386 | return true; |
3387 | } |
3388 | |
3389 | /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS. |
3390 | Return NULL_TREE if a normal call should be emitted rather than |
3391 | expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK |
3392 | or BUILT_IN_VSPRINTF_CHK. */ |
3393 | |
3394 | static bool |
3395 | gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi, |
3396 | enum built_in_function fcode) |
3397 | { |
3398 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3399 | tree dest, size, len, fn, fmt, flag; |
3400 | const char *fmt_str; |
3401 | unsigned nargs = gimple_call_num_args (gs: stmt); |
3402 | |
3403 | /* Verify the required arguments in the original call. */ |
3404 | if (nargs < 4) |
3405 | return false; |
3406 | dest = gimple_call_arg (gs: stmt, index: 0); |
3407 | flag = gimple_call_arg (gs: stmt, index: 1); |
3408 | size = gimple_call_arg (gs: stmt, index: 2); |
3409 | fmt = gimple_call_arg (gs: stmt, index: 3); |
3410 | |
3411 | len = NULL_TREE; |
3412 | |
3413 | if (!init_target_chars ()) |
3414 | return false; |
3415 | |
3416 | /* Check whether the format is a literal string constant. */ |
3417 | fmt_str = c_getstr (fmt); |
3418 | if (fmt_str != NULL) |
3419 | { |
3420 | /* If the format doesn't contain % args or %%, we know the size. */ |
3421 | if (strchr (s: fmt_str, c: target_percent) == 0) |
3422 | { |
3423 | if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4) |
3424 | len = build_int_cstu (size_type_node, strlen (s: fmt_str)); |
3425 | } |
3426 | /* If the format is "%s" and first ... argument is a string literal, |
3427 | we know the size too. */ |
3428 | else if (fcode == BUILT_IN_SPRINTF_CHK |
3429 | && strcmp (s1: fmt_str, s2: target_percent_s) == 0) |
3430 | { |
3431 | tree arg; |
3432 | |
3433 | if (nargs == 5) |
3434 | { |
3435 | arg = gimple_call_arg (gs: stmt, index: 4); |
3436 | if (POINTER_TYPE_P (TREE_TYPE (arg))) |
3437 | len = c_strlen (arg, 1); |
3438 | } |
3439 | } |
3440 | } |
3441 | |
3442 | if (! integer_all_onesp (size) && !known_lower (stmt, len, size, strict: true)) |
3443 | return false; |
3444 | |
3445 | /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0 |
3446 | or if format doesn't contain % chars or is "%s". */ |
3447 | if (! integer_zerop (flag)) |
3448 | { |
3449 | if (fmt_str == NULL) |
3450 | return false; |
3451 | if (strchr (s: fmt_str, c: target_percent) != NULL |
3452 | && strcmp (s1: fmt_str, s2: target_percent_s)) |
3453 | return false; |
3454 | } |
3455 | |
3456 | /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */ |
3457 | fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_VSPRINTF_CHK |
3458 | ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF); |
3459 | if (!fn) |
3460 | return false; |
3461 | |
3462 | /* Replace the called function and the first 4 argument by 2 retaining |
3463 | trailing varargs. */ |
3464 | gimple_call_set_fndecl (gs: stmt, decl: fn); |
3465 | gimple_call_set_fntype (call_stmt: stmt, TREE_TYPE (fn)); |
3466 | gimple_call_set_arg (gs: stmt, index: 0, arg: dest); |
3467 | gimple_call_set_arg (gs: stmt, index: 1, arg: fmt); |
3468 | for (unsigned i = 2; i < gimple_call_num_args (gs: stmt) - 2; ++i) |
3469 | gimple_call_set_arg (gs: stmt, index: i, arg: gimple_call_arg (gs: stmt, index: i + 2)); |
3470 | gimple_set_num_ops (gs: stmt, num_ops: gimple_num_ops (gs: stmt) - 2); |
3471 | fold_stmt (gsi); |
3472 | return true; |
3473 | } |
3474 | |
3475 | /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG. |
3476 | ORIG may be null if this is a 2-argument call. We don't attempt to |
3477 | simplify calls with more than 3 arguments. |
3478 | |
3479 | Return true if simplification was possible, otherwise false. */ |
3480 | |
3481 | bool |
3482 | gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi) |
3483 | { |
3484 | gimple *stmt = gsi_stmt (i: *gsi); |
3485 | |
3486 | /* Verify the required arguments in the original call. We deal with two |
3487 | types of sprintf() calls: 'sprintf (str, fmt)' and |
3488 | 'sprintf (dest, "%s", orig)'. */ |
3489 | if (gimple_call_num_args (gs: stmt) > 3) |
3490 | return false; |
3491 | |
3492 | tree orig = NULL_TREE; |
3493 | if (gimple_call_num_args (gs: stmt) == 3) |
3494 | orig = gimple_call_arg (gs: stmt, index: 2); |
3495 | |
3496 | /* Check whether the format is a literal string constant. */ |
3497 | tree fmt = gimple_call_arg (gs: stmt, index: 1); |
3498 | const char *fmt_str = c_getstr (fmt); |
3499 | if (fmt_str == NULL) |
3500 | return false; |
3501 | |
3502 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
3503 | |
3504 | if (!init_target_chars ()) |
3505 | return false; |
3506 | |
3507 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY); |
3508 | if (!fn) |
3509 | return false; |
3510 | |
3511 | /* If the format doesn't contain % args or %%, use strcpy. */ |
3512 | if (strchr (s: fmt_str, c: target_percent) == NULL) |
3513 | { |
3514 | /* Don't optimize sprintf (buf, "abc", ptr++). */ |
3515 | if (orig) |
3516 | return false; |
3517 | |
3518 | /* Convert sprintf (str, fmt) into strcpy (str, fmt) when |
3519 | 'format' is known to contain no % formats. */ |
3520 | gimple_seq stmts = NULL; |
3521 | gimple *repl = gimple_build_call (fn, 2, dest, fmt); |
3522 | |
3523 | /* Propagate the NO_WARNING bit to avoid issuing the same |
3524 | warning more than once. */ |
3525 | copy_warning (repl, stmt); |
3526 | |
3527 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3528 | if (tree lhs = gimple_call_lhs (gs: stmt)) |
3529 | { |
3530 | repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs), |
3531 | strlen (s: fmt_str))); |
3532 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3533 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3534 | /* gsi now points at the assignment to the lhs, get a |
3535 | stmt iterator to the memcpy call. |
3536 | ??? We can't use gsi_for_stmt as that doesn't work when the |
3537 | CFG isn't built yet. */ |
3538 | gimple_stmt_iterator gsi2 = *gsi; |
3539 | gsi_prev (i: &gsi2); |
3540 | fold_stmt (&gsi2); |
3541 | } |
3542 | else |
3543 | { |
3544 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3545 | fold_stmt (gsi); |
3546 | } |
3547 | return true; |
3548 | } |
3549 | |
3550 | /* If the format is "%s", use strcpy if the result isn't used. */ |
3551 | else if (fmt_str && strcmp (s1: fmt_str, s2: target_percent_s) == 0) |
3552 | { |
3553 | /* Don't crash on sprintf (str1, "%s"). */ |
3554 | if (!orig) |
3555 | return false; |
3556 | |
3557 | /* Don't fold calls with source arguments of invalid (nonpointer) |
3558 | types. */ |
3559 | if (!POINTER_TYPE_P (TREE_TYPE (orig))) |
3560 | return false; |
3561 | |
3562 | tree orig_len = NULL_TREE; |
3563 | if (gimple_call_lhs (gs: stmt)) |
3564 | { |
3565 | orig_len = get_maxval_strlen (arg: orig, rkind: SRK_STRLEN); |
3566 | if (!orig_len) |
3567 | return false; |
3568 | } |
3569 | |
3570 | /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */ |
3571 | gimple_seq stmts = NULL; |
3572 | gimple *repl = gimple_build_call (fn, 2, dest, orig); |
3573 | |
3574 | /* Propagate the NO_WARNING bit to avoid issuing the same |
3575 | warning more than once. */ |
3576 | copy_warning (repl, stmt); |
3577 | |
3578 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3579 | if (tree lhs = gimple_call_lhs (gs: stmt)) |
3580 | { |
3581 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
3582 | TREE_TYPE (orig_len))) |
3583 | orig_len = fold_convert (TREE_TYPE (lhs), orig_len); |
3584 | repl = gimple_build_assign (lhs, orig_len); |
3585 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3586 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3587 | /* gsi now points at the assignment to the lhs, get a |
3588 | stmt iterator to the memcpy call. |
3589 | ??? We can't use gsi_for_stmt as that doesn't work when the |
3590 | CFG isn't built yet. */ |
3591 | gimple_stmt_iterator gsi2 = *gsi; |
3592 | gsi_prev (i: &gsi2); |
3593 | fold_stmt (&gsi2); |
3594 | } |
3595 | else |
3596 | { |
3597 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3598 | fold_stmt (gsi); |
3599 | } |
3600 | return true; |
3601 | } |
3602 | return false; |
3603 | } |
3604 | |
3605 | /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE, |
3606 | FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't |
3607 | attempt to simplify calls with more than 4 arguments. |
3608 | |
3609 | Return true if simplification was possible, otherwise false. */ |
3610 | |
3611 | bool |
3612 | gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi) |
3613 | { |
3614 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3615 | tree dest = gimple_call_arg (gs: stmt, index: 0); |
3616 | tree destsize = gimple_call_arg (gs: stmt, index: 1); |
3617 | tree fmt = gimple_call_arg (gs: stmt, index: 2); |
3618 | tree orig = NULL_TREE; |
3619 | const char *fmt_str = NULL; |
3620 | |
3621 | if (gimple_call_num_args (gs: stmt) > 4) |
3622 | return false; |
3623 | |
3624 | if (gimple_call_num_args (gs: stmt) == 4) |
3625 | orig = gimple_call_arg (gs: stmt, index: 3); |
3626 | |
3627 | /* Check whether the format is a literal string constant. */ |
3628 | fmt_str = c_getstr (fmt); |
3629 | if (fmt_str == NULL) |
3630 | return false; |
3631 | |
3632 | if (!init_target_chars ()) |
3633 | return false; |
3634 | |
3635 | /* If the format doesn't contain % args or %%, use strcpy. */ |
3636 | if (strchr (s: fmt_str, c: target_percent) == NULL) |
3637 | { |
3638 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY); |
3639 | if (!fn) |
3640 | return false; |
3641 | |
3642 | /* Don't optimize snprintf (buf, 4, "abc", ptr++). */ |
3643 | if (orig) |
3644 | return false; |
3645 | |
3646 | tree len = build_int_cstu (TREE_TYPE (destsize), strlen (s: fmt_str)); |
3647 | |
3648 | /* We could expand this as |
3649 | memcpy (str, fmt, cst - 1); str[cst - 1] = '\0'; |
3650 | or to |
3651 | memcpy (str, fmt_with_nul_at_cstm1, cst); |
3652 | but in the former case that might increase code size |
3653 | and in the latter case grow .rodata section too much. |
3654 | So punt for now. */ |
3655 | if (!known_lower (stmt, len, size: destsize, strict: true)) |
3656 | return false; |
3657 | |
3658 | gimple_seq stmts = NULL; |
3659 | gimple *repl = gimple_build_call (fn, 2, dest, fmt); |
3660 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3661 | if (tree lhs = gimple_call_lhs (gs: stmt)) |
3662 | { |
3663 | repl = gimple_build_assign (lhs, |
3664 | fold_convert (TREE_TYPE (lhs), len)); |
3665 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3666 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3667 | /* gsi now points at the assignment to the lhs, get a |
3668 | stmt iterator to the memcpy call. |
3669 | ??? We can't use gsi_for_stmt as that doesn't work when the |
3670 | CFG isn't built yet. */ |
3671 | gimple_stmt_iterator gsi2 = *gsi; |
3672 | gsi_prev (i: &gsi2); |
3673 | fold_stmt (&gsi2); |
3674 | } |
3675 | else |
3676 | { |
3677 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3678 | fold_stmt (gsi); |
3679 | } |
3680 | return true; |
3681 | } |
3682 | |
3683 | /* If the format is "%s", use strcpy if the result isn't used. */ |
3684 | else if (fmt_str && strcmp (s1: fmt_str, s2: target_percent_s) == 0) |
3685 | { |
3686 | tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY); |
3687 | if (!fn) |
3688 | return false; |
3689 | |
3690 | /* Don't crash on snprintf (str1, cst, "%s"). */ |
3691 | if (!orig) |
3692 | return false; |
3693 | |
3694 | tree orig_len = get_maxval_strlen (arg: orig, rkind: SRK_STRLEN); |
3695 | |
3696 | /* We could expand this as |
3697 | memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0'; |
3698 | or to |
3699 | memcpy (str1, str2_with_nul_at_cstm1, cst); |
3700 | but in the former case that might increase code size |
3701 | and in the latter case grow .rodata section too much. |
3702 | So punt for now. */ |
3703 | if (!known_lower (stmt, len: orig_len, size: destsize, strict: true)) |
3704 | return false; |
3705 | |
3706 | /* Convert snprintf (str1, cst, "%s", str2) into |
3707 | strcpy (str1, str2) if strlen (str2) < cst. */ |
3708 | gimple_seq stmts = NULL; |
3709 | gimple *repl = gimple_build_call (fn, 2, dest, orig); |
3710 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3711 | if (tree lhs = gimple_call_lhs (gs: stmt)) |
3712 | { |
3713 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
3714 | TREE_TYPE (orig_len))) |
3715 | orig_len = fold_convert (TREE_TYPE (lhs), orig_len); |
3716 | repl = gimple_build_assign (lhs, orig_len); |
3717 | gimple_seq_add_stmt_without_update (&stmts, repl); |
3718 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3719 | /* gsi now points at the assignment to the lhs, get a |
3720 | stmt iterator to the memcpy call. |
3721 | ??? We can't use gsi_for_stmt as that doesn't work when the |
3722 | CFG isn't built yet. */ |
3723 | gimple_stmt_iterator gsi2 = *gsi; |
3724 | gsi_prev (i: &gsi2); |
3725 | fold_stmt (&gsi2); |
3726 | } |
3727 | else |
3728 | { |
3729 | gsi_replace_with_seq_vops (si_p: gsi, stmts); |
3730 | fold_stmt (gsi); |
3731 | } |
3732 | return true; |
3733 | } |
3734 | return false; |
3735 | } |
3736 | |
3737 | /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins. |
3738 | FP, FMT, and ARG are the arguments to the call. We don't fold calls with |
3739 | more than 3 arguments, and ARG may be null in the 2-argument case. |
3740 | |
3741 | Return NULL_TREE if no simplification was possible, otherwise return the |
3742 | simplified form of the call as a tree. FCODE is the BUILT_IN_* |
3743 | code of the function to be simplified. */ |
3744 | |
3745 | static bool |
3746 | gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi, |
3747 | tree fp, tree fmt, tree arg, |
3748 | enum built_in_function fcode) |
3749 | { |
3750 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3751 | tree fn_fputc, fn_fputs; |
3752 | const char *fmt_str = NULL; |
3753 | |
3754 | /* If the return value is used, don't do the transformation. */ |
3755 | if (gimple_call_lhs (gs: stmt) != NULL_TREE) |
3756 | return false; |
3757 | |
3758 | /* Check whether the format is a literal string constant. */ |
3759 | fmt_str = c_getstr (fmt); |
3760 | if (fmt_str == NULL) |
3761 | return false; |
3762 | |
3763 | if (fcode == BUILT_IN_FPRINTF_UNLOCKED) |
3764 | { |
3765 | /* If we're using an unlocked function, assume the other |
3766 | unlocked functions exist explicitly. */ |
3767 | fn_fputc = builtin_decl_explicit (fncode: BUILT_IN_FPUTC_UNLOCKED); |
3768 | fn_fputs = builtin_decl_explicit (fncode: BUILT_IN_FPUTS_UNLOCKED); |
3769 | } |
3770 | else |
3771 | { |
3772 | fn_fputc = builtin_decl_implicit (fncode: BUILT_IN_FPUTC); |
3773 | fn_fputs = builtin_decl_implicit (fncode: BUILT_IN_FPUTS); |
3774 | } |
3775 | |
3776 | if (!init_target_chars ()) |
3777 | return false; |
3778 | |
3779 | /* If the format doesn't contain % args or %%, use strcpy. */ |
3780 | if (strchr (s: fmt_str, c: target_percent) == NULL) |
3781 | { |
3782 | if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK |
3783 | && arg) |
3784 | return false; |
3785 | |
3786 | /* If the format specifier was "", fprintf does nothing. */ |
3787 | if (fmt_str[0] == '\0') |
3788 | { |
3789 | replace_call_with_value (gsi, NULL_TREE); |
3790 | return true; |
3791 | } |
3792 | |
3793 | /* When "string" doesn't contain %, replace all cases of |
3794 | fprintf (fp, string) with fputs (string, fp). The fputs |
3795 | builtin will take care of special cases like length == 1. */ |
3796 | if (fn_fputs) |
3797 | { |
3798 | gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp); |
3799 | replace_call_with_call_and_fold (gsi, repl); |
3800 | return true; |
3801 | } |
3802 | } |
3803 | |
3804 | /* The other optimizations can be done only on the non-va_list variants. */ |
3805 | else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK) |
3806 | return false; |
3807 | |
3808 | /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */ |
3809 | else if (strcmp (s1: fmt_str, s2: target_percent_s) == 0) |
3810 | { |
3811 | if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) |
3812 | return false; |
3813 | if (fn_fputs) |
3814 | { |
3815 | gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp); |
3816 | replace_call_with_call_and_fold (gsi, repl); |
3817 | return true; |
3818 | } |
3819 | } |
3820 | |
3821 | /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */ |
3822 | else if (strcmp (s1: fmt_str, s2: target_percent_c) == 0) |
3823 | { |
3824 | if (!arg |
3825 | || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg))) |
3826 | return false; |
3827 | if (fn_fputc) |
3828 | { |
3829 | gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp); |
3830 | replace_call_with_call_and_fold (gsi, repl); |
3831 | return true; |
3832 | } |
3833 | } |
3834 | |
3835 | return false; |
3836 | } |
3837 | |
3838 | /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins. |
3839 | FMT and ARG are the arguments to the call; we don't fold cases with |
3840 | more than 2 arguments, and ARG may be null if this is a 1-argument case. |
3841 | |
3842 | Return NULL_TREE if no simplification was possible, otherwise return the |
3843 | simplified form of the call as a tree. FCODE is the BUILT_IN_* |
3844 | code of the function to be simplified. */ |
3845 | |
3846 | static bool |
3847 | gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt, |
3848 | tree arg, enum built_in_function fcode) |
3849 | { |
3850 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
3851 | tree fn_putchar, fn_puts, newarg; |
3852 | const char *fmt_str = NULL; |
3853 | |
3854 | /* If the return value is used, don't do the transformation. */ |
3855 | if (gimple_call_lhs (gs: stmt) != NULL_TREE) |
3856 | return false; |
3857 | |
3858 | /* Check whether the format is a literal string constant. */ |
3859 | fmt_str = c_getstr (fmt); |
3860 | if (fmt_str == NULL) |
3861 | return false; |
3862 | |
3863 | if (fcode == BUILT_IN_PRINTF_UNLOCKED) |
3864 | { |
3865 | /* If we're using an unlocked function, assume the other |
3866 | unlocked functions exist explicitly. */ |
3867 | fn_putchar = builtin_decl_explicit (fncode: BUILT_IN_PUTCHAR_UNLOCKED); |
3868 | fn_puts = builtin_decl_explicit (fncode: BUILT_IN_PUTS_UNLOCKED); |
3869 | } |
3870 | else |
3871 | { |
3872 | fn_putchar = builtin_decl_implicit (fncode: BUILT_IN_PUTCHAR); |
3873 | fn_puts = builtin_decl_implicit (fncode: BUILT_IN_PUTS); |
3874 | } |
3875 | |
3876 | if (!init_target_chars ()) |
3877 | return false; |
3878 | |
3879 | if (strcmp (s1: fmt_str, s2: target_percent_s) == 0 |
3880 | || strchr (s: fmt_str, c: target_percent) == NULL) |
3881 | { |
3882 | const char *str; |
3883 | |
3884 | if (strcmp (s1: fmt_str, s2: target_percent_s) == 0) |
3885 | { |
3886 | if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) |
3887 | return false; |
3888 | |
3889 | if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) |
3890 | return false; |
3891 | |
3892 | str = c_getstr (arg); |
3893 | if (str == NULL) |
3894 | return false; |
3895 | } |
3896 | else |
3897 | { |
3898 | /* The format specifier doesn't contain any '%' characters. */ |
3899 | if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK |
3900 | && arg) |
3901 | return false; |
3902 | str = fmt_str; |
3903 | } |
3904 | |
3905 | /* If the string was "", printf does nothing. */ |
3906 | if (str[0] == '\0') |
3907 | { |
3908 | replace_call_with_value (gsi, NULL_TREE); |
3909 | return true; |
3910 | } |
3911 | |
3912 | /* If the string has length of 1, call putchar. */ |
3913 | if (str[1] == '\0') |
3914 | { |
3915 | /* Given printf("c"), (where c is any one character,) |
3916 | convert "c"[0] to an int and pass that to the replacement |
3917 | function. */ |
3918 | newarg = build_int_cst (integer_type_node, str[0]); |
3919 | if (fn_putchar) |
3920 | { |
3921 | gcall *repl = gimple_build_call (fn_putchar, 1, newarg); |
3922 | replace_call_with_call_and_fold (gsi, repl); |
3923 | return true; |
3924 | } |
3925 | } |
3926 | else |
3927 | { |
3928 | /* If the string was "string\n", call puts("string"). */ |
3929 | size_t len = strlen (s: str); |
3930 | if ((unsigned char)str[len - 1] == target_newline |
3931 | && (size_t) (int) len == len |
3932 | && (int) len > 0) |
3933 | { |
3934 | char *newstr; |
3935 | |
3936 | /* Create a NUL-terminated string that's one char shorter |
3937 | than the original, stripping off the trailing '\n'. */ |
3938 | newstr = xstrdup (str); |
3939 | newstr[len - 1] = '\0'; |
3940 | newarg = build_string_literal (len, newstr); |
3941 | free (ptr: newstr); |
3942 | if (fn_puts) |
3943 | { |
3944 | gcall *repl = gimple_build_call (fn_puts, 1, newarg); |
3945 | replace_call_with_call_and_fold (gsi, repl); |
3946 | return true; |
3947 | } |
3948 | } |
3949 | else |
3950 | /* We'd like to arrange to call fputs(string,stdout) here, |
3951 | but we need stdout and don't have a way to get it yet. */ |
3952 | return false; |
3953 | } |
3954 | } |
3955 | |
3956 | /* The other optimizations can be done only on the non-va_list variants. */ |
3957 | else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) |
3958 | return false; |
3959 | |
3960 | /* If the format specifier was "%s\n", call __builtin_puts(arg). */ |
3961 | else if (strcmp (s1: fmt_str, s2: target_percent_s_newline) == 0) |
3962 | { |
3963 | if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) |
3964 | return false; |
3965 | if (fn_puts) |
3966 | { |
3967 | gcall *repl = gimple_build_call (fn_puts, 1, arg); |
3968 | replace_call_with_call_and_fold (gsi, repl); |
3969 | return true; |
3970 | } |
3971 | } |
3972 | |
3973 | /* If the format specifier was "%c", call __builtin_putchar(arg). */ |
3974 | else if (strcmp (s1: fmt_str, s2: target_percent_c) == 0) |
3975 | { |
3976 | if (!arg || ! useless_type_conversion_p (integer_type_node, |
3977 | TREE_TYPE (arg))) |
3978 | return false; |
3979 | if (fn_putchar) |
3980 | { |
3981 | gcall *repl = gimple_build_call (fn_putchar, 1, arg); |
3982 | replace_call_with_call_and_fold (gsi, repl); |
3983 | return true; |
3984 | } |
3985 | } |
3986 | |
3987 | return false; |
3988 | } |
3989 | |
3990 | |
3991 | |
3992 | /* Fold a call to __builtin_strlen with known length LEN. */ |
3993 | |
3994 | static bool |
3995 | gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi) |
3996 | { |
3997 | gimple *stmt = gsi_stmt (i: *gsi); |
3998 | tree arg = gimple_call_arg (gs: stmt, index: 0); |
3999 | |
4000 | wide_int minlen; |
4001 | wide_int maxlen; |
4002 | |
4003 | c_strlen_data lendata = { }; |
4004 | if (get_range_strlen (arg, pdata: &lendata, /* eltsize = */ 1) |
4005 | && !lendata.decl |
4006 | && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST |
4007 | && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST) |
4008 | { |
4009 | /* The range of lengths refers to either a single constant |
4010 | string or to the longest and shortest constant string |
4011 | referenced by the argument of the strlen() call, or to |
4012 | the strings that can possibly be stored in the arrays |
4013 | the argument refers to. */ |
4014 | minlen = wi::to_wide (t: lendata.minlen); |
4015 | maxlen = wi::to_wide (t: lendata.maxlen); |
4016 | } |
4017 | else |
4018 | { |
4019 | unsigned prec = TYPE_PRECISION (sizetype); |
4020 | |
4021 | minlen = wi::shwi (val: 0, precision: prec); |
4022 | maxlen = wi::to_wide (t: max_object_size (), prec) - 2; |
4023 | } |
4024 | |
4025 | /* For -fsanitize=address, don't optimize the upper bound of the |
4026 | length to be able to diagnose UB on non-zero terminated arrays. */ |
4027 | if (sanitize_flags_p (flag: SANITIZE_ADDRESS)) |
4028 | maxlen = wi::max_value (TYPE_PRECISION (sizetype), UNSIGNED); |
4029 | |
4030 | if (minlen == maxlen) |
4031 | { |
4032 | /* Fold the strlen call to a constant. */ |
4033 | tree type = TREE_TYPE (lendata.minlen); |
4034 | tree len = force_gimple_operand_gsi (gsi, |
4035 | wide_int_to_tree (type, cst: minlen), |
4036 | true, NULL, true, GSI_SAME_STMT); |
4037 | replace_call_with_value (gsi, val: len); |
4038 | return true; |
4039 | } |
4040 | |
4041 | /* Set the strlen() range to [0, MAXLEN]. */ |
4042 | if (tree lhs = gimple_call_lhs (gs: stmt)) |
4043 | set_strlen_range (lhs, minlen, maxlen); |
4044 | |
4045 | return false; |
4046 | } |
4047 | |
4048 | /* Fold a call to __builtin_acc_on_device. */ |
4049 | |
4050 | static bool |
4051 | gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0) |
4052 | { |
4053 | /* Defer folding until we know which compiler we're in. */ |
4054 | if (symtab->state != EXPANSION) |
4055 | return false; |
4056 | |
4057 | unsigned val_host = GOMP_DEVICE_HOST; |
4058 | unsigned val_dev = GOMP_DEVICE_NONE; |
4059 | |
4060 | #ifdef ACCEL_COMPILER |
4061 | val_host = GOMP_DEVICE_NOT_HOST; |
4062 | val_dev = ACCEL_COMPILER_acc_device; |
4063 | #endif |
4064 | |
4065 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
4066 | |
4067 | tree host_eq = make_ssa_name (boolean_type_node); |
4068 | gimple *host_ass = gimple_build_assign |
4069 | (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host)); |
4070 | gimple_set_location (g: host_ass, location: loc); |
4071 | gsi_insert_before (gsi, host_ass, GSI_SAME_STMT); |
4072 | |
4073 | tree dev_eq = make_ssa_name (boolean_type_node); |
4074 | gimple *dev_ass = gimple_build_assign |
4075 | (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev)); |
4076 | gimple_set_location (g: dev_ass, location: loc); |
4077 | gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT); |
4078 | |
4079 | tree result = make_ssa_name (boolean_type_node); |
4080 | gimple *result_ass = gimple_build_assign |
4081 | (result, BIT_IOR_EXPR, host_eq, dev_eq); |
4082 | gimple_set_location (g: result_ass, location: loc); |
4083 | gsi_insert_before (gsi, result_ass, GSI_SAME_STMT); |
4084 | |
4085 | replace_call_with_value (gsi, val: result); |
4086 | |
4087 | return true; |
4088 | } |
4089 | |
4090 | /* Fold realloc (0, n) -> malloc (n). */ |
4091 | |
4092 | static bool |
4093 | gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi) |
4094 | { |
4095 | gimple *stmt = gsi_stmt (i: *gsi); |
4096 | tree arg = gimple_call_arg (gs: stmt, index: 0); |
4097 | tree size = gimple_call_arg (gs: stmt, index: 1); |
4098 | |
4099 | if (operand_equal_p (arg, null_pointer_node, flags: 0)) |
4100 | { |
4101 | tree fn_malloc = builtin_decl_implicit (fncode: BUILT_IN_MALLOC); |
4102 | if (fn_malloc) |
4103 | { |
4104 | gcall *repl = gimple_build_call (fn_malloc, 1, size); |
4105 | replace_call_with_call_and_fold (gsi, repl); |
4106 | return true; |
4107 | } |
4108 | } |
4109 | return false; |
4110 | } |
4111 | |
4112 | /* Number of bytes into which any type but aggregate, vector or |
4113 | _BitInt types should fit. */ |
4114 | static constexpr size_t clear_padding_unit |
4115 | = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT; |
4116 | /* Buffer size on which __builtin_clear_padding folding code works. */ |
4117 | static const size_t clear_padding_buf_size = 32 * clear_padding_unit; |
4118 | |
4119 | /* Data passed through __builtin_clear_padding folding. */ |
4120 | struct clear_padding_struct { |
4121 | location_t loc; |
4122 | /* 0 during __builtin_clear_padding folding, nonzero during |
4123 | clear_type_padding_in_mask. In that case, instead of clearing the |
4124 | non-padding bits in union_ptr array clear the padding bits in there. */ |
4125 | bool clear_in_mask; |
4126 | tree base; |
4127 | tree alias_type; |
4128 | gimple_stmt_iterator *gsi; |
4129 | /* Alignment of buf->base + 0. */ |
4130 | unsigned align; |
4131 | /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */ |
4132 | HOST_WIDE_INT off; |
4133 | /* Number of padding bytes before buf->off that don't have padding clear |
4134 | code emitted yet. */ |
4135 | HOST_WIDE_INT padding_bytes; |
4136 | /* The size of the whole object. Never emit code to touch |
4137 | buf->base + buf->sz or following bytes. */ |
4138 | HOST_WIDE_INT sz; |
4139 | /* Number of bytes recorded in buf->buf. */ |
4140 | size_t size; |
4141 | /* When inside union, instead of emitting code we and bits inside of |
4142 | the union_ptr array. */ |
4143 | unsigned char *union_ptr; |
4144 | /* Set bits mean padding bits that need to be cleared by the builtin. */ |
4145 | unsigned char buf[clear_padding_buf_size + clear_padding_unit]; |
4146 | }; |
4147 | |
4148 | /* Emit code to clear padding requested in BUF->buf - set bits |
4149 | in there stand for padding that should be cleared. FULL is true |
4150 | if everything from the buffer should be flushed, otherwise |
4151 | it can leave up to 2 * clear_padding_unit bytes for further |
4152 | processing. */ |
4153 | |
4154 | static void |
4155 | clear_padding_flush (clear_padding_struct *buf, bool full) |
4156 | { |
4157 | gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0); |
4158 | if (!full && buf->size < 2 * clear_padding_unit) |
4159 | return; |
4160 | gcc_assert ((buf->off % UNITS_PER_WORD) == 0); |
4161 | size_t end = buf->size; |
4162 | if (!full) |
4163 | end = ((end - clear_padding_unit - 1) / clear_padding_unit |
4164 | * clear_padding_unit); |
4165 | size_t padding_bytes = buf->padding_bytes; |
4166 | if (buf->union_ptr) |
4167 | { |
4168 | if (buf->clear_in_mask) |
4169 | { |
4170 | /* During clear_type_padding_in_mask, clear the padding |
4171 | bits set in buf->buf in the buf->union_ptr mask. */ |
4172 | for (size_t i = 0; i < end; i++) |
4173 | { |
4174 | if (buf->buf[i] == (unsigned char) ~0) |
4175 | padding_bytes++; |
4176 | else |
4177 | { |
4178 | memset (s: &buf->union_ptr[buf->off + i - padding_bytes], |
4179 | c: 0, n: padding_bytes); |
4180 | padding_bytes = 0; |
4181 | buf->union_ptr[buf->off + i] &= ~buf->buf[i]; |
4182 | } |
4183 | } |
4184 | if (full) |
4185 | { |
4186 | memset (s: &buf->union_ptr[buf->off + end - padding_bytes], |
4187 | c: 0, n: padding_bytes); |
4188 | buf->off = 0; |
4189 | buf->size = 0; |
4190 | buf->padding_bytes = 0; |
4191 | } |
4192 | else |
4193 | { |
4194 | memmove (dest: buf->buf, src: buf->buf + end, n: buf->size - end); |
4195 | buf->off += end; |
4196 | buf->size -= end; |
4197 | buf->padding_bytes = padding_bytes; |
4198 | } |
4199 | return; |
4200 | } |
4201 | /* Inside of a union, instead of emitting any code, instead |
4202 | clear all bits in the union_ptr buffer that are clear |
4203 | in buf. Whole padding bytes don't clear anything. */ |
4204 | for (size_t i = 0; i < end; i++) |
4205 | { |
4206 | if (buf->buf[i] == (unsigned char) ~0) |
4207 | padding_bytes++; |
4208 | else |
4209 | { |
4210 | padding_bytes = 0; |
4211 | buf->union_ptr[buf->off + i] &= buf->buf[i]; |
4212 | } |
4213 | } |
4214 | if (full) |
4215 | { |
4216 | buf->off = 0; |
4217 | buf->size = 0; |
4218 | buf->padding_bytes = 0; |
4219 | } |
4220 | else |
4221 | { |
4222 | memmove (dest: buf->buf, src: buf->buf + end, n: buf->size - end); |
4223 | buf->off += end; |
4224 | buf->size -= end; |
4225 | buf->padding_bytes = padding_bytes; |
4226 | } |
4227 | return; |
4228 | } |
4229 | size_t wordsize = UNITS_PER_WORD; |
4230 | for (size_t i = 0; i < end; i += wordsize) |
4231 | { |
4232 | size_t nonzero_first = wordsize; |
4233 | size_t nonzero_last = 0; |
4234 | size_t zero_first = wordsize; |
4235 | size_t zero_last = 0; |
4236 | bool all_ones = true, bytes_only = true; |
4237 | if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize) |
4238 | > (unsigned HOST_WIDE_INT) buf->sz) |
4239 | { |
4240 | gcc_assert (wordsize > 1); |
4241 | wordsize /= 2; |
4242 | i -= wordsize; |
4243 | continue; |
4244 | } |
4245 | for (size_t j = i; j < i + wordsize && j < end; j++) |
4246 | { |
4247 | if (buf->buf[j]) |
4248 | { |
4249 | if (nonzero_first == wordsize) |
4250 | { |
4251 | nonzero_first = j - i; |
4252 | nonzero_last = j - i; |
4253 | } |
4254 | if (nonzero_last != j - i) |
4255 | all_ones = false; |
4256 | nonzero_last = j + 1 - i; |
4257 | } |
4258 | else |
4259 | { |
4260 | if (zero_first == wordsize) |
4261 | zero_first = j - i; |
4262 | zero_last = j + 1 - i; |
4263 | } |
4264 | if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0) |
4265 | { |
4266 | all_ones = false; |
4267 | bytes_only = false; |
4268 | } |
4269 | } |
4270 | size_t padding_end = i; |
4271 | if (padding_bytes) |
4272 | { |
4273 | if (nonzero_first == 0 |
4274 | && nonzero_last == wordsize |
4275 | && all_ones) |
4276 | { |
4277 | /* All bits are padding and we had some padding |
4278 | before too. Just extend it. */ |
4279 | padding_bytes += wordsize; |
4280 | continue; |
4281 | } |
4282 | if (all_ones && nonzero_first == 0) |
4283 | { |
4284 | padding_bytes += nonzero_last; |
4285 | padding_end += nonzero_last; |
4286 | nonzero_first = wordsize; |
4287 | nonzero_last = 0; |
4288 | } |
4289 | else if (bytes_only && nonzero_first == 0) |
4290 | { |
4291 | gcc_assert (zero_first && zero_first != wordsize); |
4292 | padding_bytes += zero_first; |
4293 | padding_end += zero_first; |
4294 | } |
4295 | tree atype, src; |
4296 | if (padding_bytes == 1) |
4297 | { |
4298 | atype = char_type_node; |
4299 | src = build_zero_cst (char_type_node); |
4300 | } |
4301 | else |
4302 | { |
4303 | atype = build_array_type_nelts (char_type_node, padding_bytes); |
4304 | src = build_constructor (atype, NULL); |
4305 | } |
4306 | tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, arg0: buf->base, |
4307 | arg1: build_int_cst (buf->alias_type, |
4308 | buf->off + padding_end |
4309 | - padding_bytes)); |
4310 | gimple *g = gimple_build_assign (dst, src); |
4311 | gimple_set_location (g, location: buf->loc); |
4312 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4313 | padding_bytes = 0; |
4314 | buf->padding_bytes = 0; |
4315 | } |
4316 | if (nonzero_first == wordsize) |
4317 | /* All bits in a word are 0, there are no padding bits. */ |
4318 | continue; |
4319 | if (all_ones && nonzero_last == wordsize) |
4320 | { |
4321 | /* All bits between nonzero_first and end of word are padding |
4322 | bits, start counting padding_bytes. */ |
4323 | padding_bytes = nonzero_last - nonzero_first; |
4324 | continue; |
4325 | } |
4326 | if (bytes_only) |
4327 | { |
4328 | /* If bitfields aren't involved in this word, prefer storing |
4329 | individual bytes or groups of them over performing a RMW |
4330 | operation on the whole word. */ |
4331 | gcc_assert (i + zero_last <= end); |
4332 | for (size_t j = padding_end; j < i + zero_last; j++) |
4333 | { |
4334 | if (buf->buf[j]) |
4335 | { |
4336 | size_t k; |
4337 | for (k = j; k < i + zero_last; k++) |
4338 | if (buf->buf[k] == 0) |
4339 | break; |
4340 | HOST_WIDE_INT off = buf->off + j; |
4341 | tree atype, src; |
4342 | if (k - j == 1) |
4343 | { |
4344 | atype = char_type_node; |
4345 | src = build_zero_cst (char_type_node); |
4346 | } |
4347 | else |
4348 | { |
4349 | atype = build_array_type_nelts (char_type_node, k - j); |
4350 | src = build_constructor (atype, NULL); |
4351 | } |
4352 | tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, |
4353 | arg0: buf->base, |
4354 | arg1: build_int_cst (buf->alias_type, off)); |
4355 | gimple *g = gimple_build_assign (dst, src); |
4356 | gimple_set_location (g, location: buf->loc); |
4357 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4358 | j = k; |
4359 | } |
4360 | } |
4361 | if (nonzero_last == wordsize) |
4362 | padding_bytes = nonzero_last - zero_last; |
4363 | continue; |
4364 | } |
4365 | for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1) |
4366 | { |
4367 | if (nonzero_last - nonzero_first <= eltsz |
4368 | && ((nonzero_first & ~(eltsz - 1)) |
4369 | == ((nonzero_last - 1) & ~(eltsz - 1)))) |
4370 | { |
4371 | tree type; |
4372 | if (eltsz == 1) |
4373 | type = char_type_node; |
4374 | else |
4375 | type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT, |
4376 | 0); |
4377 | size_t start = nonzero_first & ~(eltsz - 1); |
4378 | HOST_WIDE_INT off = buf->off + i + start; |
4379 | tree atype = type; |
4380 | if (eltsz > 1 && buf->align < TYPE_ALIGN (type)) |
4381 | atype = build_aligned_type (type, buf->align); |
4382 | tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, arg0: buf->base, |
4383 | arg1: build_int_cst (buf->alias_type, off)); |
4384 | tree src; |
4385 | gimple *g; |
4386 | if (all_ones |
4387 | && nonzero_first == start |
4388 | && nonzero_last == start + eltsz) |
4389 | src = build_zero_cst (type); |
4390 | else |
4391 | { |
4392 | src = make_ssa_name (var: type); |
4393 | tree tmp_dst = unshare_expr (dst); |
4394 | /* The folding introduces a read from the tmp_dst, we should |
4395 | prevent uninitialized warning analysis from issuing warning |
4396 | for such fake read. In order to suppress warning only for |
4397 | this expr, we should set the location of tmp_dst to |
4398 | UNKNOWN_LOCATION first, then suppress_warning will call |
4399 | set_no_warning_bit to set the no_warning flag only for |
4400 | tmp_dst. */ |
4401 | SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION); |
4402 | suppress_warning (tmp_dst, OPT_Wuninitialized); |
4403 | g = gimple_build_assign (src, tmp_dst); |
4404 | gimple_set_location (g, location: buf->loc); |
4405 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4406 | tree mask = native_interpret_expr (type, |
4407 | buf->buf + i + start, |
4408 | eltsz); |
4409 | gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST); |
4410 | mask = fold_build1 (BIT_NOT_EXPR, type, mask); |
4411 | tree src_masked = make_ssa_name (var: type); |
4412 | g = gimple_build_assign (src_masked, BIT_AND_EXPR, |
4413 | src, mask); |
4414 | gimple_set_location (g, location: buf->loc); |
4415 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4416 | src = src_masked; |
4417 | } |
4418 | g = gimple_build_assign (dst, src); |
4419 | gimple_set_location (g, location: buf->loc); |
4420 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4421 | break; |
4422 | } |
4423 | } |
4424 | } |
4425 | if (full) |
4426 | { |
4427 | if (padding_bytes) |
4428 | { |
4429 | tree atype, src; |
4430 | if (padding_bytes == 1) |
4431 | { |
4432 | atype = char_type_node; |
4433 | src = build_zero_cst (char_type_node); |
4434 | } |
4435 | else |
4436 | { |
4437 | atype = build_array_type_nelts (char_type_node, padding_bytes); |
4438 | src = build_constructor (atype, NULL); |
4439 | } |
4440 | tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, arg0: buf->base, |
4441 | arg1: build_int_cst (buf->alias_type, |
4442 | buf->off + end |
4443 | - padding_bytes)); |
4444 | gimple *g = gimple_build_assign (dst, src); |
4445 | gimple_set_location (g, location: buf->loc); |
4446 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4447 | } |
4448 | size_t end_rem = end % UNITS_PER_WORD; |
4449 | buf->off += end - end_rem; |
4450 | buf->size = end_rem; |
4451 | memset (s: buf->buf, c: 0, n: buf->size); |
4452 | buf->padding_bytes = 0; |
4453 | } |
4454 | else |
4455 | { |
4456 | memmove (dest: buf->buf, src: buf->buf + end, n: buf->size - end); |
4457 | buf->off += end; |
4458 | buf->size -= end; |
4459 | buf->padding_bytes = padding_bytes; |
4460 | } |
4461 | } |
4462 | |
4463 | /* Append PADDING_BYTES padding bytes. */ |
4464 | |
4465 | static void |
4466 | clear_padding_add_padding (clear_padding_struct *buf, |
4467 | HOST_WIDE_INT padding_bytes) |
4468 | { |
4469 | if (padding_bytes == 0) |
4470 | return; |
4471 | if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size |
4472 | > (unsigned HOST_WIDE_INT) clear_padding_buf_size) |
4473 | clear_padding_flush (buf, full: false); |
4474 | if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size |
4475 | > (unsigned HOST_WIDE_INT) clear_padding_buf_size) |
4476 | { |
4477 | memset (s: buf->buf + buf->size, c: ~0, n: clear_padding_buf_size - buf->size); |
4478 | padding_bytes -= clear_padding_buf_size - buf->size; |
4479 | buf->size = clear_padding_buf_size; |
4480 | clear_padding_flush (buf, full: false); |
4481 | gcc_assert (buf->padding_bytes); |
4482 | /* At this point buf->buf[0] through buf->buf[buf->size - 1] |
4483 | is guaranteed to be all ones. */ |
4484 | padding_bytes += buf->size; |
4485 | buf->size = padding_bytes % UNITS_PER_WORD; |
4486 | memset (s: buf->buf, c: ~0, n: buf->size); |
4487 | buf->off += padding_bytes - buf->size; |
4488 | buf->padding_bytes += padding_bytes - buf->size; |
4489 | } |
4490 | else |
4491 | { |
4492 | memset (s: buf->buf + buf->size, c: ~0, n: padding_bytes); |
4493 | buf->size += padding_bytes; |
4494 | } |
4495 | } |
4496 | |
4497 | static void clear_padding_type (clear_padding_struct *, tree, |
4498 | HOST_WIDE_INT, bool); |
4499 | |
4500 | /* Clear padding bits of union type TYPE. */ |
4501 | |
4502 | static void |
4503 | clear_padding_union (clear_padding_struct *buf, tree type, |
4504 | HOST_WIDE_INT sz, bool for_auto_init) |
4505 | { |
4506 | clear_padding_struct *union_buf; |
4507 | HOST_WIDE_INT start_off = 0, next_off = 0; |
4508 | size_t start_size = 0; |
4509 | if (buf->union_ptr) |
4510 | { |
4511 | start_off = buf->off + buf->size; |
4512 | next_off = start_off + sz; |
4513 | start_size = start_off % UNITS_PER_WORD; |
4514 | start_off -= start_size; |
4515 | clear_padding_flush (buf, full: true); |
4516 | union_buf = buf; |
4517 | } |
4518 | else |
4519 | { |
4520 | if (sz + buf->size > clear_padding_buf_size) |
4521 | clear_padding_flush (buf, full: false); |
4522 | union_buf = XALLOCA (clear_padding_struct); |
4523 | union_buf->loc = buf->loc; |
4524 | union_buf->clear_in_mask = buf->clear_in_mask; |
4525 | union_buf->base = NULL_TREE; |
4526 | union_buf->alias_type = NULL_TREE; |
4527 | union_buf->gsi = NULL; |
4528 | union_buf->align = 0; |
4529 | union_buf->off = 0; |
4530 | union_buf->padding_bytes = 0; |
4531 | union_buf->sz = sz; |
4532 | union_buf->size = 0; |
4533 | if (sz + buf->size <= clear_padding_buf_size) |
4534 | union_buf->union_ptr = buf->buf + buf->size; |
4535 | else |
4536 | union_buf->union_ptr = XNEWVEC (unsigned char, sz); |
4537 | memset (s: union_buf->union_ptr, c: ~0, n: sz); |
4538 | } |
4539 | |
4540 | for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
4541 | if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field)) |
4542 | { |
4543 | if (DECL_SIZE_UNIT (field) == NULL_TREE) |
4544 | { |
4545 | if (TREE_TYPE (field) == error_mark_node) |
4546 | continue; |
4547 | gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE |
4548 | && !COMPLETE_TYPE_P (TREE_TYPE (field))); |
4549 | if (!buf->clear_in_mask && !for_auto_init) |
4550 | error_at (buf->loc, "flexible array member %qD does not have " |
4551 | "well defined padding bits for %qs" , |
4552 | field, "__builtin_clear_padding" ); |
4553 | continue; |
4554 | } |
4555 | HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field)); |
4556 | gcc_assert (union_buf->size == 0); |
4557 | union_buf->off = start_off; |
4558 | union_buf->size = start_size; |
4559 | memset (s: union_buf->buf, c: ~0, n: start_size); |
4560 | clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init); |
4561 | clear_padding_add_padding (buf: union_buf, padding_bytes: sz - fldsz); |
4562 | clear_padding_flush (buf: union_buf, full: true); |
4563 | } |
4564 | |
4565 | if (buf == union_buf) |
4566 | { |
4567 | buf->off = next_off; |
4568 | buf->size = next_off % UNITS_PER_WORD; |
4569 | buf->off -= buf->size; |
4570 | memset (s: buf->buf, c: ~0, n: buf->size); |
4571 | } |
4572 | else if (sz + buf->size <= clear_padding_buf_size) |
4573 | buf->size += sz; |
4574 | else |
4575 | { |
4576 | unsigned char *union_ptr = union_buf->union_ptr; |
4577 | while (sz) |
4578 | { |
4579 | clear_padding_flush (buf, full: false); |
4580 | HOST_WIDE_INT this_sz |
4581 | = MIN ((unsigned HOST_WIDE_INT) sz, |
4582 | clear_padding_buf_size - buf->size); |
4583 | memcpy (dest: buf->buf + buf->size, src: union_ptr, n: this_sz); |
4584 | buf->size += this_sz; |
4585 | union_ptr += this_sz; |
4586 | sz -= this_sz; |
4587 | } |
4588 | XDELETE (union_buf->union_ptr); |
4589 | } |
4590 | } |
4591 | |
4592 | /* The only known floating point formats with padding bits are the |
4593 | IEEE extended ones. */ |
4594 | |
4595 | static bool |
4596 | clear_padding_real_needs_padding_p (tree type) |
4597 | { |
4598 | const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type)); |
4599 | return (fmt->b == 2 |
4600 | && fmt->signbit_ro == fmt->signbit_rw |
4601 | && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95)); |
4602 | } |
4603 | |
4604 | /* _BitInt has padding bits if it isn't extended in the ABI and has smaller |
4605 | precision than bits in limb or corresponding number of limbs. */ |
4606 | |
4607 | static bool |
4608 | clear_padding_bitint_needs_padding_p (tree type) |
4609 | { |
4610 | struct bitint_info info; |
4611 | bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info); |
4612 | gcc_assert (ok); |
4613 | if (info.extended) |
4614 | return false; |
4615 | scalar_int_mode limb_mode = as_a <scalar_int_mode> (m: info.abi_limb_mode); |
4616 | if (TYPE_PRECISION (type) < GET_MODE_PRECISION (mode: limb_mode)) |
4617 | return true; |
4618 | else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (mode: limb_mode)) |
4619 | return false; |
4620 | else |
4621 | return (((unsigned) TYPE_PRECISION (type)) |
4622 | % GET_MODE_PRECISION (mode: limb_mode)) != 0; |
4623 | } |
4624 | |
4625 | /* Return true if TYPE might contain any padding bits. */ |
4626 | |
4627 | bool |
4628 | clear_padding_type_may_have_padding_p (tree type) |
4629 | { |
4630 | switch (TREE_CODE (type)) |
4631 | { |
4632 | case RECORD_TYPE: |
4633 | case UNION_TYPE: |
4634 | return true; |
4635 | case ARRAY_TYPE: |
4636 | case COMPLEX_TYPE: |
4637 | case VECTOR_TYPE: |
4638 | return clear_padding_type_may_have_padding_p (TREE_TYPE (type)); |
4639 | case REAL_TYPE: |
4640 | return clear_padding_real_needs_padding_p (type); |
4641 | case BITINT_TYPE: |
4642 | return clear_padding_bitint_needs_padding_p (type); |
4643 | default: |
4644 | return false; |
4645 | } |
4646 | } |
4647 | |
4648 | /* Emit a runtime loop: |
4649 | for (; buf.base != end; buf.base += sz) |
4650 | __builtin_clear_padding (buf.base); */ |
4651 | |
4652 | static void |
4653 | clear_padding_emit_loop (clear_padding_struct *buf, tree type, |
4654 | tree end, bool for_auto_init) |
4655 | { |
4656 | tree l1 = create_artificial_label (buf->loc); |
4657 | tree l2 = create_artificial_label (buf->loc); |
4658 | tree l3 = create_artificial_label (buf->loc); |
4659 | gimple *g = gimple_build_goto (dest: l2); |
4660 | gimple_set_location (g, location: buf->loc); |
4661 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4662 | g = gimple_build_label (label: l1); |
4663 | gimple_set_location (g, location: buf->loc); |
4664 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4665 | clear_padding_type (buf, type, buf->sz, for_auto_init); |
4666 | clear_padding_flush (buf, full: true); |
4667 | g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base, |
4668 | size_int (buf->sz)); |
4669 | gimple_set_location (g, location: buf->loc); |
4670 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4671 | g = gimple_build_label (label: l2); |
4672 | gimple_set_location (g, location: buf->loc); |
4673 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4674 | g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3); |
4675 | gimple_set_location (g, location: buf->loc); |
4676 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4677 | g = gimple_build_label (label: l3); |
4678 | gimple_set_location (g, location: buf->loc); |
4679 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4680 | } |
4681 | |
4682 | /* Clear padding bits for TYPE. Called recursively from |
4683 | gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true, |
4684 | the __builtin_clear_padding is not called by the end user, |
4685 | instead, it's inserted by the compiler to initialize the |
4686 | paddings of automatic variable. Therefore, we should not |
4687 | emit the error messages for flexible array members to confuse |
4688 | the end user. */ |
4689 | |
4690 | static void |
4691 | clear_padding_type (clear_padding_struct *buf, tree type, |
4692 | HOST_WIDE_INT sz, bool for_auto_init) |
4693 | { |
4694 | switch (TREE_CODE (type)) |
4695 | { |
4696 | case RECORD_TYPE: |
4697 | HOST_WIDE_INT cur_pos; |
4698 | cur_pos = 0; |
4699 | for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
4700 | if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field)) |
4701 | { |
4702 | tree ftype = TREE_TYPE (field); |
4703 | if (DECL_BIT_FIELD (field)) |
4704 | { |
4705 | HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype); |
4706 | if (fldsz == 0) |
4707 | continue; |
4708 | HOST_WIDE_INT pos = int_byte_position (field); |
4709 | if (pos >= sz) |
4710 | continue; |
4711 | HOST_WIDE_INT bpos |
4712 | = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)); |
4713 | bpos %= BITS_PER_UNIT; |
4714 | HOST_WIDE_INT end |
4715 | = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT; |
4716 | if (pos + end > cur_pos) |
4717 | { |
4718 | clear_padding_add_padding (buf, padding_bytes: pos + end - cur_pos); |
4719 | cur_pos = pos + end; |
4720 | } |
4721 | gcc_assert (cur_pos > pos |
4722 | && ((unsigned HOST_WIDE_INT) buf->size |
4723 | >= (unsigned HOST_WIDE_INT) cur_pos - pos)); |
4724 | unsigned char *p = buf->buf + buf->size - (cur_pos - pos); |
4725 | if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN) |
4726 | sorry_at (buf->loc, "PDP11 bit-field handling unsupported" |
4727 | " in %qs" , "__builtin_clear_padding" ); |
4728 | else if (BYTES_BIG_ENDIAN) |
4729 | { |
4730 | /* Big endian. */ |
4731 | if (bpos + fldsz <= BITS_PER_UNIT) |
4732 | *p &= ~(((1 << fldsz) - 1) |
4733 | << (BITS_PER_UNIT - bpos - fldsz)); |
4734 | else |
4735 | { |
4736 | if (bpos) |
4737 | { |
4738 | *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos); |
4739 | p++; |
4740 | fldsz -= BITS_PER_UNIT - bpos; |
4741 | } |
4742 | memset (s: p, c: 0, n: fldsz / BITS_PER_UNIT); |
4743 | p += fldsz / BITS_PER_UNIT; |
4744 | fldsz %= BITS_PER_UNIT; |
4745 | if (fldsz) |
4746 | *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz; |
4747 | } |
4748 | } |
4749 | else |
4750 | { |
4751 | /* Little endian. */ |
4752 | if (bpos + fldsz <= BITS_PER_UNIT) |
4753 | *p &= ~(((1 << fldsz) - 1) << bpos); |
4754 | else |
4755 | { |
4756 | if (bpos) |
4757 | { |
4758 | *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos); |
4759 | p++; |
4760 | fldsz -= BITS_PER_UNIT - bpos; |
4761 | } |
4762 | memset (s: p, c: 0, n: fldsz / BITS_PER_UNIT); |
4763 | p += fldsz / BITS_PER_UNIT; |
4764 | fldsz %= BITS_PER_UNIT; |
4765 | if (fldsz) |
4766 | *p &= ~((1 << fldsz) - 1); |
4767 | } |
4768 | } |
4769 | } |
4770 | else if (DECL_SIZE_UNIT (field) == NULL_TREE) |
4771 | { |
4772 | if (ftype == error_mark_node) |
4773 | continue; |
4774 | gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE |
4775 | && !COMPLETE_TYPE_P (ftype)); |
4776 | if (!buf->clear_in_mask && !for_auto_init) |
4777 | error_at (buf->loc, "flexible array member %qD does not " |
4778 | "have well defined padding bits for %qs" , |
4779 | field, "__builtin_clear_padding" ); |
4780 | } |
4781 | else if (is_empty_type (ftype)) |
4782 | continue; |
4783 | else |
4784 | { |
4785 | HOST_WIDE_INT pos = int_byte_position (field); |
4786 | if (pos >= sz) |
4787 | continue; |
4788 | HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field)); |
4789 | gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos); |
4790 | clear_padding_add_padding (buf, padding_bytes: pos - cur_pos); |
4791 | cur_pos = pos; |
4792 | if (tree asbase = lang_hooks.types.classtype_as_base (field)) |
4793 | ftype = asbase; |
4794 | clear_padding_type (buf, type: ftype, sz: fldsz, for_auto_init); |
4795 | cur_pos += fldsz; |
4796 | } |
4797 | } |
4798 | gcc_assert (sz >= cur_pos); |
4799 | clear_padding_add_padding (buf, padding_bytes: sz - cur_pos); |
4800 | break; |
4801 | case ARRAY_TYPE: |
4802 | HOST_WIDE_INT nelts, fldsz; |
4803 | fldsz = int_size_in_bytes (TREE_TYPE (type)); |
4804 | if (fldsz == 0) |
4805 | break; |
4806 | nelts = sz / fldsz; |
4807 | if (nelts > 1 |
4808 | && sz > 8 * UNITS_PER_WORD |
4809 | && buf->union_ptr == NULL |
4810 | && clear_padding_type_may_have_padding_p (TREE_TYPE (type))) |
4811 | { |
4812 | /* For sufficiently large array of more than one elements, |
4813 | emit a runtime loop to keep code size manageable. */ |
4814 | tree base = buf->base; |
4815 | unsigned int prev_align = buf->align; |
4816 | HOST_WIDE_INT off = buf->off + buf->size; |
4817 | HOST_WIDE_INT prev_sz = buf->sz; |
4818 | clear_padding_flush (buf, full: true); |
4819 | tree elttype = TREE_TYPE (type); |
4820 | buf->base = create_tmp_var (build_pointer_type (elttype)); |
4821 | tree end = make_ssa_name (TREE_TYPE (buf->base)); |
4822 | gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, |
4823 | base, size_int (off)); |
4824 | gimple_set_location (g, location: buf->loc); |
4825 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4826 | g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base, |
4827 | size_int (sz)); |
4828 | gimple_set_location (g, location: buf->loc); |
4829 | gsi_insert_before (buf->gsi, g, GSI_SAME_STMT); |
4830 | buf->sz = fldsz; |
4831 | buf->align = TYPE_ALIGN (elttype); |
4832 | buf->off = 0; |
4833 | buf->size = 0; |
4834 | clear_padding_emit_loop (buf, type: elttype, end, for_auto_init); |
4835 | buf->base = base; |
4836 | buf->sz = prev_sz; |
4837 | buf->align = prev_align; |
4838 | buf->size = off % UNITS_PER_WORD; |
4839 | buf->off = off - buf->size; |
4840 | memset (s: buf->buf, c: 0, n: buf->size); |
4841 | break; |
4842 | } |
4843 | for (HOST_WIDE_INT i = 0; i < nelts; i++) |
4844 | clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init); |
4845 | break; |
4846 | case UNION_TYPE: |
4847 | clear_padding_union (buf, type, sz, for_auto_init); |
4848 | break; |
4849 | case REAL_TYPE: |
4850 | gcc_assert ((size_t) sz <= clear_padding_unit); |
4851 | if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size) |
4852 | clear_padding_flush (buf, full: false); |
4853 | if (clear_padding_real_needs_padding_p (type)) |
4854 | { |
4855 | /* Use native_interpret_real + native_encode_expr to figure out |
4856 | which bits are padding. */ |
4857 | memset (s: buf->buf + buf->size, c: ~0, n: sz); |
4858 | tree cst = native_interpret_real (type, buf->buf + buf->size, sz); |
4859 | gcc_assert (cst && TREE_CODE (cst) == REAL_CST); |
4860 | int len = native_encode_expr (cst, buf->buf + buf->size, sz); |
4861 | gcc_assert (len > 0 && (size_t) len == (size_t) sz); |
4862 | for (size_t i = 0; i < (size_t) sz; i++) |
4863 | buf->buf[buf->size + i] ^= ~0; |
4864 | } |
4865 | else |
4866 | memset (s: buf->buf + buf->size, c: 0, n: sz); |
4867 | buf->size += sz; |
4868 | break; |
4869 | case COMPLEX_TYPE: |
4870 | fldsz = int_size_in_bytes (TREE_TYPE (type)); |
4871 | clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init); |
4872 | clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init); |
4873 | break; |
4874 | case VECTOR_TYPE: |
4875 | nelts = TYPE_VECTOR_SUBPARTS (node: type).to_constant (); |
4876 | fldsz = int_size_in_bytes (TREE_TYPE (type)); |
4877 | for (HOST_WIDE_INT i = 0; i < nelts; i++) |
4878 | clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init); |
4879 | break; |
4880 | case NULLPTR_TYPE: |
4881 | gcc_assert ((size_t) sz <= clear_padding_unit); |
4882 | if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size) |
4883 | clear_padding_flush (buf, full: false); |
4884 | memset (s: buf->buf + buf->size, c: ~0, n: sz); |
4885 | buf->size += sz; |
4886 | break; |
4887 | case BITINT_TYPE: |
4888 | { |
4889 | struct bitint_info info; |
4890 | bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info); |
4891 | gcc_assert (ok); |
4892 | scalar_int_mode limb_mode |
4893 | = as_a <scalar_int_mode> (m: info.abi_limb_mode); |
4894 | if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (mode: limb_mode)) |
4895 | { |
4896 | gcc_assert ((size_t) sz <= clear_padding_unit); |
4897 | if ((unsigned HOST_WIDE_INT) sz + buf->size |
4898 | > clear_padding_buf_size) |
4899 | clear_padding_flush (buf, full: false); |
4900 | if (!info.extended |
4901 | && TYPE_PRECISION (type) < GET_MODE_PRECISION (mode: limb_mode)) |
4902 | { |
4903 | int tprec = GET_MODE_PRECISION (mode: limb_mode); |
4904 | int prec = TYPE_PRECISION (type); |
4905 | tree t = build_nonstandard_integer_type (tprec, 1); |
4906 | tree cst = wide_int_to_tree (type: t, cst: wi::mask (width: prec, negate_p: true, precision: tprec)); |
4907 | int len = native_encode_expr (cst, buf->buf + buf->size, sz); |
4908 | gcc_assert (len > 0 && (size_t) len == (size_t) sz); |
4909 | } |
4910 | else |
4911 | memset (s: buf->buf + buf->size, c: 0, n: sz); |
4912 | buf->size += sz; |
4913 | break; |
4914 | } |
4915 | tree limbtype |
4916 | = build_nonstandard_integer_type (GET_MODE_PRECISION (mode: limb_mode), 1); |
4917 | fldsz = int_size_in_bytes (limbtype); |
4918 | nelts = int_size_in_bytes (type) / fldsz; |
4919 | for (HOST_WIDE_INT i = 0; i < nelts; i++) |
4920 | { |
4921 | if (!info.extended |
4922 | && i == (info.big_endian ? 0 : nelts - 1) |
4923 | && (((unsigned) TYPE_PRECISION (type)) |
4924 | % TYPE_PRECISION (limbtype)) != 0) |
4925 | { |
4926 | int tprec = GET_MODE_PRECISION (mode: limb_mode); |
4927 | int prec = (((unsigned) TYPE_PRECISION (type)) % tprec); |
4928 | tree cst = wide_int_to_tree (type: limbtype, |
4929 | cst: wi::mask (width: prec, negate_p: true, precision: tprec)); |
4930 | int len = native_encode_expr (cst, buf->buf + buf->size, |
4931 | fldsz); |
4932 | gcc_assert (len > 0 && (size_t) len == (size_t) fldsz); |
4933 | buf->size += fldsz; |
4934 | } |
4935 | else |
4936 | clear_padding_type (buf, type: limbtype, sz: fldsz, for_auto_init); |
4937 | } |
4938 | break; |
4939 | } |
4940 | default: |
4941 | gcc_assert ((size_t) sz <= clear_padding_unit); |
4942 | if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size) |
4943 | clear_padding_flush (buf, full: false); |
4944 | memset (s: buf->buf + buf->size, c: 0, n: sz); |
4945 | buf->size += sz; |
4946 | break; |
4947 | } |
4948 | } |
4949 | |
4950 | /* Clear padding bits of TYPE in MASK. */ |
4951 | |
4952 | void |
4953 | clear_type_padding_in_mask (tree type, unsigned char *mask) |
4954 | { |
4955 | clear_padding_struct buf; |
4956 | buf.loc = UNKNOWN_LOCATION; |
4957 | buf.clear_in_mask = true; |
4958 | buf.base = NULL_TREE; |
4959 | buf.alias_type = NULL_TREE; |
4960 | buf.gsi = NULL; |
4961 | buf.align = 0; |
4962 | buf.off = 0; |
4963 | buf.padding_bytes = 0; |
4964 | buf.sz = int_size_in_bytes (type); |
4965 | buf.size = 0; |
4966 | buf.union_ptr = mask; |
4967 | clear_padding_type (buf: &buf, type, sz: buf.sz, for_auto_init: false); |
4968 | clear_padding_flush (buf: &buf, full: true); |
4969 | } |
4970 | |
4971 | /* Fold __builtin_clear_padding builtin. */ |
4972 | |
4973 | static bool |
4974 | gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi) |
4975 | { |
4976 | gimple *stmt = gsi_stmt (i: *gsi); |
4977 | gcc_assert (gimple_call_num_args (stmt) == 2); |
4978 | tree ptr = gimple_call_arg (gs: stmt, index: 0); |
4979 | tree typearg = gimple_call_arg (gs: stmt, index: 1); |
4980 | /* The 2nd argument of __builtin_clear_padding's value is used to |
4981 | distinguish whether this call is made by the user or by the compiler |
4982 | for automatic variable initialization. */ |
4983 | bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg); |
4984 | tree type = TREE_TYPE (TREE_TYPE (typearg)); |
4985 | location_t loc = gimple_location (g: stmt); |
4986 | clear_padding_struct buf; |
4987 | gimple_stmt_iterator gsiprev = *gsi; |
4988 | /* This should be folded during the lower pass. */ |
4989 | gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL); |
4990 | gcc_assert (COMPLETE_TYPE_P (type)); |
4991 | gsi_prev (i: &gsiprev); |
4992 | |
4993 | buf.loc = loc; |
4994 | buf.clear_in_mask = false; |
4995 | buf.base = ptr; |
4996 | buf.alias_type = NULL_TREE; |
4997 | buf.gsi = gsi; |
4998 | buf.align = get_pointer_alignment (ptr); |
4999 | unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT; |
5000 | buf.align = MAX (buf.align, talign); |
5001 | buf.off = 0; |
5002 | buf.padding_bytes = 0; |
5003 | buf.size = 0; |
5004 | buf.sz = int_size_in_bytes (type); |
5005 | buf.union_ptr = NULL; |
5006 | if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0) |
5007 | sorry_at (loc, "%s not supported for variable length aggregates" , |
5008 | "__builtin_clear_padding" ); |
5009 | /* The implementation currently assumes 8-bit host and target |
5010 | chars which is the case for all currently supported targets |
5011 | and hosts and is required e.g. for native_{encode,interpret}* APIs. */ |
5012 | else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8) |
5013 | sorry_at (loc, "%s not supported on this target" , |
5014 | "__builtin_clear_padding" ); |
5015 | else if (!clear_padding_type_may_have_padding_p (type)) |
5016 | ; |
5017 | else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0) |
5018 | { |
5019 | tree sz = TYPE_SIZE_UNIT (type); |
5020 | tree elttype = type; |
5021 | /* Only supports C/C++ VLAs and flattens all the VLA levels. */ |
5022 | while (TREE_CODE (elttype) == ARRAY_TYPE |
5023 | && int_size_in_bytes (elttype) < 0) |
5024 | elttype = TREE_TYPE (elttype); |
5025 | HOST_WIDE_INT eltsz = int_size_in_bytes (elttype); |
5026 | gcc_assert (eltsz >= 0); |
5027 | if (eltsz) |
5028 | { |
5029 | buf.base = create_tmp_var (build_pointer_type (elttype)); |
5030 | tree end = make_ssa_name (TREE_TYPE (buf.base)); |
5031 | gimple *g = gimple_build_assign (buf.base, ptr); |
5032 | gimple_set_location (g, location: loc); |
5033 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
5034 | g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz); |
5035 | gimple_set_location (g, location: loc); |
5036 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
5037 | buf.sz = eltsz; |
5038 | buf.align = TYPE_ALIGN (elttype); |
5039 | buf.alias_type = build_pointer_type (elttype); |
5040 | clear_padding_emit_loop (buf: &buf, type: elttype, end, for_auto_init); |
5041 | } |
5042 | } |
5043 | else |
5044 | { |
5045 | if (!is_gimple_mem_ref_addr (buf.base)) |
5046 | { |
5047 | buf.base = make_ssa_name (TREE_TYPE (ptr)); |
5048 | gimple *g = gimple_build_assign (buf.base, ptr); |
5049 | gimple_set_location (g, location: loc); |
5050 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
5051 | } |
5052 | buf.alias_type = build_pointer_type (type); |
5053 | clear_padding_type (buf: &buf, type, sz: buf.sz, for_auto_init); |
5054 | clear_padding_flush (buf: &buf, full: true); |
5055 | } |
5056 | |
5057 | gimple_stmt_iterator gsiprev2 = *gsi; |
5058 | gsi_prev (i: &gsiprev2); |
5059 | if (gsi_stmt (i: gsiprev) == gsi_stmt (i: gsiprev2)) |
5060 | gsi_replace (gsi, gimple_build_nop (), true); |
5061 | else |
5062 | { |
5063 | gsi_remove (gsi, true); |
5064 | *gsi = gsiprev2; |
5065 | } |
5066 | return true; |
5067 | } |
5068 | |
5069 | /* Fold the non-target builtin at *GSI and return whether any simplification |
5070 | was made. */ |
5071 | |
5072 | static bool |
5073 | gimple_fold_builtin (gimple_stmt_iterator *gsi) |
5074 | { |
5075 | gcall *stmt = as_a <gcall *>(p: gsi_stmt (i: *gsi)); |
5076 | tree callee = gimple_call_fndecl (gs: stmt); |
5077 | |
5078 | /* Give up for always_inline inline builtins until they are |
5079 | inlined. */ |
5080 | if (avoid_folding_inline_builtin (callee)) |
5081 | return false; |
5082 | |
5083 | unsigned n = gimple_call_num_args (gs: stmt); |
5084 | enum built_in_function fcode = DECL_FUNCTION_CODE (decl: callee); |
5085 | switch (fcode) |
5086 | { |
5087 | case BUILT_IN_BCMP: |
5088 | return gimple_fold_builtin_bcmp (gsi); |
5089 | case BUILT_IN_BCOPY: |
5090 | return gimple_fold_builtin_bcopy (gsi); |
5091 | case BUILT_IN_BZERO: |
5092 | return gimple_fold_builtin_bzero (gsi); |
5093 | |
5094 | case BUILT_IN_MEMSET: |
5095 | return gimple_fold_builtin_memset (gsi, |
5096 | c: gimple_call_arg (gs: stmt, index: 1), |
5097 | len: gimple_call_arg (gs: stmt, index: 2)); |
5098 | case BUILT_IN_MEMCPY: |
5099 | case BUILT_IN_MEMPCPY: |
5100 | case BUILT_IN_MEMMOVE: |
5101 | return gimple_fold_builtin_memory_op (gsi, dest: gimple_call_arg (gs: stmt, index: 0), |
5102 | src: gimple_call_arg (gs: stmt, index: 1), code: fcode); |
5103 | case BUILT_IN_SPRINTF_CHK: |
5104 | case BUILT_IN_VSPRINTF_CHK: |
5105 | return gimple_fold_builtin_sprintf_chk (gsi, fcode); |
5106 | case BUILT_IN_STRCAT_CHK: |
5107 | return gimple_fold_builtin_strcat_chk (gsi); |
5108 | case BUILT_IN_STRNCAT_CHK: |
5109 | return gimple_fold_builtin_strncat_chk (gsi); |
5110 | case BUILT_IN_STRLEN: |
5111 | return gimple_fold_builtin_strlen (gsi); |
5112 | case BUILT_IN_STRCPY: |
5113 | return gimple_fold_builtin_strcpy (gsi, |
5114 | dest: gimple_call_arg (gs: stmt, index: 0), |
5115 | src: gimple_call_arg (gs: stmt, index: 1)); |
5116 | case BUILT_IN_STRNCPY: |
5117 | return gimple_fold_builtin_strncpy (gsi, |
5118 | dest: gimple_call_arg (gs: stmt, index: 0), |
5119 | src: gimple_call_arg (gs: stmt, index: 1), |
5120 | len: gimple_call_arg (gs: stmt, index: 2)); |
5121 | case BUILT_IN_STRCAT: |
5122 | return gimple_fold_builtin_strcat (gsi, dst: gimple_call_arg (gs: stmt, index: 0), |
5123 | src: gimple_call_arg (gs: stmt, index: 1)); |
5124 | case BUILT_IN_STRNCAT: |
5125 | return gimple_fold_builtin_strncat (gsi); |
5126 | case BUILT_IN_INDEX: |
5127 | case BUILT_IN_STRCHR: |
5128 | return gimple_fold_builtin_strchr (gsi, is_strrchr: false); |
5129 | case BUILT_IN_RINDEX: |
5130 | case BUILT_IN_STRRCHR: |
5131 | return gimple_fold_builtin_strchr (gsi, is_strrchr: true); |
5132 | case BUILT_IN_STRSTR: |
5133 | return gimple_fold_builtin_strstr (gsi); |
5134 | case BUILT_IN_STRCMP: |
5135 | case BUILT_IN_STRCMP_EQ: |
5136 | case BUILT_IN_STRCASECMP: |
5137 | case BUILT_IN_STRNCMP: |
5138 | case BUILT_IN_STRNCMP_EQ: |
5139 | case BUILT_IN_STRNCASECMP: |
5140 | return gimple_fold_builtin_string_compare (gsi); |
5141 | case BUILT_IN_MEMCHR: |
5142 | return gimple_fold_builtin_memchr (gsi); |
5143 | case BUILT_IN_FPUTS: |
5144 | return gimple_fold_builtin_fputs (gsi, arg0: gimple_call_arg (gs: stmt, index: 0), |
5145 | arg1: gimple_call_arg (gs: stmt, index: 1), unlocked: false); |
5146 | case BUILT_IN_FPUTS_UNLOCKED: |
5147 | return gimple_fold_builtin_fputs (gsi, arg0: gimple_call_arg (gs: stmt, index: 0), |
5148 | arg1: gimple_call_arg (gs: stmt, index: 1), unlocked: true); |
5149 | case BUILT_IN_MEMCPY_CHK: |
5150 | case BUILT_IN_MEMPCPY_CHK: |
5151 | case BUILT_IN_MEMMOVE_CHK: |
5152 | case BUILT_IN_MEMSET_CHK: |
5153 | return gimple_fold_builtin_memory_chk (gsi, |
5154 | dest: gimple_call_arg (gs: stmt, index: 0), |
5155 | src: gimple_call_arg (gs: stmt, index: 1), |
5156 | len: gimple_call_arg (gs: stmt, index: 2), |
5157 | size: gimple_call_arg (gs: stmt, index: 3), |
5158 | fcode); |
5159 | case BUILT_IN_STPCPY: |
5160 | return gimple_fold_builtin_stpcpy (gsi); |
5161 | case BUILT_IN_STRCPY_CHK: |
5162 | case BUILT_IN_STPCPY_CHK: |
5163 | return gimple_fold_builtin_stxcpy_chk (gsi, |
5164 | dest: gimple_call_arg (gs: stmt, index: 0), |
5165 | src: gimple_call_arg (gs: stmt, index: 1), |
5166 | size: gimple_call_arg (gs: stmt, index: 2), |
5167 | fcode); |
5168 | case BUILT_IN_STRNCPY_CHK: |
5169 | case BUILT_IN_STPNCPY_CHK: |
5170 | return gimple_fold_builtin_stxncpy_chk (gsi, |
5171 | dest: gimple_call_arg (gs: stmt, index: 0), |
5172 | src: gimple_call_arg (gs: stmt, index: 1), |
5173 | len: gimple_call_arg (gs: stmt, index: 2), |
5174 | size: gimple_call_arg (gs: stmt, index: 3), |
5175 | fcode); |
5176 | case BUILT_IN_SNPRINTF_CHK: |
5177 | case BUILT_IN_VSNPRINTF_CHK: |
5178 | return gimple_fold_builtin_snprintf_chk (gsi, fcode); |
5179 | |
5180 | case BUILT_IN_FPRINTF: |
5181 | case BUILT_IN_FPRINTF_UNLOCKED: |
5182 | case BUILT_IN_VFPRINTF: |
5183 | if (n == 2 || n == 3) |
5184 | return gimple_fold_builtin_fprintf (gsi, |
5185 | fp: gimple_call_arg (gs: stmt, index: 0), |
5186 | fmt: gimple_call_arg (gs: stmt, index: 1), |
5187 | arg: n == 3 |
5188 | ? gimple_call_arg (gs: stmt, index: 2) |
5189 | : NULL_TREE, |
5190 | fcode); |
5191 | break; |
5192 | case BUILT_IN_FPRINTF_CHK: |
5193 | case BUILT_IN_VFPRINTF_CHK: |
5194 | if (n == 3 || n == 4) |
5195 | return gimple_fold_builtin_fprintf (gsi, |
5196 | fp: gimple_call_arg (gs: stmt, index: 0), |
5197 | fmt: gimple_call_arg (gs: stmt, index: 2), |
5198 | arg: n == 4 |
5199 | ? gimple_call_arg (gs: stmt, index: 3) |
5200 | : NULL_TREE, |
5201 | fcode); |
5202 | break; |
5203 | case BUILT_IN_PRINTF: |
5204 | case BUILT_IN_PRINTF_UNLOCKED: |
5205 | case BUILT_IN_VPRINTF: |
5206 | if (n == 1 || n == 2) |
5207 | return gimple_fold_builtin_printf (gsi, fmt: gimple_call_arg (gs: stmt, index: 0), |
5208 | arg: n == 2 |
5209 | ? gimple_call_arg (gs: stmt, index: 1) |
5210 | : NULL_TREE, fcode); |
5211 | break; |
5212 | case BUILT_IN_PRINTF_CHK: |
5213 | case BUILT_IN_VPRINTF_CHK: |
5214 | if (n == 2 || n == 3) |
5215 | return gimple_fold_builtin_printf (gsi, fmt: gimple_call_arg (gs: stmt, index: 1), |
5216 | arg: n == 3 |
5217 | ? gimple_call_arg (gs: stmt, index: 2) |
5218 | : NULL_TREE, fcode); |
5219 | break; |
5220 | case BUILT_IN_ACC_ON_DEVICE: |
5221 | return gimple_fold_builtin_acc_on_device (gsi, |
5222 | arg0: gimple_call_arg (gs: stmt, index: 0)); |
5223 | case BUILT_IN_REALLOC: |
5224 | return gimple_fold_builtin_realloc (gsi); |
5225 | |
5226 | case BUILT_IN_CLEAR_PADDING: |
5227 | return gimple_fold_builtin_clear_padding (gsi); |
5228 | |
5229 | default:; |
5230 | } |
5231 | |
5232 | /* Try the generic builtin folder. */ |
5233 | bool ignore = (gimple_call_lhs (gs: stmt) == NULL); |
5234 | tree result = fold_call_stmt (stmt, ignore); |
5235 | if (result) |
5236 | { |
5237 | if (ignore) |
5238 | STRIP_NOPS (result); |
5239 | else |
5240 | result = fold_convert (gimple_call_return_type (stmt), result); |
5241 | gimplify_and_update_call_from_tree (si_p: gsi, expr: result); |
5242 | return true; |
5243 | } |
5244 | |
5245 | return false; |
5246 | } |
5247 | |
5248 | /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal |
5249 | function calls to constants, where possible. */ |
5250 | |
5251 | static tree |
5252 | fold_internal_goacc_dim (const gimple *call) |
5253 | { |
5254 | int axis = oacc_get_ifn_dim_arg (stmt: call); |
5255 | int size = oacc_get_fn_dim_size (fn: current_function_decl, axis); |
5256 | tree result = NULL_TREE; |
5257 | tree type = TREE_TYPE (gimple_call_lhs (call)); |
5258 | |
5259 | switch (gimple_call_internal_fn (gs: call)) |
5260 | { |
5261 | case IFN_GOACC_DIM_POS: |
5262 | /* If the size is 1, we know the answer. */ |
5263 | if (size == 1) |
5264 | result = build_int_cst (type, 0); |
5265 | break; |
5266 | case IFN_GOACC_DIM_SIZE: |
5267 | /* If the size is not dynamic, we know the answer. */ |
5268 | if (size) |
5269 | result = build_int_cst (type, size); |
5270 | break; |
5271 | default: |
5272 | break; |
5273 | } |
5274 | |
5275 | return result; |
5276 | } |
5277 | |
5278 | /* Return true if stmt is __atomic_compare_exchange_N call which is suitable |
5279 | for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is |
5280 | &var where var is only addressable because of such calls. */ |
5281 | |
5282 | bool |
5283 | optimize_atomic_compare_exchange_p (gimple *stmt) |
5284 | { |
5285 | if (gimple_call_num_args (gs: stmt) != 6 |
5286 | || !flag_inline_atomics |
5287 | || !optimize |
5288 | || sanitize_flags_p (flag: SANITIZE_THREAD | SANITIZE_ADDRESS) |
5289 | || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL) |
5290 | || !gimple_vdef (g: stmt) |
5291 | || !gimple_vuse (g: stmt)) |
5292 | return false; |
5293 | |
5294 | tree fndecl = gimple_call_fndecl (gs: stmt); |
5295 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
5296 | { |
5297 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: |
5298 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: |
5299 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: |
5300 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: |
5301 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: |
5302 | break; |
5303 | default: |
5304 | return false; |
5305 | } |
5306 | |
5307 | tree expected = gimple_call_arg (gs: stmt, index: 1); |
5308 | if (TREE_CODE (expected) != ADDR_EXPR |
5309 | || !SSA_VAR_P (TREE_OPERAND (expected, 0))) |
5310 | return false; |
5311 | |
5312 | tree etype = TREE_TYPE (TREE_OPERAND (expected, 0)); |
5313 | if (!is_gimple_reg_type (type: etype) |
5314 | || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl) |
5315 | || TREE_THIS_VOLATILE (etype) |
5316 | || VECTOR_TYPE_P (etype) |
5317 | || TREE_CODE (etype) == COMPLEX_TYPE |
5318 | /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs |
5319 | might not preserve all the bits. See PR71716. */ |
5320 | || SCALAR_FLOAT_TYPE_P (etype) |
5321 | || maybe_ne (TYPE_PRECISION (etype), |
5322 | b: GET_MODE_BITSIZE (TYPE_MODE (etype)))) |
5323 | return false; |
5324 | |
5325 | tree weak = gimple_call_arg (gs: stmt, index: 3); |
5326 | if (!integer_zerop (weak) && !integer_onep (weak)) |
5327 | return false; |
5328 | |
5329 | tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); |
5330 | tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt))); |
5331 | machine_mode mode = TYPE_MODE (itype); |
5332 | |
5333 | if (direct_optab_handler (op: atomic_compare_and_swap_optab, mode) |
5334 | == CODE_FOR_nothing |
5335 | && optab_handler (op: sync_compare_and_swap_optab, mode) == CODE_FOR_nothing) |
5336 | return false; |
5337 | |
5338 | if (maybe_ne (a: int_size_in_bytes (etype), b: GET_MODE_SIZE (mode))) |
5339 | return false; |
5340 | |
5341 | return true; |
5342 | } |
5343 | |
5344 | /* Fold |
5345 | r = __atomic_compare_exchange_N (p, &e, d, w, s, f); |
5346 | into |
5347 | _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f); |
5348 | i = IMAGPART_EXPR <t>; |
5349 | r = (_Bool) i; |
5350 | e = REALPART_EXPR <t>; */ |
5351 | |
5352 | void |
5353 | fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi) |
5354 | { |
5355 | gimple *stmt = gsi_stmt (i: *gsi); |
5356 | tree fndecl = gimple_call_fndecl (gs: stmt); |
5357 | tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); |
5358 | tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt))); |
5359 | tree ctype = build_complex_type (itype); |
5360 | tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); |
5361 | bool throws = false; |
5362 | edge e = NULL; |
5363 | gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)), |
5364 | expected); |
5365 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
5366 | gimple_stmt_iterator gsiret = gsi_for_stmt (g); |
5367 | if (!useless_type_conversion_p (itype, TREE_TYPE (expected))) |
5368 | { |
5369 | g = gimple_build_assign (make_ssa_name (var: itype), VIEW_CONVERT_EXPR, |
5370 | build1 (VIEW_CONVERT_EXPR, itype, |
5371 | gimple_assign_lhs (gs: g))); |
5372 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
5373 | } |
5374 | int flag = (integer_onep (gimple_call_arg (gs: stmt, index: 3)) ? 256 : 0) |
5375 | + int_size_in_bytes (itype); |
5376 | g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6, |
5377 | gimple_call_arg (gs: stmt, index: 0), |
5378 | gimple_assign_lhs (gs: g), |
5379 | gimple_call_arg (gs: stmt, index: 2), |
5380 | build_int_cst (integer_type_node, flag), |
5381 | gimple_call_arg (gs: stmt, index: 4), |
5382 | gimple_call_arg (gs: stmt, index: 5)); |
5383 | tree lhs = make_ssa_name (var: ctype); |
5384 | gimple_call_set_lhs (gs: g, lhs); |
5385 | gimple_move_vops (g, stmt); |
5386 | tree oldlhs = gimple_call_lhs (gs: stmt); |
5387 | if (stmt_can_throw_internal (cfun, stmt)) |
5388 | { |
5389 | throws = true; |
5390 | e = find_fallthru_edge (edges: gsi_bb (i: *gsi)->succs); |
5391 | } |
5392 | gimple_call_set_nothrow (s: as_a <gcall *> (p: g), |
5393 | nothrow_p: gimple_call_nothrow_p (s: as_a <gcall *> (p: stmt))); |
5394 | gimple_call_set_lhs (gs: stmt, NULL_TREE); |
5395 | gsi_replace (gsi, g, true); |
5396 | if (oldlhs) |
5397 | { |
5398 | g = gimple_build_assign (make_ssa_name (var: itype), IMAGPART_EXPR, |
5399 | build1 (IMAGPART_EXPR, itype, lhs)); |
5400 | if (throws) |
5401 | { |
5402 | gsi_insert_on_edge_immediate (e, g); |
5403 | *gsi = gsi_for_stmt (g); |
5404 | } |
5405 | else |
5406 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
5407 | g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (gs: g)); |
5408 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
5409 | } |
5410 | g = gimple_build_assign (make_ssa_name (var: itype), REALPART_EXPR, |
5411 | build1 (REALPART_EXPR, itype, lhs)); |
5412 | if (throws && oldlhs == NULL_TREE) |
5413 | { |
5414 | gsi_insert_on_edge_immediate (e, g); |
5415 | *gsi = gsi_for_stmt (g); |
5416 | } |
5417 | else |
5418 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
5419 | if (!useless_type_conversion_p (TREE_TYPE (expected), itype)) |
5420 | { |
5421 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)), |
5422 | VIEW_CONVERT_EXPR, |
5423 | build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected), |
5424 | gimple_assign_lhs (gs: g))); |
5425 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
5426 | } |
5427 | g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (gs: g)); |
5428 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
5429 | *gsi = gsiret; |
5430 | } |
5431 | |
5432 | /* Return true if ARG0 CODE ARG1 in infinite signed precision operation |
5433 | doesn't fit into TYPE. The test for overflow should be regardless of |
5434 | -fwrapv, and even for unsigned types. */ |
5435 | |
5436 | bool |
5437 | arith_overflowed_p (enum tree_code code, const_tree type, |
5438 | const_tree arg0, const_tree arg1) |
5439 | { |
5440 | widest2_int warg0 = widest2_int_cst (arg0); |
5441 | widest2_int warg1 = widest2_int_cst (arg1); |
5442 | widest2_int wres; |
5443 | switch (code) |
5444 | { |
5445 | case PLUS_EXPR: wres = wi::add (x: warg0, y: warg1); break; |
5446 | case MINUS_EXPR: wres = wi::sub (x: warg0, y: warg1); break; |
5447 | case MULT_EXPR: wres = wi::mul (x: warg0, y: warg1); break; |
5448 | default: gcc_unreachable (); |
5449 | } |
5450 | signop sign = TYPE_SIGN (type); |
5451 | if (sign == UNSIGNED && wi::neg_p (x: wres)) |
5452 | return true; |
5453 | return wi::min_precision (x: wres, sgn: sign) > TYPE_PRECISION (type); |
5454 | } |
5455 | |
5456 | /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional, |
5457 | return a MEM_REF for the memory it references, otherwise return null. |
5458 | VECTYPE is the type of the memory vector. MASK_P indicates it's for |
5459 | MASK if true, otherwise it's for LEN. */ |
5460 | |
5461 | static tree |
5462 | gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p) |
5463 | { |
5464 | tree ptr = gimple_call_arg (gs: call, index: 0); |
5465 | tree alias_align = gimple_call_arg (gs: call, index: 1); |
5466 | if (!tree_fits_uhwi_p (alias_align)) |
5467 | return NULL_TREE; |
5468 | |
5469 | if (mask_p) |
5470 | { |
5471 | tree mask = gimple_call_arg (gs: call, index: 2); |
5472 | if (!integer_all_onesp (mask)) |
5473 | return NULL_TREE; |
5474 | } |
5475 | else |
5476 | { |
5477 | internal_fn ifn = gimple_call_internal_fn (gs: call); |
5478 | int len_index = internal_fn_len_index (ifn); |
5479 | tree basic_len = gimple_call_arg (gs: call, index: len_index); |
5480 | if (!poly_int_tree_p (t: basic_len)) |
5481 | return NULL_TREE; |
5482 | tree bias = gimple_call_arg (gs: call, index: len_index + 1); |
5483 | gcc_assert (TREE_CODE (bias) == INTEGER_CST); |
5484 | /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE, |
5485 | we don't fold when (bias + len) != VF. */ |
5486 | if (maybe_ne (a: wi::to_poly_widest (t: basic_len) + wi::to_widest (t: bias), |
5487 | b: GET_MODE_NUNITS (TYPE_MODE (vectype)))) |
5488 | return NULL_TREE; |
5489 | |
5490 | /* For MASK_LEN_{LOAD,STORE}, we should also check whether |
5491 | the mask is all ones mask. */ |
5492 | if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE) |
5493 | { |
5494 | tree mask = gimple_call_arg (gs: call, index: internal_fn_mask_index (ifn)); |
5495 | if (!integer_all_onesp (mask)) |
5496 | return NULL_TREE; |
5497 | } |
5498 | } |
5499 | |
5500 | unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align); |
5501 | if (TYPE_ALIGN (vectype) != align) |
5502 | vectype = build_aligned_type (vectype, align); |
5503 | tree offset = build_zero_cst (TREE_TYPE (alias_align)); |
5504 | return fold_build2 (MEM_REF, vectype, ptr, offset); |
5505 | } |
5506 | |
5507 | /* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success. |
5508 | MASK_P indicates it's for MASK if true, otherwise it's for LEN. */ |
5509 | |
5510 | static bool |
5511 | gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p) |
5512 | { |
5513 | tree lhs = gimple_call_lhs (gs: call); |
5514 | if (!lhs) |
5515 | return false; |
5516 | |
5517 | if (tree rhs |
5518 | = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p)) |
5519 | { |
5520 | gassign *new_stmt = gimple_build_assign (lhs, rhs); |
5521 | gimple_set_location (g: new_stmt, location: gimple_location (g: call)); |
5522 | gimple_move_vops (new_stmt, call); |
5523 | gsi_replace (gsi, new_stmt, false); |
5524 | return true; |
5525 | } |
5526 | return false; |
5527 | } |
5528 | |
5529 | /* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success. |
5530 | MASK_P indicates it's for MASK if true, otherwise it's for LEN. */ |
5531 | |
5532 | static bool |
5533 | gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call, |
5534 | bool mask_p) |
5535 | { |
5536 | internal_fn ifn = gimple_call_internal_fn (gs: call); |
5537 | tree rhs = gimple_call_arg (gs: call, index: internal_fn_stored_value_index (ifn)); |
5538 | if (tree lhs |
5539 | = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p)) |
5540 | { |
5541 | gassign *new_stmt = gimple_build_assign (lhs, rhs); |
5542 | gimple_set_location (g: new_stmt, location: gimple_location (g: call)); |
5543 | gimple_move_vops (new_stmt, call); |
5544 | gsi_replace (gsi, new_stmt, false); |
5545 | return true; |
5546 | } |
5547 | return false; |
5548 | } |
5549 | |
5550 | /* Attempt to fold a call statement referenced by the statement iterator GSI. |
5551 | The statement may be replaced by another statement, e.g., if the call |
5552 | simplifies to a constant value. Return true if any changes were made. |
5553 | It is assumed that the operands have been previously folded. */ |
5554 | |
5555 | static bool |
5556 | gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) |
5557 | { |
5558 | gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi)); |
5559 | tree callee; |
5560 | bool changed = false; |
5561 | |
5562 | /* Check for virtual calls that became direct calls. */ |
5563 | callee = gimple_call_fn (gs: stmt); |
5564 | if (callee && TREE_CODE (callee) == OBJ_TYPE_REF) |
5565 | { |
5566 | if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE) |
5567 | { |
5568 | if (dump_file && virtual_method_call_p (callee) |
5569 | && !possible_polymorphic_call_target_p |
5570 | (ref: callee, stmt, n: cgraph_node::get (decl: gimple_call_addr_fndecl |
5571 | (OBJ_TYPE_REF_EXPR (callee))))) |
5572 | { |
5573 | fprintf (stream: dump_file, |
5574 | format: "Type inheritance inconsistent devirtualization of " ); |
5575 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
5576 | fprintf (stream: dump_file, format: " to " ); |
5577 | print_generic_expr (dump_file, callee, TDF_SLIM); |
5578 | fprintf (stream: dump_file, format: "\n" ); |
5579 | } |
5580 | |
5581 | gimple_call_set_fn (gs: stmt, OBJ_TYPE_REF_EXPR (callee)); |
5582 | changed = true; |
5583 | } |
5584 | else if (flag_devirtualize && !inplace && virtual_method_call_p (callee)) |
5585 | { |
5586 | bool final; |
5587 | vec <cgraph_node *>targets |
5588 | = possible_polymorphic_call_targets (ref: callee, call: stmt, completep: &final); |
5589 | if (final && targets.length () <= 1 && dbg_cnt (index: devirt)) |
5590 | { |
5591 | tree lhs = gimple_call_lhs (gs: stmt); |
5592 | if (dump_enabled_p ()) |
5593 | { |
5594 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt, |
5595 | "folding virtual function call to %s\n" , |
5596 | targets.length () == 1 |
5597 | ? targets[0]->name () |
5598 | : "__builtin_unreachable" ); |
5599 | } |
5600 | if (targets.length () == 1) |
5601 | { |
5602 | tree fndecl = targets[0]->decl; |
5603 | gimple_call_set_fndecl (gs: stmt, decl: fndecl); |
5604 | changed = true; |
5605 | /* If changing the call to __cxa_pure_virtual |
5606 | or similar noreturn function, adjust gimple_call_fntype |
5607 | too. */ |
5608 | if (gimple_call_noreturn_p (s: stmt) |
5609 | && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))) |
5610 | && TYPE_ARG_TYPES (TREE_TYPE (fndecl)) |
5611 | && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl))) |
5612 | == void_type_node)) |
5613 | gimple_call_set_fntype (call_stmt: stmt, TREE_TYPE (fndecl)); |
5614 | /* If the call becomes noreturn, remove the lhs. */ |
5615 | if (lhs |
5616 | && gimple_call_noreturn_p (s: stmt) |
5617 | && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt))) |
5618 | || should_remove_lhs_p (lhs))) |
5619 | { |
5620 | if (TREE_CODE (lhs) == SSA_NAME) |
5621 | { |
5622 | tree var = create_tmp_var (TREE_TYPE (lhs)); |
5623 | tree def = get_or_create_ssa_default_def (cfun, var); |
5624 | gimple *new_stmt = gimple_build_assign (lhs, def); |
5625 | gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
5626 | } |
5627 | gimple_call_set_lhs (gs: stmt, NULL_TREE); |
5628 | } |
5629 | maybe_remove_unused_call_args (cfun, stmt); |
5630 | } |
5631 | else |
5632 | { |
5633 | location_t loc = gimple_location (g: stmt); |
5634 | gimple *new_stmt = gimple_build_builtin_unreachable (loc); |
5635 | gimple_call_set_ctrl_altering (s: new_stmt, ctrl_altering_p: false); |
5636 | /* If the call had a SSA name as lhs morph that into |
5637 | an uninitialized value. */ |
5638 | if (lhs && TREE_CODE (lhs) == SSA_NAME) |
5639 | { |
5640 | tree var = create_tmp_var (TREE_TYPE (lhs)); |
5641 | SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var); |
5642 | SSA_NAME_DEF_STMT (lhs) = gimple_build_nop (); |
5643 | set_ssa_default_def (cfun, var, lhs); |
5644 | } |
5645 | gimple_move_vops (new_stmt, stmt); |
5646 | gsi_replace (gsi, new_stmt, false); |
5647 | return true; |
5648 | } |
5649 | } |
5650 | } |
5651 | } |
5652 | |
5653 | /* Check for indirect calls that became direct calls, and then |
5654 | no longer require a static chain. */ |
5655 | if (gimple_call_chain (gs: stmt)) |
5656 | { |
5657 | tree fn = gimple_call_fndecl (gs: stmt); |
5658 | if (fn && !DECL_STATIC_CHAIN (fn)) |
5659 | { |
5660 | gimple_call_set_chain (call_stmt: stmt, NULL); |
5661 | changed = true; |
5662 | } |
5663 | } |
5664 | |
5665 | if (inplace) |
5666 | return changed; |
5667 | |
5668 | /* Check for builtins that CCP can handle using information not |
5669 | available in the generic fold routines. */ |
5670 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
5671 | { |
5672 | if (gimple_fold_builtin (gsi)) |
5673 | changed = true; |
5674 | } |
5675 | else if (gimple_call_builtin_p (stmt, BUILT_IN_MD)) |
5676 | { |
5677 | changed |= targetm.gimple_fold_builtin (gsi); |
5678 | } |
5679 | else if (gimple_call_internal_p (gs: stmt)) |
5680 | { |
5681 | enum tree_code subcode = ERROR_MARK; |
5682 | tree result = NULL_TREE; |
5683 | bool cplx_result = false; |
5684 | bool uaddc_usubc = false; |
5685 | tree overflow = NULL_TREE; |
5686 | switch (gimple_call_internal_fn (gs: stmt)) |
5687 | { |
5688 | case IFN_BUILTIN_EXPECT: |
5689 | result = fold_builtin_expect (gimple_location (g: stmt), |
5690 | gimple_call_arg (gs: stmt, index: 0), |
5691 | gimple_call_arg (gs: stmt, index: 1), |
5692 | gimple_call_arg (gs: stmt, index: 2), |
5693 | NULL_TREE); |
5694 | break; |
5695 | case IFN_UBSAN_OBJECT_SIZE: |
5696 | { |
5697 | tree offset = gimple_call_arg (gs: stmt, index: 1); |
5698 | tree objsize = gimple_call_arg (gs: stmt, index: 2); |
5699 | if (integer_all_onesp (objsize) |
5700 | || (TREE_CODE (offset) == INTEGER_CST |
5701 | && TREE_CODE (objsize) == INTEGER_CST |
5702 | && tree_int_cst_le (t1: offset, t2: objsize))) |
5703 | { |
5704 | replace_call_with_value (gsi, NULL_TREE); |
5705 | return true; |
5706 | } |
5707 | } |
5708 | break; |
5709 | case IFN_UBSAN_PTR: |
5710 | if (integer_zerop (gimple_call_arg (gs: stmt, index: 1))) |
5711 | { |
5712 | replace_call_with_value (gsi, NULL_TREE); |
5713 | return true; |
5714 | } |
5715 | break; |
5716 | case IFN_UBSAN_BOUNDS: |
5717 | { |
5718 | tree index = gimple_call_arg (gs: stmt, index: 1); |
5719 | tree bound = gimple_call_arg (gs: stmt, index: 2); |
5720 | if (TREE_CODE (index) == INTEGER_CST |
5721 | && TREE_CODE (bound) == INTEGER_CST) |
5722 | { |
5723 | index = fold_convert (TREE_TYPE (bound), index); |
5724 | if (TREE_CODE (index) == INTEGER_CST |
5725 | && tree_int_cst_lt (t1: index, t2: bound)) |
5726 | { |
5727 | replace_call_with_value (gsi, NULL_TREE); |
5728 | return true; |
5729 | } |
5730 | } |
5731 | } |
5732 | break; |
5733 | case IFN_GOACC_DIM_SIZE: |
5734 | case IFN_GOACC_DIM_POS: |
5735 | result = fold_internal_goacc_dim (call: stmt); |
5736 | break; |
5737 | case IFN_UBSAN_CHECK_ADD: |
5738 | subcode = PLUS_EXPR; |
5739 | break; |
5740 | case IFN_UBSAN_CHECK_SUB: |
5741 | subcode = MINUS_EXPR; |
5742 | break; |
5743 | case IFN_UBSAN_CHECK_MUL: |
5744 | subcode = MULT_EXPR; |
5745 | break; |
5746 | case IFN_ADD_OVERFLOW: |
5747 | subcode = PLUS_EXPR; |
5748 | cplx_result = true; |
5749 | break; |
5750 | case IFN_SUB_OVERFLOW: |
5751 | subcode = MINUS_EXPR; |
5752 | cplx_result = true; |
5753 | break; |
5754 | case IFN_MUL_OVERFLOW: |
5755 | subcode = MULT_EXPR; |
5756 | cplx_result = true; |
5757 | break; |
5758 | case IFN_UADDC: |
5759 | subcode = PLUS_EXPR; |
5760 | cplx_result = true; |
5761 | uaddc_usubc = true; |
5762 | break; |
5763 | case IFN_USUBC: |
5764 | subcode = MINUS_EXPR; |
5765 | cplx_result = true; |
5766 | uaddc_usubc = true; |
5767 | break; |
5768 | case IFN_MASK_LOAD: |
5769 | changed |= gimple_fold_partial_load (gsi, call: stmt, mask_p: true); |
5770 | break; |
5771 | case IFN_MASK_STORE: |
5772 | changed |= gimple_fold_partial_store (gsi, call: stmt, mask_p: true); |
5773 | break; |
5774 | case IFN_LEN_LOAD: |
5775 | case IFN_MASK_LEN_LOAD: |
5776 | changed |= gimple_fold_partial_load (gsi, call: stmt, mask_p: false); |
5777 | break; |
5778 | case IFN_LEN_STORE: |
5779 | case IFN_MASK_LEN_STORE: |
5780 | changed |= gimple_fold_partial_store (gsi, call: stmt, mask_p: false); |
5781 | break; |
5782 | default: |
5783 | break; |
5784 | } |
5785 | if (subcode != ERROR_MARK) |
5786 | { |
5787 | tree arg0 = gimple_call_arg (gs: stmt, index: 0); |
5788 | tree arg1 = gimple_call_arg (gs: stmt, index: 1); |
5789 | tree arg2 = NULL_TREE; |
5790 | tree type = TREE_TYPE (arg0); |
5791 | if (cplx_result) |
5792 | { |
5793 | tree lhs = gimple_call_lhs (gs: stmt); |
5794 | if (lhs == NULL_TREE) |
5795 | type = NULL_TREE; |
5796 | else |
5797 | type = TREE_TYPE (TREE_TYPE (lhs)); |
5798 | if (uaddc_usubc) |
5799 | arg2 = gimple_call_arg (gs: stmt, index: 2); |
5800 | } |
5801 | if (type == NULL_TREE) |
5802 | ; |
5803 | else if (uaddc_usubc) |
5804 | { |
5805 | if (!integer_zerop (arg2)) |
5806 | ; |
5807 | /* x = y + 0 + 0; x = y - 0 - 0; */ |
5808 | else if (integer_zerop (arg1)) |
5809 | result = arg0; |
5810 | /* x = 0 + y + 0; */ |
5811 | else if (subcode != MINUS_EXPR && integer_zerop (arg0)) |
5812 | result = arg1; |
5813 | /* x = y - y - 0; */ |
5814 | else if (subcode == MINUS_EXPR |
5815 | && operand_equal_p (arg0, arg1, flags: 0)) |
5816 | result = integer_zero_node; |
5817 | } |
5818 | /* x = y + 0; x = y - 0; x = y * 0; */ |
5819 | else if (integer_zerop (arg1)) |
5820 | result = subcode == MULT_EXPR ? integer_zero_node : arg0; |
5821 | /* x = 0 + y; x = 0 * y; */ |
5822 | else if (subcode != MINUS_EXPR && integer_zerop (arg0)) |
5823 | result = subcode == MULT_EXPR ? integer_zero_node : arg1; |
5824 | /* x = y - y; */ |
5825 | else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, flags: 0)) |
5826 | result = integer_zero_node; |
5827 | /* x = y * 1; x = 1 * y; */ |
5828 | else if (subcode == MULT_EXPR && integer_onep (arg1)) |
5829 | result = arg0; |
5830 | else if (subcode == MULT_EXPR && integer_onep (arg0)) |
5831 | result = arg1; |
5832 | if (result) |
5833 | { |
5834 | if (result == integer_zero_node) |
5835 | result = build_zero_cst (type); |
5836 | else if (cplx_result && TREE_TYPE (result) != type) |
5837 | { |
5838 | if (TREE_CODE (result) == INTEGER_CST) |
5839 | { |
5840 | if (arith_overflowed_p (code: PLUS_EXPR, type, arg0: result, |
5841 | integer_zero_node)) |
5842 | overflow = build_one_cst (type); |
5843 | } |
5844 | else if ((!TYPE_UNSIGNED (TREE_TYPE (result)) |
5845 | && TYPE_UNSIGNED (type)) |
5846 | || (TYPE_PRECISION (type) |
5847 | < (TYPE_PRECISION (TREE_TYPE (result)) |
5848 | + (TYPE_UNSIGNED (TREE_TYPE (result)) |
5849 | && !TYPE_UNSIGNED (type))))) |
5850 | result = NULL_TREE; |
5851 | if (result) |
5852 | result = fold_convert (type, result); |
5853 | } |
5854 | } |
5855 | } |
5856 | |
5857 | if (result) |
5858 | { |
5859 | if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result)) |
5860 | result = drop_tree_overflow (result); |
5861 | if (cplx_result) |
5862 | { |
5863 | if (overflow == NULL_TREE) |
5864 | overflow = build_zero_cst (TREE_TYPE (result)); |
5865 | tree ctype = build_complex_type (TREE_TYPE (result)); |
5866 | if (TREE_CODE (result) == INTEGER_CST |
5867 | && TREE_CODE (overflow) == INTEGER_CST) |
5868 | result = build_complex (ctype, result, overflow); |
5869 | else |
5870 | result = build2_loc (loc: gimple_location (g: stmt), code: COMPLEX_EXPR, |
5871 | type: ctype, arg0: result, arg1: overflow); |
5872 | } |
5873 | gimplify_and_update_call_from_tree (si_p: gsi, expr: result); |
5874 | changed = true; |
5875 | } |
5876 | } |
5877 | |
5878 | return changed; |
5879 | } |
5880 | |
5881 | |
5882 | /* Return true whether NAME has a use on STMT. Note this can return |
5883 | false even though there's a use on STMT if SSA operands are not |
5884 | up-to-date. */ |
5885 | |
5886 | static bool |
5887 | has_use_on_stmt (tree name, gimple *stmt) |
5888 | { |
5889 | ssa_op_iter iter; |
5890 | tree op; |
5891 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) |
5892 | if (op == name) |
5893 | return true; |
5894 | return false; |
5895 | } |
5896 | |
5897 | /* Worker for fold_stmt_1 dispatch to pattern based folding with |
5898 | gimple_simplify. |
5899 | |
5900 | Replaces *GSI with the simplification result in RCODE and OPS |
5901 | and the associated statements in *SEQ. Does the replacement |
5902 | according to INPLACE and returns true if the operation succeeded. */ |
5903 | |
5904 | static bool |
5905 | replace_stmt_with_simplification (gimple_stmt_iterator *gsi, |
5906 | gimple_match_op *res_op, |
5907 | gimple_seq *seq, bool inplace) |
5908 | { |
5909 | gimple *stmt = gsi_stmt (i: *gsi); |
5910 | tree *ops = res_op->ops; |
5911 | unsigned int num_ops = res_op->num_ops; |
5912 | |
5913 | /* Play safe and do not allow abnormals to be mentioned in |
5914 | newly created statements. See also maybe_push_res_to_seq. |
5915 | As an exception allow such uses if there was a use of the |
5916 | same SSA name on the old stmt. */ |
5917 | for (unsigned int i = 0; i < num_ops; ++i) |
5918 | if (TREE_CODE (ops[i]) == SSA_NAME |
5919 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]) |
5920 | && !has_use_on_stmt (name: ops[i], stmt)) |
5921 | return false; |
5922 | |
5923 | if (num_ops > 0 && COMPARISON_CLASS_P (ops[0])) |
5924 | for (unsigned int i = 0; i < 2; ++i) |
5925 | if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME |
5926 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)) |
5927 | && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt)) |
5928 | return false; |
5929 | |
5930 | /* Don't insert new statements when INPLACE is true, even if we could |
5931 | reuse STMT for the final statement. */ |
5932 | if (inplace && !gimple_seq_empty_p (s: *seq)) |
5933 | return false; |
5934 | |
5935 | if (gcond *cond_stmt = dyn_cast <gcond *> (p: stmt)) |
5936 | { |
5937 | gcc_assert (res_op->code.is_tree_code ()); |
5938 | auto code = tree_code (res_op->code); |
5939 | if (TREE_CODE_CLASS (code) == tcc_comparison |
5940 | /* GIMPLE_CONDs condition may not throw. */ |
5941 | && (!flag_exceptions |
5942 | || !cfun->can_throw_non_call_exceptions |
5943 | || !operation_could_trap_p (code, |
5944 | FLOAT_TYPE_P (TREE_TYPE (ops[0])), |
5945 | false, NULL_TREE))) |
5946 | gimple_cond_set_condition (stmt: cond_stmt, code, lhs: ops[0], rhs: ops[1]); |
5947 | else if (code == SSA_NAME) |
5948 | gimple_cond_set_condition (stmt: cond_stmt, code: NE_EXPR, lhs: ops[0], |
5949 | rhs: build_zero_cst (TREE_TYPE (ops[0]))); |
5950 | else if (code == INTEGER_CST) |
5951 | { |
5952 | if (integer_zerop (ops[0])) |
5953 | gimple_cond_make_false (gs: cond_stmt); |
5954 | else |
5955 | gimple_cond_make_true (gs: cond_stmt); |
5956 | } |
5957 | else if (!inplace) |
5958 | { |
5959 | tree res = maybe_push_res_to_seq (res_op, seq); |
5960 | if (!res) |
5961 | return false; |
5962 | gimple_cond_set_condition (stmt: cond_stmt, code: NE_EXPR, lhs: res, |
5963 | rhs: build_zero_cst (TREE_TYPE (res))); |
5964 | } |
5965 | else |
5966 | return false; |
5967 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5968 | { |
5969 | fprintf (stream: dump_file, format: "gimple_simplified to " ); |
5970 | if (!gimple_seq_empty_p (s: *seq)) |
5971 | print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); |
5972 | print_gimple_stmt (dump_file, gsi_stmt (i: *gsi), |
5973 | 0, TDF_SLIM); |
5974 | } |
5975 | gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); |
5976 | return true; |
5977 | } |
5978 | else if (is_gimple_assign (gs: stmt) |
5979 | && res_op->code.is_tree_code ()) |
5980 | { |
5981 | auto code = tree_code (res_op->code); |
5982 | if (!inplace |
5983 | || gimple_num_ops (gs: stmt) > get_gimple_rhs_num_ops (code)) |
5984 | { |
5985 | maybe_build_generic_op (res_op); |
5986 | gimple_assign_set_rhs_with_ops (gsi, code, |
5987 | res_op->op_or_null (i: 0), |
5988 | res_op->op_or_null (i: 1), |
5989 | res_op->op_or_null (i: 2)); |
5990 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5991 | { |
5992 | fprintf (stream: dump_file, format: "gimple_simplified to " ); |
5993 | if (!gimple_seq_empty_p (s: *seq)) |
5994 | print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); |
5995 | print_gimple_stmt (dump_file, gsi_stmt (i: *gsi), |
5996 | 0, TDF_SLIM); |
5997 | } |
5998 | gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); |
5999 | return true; |
6000 | } |
6001 | } |
6002 | else if (res_op->code.is_fn_code () |
6003 | && gimple_call_combined_fn (stmt) == combined_fn (res_op->code)) |
6004 | { |
6005 | gcc_assert (num_ops == gimple_call_num_args (stmt)); |
6006 | for (unsigned int i = 0; i < num_ops; ++i) |
6007 | gimple_call_set_arg (gs: stmt, index: i, arg: ops[i]); |
6008 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6009 | { |
6010 | fprintf (stream: dump_file, format: "gimple_simplified to " ); |
6011 | if (!gimple_seq_empty_p (s: *seq)) |
6012 | print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); |
6013 | print_gimple_stmt (dump_file, gsi_stmt (i: *gsi), 0, TDF_SLIM); |
6014 | } |
6015 | gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); |
6016 | return true; |
6017 | } |
6018 | else if (!inplace) |
6019 | { |
6020 | if (gimple_has_lhs (stmt)) |
6021 | { |
6022 | tree lhs = gimple_get_lhs (stmt); |
6023 | if (!maybe_push_res_to_seq (res_op, seq, res: lhs)) |
6024 | return false; |
6025 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6026 | { |
6027 | fprintf (stream: dump_file, format: "gimple_simplified to " ); |
6028 | print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); |
6029 | } |
6030 | gsi_replace_with_seq_vops (si_p: gsi, stmts: *seq); |
6031 | return true; |
6032 | } |
6033 | else |
6034 | gcc_unreachable (); |
6035 | } |
6036 | |
6037 | return false; |
6038 | } |
6039 | |
6040 | /* Canonicalize MEM_REFs invariant address operand after propagation. */ |
6041 | |
6042 | static bool |
6043 | maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false) |
6044 | { |
6045 | bool res = false; |
6046 | tree *orig_t = t; |
6047 | |
6048 | if (TREE_CODE (*t) == ADDR_EXPR) |
6049 | t = &TREE_OPERAND (*t, 0); |
6050 | |
6051 | /* The C and C++ frontends use an ARRAY_REF for indexing with their |
6052 | generic vector extension. The actual vector referenced is |
6053 | view-converted to an array type for this purpose. If the index |
6054 | is constant the canonical representation in the middle-end is a |
6055 | BIT_FIELD_REF so re-write the former to the latter here. */ |
6056 | if (TREE_CODE (*t) == ARRAY_REF |
6057 | && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR |
6058 | && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST |
6059 | && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))) |
6060 | { |
6061 | tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)); |
6062 | if (VECTOR_TYPE_P (vtype)) |
6063 | { |
6064 | tree low = array_ref_low_bound (*t); |
6065 | if (TREE_CODE (low) == INTEGER_CST) |
6066 | { |
6067 | if (tree_int_cst_le (t1: low, TREE_OPERAND (*t, 1))) |
6068 | { |
6069 | widest_int idx = wi::sub (x: wi::to_widest (TREE_OPERAND (*t, 1)), |
6070 | y: wi::to_widest (t: low)); |
6071 | idx = wi::mul (x: idx, y: wi::to_widest |
6072 | (TYPE_SIZE (TREE_TYPE (*t)))); |
6073 | widest_int ext |
6074 | = wi::add (x: idx, y: wi::to_widest (TYPE_SIZE (TREE_TYPE (*t)))); |
6075 | if (wi::les_p (x: ext, y: wi::to_widest (TYPE_SIZE (vtype)))) |
6076 | { |
6077 | *t = build3_loc (EXPR_LOCATION (*t), code: BIT_FIELD_REF, |
6078 | TREE_TYPE (*t), |
6079 | TREE_OPERAND (TREE_OPERAND (*t, 0), 0), |
6080 | TYPE_SIZE (TREE_TYPE (*t)), |
6081 | arg2: wide_int_to_tree (bitsizetype, cst: idx)); |
6082 | res = true; |
6083 | } |
6084 | } |
6085 | } |
6086 | } |
6087 | } |
6088 | |
6089 | while (handled_component_p (t: *t)) |
6090 | t = &TREE_OPERAND (*t, 0); |
6091 | |
6092 | /* Canonicalize MEM [&foo.bar, 0] which appears after propagating |
6093 | of invariant addresses into a SSA name MEM_REF address. */ |
6094 | if (TREE_CODE (*t) == MEM_REF |
6095 | || TREE_CODE (*t) == TARGET_MEM_REF) |
6096 | { |
6097 | tree addr = TREE_OPERAND (*t, 0); |
6098 | if (TREE_CODE (addr) == ADDR_EXPR |
6099 | && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF |
6100 | || handled_component_p (TREE_OPERAND (addr, 0)))) |
6101 | { |
6102 | tree base; |
6103 | poly_int64 coffset; |
6104 | base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), |
6105 | &coffset); |
6106 | if (!base) |
6107 | { |
6108 | if (is_debug) |
6109 | return false; |
6110 | gcc_unreachable (); |
6111 | } |
6112 | |
6113 | TREE_OPERAND (*t, 0) = build_fold_addr_expr (base); |
6114 | TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR, |
6115 | TREE_OPERAND (*t, 1), |
6116 | size_int (coffset)); |
6117 | res = true; |
6118 | } |
6119 | gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL |
6120 | || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0))); |
6121 | } |
6122 | |
6123 | /* Canonicalize back MEM_REFs to plain reference trees if the object |
6124 | accessed is a decl that has the same access semantics as the MEM_REF. */ |
6125 | if (TREE_CODE (*t) == MEM_REF |
6126 | && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR |
6127 | && integer_zerop (TREE_OPERAND (*t, 1)) |
6128 | && MR_DEPENDENCE_CLIQUE (*t) == 0) |
6129 | { |
6130 | tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); |
6131 | tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1)); |
6132 | if (/* Same volatile qualification. */ |
6133 | TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl) |
6134 | /* Same TBAA behavior with -fstrict-aliasing. */ |
6135 | && !TYPE_REF_CAN_ALIAS_ALL (alias_type) |
6136 | && (TYPE_MAIN_VARIANT (TREE_TYPE (decl)) |
6137 | == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type))) |
6138 | /* Same alignment. */ |
6139 | && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t)) |
6140 | /* We have to look out here to not drop a required conversion |
6141 | from the rhs to the lhs if *t appears on the lhs or vice-versa |
6142 | if it appears on the rhs. Thus require strict type |
6143 | compatibility. */ |
6144 | && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl))) |
6145 | { |
6146 | *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); |
6147 | res = true; |
6148 | } |
6149 | } |
6150 | |
6151 | else if (TREE_CODE (*orig_t) == ADDR_EXPR |
6152 | && TREE_CODE (*t) == MEM_REF |
6153 | && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST) |
6154 | { |
6155 | tree base; |
6156 | poly_int64 coffset; |
6157 | base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0), |
6158 | &coffset); |
6159 | if (base) |
6160 | { |
6161 | gcc_assert (TREE_CODE (base) == MEM_REF); |
6162 | poly_int64 moffset; |
6163 | if (mem_ref_offset (base).to_shwi (r: &moffset)) |
6164 | { |
6165 | coffset += moffset; |
6166 | if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (r: &moffset)) |
6167 | { |
6168 | coffset += moffset; |
6169 | *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset); |
6170 | return true; |
6171 | } |
6172 | } |
6173 | } |
6174 | } |
6175 | |
6176 | /* Canonicalize TARGET_MEM_REF in particular with respect to |
6177 | the indexes becoming constant. */ |
6178 | else if (TREE_CODE (*t) == TARGET_MEM_REF) |
6179 | { |
6180 | tree tem = maybe_fold_tmr (*t); |
6181 | if (tem) |
6182 | { |
6183 | *t = tem; |
6184 | if (TREE_CODE (*orig_t) == ADDR_EXPR) |
6185 | recompute_tree_invariant_for_addr_expr (*orig_t); |
6186 | res = true; |
6187 | } |
6188 | } |
6189 | |
6190 | return res; |
6191 | } |
6192 | |
6193 | /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument |
6194 | distinguishes both cases. */ |
6195 | |
6196 | static bool |
6197 | fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree)) |
6198 | { |
6199 | bool changed = false; |
6200 | gimple *stmt = gsi_stmt (i: *gsi); |
6201 | bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow); |
6202 | unsigned i; |
6203 | fold_defer_overflow_warnings (); |
6204 | |
6205 | /* First do required canonicalization of [TARGET_]MEM_REF addresses |
6206 | after propagation. |
6207 | ??? This shouldn't be done in generic folding but in the |
6208 | propagation helpers which also know whether an address was |
6209 | propagated. |
6210 | Also canonicalize operand order. */ |
6211 | switch (gimple_code (g: stmt)) |
6212 | { |
6213 | case GIMPLE_ASSIGN: |
6214 | if (gimple_assign_rhs_class (gs: stmt) == GIMPLE_SINGLE_RHS) |
6215 | { |
6216 | tree *rhs = gimple_assign_rhs1_ptr (gs: stmt); |
6217 | if ((REFERENCE_CLASS_P (*rhs) |
6218 | || TREE_CODE (*rhs) == ADDR_EXPR) |
6219 | && maybe_canonicalize_mem_ref_addr (t: rhs)) |
6220 | changed = true; |
6221 | tree *lhs = gimple_assign_lhs_ptr (gs: stmt); |
6222 | if (REFERENCE_CLASS_P (*lhs) |
6223 | && maybe_canonicalize_mem_ref_addr (t: lhs)) |
6224 | changed = true; |
6225 | /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST. |
6226 | This cannot be done in maybe_canonicalize_mem_ref_addr |
6227 | as the gimple now has two operands rather than one. |
6228 | The same reason why this can't be done in |
6229 | maybe_canonicalize_mem_ref_addr is the same reason why |
6230 | this can't be done inplace. */ |
6231 | if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR) |
6232 | { |
6233 | tree inner = TREE_OPERAND (*rhs, 0); |
6234 | if (TREE_CODE (inner) == MEM_REF |
6235 | && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME |
6236 | && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST) |
6237 | { |
6238 | tree ptr = TREE_OPERAND (inner, 0); |
6239 | tree addon = TREE_OPERAND (inner, 1); |
6240 | addon = fold_convert (sizetype, addon); |
6241 | gimple_assign_set_rhs_with_ops (gsi, code: POINTER_PLUS_EXPR, |
6242 | op1: ptr, op2: addon); |
6243 | changed = true; |
6244 | stmt = gsi_stmt (i: *gsi); |
6245 | } |
6246 | } |
6247 | } |
6248 | else |
6249 | { |
6250 | /* Canonicalize operand order. */ |
6251 | enum tree_code code = gimple_assign_rhs_code (gs: stmt); |
6252 | if (TREE_CODE_CLASS (code) == tcc_comparison |
6253 | || commutative_tree_code (code) |
6254 | || commutative_ternary_tree_code (code)) |
6255 | { |
6256 | tree rhs1 = gimple_assign_rhs1 (gs: stmt); |
6257 | tree rhs2 = gimple_assign_rhs2 (gs: stmt); |
6258 | if (tree_swap_operands_p (rhs1, rhs2)) |
6259 | { |
6260 | gimple_assign_set_rhs1 (gs: stmt, rhs: rhs2); |
6261 | gimple_assign_set_rhs2 (gs: stmt, rhs: rhs1); |
6262 | if (TREE_CODE_CLASS (code) == tcc_comparison) |
6263 | gimple_assign_set_rhs_code (s: stmt, |
6264 | code: swap_tree_comparison (code)); |
6265 | changed = true; |
6266 | } |
6267 | } |
6268 | } |
6269 | break; |
6270 | case GIMPLE_CALL: |
6271 | { |
6272 | gcall *call = as_a<gcall *> (p: stmt); |
6273 | for (i = 0; i < gimple_call_num_args (gs: call); ++i) |
6274 | { |
6275 | tree *arg = gimple_call_arg_ptr (gs: call, index: i); |
6276 | if (REFERENCE_CLASS_P (*arg) |
6277 | && maybe_canonicalize_mem_ref_addr (t: arg)) |
6278 | changed = true; |
6279 | } |
6280 | tree *lhs = gimple_call_lhs_ptr (gs: call); |
6281 | if (*lhs |
6282 | && REFERENCE_CLASS_P (*lhs) |
6283 | && maybe_canonicalize_mem_ref_addr (t: lhs)) |
6284 | changed = true; |
6285 | if (*lhs) |
6286 | { |
6287 | combined_fn cfn = gimple_call_combined_fn (call); |
6288 | internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs)); |
6289 | int opno = first_commutative_argument (ifn); |
6290 | if (opno >= 0) |
6291 | { |
6292 | tree arg1 = gimple_call_arg (gs: call, index: opno); |
6293 | tree arg2 = gimple_call_arg (gs: call, index: opno + 1); |
6294 | if (tree_swap_operands_p (arg1, arg2)) |
6295 | { |
6296 | gimple_call_set_arg (gs: call, index: opno, arg: arg2); |
6297 | gimple_call_set_arg (gs: call, index: opno + 1, arg: arg1); |
6298 | changed = true; |
6299 | } |
6300 | } |
6301 | } |
6302 | break; |
6303 | } |
6304 | case GIMPLE_ASM: |
6305 | { |
6306 | gasm *asm_stmt = as_a <gasm *> (p: stmt); |
6307 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) |
6308 | { |
6309 | tree link = gimple_asm_output_op (asm_stmt, index: i); |
6310 | tree op = TREE_VALUE (link); |
6311 | if (REFERENCE_CLASS_P (op) |
6312 | && maybe_canonicalize_mem_ref_addr (t: &TREE_VALUE (link))) |
6313 | changed = true; |
6314 | } |
6315 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
6316 | { |
6317 | tree link = gimple_asm_input_op (asm_stmt, index: i); |
6318 | tree op = TREE_VALUE (link); |
6319 | if ((REFERENCE_CLASS_P (op) |
6320 | || TREE_CODE (op) == ADDR_EXPR) |
6321 | && maybe_canonicalize_mem_ref_addr (t: &TREE_VALUE (link))) |
6322 | changed = true; |
6323 | } |
6324 | } |
6325 | break; |
6326 | case GIMPLE_DEBUG: |
6327 | if (gimple_debug_bind_p (s: stmt)) |
6328 | { |
6329 | tree *val = gimple_debug_bind_get_value_ptr (dbg: stmt); |
6330 | if (*val |
6331 | && (REFERENCE_CLASS_P (*val) |
6332 | || TREE_CODE (*val) == ADDR_EXPR) |
6333 | && maybe_canonicalize_mem_ref_addr (t: val, is_debug: true)) |
6334 | changed = true; |
6335 | } |
6336 | break; |
6337 | case GIMPLE_COND: |
6338 | { |
6339 | /* Canonicalize operand order. */ |
6340 | tree lhs = gimple_cond_lhs (gs: stmt); |
6341 | tree rhs = gimple_cond_rhs (gs: stmt); |
6342 | if (tree_swap_operands_p (lhs, rhs)) |
6343 | { |
6344 | gcond *gc = as_a <gcond *> (p: stmt); |
6345 | gimple_cond_set_lhs (gs: gc, lhs: rhs); |
6346 | gimple_cond_set_rhs (gs: gc, rhs: lhs); |
6347 | gimple_cond_set_code (gs: gc, |
6348 | code: swap_tree_comparison (gimple_cond_code (gs: gc))); |
6349 | changed = true; |
6350 | } |
6351 | } |
6352 | default:; |
6353 | } |
6354 | |
6355 | /* Dispatch to pattern-based folding. */ |
6356 | if (!inplace |
6357 | || is_gimple_assign (gs: stmt) |
6358 | || gimple_code (g: stmt) == GIMPLE_COND) |
6359 | { |
6360 | gimple_seq seq = NULL; |
6361 | gimple_match_op res_op; |
6362 | if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq, |
6363 | valueize, valueize)) |
6364 | { |
6365 | if (replace_stmt_with_simplification (gsi, res_op: &res_op, seq: &seq, inplace)) |
6366 | changed = true; |
6367 | else |
6368 | gimple_seq_discard (seq); |
6369 | } |
6370 | } |
6371 | |
6372 | stmt = gsi_stmt (i: *gsi); |
6373 | |
6374 | /* Fold the main computation performed by the statement. */ |
6375 | switch (gimple_code (g: stmt)) |
6376 | { |
6377 | case GIMPLE_ASSIGN: |
6378 | { |
6379 | /* Try to canonicalize for boolean-typed X the comparisons |
6380 | X == 0, X == 1, X != 0, and X != 1. */ |
6381 | if (gimple_assign_rhs_code (gs: stmt) == EQ_EXPR |
6382 | || gimple_assign_rhs_code (gs: stmt) == NE_EXPR) |
6383 | { |
6384 | tree lhs = gimple_assign_lhs (gs: stmt); |
6385 | tree op1 = gimple_assign_rhs1 (gs: stmt); |
6386 | tree op2 = gimple_assign_rhs2 (gs: stmt); |
6387 | tree type = TREE_TYPE (op1); |
6388 | |
6389 | /* Check whether the comparison operands are of the same boolean |
6390 | type as the result type is. |
6391 | Check that second operand is an integer-constant with value |
6392 | one or zero. */ |
6393 | if (TREE_CODE (op2) == INTEGER_CST |
6394 | && (integer_zerop (op2) || integer_onep (op2)) |
6395 | && useless_type_conversion_p (TREE_TYPE (lhs), type)) |
6396 | { |
6397 | enum tree_code cmp_code = gimple_assign_rhs_code (gs: stmt); |
6398 | bool is_logical_not = false; |
6399 | |
6400 | /* X == 0 and X != 1 is a logical-not.of X |
6401 | X == 1 and X != 0 is X */ |
6402 | if ((cmp_code == EQ_EXPR && integer_zerop (op2)) |
6403 | || (cmp_code == NE_EXPR && integer_onep (op2))) |
6404 | is_logical_not = true; |
6405 | |
6406 | if (is_logical_not == false) |
6407 | gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1); |
6408 | /* Only for one-bit precision typed X the transformation |
6409 | !X -> ~X is valied. */ |
6410 | else if (TYPE_PRECISION (type) == 1) |
6411 | gimple_assign_set_rhs_with_ops (gsi, code: BIT_NOT_EXPR, op1); |
6412 | /* Otherwise we use !X -> X ^ 1. */ |
6413 | else |
6414 | gimple_assign_set_rhs_with_ops (gsi, code: BIT_XOR_EXPR, op1, |
6415 | op2: build_int_cst (type, 1)); |
6416 | changed = true; |
6417 | break; |
6418 | } |
6419 | } |
6420 | |
6421 | unsigned old_num_ops = gimple_num_ops (gs: stmt); |
6422 | tree lhs = gimple_assign_lhs (gs: stmt); |
6423 | tree new_rhs = fold_gimple_assign (si: gsi); |
6424 | if (new_rhs |
6425 | && !useless_type_conversion_p (TREE_TYPE (lhs), |
6426 | TREE_TYPE (new_rhs))) |
6427 | new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); |
6428 | if (new_rhs |
6429 | && (!inplace |
6430 | || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)) |
6431 | { |
6432 | gimple_assign_set_rhs_from_tree (gsi, new_rhs); |
6433 | changed = true; |
6434 | } |
6435 | break; |
6436 | } |
6437 | |
6438 | case GIMPLE_CALL: |
6439 | changed |= gimple_fold_call (gsi, inplace); |
6440 | break; |
6441 | |
6442 | case GIMPLE_DEBUG: |
6443 | if (gimple_debug_bind_p (s: stmt)) |
6444 | { |
6445 | tree val = gimple_debug_bind_get_value (dbg: stmt); |
6446 | if (val && REFERENCE_CLASS_P (val)) |
6447 | { |
6448 | tree tem = maybe_fold_reference (expr: val); |
6449 | if (tem) |
6450 | { |
6451 | gimple_debug_bind_set_value (dbg: stmt, value: tem); |
6452 | changed = true; |
6453 | } |
6454 | } |
6455 | } |
6456 | break; |
6457 | |
6458 | case GIMPLE_RETURN: |
6459 | { |
6460 | greturn *ret_stmt = as_a<greturn *> (p: stmt); |
6461 | tree ret = gimple_return_retval(gs: ret_stmt); |
6462 | |
6463 | if (ret && TREE_CODE (ret) == SSA_NAME && valueize) |
6464 | { |
6465 | tree val = valueize (ret); |
6466 | if (val && val != ret |
6467 | && may_propagate_copy (ret, val)) |
6468 | { |
6469 | gimple_return_set_retval (gs: ret_stmt, retval: val); |
6470 | changed = true; |
6471 | } |
6472 | } |
6473 | } |
6474 | break; |
6475 | |
6476 | default:; |
6477 | } |
6478 | |
6479 | stmt = gsi_stmt (i: *gsi); |
6480 | |
6481 | fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0); |
6482 | return changed; |
6483 | } |
6484 | |
6485 | /* Valueziation callback that ends up not following SSA edges. */ |
6486 | |
6487 | tree |
6488 | no_follow_ssa_edges (tree) |
6489 | { |
6490 | return NULL_TREE; |
6491 | } |
6492 | |
6493 | /* Valueization callback that ends up following single-use SSA edges only. */ |
6494 | |
6495 | tree |
6496 | follow_single_use_edges (tree val) |
6497 | { |
6498 | if (TREE_CODE (val) == SSA_NAME |
6499 | && !has_single_use (var: val)) |
6500 | return NULL_TREE; |
6501 | return val; |
6502 | } |
6503 | |
6504 | /* Valueization callback that follows all SSA edges. */ |
6505 | |
6506 | tree |
6507 | follow_all_ssa_edges (tree val) |
6508 | { |
6509 | return val; |
6510 | } |
6511 | |
6512 | /* Fold the statement pointed to by GSI. In some cases, this function may |
6513 | replace the whole statement with a new one. Returns true iff folding |
6514 | makes any changes. |
6515 | The statement pointed to by GSI should be in valid gimple form but may |
6516 | be in unfolded state as resulting from for example constant propagation |
6517 | which can produce *&x = 0. */ |
6518 | |
6519 | bool |
6520 | fold_stmt (gimple_stmt_iterator *gsi) |
6521 | { |
6522 | return fold_stmt_1 (gsi, inplace: false, valueize: no_follow_ssa_edges); |
6523 | } |
6524 | |
6525 | bool |
6526 | fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree)) |
6527 | { |
6528 | return fold_stmt_1 (gsi, inplace: false, valueize); |
6529 | } |
6530 | |
6531 | /* Perform the minimal folding on statement *GSI. Only operations like |
6532 | *&x created by constant propagation are handled. The statement cannot |
6533 | be replaced with a new one. Return true if the statement was |
6534 | changed, false otherwise. |
6535 | The statement *GSI should be in valid gimple form but may |
6536 | be in unfolded state as resulting from for example constant propagation |
6537 | which can produce *&x = 0. */ |
6538 | |
6539 | bool |
6540 | fold_stmt_inplace (gimple_stmt_iterator *gsi) |
6541 | { |
6542 | gimple *stmt = gsi_stmt (i: *gsi); |
6543 | bool changed = fold_stmt_1 (gsi, inplace: true, valueize: no_follow_ssa_edges); |
6544 | gcc_assert (gsi_stmt (*gsi) == stmt); |
6545 | return changed; |
6546 | } |
6547 | |
6548 | /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE |
6549 | if EXPR is null or we don't know how. |
6550 | If non-null, the result always has boolean type. */ |
6551 | |
6552 | static tree |
6553 | canonicalize_bool (tree expr, bool invert) |
6554 | { |
6555 | if (!expr) |
6556 | return NULL_TREE; |
6557 | else if (invert) |
6558 | { |
6559 | if (integer_nonzerop (expr)) |
6560 | return boolean_false_node; |
6561 | else if (integer_zerop (expr)) |
6562 | return boolean_true_node; |
6563 | else if (TREE_CODE (expr) == SSA_NAME) |
6564 | return fold_build2 (EQ_EXPR, boolean_type_node, expr, |
6565 | build_int_cst (TREE_TYPE (expr), 0)); |
6566 | else if (COMPARISON_CLASS_P (expr)) |
6567 | return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false), |
6568 | boolean_type_node, |
6569 | TREE_OPERAND (expr, 0), |
6570 | TREE_OPERAND (expr, 1)); |
6571 | else |
6572 | return NULL_TREE; |
6573 | } |
6574 | else |
6575 | { |
6576 | if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) |
6577 | return expr; |
6578 | if (integer_nonzerop (expr)) |
6579 | return boolean_true_node; |
6580 | else if (integer_zerop (expr)) |
6581 | return boolean_false_node; |
6582 | else if (TREE_CODE (expr) == SSA_NAME) |
6583 | return fold_build2 (NE_EXPR, boolean_type_node, expr, |
6584 | build_int_cst (TREE_TYPE (expr), 0)); |
6585 | else if (COMPARISON_CLASS_P (expr)) |
6586 | return fold_build2 (TREE_CODE (expr), |
6587 | boolean_type_node, |
6588 | TREE_OPERAND (expr, 0), |
6589 | TREE_OPERAND (expr, 1)); |
6590 | else |
6591 | return NULL_TREE; |
6592 | } |
6593 | } |
6594 | |
6595 | /* Check to see if a boolean expression EXPR is logically equivalent to the |
6596 | comparison (OP1 CODE OP2). Check for various identities involving |
6597 | SSA_NAMEs. */ |
6598 | |
6599 | static bool |
6600 | same_bool_comparison_p (const_tree expr, enum tree_code code, |
6601 | const_tree op1, const_tree op2) |
6602 | { |
6603 | gimple *s; |
6604 | |
6605 | /* The obvious case. */ |
6606 | if (TREE_CODE (expr) == code |
6607 | && operand_equal_p (TREE_OPERAND (expr, 0), op1, flags: 0) |
6608 | && operand_equal_p (TREE_OPERAND (expr, 1), op2, flags: 0)) |
6609 | return true; |
6610 | |
6611 | /* Check for comparing (name, name != 0) and the case where expr |
6612 | is an SSA_NAME with a definition matching the comparison. */ |
6613 | if (TREE_CODE (expr) == SSA_NAME |
6614 | && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) |
6615 | { |
6616 | if (operand_equal_p (expr, op1, flags: 0)) |
6617 | return ((code == NE_EXPR && integer_zerop (op2)) |
6618 | || (code == EQ_EXPR && integer_nonzerop (op2))); |
6619 | s = SSA_NAME_DEF_STMT (expr); |
6620 | if (is_gimple_assign (gs: s) |
6621 | && gimple_assign_rhs_code (gs: s) == code |
6622 | && operand_equal_p (gimple_assign_rhs1 (gs: s), op1, flags: 0) |
6623 | && operand_equal_p (gimple_assign_rhs2 (gs: s), op2, flags: 0)) |
6624 | return true; |
6625 | } |
6626 | |
6627 | /* If op1 is of the form (name != 0) or (name == 0), and the definition |
6628 | of name is a comparison, recurse. */ |
6629 | if (TREE_CODE (op1) == SSA_NAME |
6630 | && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE) |
6631 | { |
6632 | s = SSA_NAME_DEF_STMT (op1); |
6633 | if (is_gimple_assign (gs: s) |
6634 | && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison) |
6635 | { |
6636 | enum tree_code c = gimple_assign_rhs_code (gs: s); |
6637 | if ((c == NE_EXPR && integer_zerop (op2)) |
6638 | || (c == EQ_EXPR && integer_nonzerop (op2))) |
6639 | return same_bool_comparison_p (expr, code: c, |
6640 | op1: gimple_assign_rhs1 (gs: s), |
6641 | op2: gimple_assign_rhs2 (gs: s)); |
6642 | if ((c == EQ_EXPR && integer_zerop (op2)) |
6643 | || (c == NE_EXPR && integer_nonzerop (op2))) |
6644 | return same_bool_comparison_p (expr, |
6645 | code: invert_tree_comparison (c, false), |
6646 | op1: gimple_assign_rhs1 (gs: s), |
6647 | op2: gimple_assign_rhs2 (gs: s)); |
6648 | } |
6649 | } |
6650 | return false; |
6651 | } |
6652 | |
6653 | /* Check to see if two boolean expressions OP1 and OP2 are logically |
6654 | equivalent. */ |
6655 | |
6656 | static bool |
6657 | same_bool_result_p (const_tree op1, const_tree op2) |
6658 | { |
6659 | /* Simple cases first. */ |
6660 | if (operand_equal_p (op1, op2, flags: 0)) |
6661 | return true; |
6662 | |
6663 | /* Check the cases where at least one of the operands is a comparison. |
6664 | These are a bit smarter than operand_equal_p in that they apply some |
6665 | identifies on SSA_NAMEs. */ |
6666 | if (COMPARISON_CLASS_P (op2) |
6667 | && same_bool_comparison_p (expr: op1, TREE_CODE (op2), |
6668 | TREE_OPERAND (op2, 0), |
6669 | TREE_OPERAND (op2, 1))) |
6670 | return true; |
6671 | if (COMPARISON_CLASS_P (op1) |
6672 | && same_bool_comparison_p (expr: op2, TREE_CODE (op1), |
6673 | TREE_OPERAND (op1, 0), |
6674 | TREE_OPERAND (op1, 1))) |
6675 | return true; |
6676 | |
6677 | /* Default case. */ |
6678 | return false; |
6679 | } |
6680 | |
6681 | /* Forward declarations for some mutually recursive functions. */ |
6682 | |
6683 | static tree |
6684 | and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b, |
6685 | enum tree_code code2, tree op2a, tree op2b, basic_block); |
6686 | static tree |
6687 | and_var_with_comparison (tree type, tree var, bool invert, |
6688 | enum tree_code code2, tree op2a, tree op2b, |
6689 | basic_block); |
6690 | static tree |
6691 | and_var_with_comparison_1 (tree type, gimple *stmt, |
6692 | enum tree_code code2, tree op2a, tree op2b, |
6693 | basic_block); |
6694 | static tree |
6695 | or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b, |
6696 | enum tree_code code2, tree op2a, tree op2b, |
6697 | basic_block); |
6698 | static tree |
6699 | or_var_with_comparison (tree, tree var, bool invert, |
6700 | enum tree_code code2, tree op2a, tree op2b, |
6701 | basic_block); |
6702 | static tree |
6703 | or_var_with_comparison_1 (tree, gimple *stmt, |
6704 | enum tree_code code2, tree op2a, tree op2b, |
6705 | basic_block); |
6706 | |
6707 | /* Helper function for and_comparisons_1: try to simplify the AND of the |
6708 | ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). |
6709 | If INVERT is true, invert the value of the VAR before doing the AND. |
6710 | Return NULL_EXPR if we can't simplify this to a single expression. */ |
6711 | |
6712 | static tree |
6713 | and_var_with_comparison (tree type, tree var, bool invert, |
6714 | enum tree_code code2, tree op2a, tree op2b, |
6715 | basic_block outer_cond_bb) |
6716 | { |
6717 | tree t; |
6718 | gimple *stmt = SSA_NAME_DEF_STMT (var); |
6719 | |
6720 | /* We can only deal with variables whose definitions are assignments. */ |
6721 | if (!is_gimple_assign (gs: stmt)) |
6722 | return NULL_TREE; |
6723 | |
6724 | /* If we have an inverted comparison, apply DeMorgan's law and rewrite |
6725 | !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b)) |
6726 | Then we only have to consider the simpler non-inverted cases. */ |
6727 | if (invert) |
6728 | t = or_var_with_comparison_1 (type, stmt, |
6729 | code2: invert_tree_comparison (code2, false), |
6730 | op2a, op2b, outer_cond_bb); |
6731 | else |
6732 | t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b, |
6733 | outer_cond_bb); |
6734 | return canonicalize_bool (expr: t, invert); |
6735 | } |
6736 | |
6737 | /* Try to simplify the AND of the ssa variable defined by the assignment |
6738 | STMT with the comparison specified by (OP2A CODE2 OP2B). |
6739 | Return NULL_EXPR if we can't simplify this to a single expression. */ |
6740 | |
6741 | static tree |
6742 | and_var_with_comparison_1 (tree type, gimple *stmt, |
6743 | enum tree_code code2, tree op2a, tree op2b, |
6744 | basic_block outer_cond_bb) |
6745 | { |
6746 | tree var = gimple_assign_lhs (gs: stmt); |
6747 | tree true_test_var = NULL_TREE; |
6748 | tree false_test_var = NULL_TREE; |
6749 | enum tree_code innercode = gimple_assign_rhs_code (gs: stmt); |
6750 | |
6751 | /* Check for identities like (var AND (var == 0)) => false. */ |
6752 | if (TREE_CODE (op2a) == SSA_NAME |
6753 | && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) |
6754 | { |
6755 | if ((code2 == NE_EXPR && integer_zerop (op2b)) |
6756 | || (code2 == EQ_EXPR && integer_nonzerop (op2b))) |
6757 | { |
6758 | true_test_var = op2a; |
6759 | if (var == true_test_var) |
6760 | return var; |
6761 | } |
6762 | else if ((code2 == EQ_EXPR && integer_zerop (op2b)) |
6763 | || (code2 == NE_EXPR && integer_nonzerop (op2b))) |
6764 | { |
6765 | false_test_var = op2a; |
6766 | if (var == false_test_var) |
6767 | return boolean_false_node; |
6768 | } |
6769 | } |
6770 | |
6771 | /* If the definition is a comparison, recurse on it. */ |
6772 | if (TREE_CODE_CLASS (innercode) == tcc_comparison) |
6773 | { |
6774 | tree t = and_comparisons_1 (type, code1: innercode, |
6775 | op1a: gimple_assign_rhs1 (gs: stmt), |
6776 | op1b: gimple_assign_rhs2 (gs: stmt), |
6777 | code2, |
6778 | op2a, |
6779 | op2b, outer_cond_bb); |
6780 | if (t) |
6781 | return t; |
6782 | } |
6783 | |
6784 | /* If the definition is an AND or OR expression, we may be able to |
6785 | simplify by reassociating. */ |
6786 | if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE |
6787 | && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) |
6788 | { |
6789 | tree inner1 = gimple_assign_rhs1 (gs: stmt); |
6790 | tree inner2 = gimple_assign_rhs2 (gs: stmt); |
6791 | gimple *s; |
6792 | tree t; |
6793 | tree partial = NULL_TREE; |
6794 | bool is_and = (innercode == BIT_AND_EXPR); |
6795 | |
6796 | /* Check for boolean identities that don't require recursive examination |
6797 | of inner1/inner2: |
6798 | inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var |
6799 | inner1 AND (inner1 OR inner2) => inner1 |
6800 | !inner1 AND (inner1 AND inner2) => false |
6801 | !inner1 AND (inner1 OR inner2) => !inner1 AND inner2 |
6802 | Likewise for similar cases involving inner2. */ |
6803 | if (inner1 == true_test_var) |
6804 | return (is_and ? var : inner1); |
6805 | else if (inner2 == true_test_var) |
6806 | return (is_and ? var : inner2); |
6807 | else if (inner1 == false_test_var) |
6808 | return (is_and |
6809 | ? boolean_false_node |
6810 | : and_var_with_comparison (type, var: inner2, invert: false, code2, op2a, |
6811 | op2b, outer_cond_bb)); |
6812 | else if (inner2 == false_test_var) |
6813 | return (is_and |
6814 | ? boolean_false_node |
6815 | : and_var_with_comparison (type, var: inner1, invert: false, code2, op2a, |
6816 | op2b, outer_cond_bb)); |
6817 | |
6818 | /* Next, redistribute/reassociate the AND across the inner tests. |
6819 | Compute the first partial result, (inner1 AND (op2a code op2b)) */ |
6820 | if (TREE_CODE (inner1) == SSA_NAME |
6821 | && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner1)) |
6822 | && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison |
6823 | && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (gs: s), |
6824 | gimple_assign_rhs1 (gs: s), |
6825 | gimple_assign_rhs2 (gs: s), |
6826 | code2, op2a, op2b, |
6827 | outer_cond_bb))) |
6828 | { |
6829 | /* Handle the AND case, where we are reassociating: |
6830 | (inner1 AND inner2) AND (op2a code2 op2b) |
6831 | => (t AND inner2) |
6832 | If the partial result t is a constant, we win. Otherwise |
6833 | continue on to try reassociating with the other inner test. */ |
6834 | if (is_and) |
6835 | { |
6836 | if (integer_onep (t)) |
6837 | return inner2; |
6838 | else if (integer_zerop (t)) |
6839 | return boolean_false_node; |
6840 | } |
6841 | |
6842 | /* Handle the OR case, where we are redistributing: |
6843 | (inner1 OR inner2) AND (op2a code2 op2b) |
6844 | => (t OR (inner2 AND (op2a code2 op2b))) */ |
6845 | else if (integer_onep (t)) |
6846 | return boolean_true_node; |
6847 | |
6848 | /* Save partial result for later. */ |
6849 | partial = t; |
6850 | } |
6851 | |
6852 | /* Compute the second partial result, (inner2 AND (op2a code op2b)) */ |
6853 | if (TREE_CODE (inner2) == SSA_NAME |
6854 | && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner2)) |
6855 | && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison |
6856 | && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (gs: s), |
6857 | gimple_assign_rhs1 (gs: s), |
6858 | gimple_assign_rhs2 (gs: s), |
6859 | code2, op2a, op2b, |
6860 | outer_cond_bb))) |
6861 | { |
6862 | /* Handle the AND case, where we are reassociating: |
6863 | (inner1 AND inner2) AND (op2a code2 op2b) |
6864 | => (inner1 AND t) */ |
6865 | if (is_and) |
6866 | { |
6867 | if (integer_onep (t)) |
6868 | return inner1; |
6869 | else if (integer_zerop (t)) |
6870 | return boolean_false_node; |
6871 | /* If both are the same, we can apply the identity |
6872 | (x AND x) == x. */ |
6873 | else if (partial && same_bool_result_p (op1: t, op2: partial)) |
6874 | return t; |
6875 | } |
6876 | |
6877 | /* Handle the OR case. where we are redistributing: |
6878 | (inner1 OR inner2) AND (op2a code2 op2b) |
6879 | => (t OR (inner1 AND (op2a code2 op2b))) |
6880 | => (t OR partial) */ |
6881 | else |
6882 | { |
6883 | if (integer_onep (t)) |
6884 | return boolean_true_node; |
6885 | else if (partial) |
6886 | { |
6887 | /* We already got a simplification for the other |
6888 | operand to the redistributed OR expression. The |
6889 | interesting case is when at least one is false. |
6890 | Or, if both are the same, we can apply the identity |
6891 | (x OR x) == x. */ |
6892 | if (integer_zerop (partial)) |
6893 | return t; |
6894 | else if (integer_zerop (t)) |
6895 | return partial; |
6896 | else if (same_bool_result_p (op1: t, op2: partial)) |
6897 | return t; |
6898 | } |
6899 | } |
6900 | } |
6901 | } |
6902 | return NULL_TREE; |
6903 | } |
6904 | |
6905 | /* Try to simplify the AND of two comparisons defined by |
6906 | (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. |
6907 | If this can be done without constructing an intermediate value, |
6908 | return the resulting tree; otherwise NULL_TREE is returned. |
6909 | This function is deliberately asymmetric as it recurses on SSA_DEFs |
6910 | in the first comparison but not the second. */ |
6911 | |
6912 | static tree |
6913 | and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b, |
6914 | enum tree_code code2, tree op2a, tree op2b, |
6915 | basic_block outer_cond_bb) |
6916 | { |
6917 | tree truth_type = truth_type_for (TREE_TYPE (op1a)); |
6918 | |
6919 | /* First check for ((x CODE1 y) AND (x CODE2 y)). */ |
6920 | if (operand_equal_p (op1a, op2a, flags: 0) |
6921 | && operand_equal_p (op1b, op2b, flags: 0)) |
6922 | { |
6923 | /* Result will be either NULL_TREE, or a combined comparison. */ |
6924 | tree t = combine_comparisons (UNKNOWN_LOCATION, |
6925 | TRUTH_ANDIF_EXPR, code1, code2, |
6926 | truth_type, op1a, op1b); |
6927 | if (t) |
6928 | return t; |
6929 | } |
6930 | |
6931 | /* Likewise the swapped case of the above. */ |
6932 | if (operand_equal_p (op1a, op2b, flags: 0) |
6933 | && operand_equal_p (op1b, op2a, flags: 0)) |
6934 | { |
6935 | /* Result will be either NULL_TREE, or a combined comparison. */ |
6936 | tree t = combine_comparisons (UNKNOWN_LOCATION, |
6937 | TRUTH_ANDIF_EXPR, code1, |
6938 | swap_tree_comparison (code2), |
6939 | truth_type, op1a, op1b); |
6940 | if (t) |
6941 | return t; |
6942 | } |
6943 | |
6944 | /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where |
6945 | NAME's definition is a truth value. See if there are any simplifications |
6946 | that can be done against the NAME's definition. */ |
6947 | if (TREE_CODE (op1a) == SSA_NAME |
6948 | && (code1 == NE_EXPR || code1 == EQ_EXPR) |
6949 | && (integer_zerop (op1b) || integer_onep (op1b))) |
6950 | { |
6951 | bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) |
6952 | || (code1 == NE_EXPR && integer_onep (op1b))); |
6953 | gimple *stmt = SSA_NAME_DEF_STMT (op1a); |
6954 | switch (gimple_code (g: stmt)) |
6955 | { |
6956 | case GIMPLE_ASSIGN: |
6957 | /* Try to simplify by copy-propagating the definition. */ |
6958 | return and_var_with_comparison (type, var: op1a, invert, code2, op2a, |
6959 | op2b, outer_cond_bb); |
6960 | |
6961 | case GIMPLE_PHI: |
6962 | /* If every argument to the PHI produces the same result when |
6963 | ANDed with the second comparison, we win. |
6964 | Do not do this unless the type is bool since we need a bool |
6965 | result here anyway. */ |
6966 | if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) |
6967 | { |
6968 | tree result = NULL_TREE; |
6969 | unsigned i; |
6970 | for (i = 0; i < gimple_phi_num_args (gs: stmt); i++) |
6971 | { |
6972 | tree arg = gimple_phi_arg_def (gs: stmt, index: i); |
6973 | |
6974 | /* If this PHI has itself as an argument, ignore it. |
6975 | If all the other args produce the same result, |
6976 | we're still OK. */ |
6977 | if (arg == gimple_phi_result (gs: stmt)) |
6978 | continue; |
6979 | else if (TREE_CODE (arg) == INTEGER_CST) |
6980 | { |
6981 | if (invert ? integer_nonzerop (arg) : integer_zerop (arg)) |
6982 | { |
6983 | if (!result) |
6984 | result = boolean_false_node; |
6985 | else if (!integer_zerop (result)) |
6986 | return NULL_TREE; |
6987 | } |
6988 | else if (!result) |
6989 | result = fold_build2 (code2, boolean_type_node, |
6990 | op2a, op2b); |
6991 | else if (!same_bool_comparison_p (expr: result, |
6992 | code: code2, op1: op2a, op2: op2b)) |
6993 | return NULL_TREE; |
6994 | } |
6995 | else if (TREE_CODE (arg) == SSA_NAME |
6996 | && !SSA_NAME_IS_DEFAULT_DEF (arg)) |
6997 | { |
6998 | tree temp; |
6999 | gimple *def_stmt = SSA_NAME_DEF_STMT (arg); |
7000 | /* In simple cases we can look through PHI nodes, |
7001 | but we have to be careful with loops. |
7002 | See PR49073. */ |
7003 | if (! dom_info_available_p (CDI_DOMINATORS) |
7004 | || gimple_bb (g: def_stmt) == gimple_bb (g: stmt) |
7005 | || dominated_by_p (CDI_DOMINATORS, |
7006 | gimple_bb (g: def_stmt), |
7007 | gimple_bb (g: stmt))) |
7008 | return NULL_TREE; |
7009 | temp = and_var_with_comparison (type, var: arg, invert, code2, |
7010 | op2a, op2b, |
7011 | outer_cond_bb); |
7012 | if (!temp) |
7013 | return NULL_TREE; |
7014 | else if (!result) |
7015 | result = temp; |
7016 | else if (!same_bool_result_p (op1: result, op2: temp)) |
7017 | return NULL_TREE; |
7018 | } |
7019 | else |
7020 | return NULL_TREE; |
7021 | } |
7022 | return result; |
7023 | } |
7024 | |
7025 | default: |
7026 | break; |
7027 | } |
7028 | } |
7029 | return NULL_TREE; |
7030 | } |
7031 | |
7032 | static basic_block fosa_bb; |
7033 | static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind; |
7034 | static tree |
7035 | follow_outer_ssa_edges (tree val) |
7036 | { |
7037 | if (TREE_CODE (val) == SSA_NAME |
7038 | && !SSA_NAME_IS_DEFAULT_DEF (val)) |
7039 | { |
7040 | basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val)); |
7041 | if (!def_bb |
7042 | || def_bb == fosa_bb |
7043 | || (dom_info_available_p (CDI_DOMINATORS) |
7044 | && (def_bb == fosa_bb |
7045 | || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb)))) |
7046 | return val; |
7047 | /* We cannot temporarily rewrite stmts with undefined overflow |
7048 | behavior, so avoid expanding them. */ |
7049 | if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val)) |
7050 | || POINTER_TYPE_P (TREE_TYPE (val))) |
7051 | && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val))) |
7052 | return NULL_TREE; |
7053 | flow_sensitive_info_storage storage; |
7054 | storage.save_and_clear (val); |
7055 | /* If the definition does not dominate fosa_bb temporarily reset |
7056 | flow-sensitive info. */ |
7057 | fosa_unwind->safe_push (obj: std::make_pair (x&: val, y&: storage)); |
7058 | return val; |
7059 | } |
7060 | return val; |
7061 | } |
7062 | |
7063 | /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons |
7064 | : try to simplify the AND/OR of the ssa variable VAR with the comparison |
7065 | specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't |
7066 | simplify this to a single expression. As we are going to lower the cost |
7067 | of building SSA names / gimple stmts significantly, we need to allocate |
7068 | them ont the stack. This will cause the code to be a bit ugly. */ |
7069 | |
7070 | static tree |
7071 | maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code, |
7072 | enum tree_code code1, |
7073 | tree op1a, tree op1b, |
7074 | enum tree_code code2, tree op2a, |
7075 | tree op2b, |
7076 | basic_block outer_cond_bb) |
7077 | { |
7078 | /* Allocate gimple stmt1 on the stack. */ |
7079 | gassign *stmt1 |
7080 | = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3)); |
7081 | gimple_init (g: stmt1, code: GIMPLE_ASSIGN, num_ops: 3); |
7082 | gimple_assign_set_rhs_code (s: stmt1, code: code1); |
7083 | gimple_assign_set_rhs1 (gs: stmt1, rhs: op1a); |
7084 | gimple_assign_set_rhs2 (gs: stmt1, rhs: op1b); |
7085 | gimple_set_bb (stmt1, NULL); |
7086 | |
7087 | /* Allocate gimple stmt2 on the stack. */ |
7088 | gassign *stmt2 |
7089 | = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3)); |
7090 | gimple_init (g: stmt2, code: GIMPLE_ASSIGN, num_ops: 3); |
7091 | gimple_assign_set_rhs_code (s: stmt2, code: code2); |
7092 | gimple_assign_set_rhs1 (gs: stmt2, rhs: op2a); |
7093 | gimple_assign_set_rhs2 (gs: stmt2, rhs: op2b); |
7094 | gimple_set_bb (stmt2, NULL); |
7095 | |
7096 | /* Allocate SSA names(lhs1) on the stack. */ |
7097 | alignas (tree_node) unsigned char lhs1buf[sizeof (tree_ssa_name)]; |
7098 | tree lhs1 = (tree) &lhs1buf[0]; |
7099 | memset (s: lhs1, c: 0, n: sizeof (tree_ssa_name)); |
7100 | TREE_SET_CODE (lhs1, SSA_NAME); |
7101 | TREE_TYPE (lhs1) = type; |
7102 | init_ssa_name_imm_use (lhs1); |
7103 | |
7104 | /* Allocate SSA names(lhs2) on the stack. */ |
7105 | alignas (tree_node) unsigned char lhs2buf[sizeof (tree_ssa_name)]; |
7106 | tree lhs2 = (tree) &lhs2buf[0]; |
7107 | memset (s: lhs2, c: 0, n: sizeof (tree_ssa_name)); |
7108 | TREE_SET_CODE (lhs2, SSA_NAME); |
7109 | TREE_TYPE (lhs2) = type; |
7110 | init_ssa_name_imm_use (lhs2); |
7111 | |
7112 | gimple_assign_set_lhs (gs: stmt1, lhs: lhs1); |
7113 | gimple_assign_set_lhs (gs: stmt2, lhs: lhs2); |
7114 | |
7115 | gimple_match_op op (gimple_match_cond::UNCOND, code, |
7116 | type, gimple_assign_lhs (gs: stmt1), |
7117 | gimple_assign_lhs (gs: stmt2)); |
7118 | fosa_bb = outer_cond_bb; |
7119 | auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack; |
7120 | fosa_unwind = &unwind_stack; |
7121 | if (op.resimplify (NULL, (!outer_cond_bb |
7122 | ? follow_all_ssa_edges : follow_outer_ssa_edges))) |
7123 | { |
7124 | fosa_unwind = NULL; |
7125 | for (auto p : unwind_stack) |
7126 | p.second.restore (p.first); |
7127 | if (gimple_simplified_result_is_gimple_val (op: &op)) |
7128 | { |
7129 | tree res = op.ops[0]; |
7130 | if (res == lhs1) |
7131 | return build2 (code1, type, op1a, op1b); |
7132 | else if (res == lhs2) |
7133 | return build2 (code2, type, op2a, op2b); |
7134 | else |
7135 | return res; |
7136 | } |
7137 | else if (op.code.is_tree_code () |
7138 | && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison) |
7139 | { |
7140 | tree op0 = op.ops[0]; |
7141 | tree op1 = op.ops[1]; |
7142 | if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2) |
7143 | return NULL_TREE; /* not simple */ |
7144 | |
7145 | return build2 ((enum tree_code)op.code, op.type, op0, op1); |
7146 | } |
7147 | } |
7148 | fosa_unwind = NULL; |
7149 | for (auto p : unwind_stack) |
7150 | p.second.restore (p.first); |
7151 | |
7152 | return NULL_TREE; |
7153 | } |
7154 | |
7155 | /* Try to simplify the AND of two comparisons, specified by |
7156 | (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. |
7157 | If this can be simplified to a single expression (without requiring |
7158 | introducing more SSA variables to hold intermediate values), |
7159 | return the resulting tree. Otherwise return NULL_TREE. |
7160 | If the result expression is non-null, it has boolean type. */ |
7161 | |
7162 | tree |
7163 | maybe_fold_and_comparisons (tree type, |
7164 | enum tree_code code1, tree op1a, tree op1b, |
7165 | enum tree_code code2, tree op2a, tree op2b, |
7166 | basic_block outer_cond_bb) |
7167 | { |
7168 | if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b, |
7169 | outer_cond_bb)) |
7170 | return t; |
7171 | |
7172 | if (tree t = and_comparisons_1 (type, code1: code2, op1a: op2a, op1b: op2b, code2: code1, op2a: op1a, op2b: op1b, |
7173 | outer_cond_bb)) |
7174 | return t; |
7175 | |
7176 | if (tree t = maybe_fold_comparisons_from_match_pd (type, code: BIT_AND_EXPR, code1, |
7177 | op1a, op1b, code2, op2a, |
7178 | op2b, outer_cond_bb)) |
7179 | return t; |
7180 | |
7181 | return NULL_TREE; |
7182 | } |
7183 | |
7184 | /* Helper function for or_comparisons_1: try to simplify the OR of the |
7185 | ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). |
7186 | If INVERT is true, invert the value of VAR before doing the OR. |
7187 | Return NULL_EXPR if we can't simplify this to a single expression. */ |
7188 | |
7189 | static tree |
7190 | or_var_with_comparison (tree type, tree var, bool invert, |
7191 | enum tree_code code2, tree op2a, tree op2b, |
7192 | basic_block outer_cond_bb) |
7193 | { |
7194 | tree t; |
7195 | gimple *stmt = SSA_NAME_DEF_STMT (var); |
7196 | |
7197 | /* We can only deal with variables whose definitions are assignments. */ |
7198 | if (!is_gimple_assign (gs: stmt)) |
7199 | return NULL_TREE; |
7200 | |
7201 | /* If we have an inverted comparison, apply DeMorgan's law and rewrite |
7202 | !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b)) |
7203 | Then we only have to consider the simpler non-inverted cases. */ |
7204 | if (invert) |
7205 | t = and_var_with_comparison_1 (type, stmt, |
7206 | code2: invert_tree_comparison (code2, false), |
7207 | op2a, op2b, outer_cond_bb); |
7208 | else |
7209 | t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b, |
7210 | outer_cond_bb); |
7211 | return canonicalize_bool (expr: t, invert); |
7212 | } |
7213 | |
7214 | /* Try to simplify the OR of the ssa variable defined by the assignment |
7215 | STMT with the comparison specified by (OP2A CODE2 OP2B). |
7216 | Return NULL_EXPR if we can't simplify this to a single expression. */ |
7217 | |
7218 | static tree |
7219 | or_var_with_comparison_1 (tree type, gimple *stmt, |
7220 | enum tree_code code2, tree op2a, tree op2b, |
7221 | basic_block outer_cond_bb) |
7222 | { |
7223 | tree var = gimple_assign_lhs (gs: stmt); |
7224 | tree true_test_var = NULL_TREE; |
7225 | tree false_test_var = NULL_TREE; |
7226 | enum tree_code innercode = gimple_assign_rhs_code (gs: stmt); |
7227 | |
7228 | /* Check for identities like (var OR (var != 0)) => true . */ |
7229 | if (TREE_CODE (op2a) == SSA_NAME |
7230 | && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) |
7231 | { |
7232 | if ((code2 == NE_EXPR && integer_zerop (op2b)) |
7233 | || (code2 == EQ_EXPR && integer_nonzerop (op2b))) |
7234 | { |
7235 | true_test_var = op2a; |
7236 | if (var == true_test_var) |
7237 | return var; |
7238 | } |
7239 | else if ((code2 == EQ_EXPR && integer_zerop (op2b)) |
7240 | || (code2 == NE_EXPR && integer_nonzerop (op2b))) |
7241 | { |
7242 | false_test_var = op2a; |
7243 | if (var == false_test_var) |
7244 | return boolean_true_node; |
7245 | } |
7246 | } |
7247 | |
7248 | /* If the definition is a comparison, recurse on it. */ |
7249 | if (TREE_CODE_CLASS (innercode) == tcc_comparison) |
7250 | { |
7251 | tree t = or_comparisons_1 (type, code1: innercode, |
7252 | op1a: gimple_assign_rhs1 (gs: stmt), |
7253 | op1b: gimple_assign_rhs2 (gs: stmt), |
7254 | code2, op2a, op2b, outer_cond_bb); |
7255 | if (t) |
7256 | return t; |
7257 | } |
7258 | |
7259 | /* If the definition is an AND or OR expression, we may be able to |
7260 | simplify by reassociating. */ |
7261 | if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE |
7262 | && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) |
7263 | { |
7264 | tree inner1 = gimple_assign_rhs1 (gs: stmt); |
7265 | tree inner2 = gimple_assign_rhs2 (gs: stmt); |
7266 | gimple *s; |
7267 | tree t; |
7268 | tree partial = NULL_TREE; |
7269 | bool is_or = (innercode == BIT_IOR_EXPR); |
7270 | |
7271 | /* Check for boolean identities that don't require recursive examination |
7272 | of inner1/inner2: |
7273 | inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var |
7274 | inner1 OR (inner1 AND inner2) => inner1 |
7275 | !inner1 OR (inner1 OR inner2) => true |
7276 | !inner1 OR (inner1 AND inner2) => !inner1 OR inner2 |
7277 | */ |
7278 | if (inner1 == true_test_var) |
7279 | return (is_or ? var : inner1); |
7280 | else if (inner2 == true_test_var) |
7281 | return (is_or ? var : inner2); |
7282 | else if (inner1 == false_test_var) |
7283 | return (is_or |
7284 | ? boolean_true_node |
7285 | : or_var_with_comparison (type, var: inner2, invert: false, code2, op2a, |
7286 | op2b, outer_cond_bb)); |
7287 | else if (inner2 == false_test_var) |
7288 | return (is_or |
7289 | ? boolean_true_node |
7290 | : or_var_with_comparison (type, var: inner1, invert: false, code2, op2a, |
7291 | op2b, outer_cond_bb)); |
7292 | |
7293 | /* Next, redistribute/reassociate the OR across the inner tests. |
7294 | Compute the first partial result, (inner1 OR (op2a code op2b)) */ |
7295 | if (TREE_CODE (inner1) == SSA_NAME |
7296 | && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner1)) |
7297 | && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison |
7298 | && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (gs: s), |
7299 | gimple_assign_rhs1 (gs: s), |
7300 | gimple_assign_rhs2 (gs: s), |
7301 | code2, op2a, op2b, |
7302 | outer_cond_bb))) |
7303 | { |
7304 | /* Handle the OR case, where we are reassociating: |
7305 | (inner1 OR inner2) OR (op2a code2 op2b) |
7306 | => (t OR inner2) |
7307 | If the partial result t is a constant, we win. Otherwise |
7308 | continue on to try reassociating with the other inner test. */ |
7309 | if (is_or) |
7310 | { |
7311 | if (integer_onep (t)) |
7312 | return boolean_true_node; |
7313 | else if (integer_zerop (t)) |
7314 | return inner2; |
7315 | } |
7316 | |
7317 | /* Handle the AND case, where we are redistributing: |
7318 | (inner1 AND inner2) OR (op2a code2 op2b) |
7319 | => (t AND (inner2 OR (op2a code op2b))) */ |
7320 | else if (integer_zerop (t)) |
7321 | return boolean_false_node; |
7322 | |
7323 | /* Save partial result for later. */ |
7324 | partial = t; |
7325 | } |
7326 | |
7327 | /* Compute the second partial result, (inner2 OR (op2a code op2b)) */ |
7328 | if (TREE_CODE (inner2) == SSA_NAME |
7329 | && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner2)) |
7330 | && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison |
7331 | && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (gs: s), |
7332 | gimple_assign_rhs1 (gs: s), |
7333 | gimple_assign_rhs2 (gs: s), |
7334 | code2, op2a, op2b, |
7335 | outer_cond_bb))) |
7336 | { |
7337 | /* Handle the OR case, where we are reassociating: |
7338 | (inner1 OR inner2) OR (op2a code2 op2b) |
7339 | => (inner1 OR t) |
7340 | => (t OR partial) */ |
7341 | if (is_or) |
7342 | { |
7343 | if (integer_zerop (t)) |
7344 | return inner1; |
7345 | else if (integer_onep (t)) |
7346 | return boolean_true_node; |
7347 | /* If both are the same, we can apply the identity |
7348 | (x OR x) == x. */ |
7349 | else if (partial && same_bool_result_p (op1: t, op2: partial)) |
7350 | return t; |
7351 | } |
7352 | |
7353 | /* Handle the AND case, where we are redistributing: |
7354 | (inner1 AND inner2) OR (op2a code2 op2b) |
7355 | => (t AND (inner1 OR (op2a code2 op2b))) |
7356 | => (t AND partial) */ |
7357 | else |
7358 | { |
7359 | if (integer_zerop (t)) |
7360 | return boolean_false_node; |
7361 | else if (partial) |
7362 | { |
7363 | /* We already got a simplification for the other |
7364 | operand to the redistributed AND expression. The |
7365 | interesting case is when at least one is true. |
7366 | Or, if both are the same, we can apply the identity |
7367 | (x AND x) == x. */ |
7368 | if (integer_onep (partial)) |
7369 | return t; |
7370 | else if (integer_onep (t)) |
7371 | return partial; |
7372 | else if (same_bool_result_p (op1: t, op2: partial)) |
7373 | return t; |
7374 | } |
7375 | } |
7376 | } |
7377 | } |
7378 | return NULL_TREE; |
7379 | } |
7380 | |
7381 | /* Try to simplify the OR of two comparisons defined by |
7382 | (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. |
7383 | If this can be done without constructing an intermediate value, |
7384 | return the resulting tree; otherwise NULL_TREE is returned. |
7385 | This function is deliberately asymmetric as it recurses on SSA_DEFs |
7386 | in the first comparison but not the second. */ |
7387 | |
7388 | static tree |
7389 | or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b, |
7390 | enum tree_code code2, tree op2a, tree op2b, |
7391 | basic_block outer_cond_bb) |
7392 | { |
7393 | tree truth_type = truth_type_for (TREE_TYPE (op1a)); |
7394 | |
7395 | /* First check for ((x CODE1 y) OR (x CODE2 y)). */ |
7396 | if (operand_equal_p (op1a, op2a, flags: 0) |
7397 | && operand_equal_p (op1b, op2b, flags: 0)) |
7398 | { |
7399 | /* Result will be either NULL_TREE, or a combined comparison. */ |
7400 | tree t = combine_comparisons (UNKNOWN_LOCATION, |
7401 | TRUTH_ORIF_EXPR, code1, code2, |
7402 | truth_type, op1a, op1b); |
7403 | if (t) |
7404 | return t; |
7405 | } |
7406 | |
7407 | /* Likewise the swapped case of the above. */ |
7408 | if (operand_equal_p (op1a, op2b, flags: 0) |
7409 | && operand_equal_p (op1b, op2a, flags: 0)) |
7410 | { |
7411 | /* Result will be either NULL_TREE, or a combined comparison. */ |
7412 | tree t = combine_comparisons (UNKNOWN_LOCATION, |
7413 | TRUTH_ORIF_EXPR, code1, |
7414 | swap_tree_comparison (code2), |
7415 | truth_type, op1a, op1b); |
7416 | if (t) |
7417 | return t; |
7418 | } |
7419 | |
7420 | /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where |
7421 | NAME's definition is a truth value. See if there are any simplifications |
7422 | that can be done against the NAME's definition. */ |
7423 | if (TREE_CODE (op1a) == SSA_NAME |
7424 | && (code1 == NE_EXPR || code1 == EQ_EXPR) |
7425 | && (integer_zerop (op1b) || integer_onep (op1b))) |
7426 | { |
7427 | bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) |
7428 | || (code1 == NE_EXPR && integer_onep (op1b))); |
7429 | gimple *stmt = SSA_NAME_DEF_STMT (op1a); |
7430 | switch (gimple_code (g: stmt)) |
7431 | { |
7432 | case GIMPLE_ASSIGN: |
7433 | /* Try to simplify by copy-propagating the definition. */ |
7434 | return or_var_with_comparison (type, var: op1a, invert, code2, op2a, |
7435 | op2b, outer_cond_bb); |
7436 | |
7437 | case GIMPLE_PHI: |
7438 | /* If every argument to the PHI produces the same result when |
7439 | ORed with the second comparison, we win. |
7440 | Do not do this unless the type is bool since we need a bool |
7441 | result here anyway. */ |
7442 | if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) |
7443 | { |
7444 | tree result = NULL_TREE; |
7445 | unsigned i; |
7446 | for (i = 0; i < gimple_phi_num_args (gs: stmt); i++) |
7447 | { |
7448 | tree arg = gimple_phi_arg_def (gs: stmt, index: i); |
7449 | |
7450 | /* If this PHI has itself as an argument, ignore it. |
7451 | If all the other args produce the same result, |
7452 | we're still OK. */ |
7453 | if (arg == gimple_phi_result (gs: stmt)) |
7454 | continue; |
7455 | else if (TREE_CODE (arg) == INTEGER_CST) |
7456 | { |
7457 | if (invert ? integer_zerop (arg) : integer_nonzerop (arg)) |
7458 | { |
7459 | if (!result) |
7460 | result = boolean_true_node; |
7461 | else if (!integer_onep (result)) |
7462 | return NULL_TREE; |
7463 | } |
7464 | else if (!result) |
7465 | result = fold_build2 (code2, boolean_type_node, |
7466 | op2a, op2b); |
7467 | else if (!same_bool_comparison_p (expr: result, |
7468 | code: code2, op1: op2a, op2: op2b)) |
7469 | return NULL_TREE; |
7470 | } |
7471 | else if (TREE_CODE (arg) == SSA_NAME |
7472 | && !SSA_NAME_IS_DEFAULT_DEF (arg)) |
7473 | { |
7474 | tree temp; |
7475 | gimple *def_stmt = SSA_NAME_DEF_STMT (arg); |
7476 | /* In simple cases we can look through PHI nodes, |
7477 | but we have to be careful with loops. |
7478 | See PR49073. */ |
7479 | if (! dom_info_available_p (CDI_DOMINATORS) |
7480 | || gimple_bb (g: def_stmt) == gimple_bb (g: stmt) |
7481 | || dominated_by_p (CDI_DOMINATORS, |
7482 | gimple_bb (g: def_stmt), |
7483 | gimple_bb (g: stmt))) |
7484 | return NULL_TREE; |
7485 | temp = or_var_with_comparison (type, var: arg, invert, code2, |
7486 | op2a, op2b, outer_cond_bb); |
7487 | if (!temp) |
7488 | return NULL_TREE; |
7489 | else if (!result) |
7490 | result = temp; |
7491 | else if (!same_bool_result_p (op1: result, op2: temp)) |
7492 | return NULL_TREE; |
7493 | } |
7494 | else |
7495 | return NULL_TREE; |
7496 | } |
7497 | return result; |
7498 | } |
7499 | |
7500 | default: |
7501 | break; |
7502 | } |
7503 | } |
7504 | return NULL_TREE; |
7505 | } |
7506 | |
7507 | /* Try to simplify the OR of two comparisons, specified by |
7508 | (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. |
7509 | If this can be simplified to a single expression (without requiring |
7510 | introducing more SSA variables to hold intermediate values), |
7511 | return the resulting tree. Otherwise return NULL_TREE. |
7512 | If the result expression is non-null, it has boolean type. */ |
7513 | |
7514 | tree |
7515 | maybe_fold_or_comparisons (tree type, |
7516 | enum tree_code code1, tree op1a, tree op1b, |
7517 | enum tree_code code2, tree op2a, tree op2b, |
7518 | basic_block outer_cond_bb) |
7519 | { |
7520 | if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b, |
7521 | outer_cond_bb)) |
7522 | return t; |
7523 | |
7524 | if (tree t = or_comparisons_1 (type, code1: code2, op1a: op2a, op1b: op2b, code2: code1, op2a: op1a, op2b: op1b, |
7525 | outer_cond_bb)) |
7526 | return t; |
7527 | |
7528 | if (tree t = maybe_fold_comparisons_from_match_pd (type, code: BIT_IOR_EXPR, code1, |
7529 | op1a, op1b, code2, op2a, |
7530 | op2b, outer_cond_bb)) |
7531 | return t; |
7532 | |
7533 | return NULL_TREE; |
7534 | } |
7535 | |
7536 | /* Fold STMT to a constant using VALUEIZE to valueize SSA names. |
7537 | |
7538 | Either NULL_TREE, a simplified but non-constant or a constant |
7539 | is returned. |
7540 | |
7541 | ??? This should go into a gimple-fold-inline.h file to be eventually |
7542 | privatized with the single valueize function used in the various TUs |
7543 | to avoid the indirect function call overhead. */ |
7544 | |
7545 | tree |
7546 | gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree), |
7547 | tree (*gvalueize) (tree)) |
7548 | { |
7549 | gimple_match_op res_op; |
7550 | /* ??? The SSA propagators do not correctly deal with following SSA use-def |
7551 | edges if there are intermediate VARYING defs. For this reason |
7552 | do not follow SSA edges here even though SCCVN can technically |
7553 | just deal fine with that. */ |
7554 | if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize)) |
7555 | { |
7556 | tree res = NULL_TREE; |
7557 | if (gimple_simplified_result_is_gimple_val (op: &res_op)) |
7558 | res = res_op.ops[0]; |
7559 | else if (mprts_hook) |
7560 | res = mprts_hook (&res_op); |
7561 | if (res) |
7562 | { |
7563 | if (dump_file && dump_flags & TDF_DETAILS) |
7564 | { |
7565 | fprintf (stream: dump_file, format: "Match-and-simplified " ); |
7566 | print_gimple_expr (dump_file, stmt, 0, TDF_SLIM); |
7567 | fprintf (stream: dump_file, format: " to " ); |
7568 | print_generic_expr (dump_file, res); |
7569 | fprintf (stream: dump_file, format: "\n" ); |
7570 | } |
7571 | return res; |
7572 | } |
7573 | } |
7574 | |
7575 | location_t loc = gimple_location (g: stmt); |
7576 | switch (gimple_code (g: stmt)) |
7577 | { |
7578 | case GIMPLE_ASSIGN: |
7579 | { |
7580 | enum tree_code subcode = gimple_assign_rhs_code (gs: stmt); |
7581 | |
7582 | switch (get_gimple_rhs_class (code: subcode)) |
7583 | { |
7584 | case GIMPLE_SINGLE_RHS: |
7585 | { |
7586 | tree rhs = gimple_assign_rhs1 (gs: stmt); |
7587 | enum tree_code_class kind = TREE_CODE_CLASS (subcode); |
7588 | |
7589 | if (TREE_CODE (rhs) == SSA_NAME) |
7590 | { |
7591 | /* If the RHS is an SSA_NAME, return its known constant value, |
7592 | if any. */ |
7593 | return (*valueize) (rhs); |
7594 | } |
7595 | /* Handle propagating invariant addresses into address |
7596 | operations. */ |
7597 | else if (TREE_CODE (rhs) == ADDR_EXPR |
7598 | && !is_gimple_min_invariant (rhs)) |
7599 | { |
7600 | poly_int64 offset = 0; |
7601 | tree base; |
7602 | base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), |
7603 | &offset, |
7604 | valueize); |
7605 | if (base |
7606 | && (CONSTANT_CLASS_P (base) |
7607 | || decl_address_invariant_p (base))) |
7608 | return build_invariant_address (TREE_TYPE (rhs), |
7609 | base, offset); |
7610 | } |
7611 | else if (TREE_CODE (rhs) == CONSTRUCTOR |
7612 | && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE |
7613 | && known_eq (CONSTRUCTOR_NELTS (rhs), |
7614 | TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) |
7615 | { |
7616 | unsigned i, nelts; |
7617 | tree val; |
7618 | |
7619 | nelts = CONSTRUCTOR_NELTS (rhs); |
7620 | tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1); |
7621 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) |
7622 | { |
7623 | val = (*valueize) (val); |
7624 | if (TREE_CODE (val) == INTEGER_CST |
7625 | || TREE_CODE (val) == REAL_CST |
7626 | || TREE_CODE (val) == FIXED_CST) |
7627 | vec.quick_push (obj: val); |
7628 | else |
7629 | return NULL_TREE; |
7630 | } |
7631 | |
7632 | return vec.build (); |
7633 | } |
7634 | if (subcode == OBJ_TYPE_REF) |
7635 | { |
7636 | tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs)); |
7637 | /* If callee is constant, we can fold away the wrapper. */ |
7638 | if (is_gimple_min_invariant (val)) |
7639 | return val; |
7640 | } |
7641 | |
7642 | if (kind == tcc_reference) |
7643 | { |
7644 | if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR |
7645 | || TREE_CODE (rhs) == REALPART_EXPR |
7646 | || TREE_CODE (rhs) == IMAGPART_EXPR) |
7647 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
7648 | { |
7649 | tree val = (*valueize) (TREE_OPERAND (rhs, 0)); |
7650 | return fold_unary_loc (EXPR_LOCATION (rhs), |
7651 | TREE_CODE (rhs), |
7652 | TREE_TYPE (rhs), val); |
7653 | } |
7654 | else if (TREE_CODE (rhs) == BIT_FIELD_REF |
7655 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
7656 | { |
7657 | tree val = (*valueize) (TREE_OPERAND (rhs, 0)); |
7658 | return fold_ternary_loc (EXPR_LOCATION (rhs), |
7659 | TREE_CODE (rhs), |
7660 | TREE_TYPE (rhs), val, |
7661 | TREE_OPERAND (rhs, 1), |
7662 | TREE_OPERAND (rhs, 2)); |
7663 | } |
7664 | else if (TREE_CODE (rhs) == MEM_REF |
7665 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
7666 | { |
7667 | tree val = (*valueize) (TREE_OPERAND (rhs, 0)); |
7668 | if (TREE_CODE (val) == ADDR_EXPR |
7669 | && is_gimple_min_invariant (val)) |
7670 | { |
7671 | tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), |
7672 | unshare_expr (val), |
7673 | TREE_OPERAND (rhs, 1)); |
7674 | if (tem) |
7675 | rhs = tem; |
7676 | } |
7677 | } |
7678 | return fold_const_aggregate_ref_1 (rhs, valueize); |
7679 | } |
7680 | else if (kind == tcc_declaration) |
7681 | return get_symbol_constant_value (sym: rhs); |
7682 | return rhs; |
7683 | } |
7684 | |
7685 | case GIMPLE_UNARY_RHS: |
7686 | return NULL_TREE; |
7687 | |
7688 | case GIMPLE_BINARY_RHS: |
7689 | /* Translate &x + CST into an invariant form suitable for |
7690 | further propagation. */ |
7691 | if (subcode == POINTER_PLUS_EXPR) |
7692 | { |
7693 | tree op0 = (*valueize) (gimple_assign_rhs1 (gs: stmt)); |
7694 | tree op1 = (*valueize) (gimple_assign_rhs2 (gs: stmt)); |
7695 | if (TREE_CODE (op0) == ADDR_EXPR |
7696 | && TREE_CODE (op1) == INTEGER_CST) |
7697 | { |
7698 | tree off = fold_convert (ptr_type_node, op1); |
7699 | return build1_loc |
7700 | (loc, code: ADDR_EXPR, TREE_TYPE (op0), |
7701 | fold_build2 (MEM_REF, |
7702 | TREE_TYPE (TREE_TYPE (op0)), |
7703 | unshare_expr (op0), off)); |
7704 | } |
7705 | } |
7706 | /* Canonicalize bool != 0 and bool == 0 appearing after |
7707 | valueization. While gimple_simplify handles this |
7708 | it can get confused by the ~X == 1 -> X == 0 transform |
7709 | which we cant reduce to a SSA name or a constant |
7710 | (and we have no way to tell gimple_simplify to not |
7711 | consider those transforms in the first place). */ |
7712 | else if (subcode == EQ_EXPR |
7713 | || subcode == NE_EXPR) |
7714 | { |
7715 | tree lhs = gimple_assign_lhs (gs: stmt); |
7716 | tree op0 = gimple_assign_rhs1 (gs: stmt); |
7717 | if (useless_type_conversion_p (TREE_TYPE (lhs), |
7718 | TREE_TYPE (op0))) |
7719 | { |
7720 | tree op1 = (*valueize) (gimple_assign_rhs2 (gs: stmt)); |
7721 | op0 = (*valueize) (op0); |
7722 | if (TREE_CODE (op0) == INTEGER_CST) |
7723 | std::swap (a&: op0, b&: op1); |
7724 | if (TREE_CODE (op1) == INTEGER_CST |
7725 | && ((subcode == NE_EXPR && integer_zerop (op1)) |
7726 | || (subcode == EQ_EXPR && integer_onep (op1)))) |
7727 | return op0; |
7728 | } |
7729 | } |
7730 | return NULL_TREE; |
7731 | |
7732 | case GIMPLE_TERNARY_RHS: |
7733 | { |
7734 | /* Handle ternary operators that can appear in GIMPLE form. */ |
7735 | tree op0 = (*valueize) (gimple_assign_rhs1 (gs: stmt)); |
7736 | tree op1 = (*valueize) (gimple_assign_rhs2 (gs: stmt)); |
7737 | tree op2 = (*valueize) (gimple_assign_rhs3 (gs: stmt)); |
7738 | return fold_ternary_loc (loc, subcode, |
7739 | TREE_TYPE (gimple_assign_lhs (stmt)), |
7740 | op0, op1, op2); |
7741 | } |
7742 | |
7743 | default: |
7744 | gcc_unreachable (); |
7745 | } |
7746 | } |
7747 | |
7748 | case GIMPLE_CALL: |
7749 | { |
7750 | tree fn; |
7751 | gcall *call_stmt = as_a <gcall *> (p: stmt); |
7752 | |
7753 | if (gimple_call_internal_p (gs: stmt)) |
7754 | { |
7755 | enum tree_code subcode = ERROR_MARK; |
7756 | switch (gimple_call_internal_fn (gs: stmt)) |
7757 | { |
7758 | case IFN_UBSAN_CHECK_ADD: |
7759 | subcode = PLUS_EXPR; |
7760 | break; |
7761 | case IFN_UBSAN_CHECK_SUB: |
7762 | subcode = MINUS_EXPR; |
7763 | break; |
7764 | case IFN_UBSAN_CHECK_MUL: |
7765 | subcode = MULT_EXPR; |
7766 | break; |
7767 | case IFN_BUILTIN_EXPECT: |
7768 | { |
7769 | tree arg0 = gimple_call_arg (gs: stmt, index: 0); |
7770 | tree op0 = (*valueize) (arg0); |
7771 | if (TREE_CODE (op0) == INTEGER_CST) |
7772 | return op0; |
7773 | return NULL_TREE; |
7774 | } |
7775 | default: |
7776 | return NULL_TREE; |
7777 | } |
7778 | tree arg0 = gimple_call_arg (gs: stmt, index: 0); |
7779 | tree arg1 = gimple_call_arg (gs: stmt, index: 1); |
7780 | tree op0 = (*valueize) (arg0); |
7781 | tree op1 = (*valueize) (arg1); |
7782 | |
7783 | if (TREE_CODE (op0) != INTEGER_CST |
7784 | || TREE_CODE (op1) != INTEGER_CST) |
7785 | { |
7786 | switch (subcode) |
7787 | { |
7788 | case MULT_EXPR: |
7789 | /* x * 0 = 0 * x = 0 without overflow. */ |
7790 | if (integer_zerop (op0) || integer_zerop (op1)) |
7791 | return build_zero_cst (TREE_TYPE (arg0)); |
7792 | break; |
7793 | case MINUS_EXPR: |
7794 | /* y - y = 0 without overflow. */ |
7795 | if (operand_equal_p (op0, op1, flags: 0)) |
7796 | return build_zero_cst (TREE_TYPE (arg0)); |
7797 | break; |
7798 | default: |
7799 | break; |
7800 | } |
7801 | } |
7802 | tree res |
7803 | = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1); |
7804 | if (res |
7805 | && TREE_CODE (res) == INTEGER_CST |
7806 | && !TREE_OVERFLOW (res)) |
7807 | return res; |
7808 | return NULL_TREE; |
7809 | } |
7810 | |
7811 | fn = (*valueize) (gimple_call_fn (gs: stmt)); |
7812 | if (TREE_CODE (fn) == ADDR_EXPR |
7813 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
7814 | && fndecl_built_in_p (TREE_OPERAND (fn, 0)) |
7815 | && gimple_builtin_call_types_compatible_p (stmt, |
7816 | TREE_OPERAND (fn, 0))) |
7817 | { |
7818 | tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); |
7819 | tree retval; |
7820 | unsigned i; |
7821 | for (i = 0; i < gimple_call_num_args (gs: stmt); ++i) |
7822 | args[i] = (*valueize) (gimple_call_arg (gs: stmt, index: i)); |
7823 | retval = fold_builtin_call_array (loc, |
7824 | gimple_call_return_type (gs: call_stmt), |
7825 | fn, gimple_call_num_args (gs: stmt), args); |
7826 | if (retval) |
7827 | { |
7828 | /* fold_call_expr wraps the result inside a NOP_EXPR. */ |
7829 | STRIP_NOPS (retval); |
7830 | retval = fold_convert (gimple_call_return_type (call_stmt), |
7831 | retval); |
7832 | } |
7833 | return retval; |
7834 | } |
7835 | return NULL_TREE; |
7836 | } |
7837 | |
7838 | default: |
7839 | return NULL_TREE; |
7840 | } |
7841 | } |
7842 | |
7843 | /* Fold STMT to a constant using VALUEIZE to valueize SSA names. |
7844 | Returns NULL_TREE if folding to a constant is not possible, otherwise |
7845 | returns a constant according to is_gimple_min_invariant. */ |
7846 | |
7847 | tree |
7848 | gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree)) |
7849 | { |
7850 | tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize); |
7851 | if (res && is_gimple_min_invariant (res)) |
7852 | return res; |
7853 | return NULL_TREE; |
7854 | } |
7855 | |
7856 | |
7857 | /* The following set of functions are supposed to fold references using |
7858 | their constant initializers. */ |
7859 | |
7860 | /* See if we can find constructor defining value of BASE. |
7861 | When we know the consructor with constant offset (such as |
7862 | base is array[40] and we do know constructor of array), then |
7863 | BIT_OFFSET is adjusted accordingly. |
7864 | |
7865 | As a special case, return error_mark_node when constructor |
7866 | is not explicitly available, but it is known to be zero |
7867 | such as 'static const int a;'. */ |
7868 | static tree |
7869 | get_base_constructor (tree base, poly_int64 *bit_offset, |
7870 | tree (*valueize)(tree)) |
7871 | { |
7872 | poly_int64 bit_offset2, size, max_size; |
7873 | bool reverse; |
7874 | |
7875 | if (TREE_CODE (base) == MEM_REF) |
7876 | { |
7877 | poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT; |
7878 | if (!boff.to_shwi (r: bit_offset)) |
7879 | return NULL_TREE; |
7880 | |
7881 | if (valueize |
7882 | && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) |
7883 | base = valueize (TREE_OPERAND (base, 0)); |
7884 | if (!base || TREE_CODE (base) != ADDR_EXPR) |
7885 | return NULL_TREE; |
7886 | base = TREE_OPERAND (base, 0); |
7887 | } |
7888 | else if (valueize |
7889 | && TREE_CODE (base) == SSA_NAME) |
7890 | base = valueize (base); |
7891 | |
7892 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its |
7893 | DECL_INITIAL. If BASE is a nested reference into another |
7894 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve |
7895 | the inner reference. */ |
7896 | switch (TREE_CODE (base)) |
7897 | { |
7898 | case VAR_DECL: |
7899 | case CONST_DECL: |
7900 | { |
7901 | tree init = ctor_for_folding (base); |
7902 | |
7903 | /* Our semantic is exact opposite of ctor_for_folding; |
7904 | NULL means unknown, while error_mark_node is 0. */ |
7905 | if (init == error_mark_node) |
7906 | return NULL_TREE; |
7907 | if (!init) |
7908 | return error_mark_node; |
7909 | return init; |
7910 | } |
7911 | |
7912 | case VIEW_CONVERT_EXPR: |
7913 | return get_base_constructor (TREE_OPERAND (base, 0), |
7914 | bit_offset, valueize); |
7915 | |
7916 | case ARRAY_REF: |
7917 | case COMPONENT_REF: |
7918 | base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size, |
7919 | &reverse); |
7920 | if (!known_size_p (a: max_size) || maybe_ne (a: size, b: max_size)) |
7921 | return NULL_TREE; |
7922 | *bit_offset += bit_offset2; |
7923 | return get_base_constructor (base, bit_offset, valueize); |
7924 | |
7925 | case CONSTRUCTOR: |
7926 | return base; |
7927 | |
7928 | default: |
7929 | if (CONSTANT_CLASS_P (base)) |
7930 | return base; |
7931 | |
7932 | return NULL_TREE; |
7933 | } |
7934 | } |
7935 | |
7936 | /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE |
7937 | bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of |
7938 | the reference; otherwise the type of the referenced element is used instead. |
7939 | When SIZE is zero, attempt to fold a reference to the entire element OFFSET |
7940 | refers to. Increment *SUBOFF by the bit offset of the accessed element. */ |
7941 | |
7942 | static tree |
7943 | fold_array_ctor_reference (tree type, tree ctor, |
7944 | unsigned HOST_WIDE_INT offset, |
7945 | unsigned HOST_WIDE_INT size, |
7946 | tree from_decl, |
7947 | unsigned HOST_WIDE_INT *suboff) |
7948 | { |
7949 | offset_int low_bound; |
7950 | offset_int elt_size; |
7951 | offset_int access_index; |
7952 | tree domain_type = NULL_TREE; |
7953 | HOST_WIDE_INT inner_offset; |
7954 | |
7955 | /* Compute low bound and elt size. */ |
7956 | if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) |
7957 | domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); |
7958 | if (domain_type && TYPE_MIN_VALUE (domain_type)) |
7959 | { |
7960 | /* Static constructors for variably sized objects make no sense. */ |
7961 | if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST) |
7962 | return NULL_TREE; |
7963 | low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type)); |
7964 | } |
7965 | else |
7966 | low_bound = 0; |
7967 | /* Static constructors for variably sized objects make no sense. */ |
7968 | if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST) |
7969 | return NULL_TREE; |
7970 | elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); |
7971 | |
7972 | /* When TYPE is non-null, verify that it specifies a constant-sized |
7973 | access of a multiple of the array element size. Avoid division |
7974 | by zero below when ELT_SIZE is zero, such as with the result of |
7975 | an initializer for a zero-length array or an empty struct. */ |
7976 | if (elt_size == 0 |
7977 | || (type |
7978 | && (!TYPE_SIZE_UNIT (type) |
7979 | || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST))) |
7980 | return NULL_TREE; |
7981 | |
7982 | /* Compute the array index we look for. */ |
7983 | access_index = wi::udiv_trunc (x: offset_int (offset / BITS_PER_UNIT), |
7984 | y: elt_size); |
7985 | access_index += low_bound; |
7986 | |
7987 | /* And offset within the access. */ |
7988 | inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT); |
7989 | |
7990 | unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi (); |
7991 | if (size > elt_sz * BITS_PER_UNIT) |
7992 | { |
7993 | /* native_encode_expr constraints. */ |
7994 | if (size > MAX_BITSIZE_MODE_ANY_MODE |
7995 | || size % BITS_PER_UNIT != 0 |
7996 | || inner_offset % BITS_PER_UNIT != 0 |
7997 | || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT) |
7998 | return NULL_TREE; |
7999 | |
8000 | unsigned ctor_idx; |
8001 | tree val = get_array_ctor_element_at_index (ctor, access_index, |
8002 | &ctor_idx); |
8003 | if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor)) |
8004 | return build_zero_cst (type); |
8005 | |
8006 | /* native-encode adjacent ctor elements. */ |
8007 | unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; |
8008 | unsigned bufoff = 0; |
8009 | offset_int index = 0; |
8010 | offset_int max_index = access_index; |
8011 | constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx); |
8012 | if (!val) |
8013 | val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor))); |
8014 | else if (!CONSTANT_CLASS_P (val)) |
8015 | return NULL_TREE; |
8016 | if (!elt->index) |
8017 | ; |
8018 | else if (TREE_CODE (elt->index) == RANGE_EXPR) |
8019 | { |
8020 | index = wi::to_offset (TREE_OPERAND (elt->index, 0)); |
8021 | max_index = wi::to_offset (TREE_OPERAND (elt->index, 1)); |
8022 | } |
8023 | else |
8024 | index = max_index = wi::to_offset (t: elt->index); |
8025 | index = wi::umax (x: index, y: access_index); |
8026 | do |
8027 | { |
8028 | if (bufoff + elt_sz > sizeof (buf)) |
8029 | elt_sz = sizeof (buf) - bufoff; |
8030 | int len = native_encode_expr (val, buf + bufoff, elt_sz, |
8031 | off: inner_offset / BITS_PER_UNIT); |
8032 | if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT) |
8033 | return NULL_TREE; |
8034 | inner_offset = 0; |
8035 | bufoff += len; |
8036 | |
8037 | access_index += 1; |
8038 | if (wi::cmpu (x: access_index, y: index) == 0) |
8039 | val = elt->value; |
8040 | else if (wi::cmpu (x: access_index, y: max_index) > 0) |
8041 | { |
8042 | ctor_idx++; |
8043 | if (ctor_idx >= CONSTRUCTOR_NELTS (ctor)) |
8044 | { |
8045 | val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor))); |
8046 | ++max_index; |
8047 | } |
8048 | else |
8049 | { |
8050 | elt = CONSTRUCTOR_ELT (ctor, ctor_idx); |
8051 | index = 0; |
8052 | max_index = access_index; |
8053 | if (!elt->index) |
8054 | ; |
8055 | else if (TREE_CODE (elt->index) == RANGE_EXPR) |
8056 | { |
8057 | index = wi::to_offset (TREE_OPERAND (elt->index, 0)); |
8058 | max_index = wi::to_offset (TREE_OPERAND (elt->index, 1)); |
8059 | } |
8060 | else |
8061 | index = max_index = wi::to_offset (t: elt->index); |
8062 | index = wi::umax (x: index, y: access_index); |
8063 | if (wi::cmpu (x: access_index, y: index) == 0) |
8064 | val = elt->value; |
8065 | else |
8066 | val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor))); |
8067 | } |
8068 | } |
8069 | } |
8070 | while (bufoff < size / BITS_PER_UNIT); |
8071 | *suboff += size; |
8072 | return native_interpret_expr (type, buf, size / BITS_PER_UNIT); |
8073 | } |
8074 | |
8075 | if (tree val = get_array_ctor_element_at_index (ctor, access_index)) |
8076 | { |
8077 | if (!size && TREE_CODE (val) != CONSTRUCTOR) |
8078 | { |
8079 | /* For the final reference to the entire accessed element |
8080 | (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which |
8081 | may be null) in favor of the type of the element, and set |
8082 | SIZE to the size of the accessed element. */ |
8083 | inner_offset = 0; |
8084 | type = TREE_TYPE (val); |
8085 | size = elt_sz * BITS_PER_UNIT; |
8086 | } |
8087 | else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1 |
8088 | && TREE_CODE (val) == CONSTRUCTOR |
8089 | && (elt_sz * BITS_PER_UNIT - inner_offset) < size) |
8090 | /* If this isn't the last element in the CTOR and a CTOR itself |
8091 | and it does not cover the whole object we are requesting give up |
8092 | since we're not set up for combining from multiple CTORs. */ |
8093 | return NULL_TREE; |
8094 | |
8095 | *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT; |
8096 | return fold_ctor_reference (type, val, inner_offset, size, from_decl, |
8097 | suboff); |
8098 | } |
8099 | |
8100 | /* Memory not explicitly mentioned in constructor is 0 (or |
8101 | the reference is out of range). */ |
8102 | return type ? build_zero_cst (type) : NULL_TREE; |
8103 | } |
8104 | |
8105 | /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE |
8106 | bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of |
8107 | the reference; otherwise the type of the referenced member is used instead. |
8108 | When SIZE is zero, attempt to fold a reference to the entire member OFFSET |
8109 | refers to. Increment *SUBOFF by the bit offset of the accessed member. */ |
8110 | |
8111 | static tree |
8112 | fold_nonarray_ctor_reference (tree type, tree ctor, |
8113 | unsigned HOST_WIDE_INT offset, |
8114 | unsigned HOST_WIDE_INT size, |
8115 | tree from_decl, |
8116 | unsigned HOST_WIDE_INT *suboff) |
8117 | { |
8118 | unsigned HOST_WIDE_INT cnt; |
8119 | tree cfield, cval; |
8120 | |
8121 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
8122 | { |
8123 | tree byte_offset = DECL_FIELD_OFFSET (cfield); |
8124 | tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); |
8125 | tree field_size = DECL_SIZE (cfield); |
8126 | |
8127 | if (!field_size) |
8128 | { |
8129 | /* Determine the size of the flexible array member from |
8130 | the size of the initializer provided for it. */ |
8131 | field_size = TYPE_SIZE (TREE_TYPE (cval)); |
8132 | } |
8133 | |
8134 | /* Variable sized objects in static constructors makes no sense, |
8135 | but field_size can be NULL for flexible array members. */ |
8136 | gcc_assert (TREE_CODE (field_offset) == INTEGER_CST |
8137 | && TREE_CODE (byte_offset) == INTEGER_CST |
8138 | && (field_size != NULL_TREE |
8139 | ? TREE_CODE (field_size) == INTEGER_CST |
8140 | : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); |
8141 | |
8142 | /* Compute bit offset of the field. */ |
8143 | offset_int bitoffset |
8144 | = (wi::to_offset (t: field_offset) |
8145 | + (wi::to_offset (t: byte_offset) << LOG2_BITS_PER_UNIT)); |
8146 | /* Compute bit offset where the field ends. */ |
8147 | offset_int bitoffset_end; |
8148 | if (field_size != NULL_TREE) |
8149 | bitoffset_end = bitoffset + wi::to_offset (t: field_size); |
8150 | else |
8151 | bitoffset_end = 0; |
8152 | |
8153 | /* Compute the bit offset of the end of the desired access. |
8154 | As a special case, if the size of the desired access is |
8155 | zero, assume the access is to the entire field (and let |
8156 | the caller make any necessary adjustments by storing |
8157 | the actual bounds of the field in FIELDBOUNDS). */ |
8158 | offset_int access_end = offset_int (offset); |
8159 | if (size) |
8160 | access_end += size; |
8161 | else |
8162 | access_end = bitoffset_end; |
8163 | |
8164 | /* Is there any overlap between the desired access at |
8165 | [OFFSET, OFFSET+SIZE) and the offset of the field within |
8166 | the object at [BITOFFSET, BITOFFSET_END)? */ |
8167 | if (wi::cmps (x: access_end, y: bitoffset) > 0 |
8168 | && (field_size == NULL_TREE |
8169 | || wi::lts_p (x: offset, y: bitoffset_end))) |
8170 | { |
8171 | *suboff += bitoffset.to_uhwi (); |
8172 | |
8173 | if (!size && TREE_CODE (cval) != CONSTRUCTOR) |
8174 | { |
8175 | /* For the final reference to the entire accessed member |
8176 | (SIZE is zero), reset OFFSET, disegard TYPE (which may |
8177 | be null) in favor of the type of the member, and set |
8178 | SIZE to the size of the accessed member. */ |
8179 | offset = bitoffset.to_uhwi (); |
8180 | type = TREE_TYPE (cval); |
8181 | size = (bitoffset_end - bitoffset).to_uhwi (); |
8182 | } |
8183 | |
8184 | /* We do have overlap. Now see if the field is large enough |
8185 | to cover the access. Give up for accesses that extend |
8186 | beyond the end of the object or that span multiple fields. */ |
8187 | if (wi::cmps (x: access_end, y: bitoffset_end) > 0) |
8188 | return NULL_TREE; |
8189 | if (offset < bitoffset) |
8190 | return NULL_TREE; |
8191 | |
8192 | offset_int inner_offset = offset_int (offset) - bitoffset; |
8193 | |
8194 | /* Integral bit-fields are left-justified on big-endian targets, so |
8195 | we must arrange for native_encode_int to start at their MSB. */ |
8196 | if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield))) |
8197 | { |
8198 | if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN) |
8199 | return NULL_TREE; |
8200 | const unsigned int encoding_size |
8201 | = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield))); |
8202 | if (BYTES_BIG_ENDIAN) |
8203 | inner_offset += encoding_size - wi::to_offset (t: field_size); |
8204 | } |
8205 | |
8206 | return fold_ctor_reference (type, cval, |
8207 | inner_offset.to_uhwi (), size, |
8208 | from_decl, suboff); |
8209 | } |
8210 | } |
8211 | |
8212 | if (!type) |
8213 | return NULL_TREE; |
8214 | |
8215 | return build_zero_cst (type); |
8216 | } |
8217 | |
8218 | /* CTOR is a value initializing memory. Fold a reference of TYPE and |
8219 | bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE |
8220 | is zero, attempt to fold a reference to the entire subobject |
8221 | which OFFSET refers to. This is used when folding accesses to |
8222 | string members of aggregates. When non-null, set *SUBOFF to |
8223 | the bit offset of the accessed subobject. */ |
8224 | |
8225 | tree |
8226 | fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset, |
8227 | const poly_uint64 &poly_size, tree from_decl, |
8228 | unsigned HOST_WIDE_INT *suboff /* = NULL */) |
8229 | { |
8230 | tree ret; |
8231 | |
8232 | /* We found the field with exact match. */ |
8233 | if (type |
8234 | && useless_type_conversion_p (type, TREE_TYPE (ctor)) |
8235 | && known_eq (poly_offset, 0U)) |
8236 | return canonicalize_constructor_val (cval: unshare_expr (ctor), from_decl); |
8237 | |
8238 | /* The remaining optimizations need a constant size and offset. */ |
8239 | unsigned HOST_WIDE_INT size, offset; |
8240 | if (!poly_size.is_constant (const_value: &size) || !poly_offset.is_constant (const_value: &offset)) |
8241 | return NULL_TREE; |
8242 | |
8243 | /* We are at the end of walk, see if we can view convert the |
8244 | result. */ |
8245 | if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset |
8246 | /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ |
8247 | && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size) |
8248 | && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size)) |
8249 | { |
8250 | ret = canonicalize_constructor_val (cval: unshare_expr (ctor), from_decl); |
8251 | if (ret) |
8252 | { |
8253 | ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); |
8254 | if (ret) |
8255 | STRIP_USELESS_TYPE_CONVERSION (ret); |
8256 | } |
8257 | return ret; |
8258 | } |
8259 | |
8260 | /* For constants and byte-aligned/sized reads, try to go through |
8261 | native_encode/interpret. */ |
8262 | if (CONSTANT_CLASS_P (ctor) |
8263 | && BITS_PER_UNIT == 8 |
8264 | && offset % BITS_PER_UNIT == 0 |
8265 | && offset / BITS_PER_UNIT <= INT_MAX |
8266 | && size % BITS_PER_UNIT == 0 |
8267 | && size <= MAX_BITSIZE_MODE_ANY_MODE |
8268 | && can_native_interpret_type_p (type)) |
8269 | { |
8270 | unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; |
8271 | int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT, |
8272 | off: offset / BITS_PER_UNIT); |
8273 | if (len > 0) |
8274 | return native_interpret_expr (type, buf, len); |
8275 | } |
8276 | |
8277 | /* For constructors, try first a recursive local processing, but in any case |
8278 | this requires the native storage order. */ |
8279 | if (TREE_CODE (ctor) == CONSTRUCTOR |
8280 | && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor)) |
8281 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor)))) |
8282 | { |
8283 | unsigned HOST_WIDE_INT dummy = 0; |
8284 | if (!suboff) |
8285 | suboff = &dummy; |
8286 | |
8287 | tree ret; |
8288 | if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE |
8289 | || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) |
8290 | ret = fold_array_ctor_reference (type, ctor, offset, size, |
8291 | from_decl, suboff); |
8292 | else |
8293 | ret = fold_nonarray_ctor_reference (type, ctor, offset, size, |
8294 | from_decl, suboff); |
8295 | |
8296 | /* Otherwise fall back to native_encode_initializer. This may be done |
8297 | only from the outermost fold_ctor_reference call (because it itself |
8298 | recurses into CONSTRUCTORs and doesn't update suboff). */ |
8299 | if (ret == NULL_TREE |
8300 | && suboff == &dummy |
8301 | && BITS_PER_UNIT == 8 |
8302 | && offset % BITS_PER_UNIT == 0 |
8303 | && offset / BITS_PER_UNIT <= INT_MAX |
8304 | && size % BITS_PER_UNIT == 0 |
8305 | && size <= MAX_BITSIZE_MODE_ANY_MODE |
8306 | && can_native_interpret_type_p (type)) |
8307 | { |
8308 | unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; |
8309 | int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT, |
8310 | off: offset / BITS_PER_UNIT); |
8311 | if (len > 0) |
8312 | return native_interpret_expr (type, buf, len); |
8313 | } |
8314 | |
8315 | return ret; |
8316 | } |
8317 | |
8318 | return NULL_TREE; |
8319 | } |
8320 | |
8321 | /* Return the tree representing the element referenced by T if T is an |
8322 | ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA |
8323 | names using VALUEIZE. Return NULL_TREE otherwise. */ |
8324 | |
8325 | tree |
8326 | fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) |
8327 | { |
8328 | tree ctor, idx, base; |
8329 | poly_int64 offset, size, max_size; |
8330 | tree tem; |
8331 | bool reverse; |
8332 | |
8333 | if (TREE_THIS_VOLATILE (t)) |
8334 | return NULL_TREE; |
8335 | |
8336 | if (DECL_P (t)) |
8337 | return get_symbol_constant_value (sym: t); |
8338 | |
8339 | tem = fold_read_from_constant_string (t); |
8340 | if (tem) |
8341 | return tem; |
8342 | |
8343 | switch (TREE_CODE (t)) |
8344 | { |
8345 | case ARRAY_REF: |
8346 | case ARRAY_RANGE_REF: |
8347 | /* Constant indexes are handled well by get_base_constructor. |
8348 | Only special case variable offsets. |
8349 | FIXME: This code can't handle nested references with variable indexes |
8350 | (they will be handled only by iteration of ccp). Perhaps we can bring |
8351 | get_ref_base_and_extent here and make it use a valueize callback. */ |
8352 | if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME |
8353 | && valueize |
8354 | && (idx = (*valueize) (TREE_OPERAND (t, 1))) |
8355 | && poly_int_tree_p (t: idx)) |
8356 | { |
8357 | tree low_bound, unit_size; |
8358 | |
8359 | /* If the resulting bit-offset is constant, track it. */ |
8360 | if ((low_bound = array_ref_low_bound (t), |
8361 | poly_int_tree_p (t: low_bound)) |
8362 | && (unit_size = array_ref_element_size (t), |
8363 | tree_fits_uhwi_p (unit_size))) |
8364 | { |
8365 | poly_offset_int woffset |
8366 | = wi::sext (a: wi::to_poly_offset (t: idx) |
8367 | - wi::to_poly_offset (t: low_bound), |
8368 | TYPE_PRECISION (sizetype)); |
8369 | woffset *= tree_to_uhwi (unit_size); |
8370 | woffset *= BITS_PER_UNIT; |
8371 | if (woffset.to_shwi (r: &offset)) |
8372 | { |
8373 | base = TREE_OPERAND (t, 0); |
8374 | ctor = get_base_constructor (base, bit_offset: &offset, valueize); |
8375 | /* Empty constructor. Always fold to 0. */ |
8376 | if (ctor == error_mark_node) |
8377 | return build_zero_cst (TREE_TYPE (t)); |
8378 | /* Out of bound array access. Value is undefined, |
8379 | but don't fold. */ |
8380 | if (maybe_lt (a: offset, b: 0)) |
8381 | return NULL_TREE; |
8382 | /* We cannot determine ctor. */ |
8383 | if (!ctor) |
8384 | return NULL_TREE; |
8385 | return fold_ctor_reference (TREE_TYPE (t), ctor, poly_offset: offset, |
8386 | poly_size: tree_to_uhwi (unit_size) |
8387 | * BITS_PER_UNIT, |
8388 | from_decl: base); |
8389 | } |
8390 | } |
8391 | } |
8392 | /* Fallthru. */ |
8393 | |
8394 | case COMPONENT_REF: |
8395 | case BIT_FIELD_REF: |
8396 | case TARGET_MEM_REF: |
8397 | case MEM_REF: |
8398 | base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse); |
8399 | ctor = get_base_constructor (base, bit_offset: &offset, valueize); |
8400 | |
8401 | /* Empty constructor. Always fold to 0. */ |
8402 | if (ctor == error_mark_node) |
8403 | return build_zero_cst (TREE_TYPE (t)); |
8404 | /* We do not know precise address. */ |
8405 | if (!known_size_p (a: max_size) || maybe_ne (a: max_size, b: size)) |
8406 | return NULL_TREE; |
8407 | /* We cannot determine ctor. */ |
8408 | if (!ctor) |
8409 | return NULL_TREE; |
8410 | |
8411 | /* Out of bound array access. Value is undefined, but don't fold. */ |
8412 | if (maybe_lt (a: offset, b: 0)) |
8413 | return NULL_TREE; |
8414 | |
8415 | tem = fold_ctor_reference (TREE_TYPE (t), ctor, poly_offset: offset, poly_size: size, from_decl: base); |
8416 | if (tem) |
8417 | return tem; |
8418 | |
8419 | /* For bit field reads try to read the representative and |
8420 | adjust. */ |
8421 | if (TREE_CODE (t) == COMPONENT_REF |
8422 | && DECL_BIT_FIELD (TREE_OPERAND (t, 1)) |
8423 | && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1))) |
8424 | { |
8425 | HOST_WIDE_INT csize, coffset; |
8426 | tree field = TREE_OPERAND (t, 1); |
8427 | tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field); |
8428 | if (INTEGRAL_TYPE_P (TREE_TYPE (repr)) |
8429 | && size.is_constant (const_value: &csize) |
8430 | && offset.is_constant (const_value: &coffset) |
8431 | && (coffset % BITS_PER_UNIT != 0 |
8432 | || csize % BITS_PER_UNIT != 0) |
8433 | && !reverse |
8434 | && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN) |
8435 | { |
8436 | poly_int64 bitoffset; |
8437 | poly_uint64 field_offset, repr_offset; |
8438 | if (poly_int_tree_p (DECL_FIELD_OFFSET (field), value: &field_offset) |
8439 | && poly_int_tree_p (DECL_FIELD_OFFSET (repr), value: &repr_offset)) |
8440 | bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT; |
8441 | else |
8442 | bitoffset = 0; |
8443 | bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
8444 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); |
8445 | HOST_WIDE_INT bitoff; |
8446 | int diff = (TYPE_PRECISION (TREE_TYPE (repr)) |
8447 | - TYPE_PRECISION (TREE_TYPE (field))); |
8448 | if (bitoffset.is_constant (const_value: &bitoff) |
8449 | && bitoff >= 0 |
8450 | && bitoff <= diff) |
8451 | { |
8452 | offset -= bitoff; |
8453 | size = tree_to_uhwi (DECL_SIZE (repr)); |
8454 | |
8455 | tem = fold_ctor_reference (TREE_TYPE (repr), ctor, poly_offset: offset, |
8456 | poly_size: size, from_decl: base); |
8457 | if (tem && TREE_CODE (tem) == INTEGER_CST) |
8458 | { |
8459 | if (!BYTES_BIG_ENDIAN) |
8460 | tem = wide_int_to_tree (TREE_TYPE (field), |
8461 | cst: wi::lrshift (x: wi::to_wide (t: tem), |
8462 | y: bitoff)); |
8463 | else |
8464 | tem = wide_int_to_tree (TREE_TYPE (field), |
8465 | cst: wi::lrshift (x: wi::to_wide (t: tem), |
8466 | y: diff - bitoff)); |
8467 | return tem; |
8468 | } |
8469 | } |
8470 | } |
8471 | } |
8472 | break; |
8473 | |
8474 | case REALPART_EXPR: |
8475 | case IMAGPART_EXPR: |
8476 | { |
8477 | tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize); |
8478 | if (c && TREE_CODE (c) == COMPLEX_CST) |
8479 | return fold_build1_loc (EXPR_LOCATION (t), |
8480 | TREE_CODE (t), TREE_TYPE (t), c); |
8481 | break; |
8482 | } |
8483 | |
8484 | default: |
8485 | break; |
8486 | } |
8487 | |
8488 | return NULL_TREE; |
8489 | } |
8490 | |
8491 | tree |
8492 | fold_const_aggregate_ref (tree t) |
8493 | { |
8494 | return fold_const_aggregate_ref_1 (t, NULL); |
8495 | } |
8496 | |
8497 | /* Lookup virtual method with index TOKEN in a virtual table V |
8498 | at OFFSET. |
8499 | Set CAN_REFER if non-NULL to false if method |
8500 | is not referable or if the virtual table is ill-formed (such as rewriten |
8501 | by non-C++ produced symbol). Otherwise just return NULL in that calse. */ |
8502 | |
8503 | tree |
8504 | gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, |
8505 | tree v, |
8506 | unsigned HOST_WIDE_INT offset, |
8507 | bool *can_refer) |
8508 | { |
8509 | tree vtable = v, init, fn; |
8510 | unsigned HOST_WIDE_INT size; |
8511 | unsigned HOST_WIDE_INT elt_size, access_index; |
8512 | tree domain_type; |
8513 | |
8514 | if (can_refer) |
8515 | *can_refer = true; |
8516 | |
8517 | /* First of all double check we have virtual table. */ |
8518 | if (!VAR_P (v) || !DECL_VIRTUAL_P (v)) |
8519 | { |
8520 | /* Pass down that we lost track of the target. */ |
8521 | if (can_refer) |
8522 | *can_refer = false; |
8523 | return NULL_TREE; |
8524 | } |
8525 | |
8526 | init = ctor_for_folding (v); |
8527 | |
8528 | /* The virtual tables should always be born with constructors |
8529 | and we always should assume that they are avaialble for |
8530 | folding. At the moment we do not stream them in all cases, |
8531 | but it should never happen that ctor seem unreachable. */ |
8532 | gcc_assert (init); |
8533 | if (init == error_mark_node) |
8534 | { |
8535 | /* Pass down that we lost track of the target. */ |
8536 | if (can_refer) |
8537 | *can_refer = false; |
8538 | return NULL_TREE; |
8539 | } |
8540 | gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); |
8541 | size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v)))); |
8542 | offset *= BITS_PER_UNIT; |
8543 | offset += token * size; |
8544 | |
8545 | /* Lookup the value in the constructor that is assumed to be array. |
8546 | This is equivalent to |
8547 | fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init, |
8548 | offset, size, NULL); |
8549 | but in a constant time. We expect that frontend produced a simple |
8550 | array without indexed initializers. */ |
8551 | |
8552 | gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); |
8553 | domain_type = TYPE_DOMAIN (TREE_TYPE (init)); |
8554 | gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type))); |
8555 | elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init)))); |
8556 | |
8557 | access_index = offset / BITS_PER_UNIT / elt_size; |
8558 | gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0); |
8559 | |
8560 | /* The C++ FE can now produce indexed fields, and we check if the indexes |
8561 | match. */ |
8562 | if (access_index < CONSTRUCTOR_NELTS (init)) |
8563 | { |
8564 | fn = CONSTRUCTOR_ELT (init, access_index)->value; |
8565 | tree idx = CONSTRUCTOR_ELT (init, access_index)->index; |
8566 | gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index); |
8567 | STRIP_NOPS (fn); |
8568 | } |
8569 | else |
8570 | fn = NULL; |
8571 | |
8572 | /* For type inconsistent program we may end up looking up virtual method |
8573 | in virtual table that does not contain TOKEN entries. We may overrun |
8574 | the virtual table and pick up a constant or RTTI info pointer. |
8575 | In any case the call is undefined. */ |
8576 | if (!fn |
8577 | || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR) |
8578 | || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL) |
8579 | fn = builtin_decl_unreachable (); |
8580 | else |
8581 | { |
8582 | fn = TREE_OPERAND (fn, 0); |
8583 | |
8584 | /* When cgraph node is missing and function is not public, we cannot |
8585 | devirtualize. This can happen in WHOPR when the actual method |
8586 | ends up in other partition, because we found devirtualization |
8587 | possibility too late. */ |
8588 | if (!can_refer_decl_in_current_unit_p (decl: fn, from_decl: vtable)) |
8589 | { |
8590 | if (can_refer) |
8591 | { |
8592 | *can_refer = false; |
8593 | return fn; |
8594 | } |
8595 | return NULL_TREE; |
8596 | } |
8597 | } |
8598 | |
8599 | /* Make sure we create a cgraph node for functions we'll reference. |
8600 | They can be non-existent if the reference comes from an entry |
8601 | of an external vtable for example. */ |
8602 | cgraph_node::get_create (fn); |
8603 | |
8604 | return fn; |
8605 | } |
8606 | |
8607 | /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN |
8608 | is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. |
8609 | KNOWN_BINFO carries the binfo describing the true type of |
8610 | OBJ_TYPE_REF_OBJECT(REF). |
8611 | Set CAN_REFER if non-NULL to false if method |
8612 | is not referable or if the virtual table is ill-formed (such as rewriten |
8613 | by non-C++ produced symbol). Otherwise just return NULL in that calse. */ |
8614 | |
8615 | tree |
8616 | gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo, |
8617 | bool *can_refer) |
8618 | { |
8619 | unsigned HOST_WIDE_INT offset; |
8620 | tree v; |
8621 | |
8622 | v = BINFO_VTABLE (known_binfo); |
8623 | /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */ |
8624 | if (!v) |
8625 | return NULL_TREE; |
8626 | |
8627 | if (!vtable_pointer_value_to_vtable (v, &v, &offset)) |
8628 | { |
8629 | if (can_refer) |
8630 | *can_refer = false; |
8631 | return NULL_TREE; |
8632 | } |
8633 | return gimple_get_virt_method_for_vtable (token, v, offset, can_refer); |
8634 | } |
8635 | |
8636 | /* Given a pointer value T, return a simplified version of an |
8637 | indirection through T, or NULL_TREE if no simplification is |
8638 | possible. Note that the resulting type may be different from |
8639 | the type pointed to in the sense that it is still compatible |
8640 | from the langhooks point of view. */ |
8641 | |
8642 | tree |
8643 | gimple_fold_indirect_ref (tree t) |
8644 | { |
8645 | tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); |
8646 | tree sub = t; |
8647 | tree subtype; |
8648 | |
8649 | STRIP_NOPS (sub); |
8650 | subtype = TREE_TYPE (sub); |
8651 | if (!POINTER_TYPE_P (subtype) |
8652 | || TYPE_REF_CAN_ALIAS_ALL (ptype)) |
8653 | return NULL_TREE; |
8654 | |
8655 | if (TREE_CODE (sub) == ADDR_EXPR) |
8656 | { |
8657 | tree op = TREE_OPERAND (sub, 0); |
8658 | tree optype = TREE_TYPE (op); |
8659 | /* *&p => p */ |
8660 | if (useless_type_conversion_p (type, optype)) |
8661 | return op; |
8662 | |
8663 | /* *(foo *)&fooarray => fooarray[0] */ |
8664 | if (TREE_CODE (optype) == ARRAY_TYPE |
8665 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST |
8666 | && useless_type_conversion_p (type, TREE_TYPE (optype))) |
8667 | { |
8668 | tree type_domain = TYPE_DOMAIN (optype); |
8669 | tree min_val = size_zero_node; |
8670 | if (type_domain && TYPE_MIN_VALUE (type_domain)) |
8671 | min_val = TYPE_MIN_VALUE (type_domain); |
8672 | if (TREE_CODE (min_val) == INTEGER_CST) |
8673 | return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); |
8674 | } |
8675 | /* *(foo *)&complexfoo => __real__ complexfoo */ |
8676 | else if (TREE_CODE (optype) == COMPLEX_TYPE |
8677 | && useless_type_conversion_p (type, TREE_TYPE (optype))) |
8678 | return fold_build1 (REALPART_EXPR, type, op); |
8679 | /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ |
8680 | else if (TREE_CODE (optype) == VECTOR_TYPE |
8681 | && useless_type_conversion_p (type, TREE_TYPE (optype))) |
8682 | { |
8683 | tree part_width = TYPE_SIZE (type); |
8684 | tree index = bitsize_int (0); |
8685 | return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); |
8686 | } |
8687 | } |
8688 | |
8689 | /* *(p + CST) -> ... */ |
8690 | if (TREE_CODE (sub) == POINTER_PLUS_EXPR |
8691 | && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) |
8692 | { |
8693 | tree addr = TREE_OPERAND (sub, 0); |
8694 | tree off = TREE_OPERAND (sub, 1); |
8695 | tree addrtype; |
8696 | |
8697 | STRIP_NOPS (addr); |
8698 | addrtype = TREE_TYPE (addr); |
8699 | |
8700 | /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */ |
8701 | if (TREE_CODE (addr) == ADDR_EXPR |
8702 | && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE |
8703 | && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) |
8704 | && tree_fits_uhwi_p (off)) |
8705 | { |
8706 | unsigned HOST_WIDE_INT offset = tree_to_uhwi (off); |
8707 | tree part_width = TYPE_SIZE (type); |
8708 | unsigned HOST_WIDE_INT part_widthi |
8709 | = tree_to_shwi (part_width) / BITS_PER_UNIT; |
8710 | unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; |
8711 | tree index = bitsize_int (indexi); |
8712 | if (known_lt (offset / part_widthi, |
8713 | TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))) |
8714 | return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), |
8715 | part_width, index); |
8716 | } |
8717 | |
8718 | /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ |
8719 | if (TREE_CODE (addr) == ADDR_EXPR |
8720 | && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE |
8721 | && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) |
8722 | { |
8723 | tree size = TYPE_SIZE_UNIT (type); |
8724 | if (tree_int_cst_equal (size, off)) |
8725 | return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); |
8726 | } |
8727 | |
8728 | /* *(p + CST) -> MEM_REF <p, CST>. */ |
8729 | if (TREE_CODE (addr) != ADDR_EXPR |
8730 | || DECL_P (TREE_OPERAND (addr, 0))) |
8731 | return fold_build2 (MEM_REF, type, |
8732 | addr, |
8733 | wide_int_to_tree (ptype, wi::to_wide (off))); |
8734 | } |
8735 | |
8736 | /* *(foo *)fooarrptr => (*fooarrptr)[0] */ |
8737 | if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE |
8738 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST |
8739 | && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) |
8740 | { |
8741 | tree type_domain; |
8742 | tree min_val = size_zero_node; |
8743 | tree osub = sub; |
8744 | sub = gimple_fold_indirect_ref (t: sub); |
8745 | if (! sub) |
8746 | sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); |
8747 | type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); |
8748 | if (type_domain && TYPE_MIN_VALUE (type_domain)) |
8749 | min_val = TYPE_MIN_VALUE (type_domain); |
8750 | if (TREE_CODE (min_val) == INTEGER_CST) |
8751 | return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); |
8752 | } |
8753 | |
8754 | return NULL_TREE; |
8755 | } |
8756 | |
8757 | /* Return true if CODE is an operation that when operating on signed |
8758 | integer types involves undefined behavior on overflow and the |
8759 | operation can be expressed with unsigned arithmetic. */ |
8760 | |
8761 | bool |
8762 | arith_code_with_undefined_signed_overflow (tree_code code) |
8763 | { |
8764 | switch (code) |
8765 | { |
8766 | case ABS_EXPR: |
8767 | case PLUS_EXPR: |
8768 | case MINUS_EXPR: |
8769 | case MULT_EXPR: |
8770 | case NEGATE_EXPR: |
8771 | case POINTER_PLUS_EXPR: |
8772 | return true; |
8773 | default: |
8774 | return false; |
8775 | } |
8776 | } |
8777 | |
8778 | /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic |
8779 | operation that can be transformed to unsigned arithmetic by converting |
8780 | its operand, carrying out the operation in the corresponding unsigned |
8781 | type and converting the result back to the original type. |
8782 | |
8783 | If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and |
8784 | return NULL. |
8785 | Otherwise returns a sequence of statements that replace STMT and also |
8786 | contain a modified form of STMT itself. */ |
8787 | |
8788 | static gimple_seq |
8789 | rewrite_to_defined_overflow (gimple_stmt_iterator *gsi, gimple *stmt, |
8790 | bool in_place) |
8791 | { |
8792 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8793 | { |
8794 | fprintf (stream: dump_file, format: "rewriting stmt with undefined signed " |
8795 | "overflow " ); |
8796 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
8797 | } |
8798 | |
8799 | tree lhs = gimple_assign_lhs (gs: stmt); |
8800 | tree type = unsigned_type_for (TREE_TYPE (lhs)); |
8801 | gimple_seq stmts = NULL; |
8802 | if (gimple_assign_rhs_code (gs: stmt) == ABS_EXPR) |
8803 | gimple_assign_set_rhs_code (s: stmt, code: ABSU_EXPR); |
8804 | else |
8805 | for (unsigned i = 1; i < gimple_num_ops (gs: stmt); ++i) |
8806 | { |
8807 | tree op = gimple_op (gs: stmt, i); |
8808 | op = gimple_convert (seq: &stmts, type, op); |
8809 | gimple_set_op (gs: stmt, i, op); |
8810 | } |
8811 | gimple_assign_set_lhs (gs: stmt, lhs: make_ssa_name (var: type, stmt)); |
8812 | if (gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR) |
8813 | gimple_assign_set_rhs_code (s: stmt, code: PLUS_EXPR); |
8814 | gimple_set_modified (s: stmt, modifiedp: true); |
8815 | if (in_place) |
8816 | { |
8817 | if (stmts) |
8818 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
8819 | stmts = NULL; |
8820 | } |
8821 | else |
8822 | gimple_seq_add_stmt (&stmts, stmt); |
8823 | gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (gs: stmt)); |
8824 | if (in_place) |
8825 | { |
8826 | gsi_insert_after (gsi, cvt, GSI_SAME_STMT); |
8827 | update_stmt (s: stmt); |
8828 | } |
8829 | else |
8830 | gimple_seq_add_stmt (&stmts, cvt); |
8831 | |
8832 | return stmts; |
8833 | } |
8834 | |
8835 | void |
8836 | rewrite_to_defined_overflow (gimple_stmt_iterator *gsi) |
8837 | { |
8838 | rewrite_to_defined_overflow (gsi, stmt: gsi_stmt (i: *gsi), in_place: true); |
8839 | } |
8840 | |
8841 | gimple_seq |
8842 | rewrite_to_defined_overflow (gimple *stmt) |
8843 | { |
8844 | return rewrite_to_defined_overflow (gsi: nullptr, stmt, in_place: false); |
8845 | } |
8846 | |
8847 | /* The valueization hook we use for the gimple_build API simplification. |
8848 | This makes us match fold_buildN behavior by only combining with |
8849 | statements in the sequence(s) we are currently building. */ |
8850 | |
8851 | static tree |
8852 | gimple_build_valueize (tree op) |
8853 | { |
8854 | if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL) |
8855 | return op; |
8856 | return NULL_TREE; |
8857 | } |
8858 | |
8859 | /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */ |
8860 | |
8861 | static inline void |
8862 | gimple_build_insert_seq (gimple_stmt_iterator *gsi, |
8863 | bool before, gsi_iterator_update update, |
8864 | gimple_seq seq) |
8865 | { |
8866 | if (before) |
8867 | { |
8868 | if (gsi->bb) |
8869 | gsi_insert_seq_before (gsi, seq, update); |
8870 | else |
8871 | gsi_insert_seq_before_without_update (gsi, seq, update); |
8872 | } |
8873 | else |
8874 | { |
8875 | if (gsi->bb) |
8876 | gsi_insert_seq_after (gsi, seq, update); |
8877 | else |
8878 | gsi_insert_seq_after_without_update (gsi, seq, update); |
8879 | } |
8880 | } |
8881 | |
8882 | /* Build the expression CODE OP0 of type TYPE with location LOC, |
8883 | simplifying it first if possible. Returns the built |
8884 | expression value and inserts statements possibly defining it |
8885 | before GSI if BEFORE is true or after GSI if false and advance |
8886 | the iterator accordingly. |
8887 | If gsi refers to a basic block simplifying is allowed to look |
8888 | at all SSA defs while when it does not it is restricted to |
8889 | SSA defs that are not associated with a basic block yet, |
8890 | indicating they belong to the currently building sequence. */ |
8891 | |
8892 | tree |
8893 | gimple_build (gimple_stmt_iterator *gsi, |
8894 | bool before, gsi_iterator_update update, |
8895 | location_t loc, enum tree_code code, tree type, tree op0) |
8896 | { |
8897 | gimple_seq seq = NULL; |
8898 | tree res |
8899 | = gimple_simplify (code, type, op0, &seq, |
8900 | gsi->bb ? follow_all_ssa_edges : gimple_build_valueize); |
8901 | if (!res) |
8902 | { |
8903 | res = create_tmp_reg_or_ssa_name (type); |
8904 | gimple *stmt; |
8905 | if (code == REALPART_EXPR |
8906 | || code == IMAGPART_EXPR |
8907 | || code == VIEW_CONVERT_EXPR) |
8908 | stmt = gimple_build_assign (res, code, build1 (code, type, op0)); |
8909 | else |
8910 | stmt = gimple_build_assign (res, code, op0); |
8911 | gimple_set_location (g: stmt, location: loc); |
8912 | gimple_seq_add_stmt_without_update (&seq, stmt); |
8913 | } |
8914 | gimple_build_insert_seq (gsi, before, update, seq); |
8915 | return res; |
8916 | } |
8917 | |
8918 | /* Build the expression OP0 CODE OP1 of type TYPE with location LOC, |
8919 | simplifying it first if possible. Returns the built |
8920 | expression value inserting any new statements at GSI honoring BEFORE |
8921 | and UPDATE. */ |
8922 | |
8923 | tree |
8924 | gimple_build (gimple_stmt_iterator *gsi, |
8925 | bool before, gsi_iterator_update update, |
8926 | location_t loc, enum tree_code code, tree type, |
8927 | tree op0, tree op1) |
8928 | { |
8929 | gimple_seq seq = NULL; |
8930 | tree res |
8931 | = gimple_simplify (code, type, op0, op1, &seq, |
8932 | gsi->bb ? follow_all_ssa_edges : gimple_build_valueize); |
8933 | if (!res) |
8934 | { |
8935 | res = create_tmp_reg_or_ssa_name (type); |
8936 | gimple *stmt = gimple_build_assign (res, code, op0, op1); |
8937 | gimple_set_location (g: stmt, location: loc); |
8938 | gimple_seq_add_stmt_without_update (&seq, stmt); |
8939 | } |
8940 | gimple_build_insert_seq (gsi, before, update, seq); |
8941 | return res; |
8942 | } |
8943 | |
8944 | /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC, |
8945 | simplifying it first if possible. Returns the built |
8946 | expression value inserting any new statements at GSI honoring BEFORE |
8947 | and UPDATE. */ |
8948 | |
8949 | tree |
8950 | gimple_build (gimple_stmt_iterator *gsi, |
8951 | bool before, gsi_iterator_update update, |
8952 | location_t loc, enum tree_code code, tree type, |
8953 | tree op0, tree op1, tree op2) |
8954 | { |
8955 | |
8956 | gimple_seq seq = NULL; |
8957 | tree res |
8958 | = gimple_simplify (code, type, op0, op1, op2, &seq, |
8959 | gsi->bb ? follow_all_ssa_edges : gimple_build_valueize); |
8960 | if (!res) |
8961 | { |
8962 | res = create_tmp_reg_or_ssa_name (type); |
8963 | gimple *stmt; |
8964 | if (code == BIT_FIELD_REF) |
8965 | stmt = gimple_build_assign (res, code, |
8966 | build3 (code, type, op0, op1, op2)); |
8967 | else |
8968 | stmt = gimple_build_assign (res, code, op0, op1, op2); |
8969 | gimple_set_location (g: stmt, location: loc); |
8970 | gimple_seq_add_stmt_without_update (&seq, stmt); |
8971 | } |
8972 | gimple_build_insert_seq (gsi, before, update, seq); |
8973 | return res; |
8974 | } |
8975 | |
8976 | /* Build the call FN () with a result of type TYPE (or no result if TYPE is |
8977 | void) with a location LOC. Returns the built expression value (or NULL_TREE |
8978 | if TYPE is void) inserting any new statements at GSI honoring BEFORE |
8979 | and UPDATE. */ |
8980 | |
8981 | tree |
8982 | gimple_build (gimple_stmt_iterator *gsi, |
8983 | bool before, gsi_iterator_update update, |
8984 | location_t loc, combined_fn fn, tree type) |
8985 | { |
8986 | tree res = NULL_TREE; |
8987 | gimple_seq seq = NULL; |
8988 | gcall *stmt; |
8989 | if (internal_fn_p (code: fn)) |
8990 | stmt = gimple_build_call_internal (as_internal_fn (code: fn), 0); |
8991 | else |
8992 | { |
8993 | tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn)); |
8994 | stmt = gimple_build_call (decl, 0); |
8995 | } |
8996 | if (!VOID_TYPE_P (type)) |
8997 | { |
8998 | res = create_tmp_reg_or_ssa_name (type); |
8999 | gimple_call_set_lhs (gs: stmt, lhs: res); |
9000 | } |
9001 | gimple_set_location (g: stmt, location: loc); |
9002 | gimple_seq_add_stmt_without_update (&seq, stmt); |
9003 | gimple_build_insert_seq (gsi, before, update, seq); |
9004 | return res; |
9005 | } |
9006 | |
9007 | /* Build the call FN (ARG0) with a result of type TYPE |
9008 | (or no result if TYPE is void) with location LOC, |
9009 | simplifying it first if possible. Returns the built |
9010 | expression value (or NULL_TREE if TYPE is void) inserting any new |
9011 | statements at GSI honoring BEFORE and UPDATE. */ |
9012 | |
9013 | tree |
9014 | gimple_build (gimple_stmt_iterator *gsi, |
9015 | bool before, gsi_iterator_update update, |
9016 | location_t loc, combined_fn fn, |
9017 | tree type, tree arg0) |
9018 | { |
9019 | gimple_seq seq = NULL; |
9020 | tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize); |
9021 | if (!res) |
9022 | { |
9023 | gcall *stmt; |
9024 | if (internal_fn_p (code: fn)) |
9025 | stmt = gimple_build_call_internal (as_internal_fn (code: fn), 1, arg0); |
9026 | else |
9027 | { |
9028 | tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn)); |
9029 | stmt = gimple_build_call (decl, 1, arg0); |
9030 | } |
9031 | if (!VOID_TYPE_P (type)) |
9032 | { |
9033 | res = create_tmp_reg_or_ssa_name (type); |
9034 | gimple_call_set_lhs (gs: stmt, lhs: res); |
9035 | } |
9036 | gimple_set_location (g: stmt, location: loc); |
9037 | gimple_seq_add_stmt_without_update (&seq, stmt); |
9038 | } |
9039 | gimple_build_insert_seq (gsi, before, update, seq); |
9040 | return res; |
9041 | } |
9042 | |
9043 | /* Build the call FN (ARG0, ARG1) with a result of type TYPE |
9044 | (or no result if TYPE is void) with location LOC, |
9045 | simplifying it first if possible. Returns the built |
9046 | expression value (or NULL_TREE if TYPE is void) inserting any new |
9047 | statements at GSI honoring BEFORE and UPDATE. */ |
9048 | |
9049 | tree |
9050 | gimple_build (gimple_stmt_iterator *gsi, |
9051 | bool before, gsi_iterator_update update, |
9052 | location_t loc, combined_fn fn, |
9053 | tree type, tree arg0, tree arg1) |
9054 | { |
9055 | gimple_seq seq = NULL; |
9056 | tree res = gimple_simplify (fn, type, arg0, arg1, &seq, |
9057 | gimple_build_valueize); |
9058 | if (!res) |
9059 | { |
9060 | gcall *stmt; |
9061 | if (internal_fn_p (code: fn)) |
9062 | stmt = gimple_build_call_internal (as_internal_fn (code: fn), 2, arg0, arg1); |
9063 | else |
9064 | { |
9065 | tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn)); |
9066 | stmt = gimple_build_call (decl, 2, arg0, arg1); |
9067 | } |
9068 | if (!VOID_TYPE_P (type)) |
9069 | { |
9070 | res = create_tmp_reg_or_ssa_name (type); |
9071 | gimple_call_set_lhs (gs: stmt, lhs: res); |
9072 | } |
9073 | gimple_set_location (g: stmt, location: loc); |
9074 | gimple_seq_add_stmt_without_update (&seq, stmt); |
9075 | } |
9076 | gimple_build_insert_seq (gsi, before, update, seq); |
9077 | return res; |
9078 | } |
9079 | |
9080 | /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE |
9081 | (or no result if TYPE is void) with location LOC, |
9082 | simplifying it first if possible. Returns the built |
9083 | expression value (or NULL_TREE if TYPE is void) inserting any new |
9084 | statements at GSI honoring BEFORE and UPDATE. */ |
9085 | |
9086 | tree |
9087 | gimple_build (gimple_stmt_iterator *gsi, |
9088 | bool before, gsi_iterator_update update, |
9089 | location_t loc, combined_fn fn, |
9090 | tree type, tree arg0, tree arg1, tree arg2) |
9091 | { |
9092 | gimple_seq seq = NULL; |
9093 | tree res = gimple_simplify (fn, type, arg0, arg1, arg2, |
9094 | &seq, gimple_build_valueize); |
9095 | if (!res) |
9096 | { |
9097 | gcall *stmt; |
9098 | if (internal_fn_p (code: fn)) |
9099 | stmt = gimple_build_call_internal (as_internal_fn (code: fn), |
9100 | 3, arg0, arg1, arg2); |
9101 | else |
9102 | { |
9103 | tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn)); |
9104 | stmt = gimple_build_call (decl, 3, arg0, arg1, arg2); |
9105 | } |
9106 | if (!VOID_TYPE_P (type)) |
9107 | { |
9108 | res = create_tmp_reg_or_ssa_name (type); |
9109 | gimple_call_set_lhs (gs: stmt, lhs: res); |
9110 | } |
9111 | gimple_set_location (g: stmt, location: loc); |
9112 | gimple_seq_add_stmt_without_update (&seq, stmt); |
9113 | } |
9114 | gimple_build_insert_seq (gsi, before, update, seq); |
9115 | return res; |
9116 | } |
9117 | |
9118 | /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is |
9119 | void) with location LOC, simplifying it first if possible. Returns the |
9120 | built expression value (or NULL_TREE if TYPE is void) inserting any new |
9121 | statements at GSI honoring BEFORE and UPDATE. */ |
9122 | |
9123 | tree |
9124 | gimple_build (gimple_stmt_iterator *gsi, |
9125 | bool before, gsi_iterator_update update, |
9126 | location_t loc, code_helper code, tree type, tree op0) |
9127 | { |
9128 | if (code.is_tree_code ()) |
9129 | return gimple_build (gsi, before, update, loc, code: tree_code (code), type, op0); |
9130 | return gimple_build (gsi, before, update, loc, fn: combined_fn (code), type, arg0: op0); |
9131 | } |
9132 | |
9133 | /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is |
9134 | void) with location LOC, simplifying it first if possible. Returns the |
9135 | built expression value (or NULL_TREE if TYPE is void) inserting any new |
9136 | statements at GSI honoring BEFORE and UPDATE. */ |
9137 | |
9138 | tree |
9139 | gimple_build (gimple_stmt_iterator *gsi, |
9140 | bool before, gsi_iterator_update update, |
9141 | location_t loc, code_helper code, tree type, tree op0, tree op1) |
9142 | { |
9143 | if (code.is_tree_code ()) |
9144 | return gimple_build (gsi, before, update, |
9145 | loc, code: tree_code (code), type, op0, op1); |
9146 | return gimple_build (gsi, before, update, |
9147 | loc, fn: combined_fn (code), type, arg0: op0, arg1: op1); |
9148 | } |
9149 | |
9150 | /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE |
9151 | is void) with location LOC, simplifying it first if possible. Returns the |
9152 | built expression value (or NULL_TREE if TYPE is void) inserting any new |
9153 | statements at GSI honoring BEFORE and UPDATE. */ |
9154 | |
9155 | tree |
9156 | gimple_build (gimple_stmt_iterator *gsi, |
9157 | bool before, gsi_iterator_update update, |
9158 | location_t loc, code_helper code, |
9159 | tree type, tree op0, tree op1, tree op2) |
9160 | { |
9161 | if (code.is_tree_code ()) |
9162 | return gimple_build (gsi, before, update, |
9163 | loc, code: tree_code (code), type, op0, op1, op2); |
9164 | return gimple_build (gsi, before, update, |
9165 | loc, fn: combined_fn (code), type, arg0: op0, arg1: op1, arg2: op2); |
9166 | } |
9167 | |
9168 | /* Build the conversion (TYPE) OP with a result of type TYPE |
9169 | with location LOC if such conversion is neccesary in GIMPLE, |
9170 | simplifying it first. |
9171 | Returns the built expression inserting any new statements |
9172 | at GSI honoring BEFORE and UPDATE. */ |
9173 | |
9174 | tree |
9175 | gimple_convert (gimple_stmt_iterator *gsi, |
9176 | bool before, gsi_iterator_update update, |
9177 | location_t loc, tree type, tree op) |
9178 | { |
9179 | if (useless_type_conversion_p (type, TREE_TYPE (op))) |
9180 | return op; |
9181 | return gimple_build (gsi, before, update, loc, code: NOP_EXPR, type, op0: op); |
9182 | } |
9183 | |
9184 | /* Build the conversion (ptrofftype) OP with a result of a type |
9185 | compatible with ptrofftype with location LOC if such conversion |
9186 | is neccesary in GIMPLE, simplifying it first. |
9187 | Returns the built expression value inserting any new statements |
9188 | at GSI honoring BEFORE and UPDATE. */ |
9189 | |
9190 | tree |
9191 | gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi, |
9192 | bool before, gsi_iterator_update update, |
9193 | location_t loc, tree op) |
9194 | { |
9195 | if (ptrofftype_p (TREE_TYPE (op))) |
9196 | return op; |
9197 | return gimple_convert (gsi, before, update, loc, sizetype, op); |
9198 | } |
9199 | |
9200 | /* Build a vector of type TYPE in which each element has the value OP. |
9201 | Return a gimple value for the result, inserting any new statements |
9202 | at GSI honoring BEFORE and UPDATE. */ |
9203 | |
9204 | tree |
9205 | gimple_build_vector_from_val (gimple_stmt_iterator *gsi, |
9206 | bool before, gsi_iterator_update update, |
9207 | location_t loc, tree type, tree op) |
9208 | { |
9209 | if (!TYPE_VECTOR_SUBPARTS (node: type).is_constant () |
9210 | && !CONSTANT_CLASS_P (op)) |
9211 | return gimple_build (gsi, before, update, |
9212 | loc, code: VEC_DUPLICATE_EXPR, type, op0: op); |
9213 | |
9214 | tree res, vec = build_vector_from_val (type, op); |
9215 | if (is_gimple_val (vec)) |
9216 | return vec; |
9217 | if (gimple_in_ssa_p (cfun)) |
9218 | res = make_ssa_name (var: type); |
9219 | else |
9220 | res = create_tmp_reg (type); |
9221 | gimple_seq seq = NULL; |
9222 | gimple *stmt = gimple_build_assign (res, vec); |
9223 | gimple_set_location (g: stmt, location: loc); |
9224 | gimple_seq_add_stmt_without_update (&seq, stmt); |
9225 | gimple_build_insert_seq (gsi, before, update, seq); |
9226 | return res; |
9227 | } |
9228 | |
9229 | /* Build a vector from BUILDER, handling the case in which some elements |
9230 | are non-constant. Return a gimple value for the result, inserting |
9231 | any new instructions to GSI honoring BEFORE and UPDATE. |
9232 | |
9233 | BUILDER must not have a stepped encoding on entry. This is because |
9234 | the function is not geared up to handle the arithmetic that would |
9235 | be needed in the variable case, and any code building a vector that |
9236 | is known to be constant should use BUILDER->build () directly. */ |
9237 | |
9238 | tree |
9239 | gimple_build_vector (gimple_stmt_iterator *gsi, |
9240 | bool before, gsi_iterator_update update, |
9241 | location_t loc, tree_vector_builder *builder) |
9242 | { |
9243 | gcc_assert (builder->nelts_per_pattern () <= 2); |
9244 | unsigned int encoded_nelts = builder->encoded_nelts (); |
9245 | for (unsigned int i = 0; i < encoded_nelts; ++i) |
9246 | if (!CONSTANT_CLASS_P ((*builder)[i])) |
9247 | { |
9248 | gimple_seq seq = NULL; |
9249 | tree type = builder->type (); |
9250 | unsigned int nelts = TYPE_VECTOR_SUBPARTS (node: type).to_constant (); |
9251 | vec<constructor_elt, va_gc> *v; |
9252 | vec_alloc (v, nelems: nelts); |
9253 | for (i = 0; i < nelts; ++i) |
9254 | CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i)); |
9255 | |
9256 | tree res; |
9257 | if (gimple_in_ssa_p (cfun)) |
9258 | res = make_ssa_name (var: type); |
9259 | else |
9260 | res = create_tmp_reg (type); |
9261 | gimple *stmt = gimple_build_assign (res, build_constructor (type, v)); |
9262 | gimple_set_location (g: stmt, location: loc); |
9263 | gimple_seq_add_stmt_without_update (&seq, stmt); |
9264 | gimple_build_insert_seq (gsi, before, update, seq); |
9265 | return res; |
9266 | } |
9267 | return builder->build (); |
9268 | } |
9269 | |
9270 | /* Emit gimple statements into &stmts that take a value given in OLD_SIZE |
9271 | and generate a value guaranteed to be rounded upwards to ALIGN. |
9272 | |
9273 | Return the tree node representing this size, it is of TREE_TYPE TYPE. */ |
9274 | |
9275 | tree |
9276 | gimple_build_round_up (gimple_stmt_iterator *gsi, |
9277 | bool before, gsi_iterator_update update, |
9278 | location_t loc, tree type, |
9279 | tree old_size, unsigned HOST_WIDE_INT align) |
9280 | { |
9281 | unsigned HOST_WIDE_INT tg_mask = align - 1; |
9282 | /* tree new_size = (old_size + tg_mask) & ~tg_mask; */ |
9283 | gcc_assert (INTEGRAL_TYPE_P (type)); |
9284 | tree tree_mask = build_int_cst (type, tg_mask); |
9285 | tree oversize = gimple_build (gsi, before, update, |
9286 | loc, code: PLUS_EXPR, type, op0: old_size, op1: tree_mask); |
9287 | |
9288 | tree mask = build_int_cst (type, -align); |
9289 | return gimple_build (gsi, before, update, |
9290 | loc, code: BIT_AND_EXPR, type, op0: oversize, op1: mask); |
9291 | } |
9292 | |
9293 | /* Return true if the result of assignment STMT is known to be non-negative. |
9294 | If the return value is based on the assumption that signed overflow is |
9295 | undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change |
9296 | *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ |
9297 | |
9298 | static bool |
9299 | gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, |
9300 | int depth) |
9301 | { |
9302 | enum tree_code code = gimple_assign_rhs_code (gs: stmt); |
9303 | tree type = TREE_TYPE (gimple_assign_lhs (stmt)); |
9304 | switch (get_gimple_rhs_class (code)) |
9305 | { |
9306 | case GIMPLE_UNARY_RHS: |
9307 | return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (gs: stmt), |
9308 | type, |
9309 | gimple_assign_rhs1 (gs: stmt), |
9310 | strict_overflow_p, depth); |
9311 | case GIMPLE_BINARY_RHS: |
9312 | return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (gs: stmt), |
9313 | type, |
9314 | gimple_assign_rhs1 (gs: stmt), |
9315 | gimple_assign_rhs2 (gs: stmt), |
9316 | strict_overflow_p, depth); |
9317 | case GIMPLE_TERNARY_RHS: |
9318 | return false; |
9319 | case GIMPLE_SINGLE_RHS: |
9320 | return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (gs: stmt), |
9321 | strict_overflow_p, depth); |
9322 | case GIMPLE_INVALID_RHS: |
9323 | break; |
9324 | } |
9325 | gcc_unreachable (); |
9326 | } |
9327 | |
9328 | /* Return true if return value of call STMT is known to be non-negative. |
9329 | If the return value is based on the assumption that signed overflow is |
9330 | undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change |
9331 | *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ |
9332 | |
9333 | static bool |
9334 | gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, |
9335 | int depth) |
9336 | { |
9337 | tree arg0 = gimple_call_num_args (gs: stmt) > 0 ? |
9338 | gimple_call_arg (gs: stmt, index: 0) : NULL_TREE; |
9339 | tree arg1 = gimple_call_num_args (gs: stmt) > 1 ? |
9340 | gimple_call_arg (gs: stmt, index: 1) : NULL_TREE; |
9341 | tree lhs = gimple_call_lhs (gs: stmt); |
9342 | return (lhs |
9343 | && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs), |
9344 | gimple_call_combined_fn (stmt), |
9345 | arg0, arg1, |
9346 | strict_overflow_p, depth)); |
9347 | } |
9348 | |
9349 | /* Return true if return value of call STMT is known to be non-negative. |
9350 | If the return value is based on the assumption that signed overflow is |
9351 | undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change |
9352 | *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ |
9353 | |
9354 | static bool |
9355 | gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, |
9356 | int depth) |
9357 | { |
9358 | for (unsigned i = 0; i < gimple_phi_num_args (gs: stmt); ++i) |
9359 | { |
9360 | tree arg = gimple_phi_arg_def (gs: stmt, index: i); |
9361 | if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1)) |
9362 | return false; |
9363 | } |
9364 | return true; |
9365 | } |
9366 | |
9367 | /* Return true if STMT is known to compute a non-negative value. |
9368 | If the return value is based on the assumption that signed overflow is |
9369 | undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change |
9370 | *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ |
9371 | |
9372 | bool |
9373 | gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, |
9374 | int depth) |
9375 | { |
9376 | tree type = gimple_range_type (s: stmt); |
9377 | if (type && frange::supports_p (type)) |
9378 | { |
9379 | frange r; |
9380 | bool sign; |
9381 | if (get_global_range_query ()->range_of_stmt (r, stmt) |
9382 | && r.signbit_p (signbit&: sign)) |
9383 | return !sign; |
9384 | } |
9385 | switch (gimple_code (g: stmt)) |
9386 | { |
9387 | case GIMPLE_ASSIGN: |
9388 | return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p, |
9389 | depth); |
9390 | case GIMPLE_CALL: |
9391 | return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p, |
9392 | depth); |
9393 | case GIMPLE_PHI: |
9394 | return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p, |
9395 | depth); |
9396 | default: |
9397 | return false; |
9398 | } |
9399 | } |
9400 | |
9401 | /* Return true if the floating-point value computed by assignment STMT |
9402 | is known to have an integer value. We also allow +Inf, -Inf and NaN |
9403 | to be considered integer values. Return false for signaling NaN. |
9404 | |
9405 | DEPTH is the current nesting depth of the query. */ |
9406 | |
9407 | static bool |
9408 | gimple_assign_integer_valued_real_p (gimple *stmt, int depth) |
9409 | { |
9410 | enum tree_code code = gimple_assign_rhs_code (gs: stmt); |
9411 | switch (get_gimple_rhs_class (code)) |
9412 | { |
9413 | case GIMPLE_UNARY_RHS: |
9414 | return integer_valued_real_unary_p (gimple_assign_rhs_code (gs: stmt), |
9415 | gimple_assign_rhs1 (gs: stmt), depth); |
9416 | case GIMPLE_BINARY_RHS: |
9417 | return integer_valued_real_binary_p (gimple_assign_rhs_code (gs: stmt), |
9418 | gimple_assign_rhs1 (gs: stmt), |
9419 | gimple_assign_rhs2 (gs: stmt), depth); |
9420 | case GIMPLE_TERNARY_RHS: |
9421 | return false; |
9422 | case GIMPLE_SINGLE_RHS: |
9423 | return integer_valued_real_single_p (gimple_assign_rhs1 (gs: stmt), depth); |
9424 | case GIMPLE_INVALID_RHS: |
9425 | break; |
9426 | } |
9427 | gcc_unreachable (); |
9428 | } |
9429 | |
9430 | /* Return true if the floating-point value computed by call STMT is known |
9431 | to have an integer value. We also allow +Inf, -Inf and NaN to be |
9432 | considered integer values. Return false for signaling NaN. |
9433 | |
9434 | DEPTH is the current nesting depth of the query. */ |
9435 | |
9436 | static bool |
9437 | gimple_call_integer_valued_real_p (gimple *stmt, int depth) |
9438 | { |
9439 | tree arg0 = (gimple_call_num_args (gs: stmt) > 0 |
9440 | ? gimple_call_arg (gs: stmt, index: 0) |
9441 | : NULL_TREE); |
9442 | tree arg1 = (gimple_call_num_args (gs: stmt) > 1 |
9443 | ? gimple_call_arg (gs: stmt, index: 1) |
9444 | : NULL_TREE); |
9445 | return integer_valued_real_call_p (gimple_call_combined_fn (stmt), |
9446 | arg0, arg1, depth); |
9447 | } |
9448 | |
9449 | /* Return true if the floating-point result of phi STMT is known to have |
9450 | an integer value. We also allow +Inf, -Inf and NaN to be considered |
9451 | integer values. Return false for signaling NaN. |
9452 | |
9453 | DEPTH is the current nesting depth of the query. */ |
9454 | |
9455 | static bool |
9456 | gimple_phi_integer_valued_real_p (gimple *stmt, int depth) |
9457 | { |
9458 | for (unsigned i = 0; i < gimple_phi_num_args (gs: stmt); ++i) |
9459 | { |
9460 | tree arg = gimple_phi_arg_def (gs: stmt, index: i); |
9461 | if (!integer_valued_real_single_p (arg, depth + 1)) |
9462 | return false; |
9463 | } |
9464 | return true; |
9465 | } |
9466 | |
9467 | /* Return true if the floating-point value computed by STMT is known |
9468 | to have an integer value. We also allow +Inf, -Inf and NaN to be |
9469 | considered integer values. Return false for signaling NaN. |
9470 | |
9471 | DEPTH is the current nesting depth of the query. */ |
9472 | |
9473 | bool |
9474 | gimple_stmt_integer_valued_real_p (gimple *stmt, int depth) |
9475 | { |
9476 | switch (gimple_code (g: stmt)) |
9477 | { |
9478 | case GIMPLE_ASSIGN: |
9479 | return gimple_assign_integer_valued_real_p (stmt, depth); |
9480 | case GIMPLE_CALL: |
9481 | return gimple_call_integer_valued_real_p (stmt, depth); |
9482 | case GIMPLE_PHI: |
9483 | return gimple_phi_integer_valued_real_p (stmt, depth); |
9484 | default: |
9485 | return false; |
9486 | } |
9487 | } |
9488 | |