1/* Alias analysis for trees.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30#include "ssa.h"
31#include "cgraph.h"
32#include "tree-pretty-print.h"
33#include "alias.h"
34#include "fold-const.h"
35#include "langhooks.h"
36#include "dumpfile.h"
37#include "tree-eh.h"
38#include "tree-dfa.h"
39#include "ipa-reference.h"
40#include "varasm.h"
41#include "ipa-modref-tree.h"
42#include "ipa-modref.h"
43#include "attr-fnspec.h"
44#include "errors.h"
45#include "dbgcnt.h"
46#include "gimple-pretty-print.h"
47#include "print-tree.h"
48#include "tree-ssa-alias-compare.h"
49#include "builtins.h"
50#include "internal-fn.h"
51
52/* Broad overview of how alias analysis on gimple works:
53
54 Statements clobbering or using memory are linked through the
55 virtual operand factored use-def chain. The virtual operand
56 is unique per function, its symbol is accessible via gimple_vop (cfun).
57 Virtual operands are used for efficiently walking memory statements
58 in the gimple IL and are useful for things like value-numbering as
59 a generation count for memory references.
60
61 SSA_NAME pointers may have associated points-to information
62 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 points-to information is (re-)computed by the TODO_rebuild_alias
64 pass manager todo. Points-to information is also used for more
65 precise tracking of call-clobbered and call-used variables and
66 related disambiguations.
67
68 This file contains functions for disambiguating memory references,
69 the so called alias-oracle and tools for walking of the gimple IL.
70
71 The main alias-oracle entry-points are
72
73 bool stmt_may_clobber_ref_p (gimple *, tree)
74
75 This function queries if a statement may invalidate (parts of)
76 the memory designated by the reference tree argument.
77
78 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79
80 This function queries if a statement may need (parts of) the
81 memory designated by the reference tree argument.
82
83 There are variants of these functions that only handle the call
84 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 Note that these do not disambiguate against a possible call lhs.
86
87 bool refs_may_alias_p (tree, tree)
88
89 This function tries to disambiguate two reference trees.
90
91 bool ptr_deref_may_alias_global_p (tree, bool)
92
93 This function queries if dereferencing a pointer variable may
94 alias global memory. If bool argument is true, global memory
95 is considered to also include function local memory that escaped.
96
97 More low-level disambiguators are available and documented in
98 this file. Low-level disambiguators dealing with points-to
99 information are in tree-ssa-structalias.cc. */
100
101static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
102static bool nonoverlapping_component_refs_p (const_tree, const_tree);
103
104/* Query statistics for the different low-level disambiguators.
105 A high-level query may trigger multiple of them. */
106
107static struct {
108 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
109 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
110 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
111 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
112 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
113 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
114 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
119 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
120 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
121 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
122 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
123 unsigned HOST_WIDE_INT modref_use_may_alias;
124 unsigned HOST_WIDE_INT modref_use_no_alias;
125 unsigned HOST_WIDE_INT modref_clobber_may_alias;
126 unsigned HOST_WIDE_INT modref_clobber_no_alias;
127 unsigned HOST_WIDE_INT modref_kill_no;
128 unsigned HOST_WIDE_INT modref_kill_yes;
129 unsigned HOST_WIDE_INT modref_tests;
130 unsigned HOST_WIDE_INT modref_baseptr_tests;
131} alias_stats;
132
133void
134dump_alias_stats (FILE *s)
135{
136 fprintf (stream: s, format: "\nAlias oracle query stats:\n");
137 fprintf (stream: s, format: " refs_may_alias_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.refs_may_alias_p_no_alias,
141 alias_stats.refs_may_alias_p_no_alias
142 + alias_stats.refs_may_alias_p_may_alias);
143 fprintf (stream: s, format: " ref_maybe_used_by_call_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.ref_maybe_used_by_call_p_no_alias,
147 alias_stats.refs_may_alias_p_no_alias
148 + alias_stats.ref_maybe_used_by_call_p_may_alias);
149 fprintf (stream: s, format: " call_may_clobber_ref_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.call_may_clobber_ref_p_no_alias,
153 alias_stats.call_may_clobber_ref_p_no_alias
154 + alias_stats.call_may_clobber_ref_p_may_alias);
155 fprintf (stream: s, format: " stmt_kills_ref_p: "
156 HOST_WIDE_INT_PRINT_DEC" kills, "
157 HOST_WIDE_INT_PRINT_DEC" queries\n",
158 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
159 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
160 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
161 fprintf (stream: s, format: " nonoverlapping_component_refs_p: "
162 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
163 HOST_WIDE_INT_PRINT_DEC" queries\n",
164 alias_stats.nonoverlapping_component_refs_p_no_alias,
165 alias_stats.nonoverlapping_component_refs_p_no_alias
166 + alias_stats.nonoverlapping_component_refs_p_may_alias);
167 fprintf (stream: s, format: " nonoverlapping_refs_since_match_p: "
168 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
169 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
170 HOST_WIDE_INT_PRINT_DEC" queries\n",
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
172 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
173 alias_stats.nonoverlapping_refs_since_match_p_no_alias
174 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
175 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
176 fprintf (stream: s, format: " aliasing_component_refs_p: "
177 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
178 HOST_WIDE_INT_PRINT_DEC" queries\n",
179 alias_stats.aliasing_component_refs_p_no_alias,
180 alias_stats.aliasing_component_refs_p_no_alias
181 + alias_stats.aliasing_component_refs_p_may_alias);
182 dump_alias_stats_in_alias_c (s);
183 fprintf (stream: s, format: "\nModref stats:\n");
184 fprintf (stream: s, format: " modref kill: "
185 HOST_WIDE_INT_PRINT_DEC" kills, "
186 HOST_WIDE_INT_PRINT_DEC" queries\n",
187 alias_stats.modref_kill_yes,
188 alias_stats.modref_kill_yes
189 + alias_stats.modref_kill_no);
190 fprintf (stream: s, format: " modref use: "
191 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
192 HOST_WIDE_INT_PRINT_DEC" queries\n",
193 alias_stats.modref_use_no_alias,
194 alias_stats.modref_use_no_alias
195 + alias_stats.modref_use_may_alias);
196 fprintf (stream: s, format: " modref clobber: "
197 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
198 HOST_WIDE_INT_PRINT_DEC" queries\n"
199 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
200 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
201 alias_stats.modref_clobber_no_alias,
202 alias_stats.modref_clobber_no_alias
203 + alias_stats.modref_clobber_may_alias,
204 alias_stats.modref_tests,
205 ((double)alias_stats.modref_tests)
206 / (alias_stats.modref_clobber_no_alias
207 + alias_stats.modref_clobber_may_alias),
208 alias_stats.modref_baseptr_tests,
209 ((double)alias_stats.modref_baseptr_tests)
210 / (alias_stats.modref_clobber_no_alias
211 + alias_stats.modref_clobber_may_alias));
212}
213
214
215/* Return true, if dereferencing PTR may alias with a global variable.
216 When ESCAPED_LOCAL_P is true escaped local memory is also considered
217 global. */
218
219bool
220ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
221{
222 struct ptr_info_def *pi;
223
224 /* If we end up with a pointer constant here that may point
225 to global memory. */
226 if (TREE_CODE (ptr) != SSA_NAME)
227 return true;
228
229 pi = SSA_NAME_PTR_INFO (ptr);
230
231 /* If we do not have points-to information for this variable,
232 we have to punt. */
233 if (!pi)
234 return true;
235
236 /* ??? This does not use TBAA to prune globals ptr may not access. */
237 return pt_solution_includes_global (&pi->pt, escaped_local_p);
238}
239
240/* Return true if dereferencing PTR may alias DECL.
241 The caller is responsible for applying TBAA to see if PTR
242 may access DECL at all. */
243
244static bool
245ptr_deref_may_alias_decl_p (tree ptr, tree decl)
246{
247 struct ptr_info_def *pi;
248
249 /* Conversions are irrelevant for points-to information and
250 data-dependence analysis can feed us those. */
251 STRIP_NOPS (ptr);
252
253 /* Anything we do not explicilty handle aliases. */
254 if ((TREE_CODE (ptr) != SSA_NAME
255 && TREE_CODE (ptr) != ADDR_EXPR
256 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
257 || !POINTER_TYPE_P (TREE_TYPE (ptr))
258 || (!VAR_P (decl)
259 && TREE_CODE (decl) != PARM_DECL
260 && TREE_CODE (decl) != RESULT_DECL))
261 return true;
262
263 /* Disregard pointer offsetting. */
264 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
265 {
266 do
267 {
268 ptr = TREE_OPERAND (ptr, 0);
269 }
270 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
271 return ptr_deref_may_alias_decl_p (ptr, decl);
272 }
273
274 /* ADDR_EXPR pointers either just offset another pointer or directly
275 specify the pointed-to set. */
276 if (TREE_CODE (ptr) == ADDR_EXPR)
277 {
278 tree base = get_base_address (TREE_OPERAND (ptr, 0));
279 if (base
280 && (TREE_CODE (base) == MEM_REF
281 || TREE_CODE (base) == TARGET_MEM_REF))
282 ptr = TREE_OPERAND (base, 0);
283 else if (base
284 && DECL_P (base))
285 return compare_base_decls (base, decl) != 0;
286 else if (base
287 && CONSTANT_CLASS_P (base))
288 return false;
289 else
290 return true;
291 }
292
293 /* Non-aliased variables cannot be pointed to. */
294 if (!may_be_aliased (var: decl))
295 return false;
296
297 /* If we do not have useful points-to information for this pointer
298 we cannot disambiguate anything else. */
299 pi = SSA_NAME_PTR_INFO (ptr);
300 if (!pi)
301 return true;
302
303 return pt_solution_includes (&pi->pt, decl);
304}
305
306/* Return true if dereferenced PTR1 and PTR2 may alias.
307 The caller is responsible for applying TBAA to see if accesses
308 through PTR1 and PTR2 may conflict at all. */
309
310bool
311ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
312{
313 struct ptr_info_def *pi1, *pi2;
314
315 /* Conversions are irrelevant for points-to information and
316 data-dependence analysis can feed us those. */
317 STRIP_NOPS (ptr1);
318 STRIP_NOPS (ptr2);
319
320 /* Disregard pointer offsetting. */
321 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
322 {
323 do
324 {
325 ptr1 = TREE_OPERAND (ptr1, 0);
326 }
327 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
328 return ptr_derefs_may_alias_p (ptr1, ptr2);
329 }
330 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
331 {
332 do
333 {
334 ptr2 = TREE_OPERAND (ptr2, 0);
335 }
336 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
337 return ptr_derefs_may_alias_p (ptr1, ptr2);
338 }
339
340 /* ADDR_EXPR pointers either just offset another pointer or directly
341 specify the pointed-to set. */
342 if (TREE_CODE (ptr1) == ADDR_EXPR)
343 {
344 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
345 if (base
346 && (TREE_CODE (base) == MEM_REF
347 || TREE_CODE (base) == TARGET_MEM_REF))
348 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
349 else if (base
350 && DECL_P (base))
351 return ptr_deref_may_alias_decl_p (ptr: ptr2, decl: base);
352 /* Try ptr2 when ptr1 points to a constant. */
353 else if (base
354 && !CONSTANT_CLASS_P (base))
355 return true;
356 }
357 if (TREE_CODE (ptr2) == ADDR_EXPR)
358 {
359 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
360 if (base
361 && (TREE_CODE (base) == MEM_REF
362 || TREE_CODE (base) == TARGET_MEM_REF))
363 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
364 else if (base
365 && DECL_P (base))
366 return ptr_deref_may_alias_decl_p (ptr: ptr1, decl: base);
367 else
368 return true;
369 }
370
371 /* From here we require SSA name pointers. Anything else aliases. */
372 if (TREE_CODE (ptr1) != SSA_NAME
373 || TREE_CODE (ptr2) != SSA_NAME
374 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
375 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
376 return true;
377
378 /* We may end up with two empty points-to solutions for two same pointers.
379 In this case we still want to say both pointers alias, so shortcut
380 that here. */
381 if (ptr1 == ptr2)
382 return true;
383
384 /* If we do not have useful points-to information for either pointer
385 we cannot disambiguate anything else. */
386 pi1 = SSA_NAME_PTR_INFO (ptr1);
387 pi2 = SSA_NAME_PTR_INFO (ptr2);
388 if (!pi1 || !pi2)
389 return true;
390
391 /* ??? This does not use TBAA to prune decls from the intersection
392 that not both pointers may access. */
393 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
394}
395
396/* Return true if dereferencing PTR may alias *REF.
397 The caller is responsible for applying TBAA to see if PTR
398 may access *REF at all. */
399
400static bool
401ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
402{
403 tree base = ao_ref_base (ref);
404
405 if (TREE_CODE (base) == MEM_REF
406 || TREE_CODE (base) == TARGET_MEM_REF)
407 return ptr_derefs_may_alias_p (ptr1: ptr, TREE_OPERAND (base, 0));
408 else if (DECL_P (base))
409 return ptr_deref_may_alias_decl_p (ptr, decl: base);
410
411 return true;
412}
413
414/* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
415
416bool
417ptrs_compare_unequal (tree ptr1, tree ptr2)
418{
419 /* First resolve the pointers down to a SSA name pointer base or
420 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
421 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
422 or STRING_CSTs which needs points-to adjustments to track them
423 in the points-to sets. */
424 tree obj1 = NULL_TREE;
425 tree obj2 = NULL_TREE;
426 if (TREE_CODE (ptr1) == ADDR_EXPR)
427 {
428 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
429 if (! tem)
430 return false;
431 if (VAR_P (tem)
432 || TREE_CODE (tem) == PARM_DECL
433 || TREE_CODE (tem) == RESULT_DECL)
434 obj1 = tem;
435 else if (TREE_CODE (tem) == MEM_REF)
436 ptr1 = TREE_OPERAND (tem, 0);
437 }
438 if (TREE_CODE (ptr2) == ADDR_EXPR)
439 {
440 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
441 if (! tem)
442 return false;
443 if (VAR_P (tem)
444 || TREE_CODE (tem) == PARM_DECL
445 || TREE_CODE (tem) == RESULT_DECL)
446 obj2 = tem;
447 else if (TREE_CODE (tem) == MEM_REF)
448 ptr2 = TREE_OPERAND (tem, 0);
449 }
450
451 /* Canonicalize ptr vs. object. */
452 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
453 {
454 std::swap (a&: ptr1, b&: ptr2);
455 std::swap (a&: obj1, b&: obj2);
456 }
457
458 if (obj1 && obj2)
459 /* Other code handles this correctly, no need to duplicate it here. */;
460 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
461 {
462 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
463 /* We may not use restrict to optimize pointer comparisons.
464 See PR71062. So we have to assume that restrict-pointed-to
465 may be in fact obj1. */
466 if (!pi
467 || pi->pt.vars_contains_restrict
468 || pi->pt.vars_contains_interposable)
469 return false;
470 if (VAR_P (obj1)
471 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
472 {
473 varpool_node *node = varpool_node::get (decl: obj1);
474 /* If obj1 may bind to NULL give up (see below). */
475 if (! node
476 || ! node->nonzero_address ()
477 || ! decl_binds_to_current_def_p (obj1))
478 return false;
479 }
480 return !pt_solution_includes (&pi->pt, obj1);
481 }
482
483 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
484 but those require pt.null to be conservatively correct. */
485
486 return false;
487}
488
489/* Returns whether reference REF to BASE may refer to global memory.
490 When ESCAPED_LOCAL_P is true escaped local memory is also considered
491 global. */
492
493static bool
494ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
495{
496 if (DECL_P (base))
497 return (is_global_var (t: base)
498 || (escaped_local_p
499 && pt_solution_includes (&cfun->gimple_df->escaped_return,
500 base)));
501 else if (TREE_CODE (base) == MEM_REF
502 || TREE_CODE (base) == TARGET_MEM_REF)
503 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
504 escaped_local_p);
505 return true;
506}
507
508bool
509ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
510{
511 tree base = ao_ref_base (ref);
512 return ref_may_alias_global_p_1 (base, escaped_local_p);
513}
514
515bool
516ref_may_alias_global_p (tree ref, bool escaped_local_p)
517{
518 tree base = get_base_address (t: ref);
519 return ref_may_alias_global_p_1 (base, escaped_local_p);
520}
521
522/* Return true whether STMT may clobber global memory.
523 When ESCAPED_LOCAL_P is true escaped local memory is also considered
524 global. */
525
526bool
527stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
528{
529 tree lhs;
530
531 if (!gimple_vdef (g: stmt))
532 return false;
533
534 /* ??? We can ask the oracle whether an artificial pointer
535 dereference with a pointer with points-to information covering
536 all global memory (what about non-address taken memory?) maybe
537 clobbered by this call. As there is at the moment no convenient
538 way of doing that without generating garbage do some manual
539 checking instead.
540 ??? We could make a NULL ao_ref argument to the various
541 predicates special, meaning any global memory. */
542
543 switch (gimple_code (g: stmt))
544 {
545 case GIMPLE_ASSIGN:
546 lhs = gimple_assign_lhs (gs: stmt);
547 return (TREE_CODE (lhs) != SSA_NAME
548 && ref_may_alias_global_p (ref: lhs, escaped_local_p));
549 case GIMPLE_CALL:
550 return true;
551 default:
552 return true;
553 }
554}
555
556
557/* Dump alias information on FILE. */
558
559void
560dump_alias_info (FILE *file)
561{
562 unsigned i;
563 tree ptr;
564 const char *funcname
565 = lang_hooks.decl_printable_name (current_function_decl, 2);
566 tree var;
567
568 fprintf (stream: file, format: "\n\nAlias information for %s\n\n", funcname);
569
570 fprintf (stream: file, format: "Aliased symbols\n\n");
571
572 FOR_EACH_LOCAL_DECL (cfun, i, var)
573 {
574 if (may_be_aliased (var))
575 dump_variable (file, var);
576 }
577
578 fprintf (stream: file, format: "\nCall clobber information\n");
579
580 fprintf (stream: file, format: "\nESCAPED");
581 dump_points_to_solution (file, &cfun->gimple_df->escaped);
582
583 fprintf (stream: file, format: "\nESCAPED_RETURN");
584 dump_points_to_solution (file, &cfun->gimple_df->escaped_return);
585
586 fprintf (stream: file, format: "\n\nFlow-insensitive points-to information\n\n");
587
588 FOR_EACH_SSA_NAME (i, ptr, cfun)
589 {
590 struct ptr_info_def *pi;
591
592 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
593 || SSA_NAME_IN_FREE_LIST (ptr))
594 continue;
595
596 pi = SSA_NAME_PTR_INFO (ptr);
597 if (pi)
598 dump_points_to_info_for (file, ptr);
599 }
600
601 fprintf (stream: file, format: "\n");
602}
603
604
605/* Dump alias information on stderr. */
606
607DEBUG_FUNCTION void
608debug_alias_info (void)
609{
610 dump_alias_info (stderr);
611}
612
613
614/* Dump the points-to set *PT into FILE. */
615
616void
617dump_points_to_solution (FILE *file, struct pt_solution *pt)
618{
619 if (pt->anything)
620 fprintf (stream: file, format: ", points-to anything");
621
622 if (pt->nonlocal)
623 fprintf (stream: file, format: ", points-to non-local");
624
625 if (pt->escaped)
626 fprintf (stream: file, format: ", points-to escaped");
627
628 if (pt->ipa_escaped)
629 fprintf (stream: file, format: ", points-to unit escaped");
630
631 if (pt->null)
632 fprintf (stream: file, format: ", points-to NULL");
633
634 if (pt->vars)
635 {
636 fprintf (stream: file, format: ", points-to vars: ");
637 dump_decl_set (file, pt->vars);
638 if (pt->vars_contains_nonlocal
639 || pt->vars_contains_escaped
640 || pt->vars_contains_escaped_heap
641 || pt->vars_contains_restrict)
642 {
643 const char *comma = "";
644 fprintf (stream: file, format: " (");
645 if (pt->vars_contains_nonlocal)
646 {
647 fprintf (stream: file, format: "nonlocal");
648 comma = ", ";
649 }
650 if (pt->vars_contains_escaped)
651 {
652 fprintf (stream: file, format: "%sescaped", comma);
653 comma = ", ";
654 }
655 if (pt->vars_contains_escaped_heap)
656 {
657 fprintf (stream: file, format: "%sescaped heap", comma);
658 comma = ", ";
659 }
660 if (pt->vars_contains_restrict)
661 {
662 fprintf (stream: file, format: "%srestrict", comma);
663 comma = ", ";
664 }
665 if (pt->vars_contains_interposable)
666 fprintf (stream: file, format: "%sinterposable", comma);
667 fprintf (stream: file, format: ")");
668 }
669 }
670}
671
672
673/* Unified dump function for pt_solution. */
674
675DEBUG_FUNCTION void
676debug (pt_solution &ref)
677{
678 dump_points_to_solution (stderr, pt: &ref);
679}
680
681DEBUG_FUNCTION void
682debug (pt_solution *ptr)
683{
684 if (ptr)
685 debug (ref&: *ptr);
686 else
687 fprintf (stderr, format: "<nil>\n");
688}
689
690
691/* Dump points-to information for SSA_NAME PTR into FILE. */
692
693void
694dump_points_to_info_for (FILE *file, tree ptr)
695{
696 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
697
698 print_generic_expr (file, ptr, dump_flags);
699
700 if (pi)
701 dump_points_to_solution (file, pt: &pi->pt);
702 else
703 fprintf (stream: file, format: ", points-to anything");
704
705 fprintf (stream: file, format: "\n");
706}
707
708
709/* Dump points-to information for VAR into stderr. */
710
711DEBUG_FUNCTION void
712debug_points_to_info_for (tree var)
713{
714 dump_points_to_info_for (stderr, ptr: var);
715}
716
717
718/* Initializes the alias-oracle reference representation *R from REF. */
719
720void
721ao_ref_init (ao_ref *r, tree ref)
722{
723 r->ref = ref;
724 r->base = NULL_TREE;
725 r->offset = 0;
726 r->size = -1;
727 r->max_size = -1;
728 r->ref_alias_set = -1;
729 r->base_alias_set = -1;
730 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
731}
732
733/* Returns the base object of the memory reference *REF. */
734
735tree
736ao_ref_base (ao_ref *ref)
737{
738 bool reverse;
739
740 if (ref->base)
741 return ref->base;
742 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
743 &ref->max_size, &reverse);
744 return ref->base;
745}
746
747/* Returns the base object alias set of the memory reference *REF. */
748
749alias_set_type
750ao_ref_base_alias_set (ao_ref *ref)
751{
752 tree base_ref;
753 if (ref->base_alias_set != -1)
754 return ref->base_alias_set;
755 if (!ref->ref)
756 return 0;
757 base_ref = ref->ref;
758 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
759 base_ref = TREE_OPERAND (base_ref, 0);
760 while (handled_component_p (t: base_ref))
761 base_ref = TREE_OPERAND (base_ref, 0);
762 ref->base_alias_set = get_alias_set (base_ref);
763 return ref->base_alias_set;
764}
765
766/* Returns the reference alias set of the memory reference *REF. */
767
768alias_set_type
769ao_ref_alias_set (ao_ref *ref)
770{
771 if (ref->ref_alias_set != -1)
772 return ref->ref_alias_set;
773 if (!ref->ref)
774 return 0;
775 ref->ref_alias_set = get_alias_set (ref->ref);
776 return ref->ref_alias_set;
777}
778
779/* Returns a type satisfying
780 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
781
782tree
783ao_ref_base_alias_ptr_type (ao_ref *ref)
784{
785 tree base_ref;
786
787 if (!ref->ref)
788 return NULL_TREE;
789 base_ref = ref->ref;
790 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
791 base_ref = TREE_OPERAND (base_ref, 0);
792 while (handled_component_p (t: base_ref))
793 base_ref = TREE_OPERAND (base_ref, 0);
794 tree ret = reference_alias_ptr_type (base_ref);
795 return ret;
796}
797
798/* Returns a type satisfying
799 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
800
801tree
802ao_ref_alias_ptr_type (ao_ref *ref)
803{
804 if (!ref->ref)
805 return NULL_TREE;
806 tree ret = reference_alias_ptr_type (ref->ref);
807 return ret;
808}
809
810/* Return the alignment of the access *REF and store it in the *ALIGN
811 and *BITPOS pairs. Returns false if no alignment could be determined.
812 See get_object_alignment_2 for details. */
813
814bool
815ao_ref_alignment (ao_ref *ref, unsigned int *align,
816 unsigned HOST_WIDE_INT *bitpos)
817{
818 if (ref->ref)
819 return get_object_alignment_1 (ref->ref, align, bitpos);
820
821 /* When we just have ref->base we cannot use get_object_alignment since
822 that will eventually use the type of the appearant access while for
823 example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
824 *align = BITS_PER_UNIT;
825 HOST_WIDE_INT offset;
826 if (!ref->offset.is_constant (const_value: &offset)
827 || !get_object_alignment_2 (ref->base, align, bitpos, true))
828 return false;
829 *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
830 *bitpos = *bitpos & (*align - 1);
831 return true;
832}
833
834/* Init an alias-oracle reference representation from a gimple pointer
835 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
836 that RANGE_KNOWN is set.
837
838 The access is assumed to be only to or after of the pointer target adjusted
839 by the offset, not before it (even in the case RANGE_KNOWN is false). */
840
841void
842ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
843 bool range_known,
844 poly_int64 offset,
845 poly_int64 size,
846 poly_int64 max_size)
847{
848 poly_int64 t, extra_offset = 0;
849
850 ref->ref = NULL_TREE;
851 if (TREE_CODE (ptr) == SSA_NAME)
852 {
853 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
854 if (gimple_assign_single_p (gs: stmt)
855 && gimple_assign_rhs_code (gs: stmt) == ADDR_EXPR)
856 ptr = gimple_assign_rhs1 (gs: stmt);
857 else if (is_gimple_assign (gs: stmt)
858 && gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR
859 && ptrdiff_tree_p (gimple_assign_rhs2 (gs: stmt), &extra_offset))
860 {
861 ptr = gimple_assign_rhs1 (gs: stmt);
862 extra_offset *= BITS_PER_UNIT;
863 }
864 }
865
866 if (TREE_CODE (ptr) == ADDR_EXPR)
867 {
868 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
869 if (ref->base)
870 ref->offset = BITS_PER_UNIT * t;
871 else
872 {
873 range_known = false;
874 ref->offset = 0;
875 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
876 }
877 }
878 else
879 {
880 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
881 ref->base = build2 (MEM_REF, char_type_node,
882 ptr, null_pointer_node);
883 ref->offset = 0;
884 }
885 ref->offset += extra_offset + offset;
886 if (range_known)
887 {
888 ref->max_size = max_size;
889 ref->size = size;
890 }
891 else
892 ref->max_size = ref->size = -1;
893 ref->ref_alias_set = 0;
894 ref->base_alias_set = 0;
895 ref->volatile_p = false;
896}
897
898/* Init an alias-oracle reference representation from a gimple pointer
899 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
900 size is assumed to be unknown. The access is assumed to be only
901 to or after of the pointer target, not before it. */
902
903void
904ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
905{
906 poly_int64 size_hwi;
907 if (size
908 && poly_int_tree_p (t: size, value: &size_hwi)
909 && coeffs_in_range_p (a: size_hwi, b: 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
910 {
911 size_hwi = size_hwi * BITS_PER_UNIT;
912 ao_ref_init_from_ptr_and_range (ref, ptr, range_known: true, offset: 0, size: size_hwi, max_size: size_hwi);
913 }
914 else
915 ao_ref_init_from_ptr_and_range (ref, ptr, range_known: false, offset: 0, size: -1, max_size: -1);
916}
917
918/* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
919 Return -1 if S1 < S2
920 Return 1 if S1 > S2
921 Return 0 if equal or incomparable. */
922
923static int
924compare_sizes (tree s1, tree s2)
925{
926 if (!s1 || !s2)
927 return 0;
928
929 poly_uint64 size1;
930 poly_uint64 size2;
931
932 if (!poly_int_tree_p (t: s1, value: &size1) || !poly_int_tree_p (t: s2, value: &size2))
933 return 0;
934 if (known_lt (size1, size2))
935 return -1;
936 if (known_lt (size2, size1))
937 return 1;
938 return 0;
939}
940
941/* Compare TYPE1 and TYPE2 by its size.
942 Return -1 if size of TYPE1 < size of TYPE2
943 Return 1 if size of TYPE1 > size of TYPE2
944 Return 0 if types are of equal sizes or we can not compare them. */
945
946static int
947compare_type_sizes (tree type1, tree type2)
948{
949 /* Be conservative for arrays and vectors. We want to support partial
950 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
951 while (TREE_CODE (type1) == ARRAY_TYPE
952 || VECTOR_TYPE_P (type1))
953 type1 = TREE_TYPE (type1);
954 while (TREE_CODE (type2) == ARRAY_TYPE
955 || VECTOR_TYPE_P (type2))
956 type2 = TREE_TYPE (type2);
957 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
958}
959
960/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
961 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
962 decide. */
963
964static inline int
965same_type_for_tbaa (tree type1, tree type2)
966{
967 type1 = TYPE_MAIN_VARIANT (type1);
968 type2 = TYPE_MAIN_VARIANT (type2);
969
970 /* Handle the most common case first. */
971 if (type1 == type2)
972 return 1;
973
974 /* If we would have to do structural comparison bail out. */
975 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
976 || TYPE_STRUCTURAL_EQUALITY_P (type2))
977 return -1;
978
979 /* Compare the canonical types. */
980 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
981 return 1;
982
983 /* ??? Array types are not properly unified in all cases as we have
984 spurious changes in the index types for example. Removing this
985 causes all sorts of problems with the Fortran frontend. */
986 if (TREE_CODE (type1) == ARRAY_TYPE
987 && TREE_CODE (type2) == ARRAY_TYPE)
988 return -1;
989
990 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
991 object of one of its constrained subtypes, e.g. when a function with an
992 unconstrained parameter passed by reference is called on an object and
993 inlined. But, even in the case of a fixed size, type and subtypes are
994 not equivalent enough as to share the same TYPE_CANONICAL, since this
995 would mean that conversions between them are useless, whereas they are
996 not (e.g. type and subtypes can have different modes). So, in the end,
997 they are only guaranteed to have the same alias set. */
998 alias_set_type set1 = get_alias_set (type1);
999 alias_set_type set2 = get_alias_set (type2);
1000 if (set1 == set2)
1001 return -1;
1002
1003 /* Pointers to void are considered compatible with all other pointers,
1004 so for two pointers see what the alias set resolution thinks. */
1005 if (POINTER_TYPE_P (type1)
1006 && POINTER_TYPE_P (type2)
1007 && alias_sets_conflict_p (set1, set2))
1008 return -1;
1009
1010 /* The types are known to be not equal. */
1011 return 0;
1012}
1013
1014/* Return true if TYPE is a composite type (i.e. we may apply one of handled
1015 components on it). */
1016
1017static bool
1018type_has_components_p (tree type)
1019{
1020 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1021 || TREE_CODE (type) == COMPLEX_TYPE;
1022}
1023
1024/* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1025 respectively are either pointing to same address or are completely
1026 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1027 just partly overlap.
1028
1029 Try to disambiguate using the access path starting from the match
1030 and return false if there is no conflict.
1031
1032 Helper for aliasing_component_refs_p. */
1033
1034static bool
1035aliasing_matching_component_refs_p (tree match1, tree ref1,
1036 poly_int64 offset1, poly_int64 max_size1,
1037 tree match2, tree ref2,
1038 poly_int64 offset2, poly_int64 max_size2,
1039 bool partial_overlap)
1040{
1041 poly_int64 offadj, sztmp, msztmp;
1042 bool reverse;
1043
1044 if (!partial_overlap)
1045 {
1046 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1047 offset2 -= offadj;
1048 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1049 offset1 -= offadj;
1050 if (!ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
1051 {
1052 ++alias_stats.aliasing_component_refs_p_no_alias;
1053 return false;
1054 }
1055 }
1056
1057 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1058 partial_overlap);
1059 if (cmp == 1
1060 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1061 {
1062 ++alias_stats.aliasing_component_refs_p_no_alias;
1063 return false;
1064 }
1065 ++alias_stats.aliasing_component_refs_p_may_alias;
1066 return true;
1067}
1068
1069/* Return true if REF is reference to zero sized trailing array. I.e.
1070 struct foo {int bar; int array[0];} *fooptr;
1071 fooptr->array. */
1072
1073static bool
1074component_ref_to_zero_sized_trailing_array_p (tree ref)
1075{
1076 return (TREE_CODE (ref) == COMPONENT_REF
1077 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1078 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1079 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1080 && array_ref_flexible_size_p (ref));
1081}
1082
1083/* Worker for aliasing_component_refs_p. Most parameters match parameters of
1084 aliasing_component_refs_p.
1085
1086 Walk access path REF2 and try to find type matching TYPE1
1087 (which is a start of possibly aliasing access path REF1).
1088 If match is found, try to disambiguate.
1089
1090 Return 0 for sucessful disambiguation.
1091 Return 1 if match was found but disambiguation failed
1092 Return -1 if there is no match.
1093 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1094 in access patch REF2 and -1 if we are not sure. */
1095
1096static int
1097aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1098 poly_int64 offset1, poly_int64 max_size1,
1099 tree end_struct_ref1,
1100 tree ref2, tree base2,
1101 poly_int64 offset2, poly_int64 max_size2,
1102 bool *maybe_match)
1103{
1104 tree ref = ref2;
1105 int same_p = 0;
1106
1107 while (true)
1108 {
1109 /* We walk from inner type to the outer types. If type we see is
1110 already too large to be part of type1, terminate the search. */
1111 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1112
1113 if (cmp < 0
1114 && (!end_struct_ref1
1115 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1116 TREE_TYPE (ref)) < 0))
1117 break;
1118 /* If types may be of same size, see if we can decide about their
1119 equality. */
1120 if (cmp == 0)
1121 {
1122 same_p = same_type_for_tbaa (TREE_TYPE (ref), type2: type1);
1123 if (same_p == 1)
1124 break;
1125 /* In case we can't decide whether types are same try to
1126 continue looking for the exact match.
1127 Remember however that we possibly saw a match
1128 to bypass the access path continuations tests we do later. */
1129 if (same_p == -1)
1130 *maybe_match = true;
1131 }
1132 if (!handled_component_p (t: ref))
1133 break;
1134 ref = TREE_OPERAND (ref, 0);
1135 }
1136 if (same_p == 1)
1137 {
1138 bool partial_overlap = false;
1139
1140 /* We assume that arrays can overlap by multiple of their elements
1141 size as tested in gcc.dg/torture/alias-2.c.
1142 This partial overlap happen only when both arrays are bases of
1143 the access and not contained within another component ref.
1144 To be safe we also assume partial overlap for VLAs. */
1145 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1146 && (!TYPE_SIZE (TREE_TYPE (base1))
1147 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1148 || ref == base2))
1149 {
1150 /* Setting maybe_match to true triggers
1151 nonoverlapping_component_refs_p test later that still may do
1152 useful disambiguation. */
1153 *maybe_match = true;
1154 partial_overlap = true;
1155 }
1156 return aliasing_matching_component_refs_p (match1: base1, ref1,
1157 offset1, max_size1,
1158 match2: ref, ref2,
1159 offset2, max_size2,
1160 partial_overlap);
1161 }
1162 return -1;
1163}
1164
1165/* Consider access path1 base1....ref1 and access path2 base2...ref2.
1166 Return true if they can be composed to single access path
1167 base1...ref1...base2...ref2.
1168
1169 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1170 a trailing array access after REF1 in the non-TBAA part of the access.
1171 REF1_ALIAS_SET is the alias set of REF1.
1172
1173 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1174 a trailing array access in the TBAA part of access path2.
1175 BASE2_ALIAS_SET is the alias set of base2. */
1176
1177bool
1178access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1179 alias_set_type ref1_alias_set,
1180 tree base_type2, tree end_struct_ref2,
1181 alias_set_type base2_alias_set)
1182{
1183 /* Access path can not continue past types with no components. */
1184 if (!type_has_components_p (type: ref_type1))
1185 return false;
1186
1187 /* If first access path ends by too small type to hold base of
1188 the second access path, typically paths can not continue.
1189
1190 Punt if end_struct_past_end1 is true. We want to support arbitrary
1191 type puning past first COMPONENT_REF to union because redundant store
1192 elimination depends on this, see PR92152. For this reason we can not
1193 check size of the reference because types may partially overlap. */
1194 if (!end_struct_past_end1)
1195 {
1196 if (compare_type_sizes (type1: ref_type1, type2: base_type2) < 0)
1197 return false;
1198 /* If the path2 contains trailing array access we can strenghten the check
1199 to verify that also the size of element of the trailing array fits.
1200 In fact we could check for offset + type_size, but we do not track
1201 offsets and this is quite side case. */
1202 if (end_struct_ref2
1203 && compare_type_sizes (type1: ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1204 return false;
1205 }
1206 return (base2_alias_set == ref1_alias_set
1207 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1208}
1209
1210/* Determine if the two component references REF1 and REF2 which are
1211 based on access types TYPE1 and TYPE2 and of which at least one is based
1212 on an indirect reference may alias.
1213 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1214 are the respective alias sets. */
1215
1216static bool
1217aliasing_component_refs_p (tree ref1,
1218 alias_set_type ref1_alias_set,
1219 alias_set_type base1_alias_set,
1220 poly_int64 offset1, poly_int64 max_size1,
1221 tree ref2,
1222 alias_set_type ref2_alias_set,
1223 alias_set_type base2_alias_set,
1224 poly_int64 offset2, poly_int64 max_size2)
1225{
1226 /* If one reference is a component references through pointers try to find a
1227 common base and apply offset based disambiguation. This handles
1228 for example
1229 struct A { int i; int j; } *q;
1230 struct B { struct A a; int k; } *p;
1231 disambiguating q->i and p->a.j. */
1232 tree base1, base2;
1233 tree type1, type2;
1234 bool maybe_match = false;
1235 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1236 bool end_struct_past_end1 = false;
1237 bool end_struct_past_end2 = false;
1238
1239 /* Choose bases and base types to search for.
1240 The access path is as follows:
1241 base....end_of_tbaa_ref...actual_ref
1242 At one place in the access path may be a reference to zero sized or
1243 trailing array.
1244
1245 We generally discard the segment after end_of_tbaa_ref however
1246 we need to be careful in case it contains zero sized or trailing array.
1247 These may happen after reference to union and in this case we need to
1248 not disambiguate type puning scenarios.
1249
1250 We set:
1251 base1 to point to base
1252
1253 ref1 to point to end_of_tbaa_ref
1254
1255 end_struct_ref1 to point the trailing reference (if it exists
1256 in range base....end_of_tbaa_ref
1257
1258 end_struct_past_end1 is true if this trailing reference occurs in
1259 end_of_tbaa_ref...actual_ref. */
1260 base1 = ref1;
1261 while (handled_component_p (t: base1))
1262 {
1263 /* Generally access paths are monotous in the size of object. The
1264 exception are trailing arrays of structures. I.e.
1265 struct a {int array[0];};
1266 or
1267 struct a {int array1[0]; int array[];};
1268 Such struct has size 0 but accesses to a.array may have non-zero size.
1269 In this case the size of TREE_TYPE (base1) is smaller than
1270 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1271
1272 Because we compare sizes of arrays just by sizes of their elements,
1273 we only need to care about zero sized array fields here. */
1274 if (component_ref_to_zero_sized_trailing_array_p (ref: base1))
1275 {
1276 gcc_checking_assert (!end_struct_ref1);
1277 end_struct_ref1 = base1;
1278 }
1279 if (ends_tbaa_access_path_p (base1))
1280 {
1281 ref1 = TREE_OPERAND (base1, 0);
1282 if (end_struct_ref1)
1283 {
1284 end_struct_past_end1 = true;
1285 end_struct_ref1 = NULL;
1286 }
1287 }
1288 base1 = TREE_OPERAND (base1, 0);
1289 }
1290 type1 = TREE_TYPE (base1);
1291 base2 = ref2;
1292 while (handled_component_p (t: base2))
1293 {
1294 if (component_ref_to_zero_sized_trailing_array_p (ref: base2))
1295 {
1296 gcc_checking_assert (!end_struct_ref2);
1297 end_struct_ref2 = base2;
1298 }
1299 if (ends_tbaa_access_path_p (base2))
1300 {
1301 ref2 = TREE_OPERAND (base2, 0);
1302 if (end_struct_ref2)
1303 {
1304 end_struct_past_end2 = true;
1305 end_struct_ref2 = NULL;
1306 }
1307 }
1308 base2 = TREE_OPERAND (base2, 0);
1309 }
1310 type2 = TREE_TYPE (base2);
1311
1312 /* Now search for the type1 in the access path of ref2. This
1313 would be a common base for doing offset based disambiguation on.
1314 This however only makes sense if type2 is big enough to hold type1. */
1315 int cmp_outer = compare_type_sizes (type1: type2, type2: type1);
1316
1317 /* If type2 is big enough to contain type1 walk its access path.
1318 We also need to care of arrays at the end of structs that may extend
1319 beyond the end of structure. If this occurs in the TBAA part of the
1320 access path, we need to consider the increased type as well. */
1321 if (cmp_outer >= 0
1322 || (end_struct_ref2
1323 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type2: type1) >= 0))
1324 {
1325 int res = aliasing_component_refs_walk (ref1, type1, base1,
1326 offset1, max_size1,
1327 end_struct_ref1,
1328 ref2, base2, offset2, max_size2,
1329 maybe_match: &maybe_match);
1330 if (res != -1)
1331 return res;
1332 }
1333
1334 /* If we didn't find a common base, try the other way around. */
1335 if (cmp_outer <= 0
1336 || (end_struct_ref1
1337 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1338 {
1339 int res = aliasing_component_refs_walk (ref1: ref2, type1: type2, base1: base2,
1340 offset1: offset2, max_size1: max_size2,
1341 end_struct_ref1: end_struct_ref2,
1342 ref2: ref1, base2: base1, offset2: offset1, max_size2: max_size1,
1343 maybe_match: &maybe_match);
1344 if (res != -1)
1345 return res;
1346 }
1347
1348 /* In the following code we make an assumption that the types in access
1349 paths do not overlap and thus accesses alias only if one path can be
1350 continuation of another. If we was not able to decide about equivalence,
1351 we need to give up. */
1352 if (maybe_match)
1353 {
1354 if (!nonoverlapping_component_refs_p (ref1, ref2))
1355 {
1356 ++alias_stats.aliasing_component_refs_p_may_alias;
1357 return true;
1358 }
1359 ++alias_stats.aliasing_component_refs_p_no_alias;
1360 return false;
1361 }
1362
1363 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1364 ref1_alias_set,
1365 base_type2: type2, end_struct_ref2,
1366 base2_alias_set)
1367 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end1: end_struct_past_end2,
1368 ref1_alias_set: ref2_alias_set,
1369 base_type2: type1, end_struct_ref2: end_struct_ref1,
1370 base2_alias_set: base1_alias_set))
1371 {
1372 ++alias_stats.aliasing_component_refs_p_may_alias;
1373 return true;
1374 }
1375 ++alias_stats.aliasing_component_refs_p_no_alias;
1376 return false;
1377}
1378
1379/* FIELD1 and FIELD2 are two fields of component refs. We assume
1380 that bases of both component refs are either equivalent or nonoverlapping.
1381 We do not assume that the containers of FIELD1 and FIELD2 are of the
1382 same type or size.
1383
1384 Return 0 in case the base address of component_refs are same then
1385 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1386 may not be of same type or size.
1387
1388 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1389
1390 Return -1 otherwise.
1391
1392 Main difference between 0 and -1 is to let
1393 nonoverlapping_component_refs_since_match_p discover the semantically
1394 equivalent part of the access path.
1395
1396 Note that this function is used even with -fno-strict-aliasing
1397 and makes use of no TBAA assumptions. */
1398
1399static int
1400nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1401{
1402 /* If both fields are of the same type, we could save hard work of
1403 comparing offsets. */
1404 tree type1 = DECL_CONTEXT (field1);
1405 tree type2 = DECL_CONTEXT (field2);
1406
1407 if (TREE_CODE (type1) == RECORD_TYPE
1408 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1409 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1410 if (TREE_CODE (type2) == RECORD_TYPE
1411 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1412 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1413
1414 /* ??? Bitfields can overlap at RTL level so punt on them.
1415 FIXME: RTL expansion should be fixed by adjusting the access path
1416 when producing MEM_ATTRs for MEMs which are wider than
1417 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1418 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1419 return -1;
1420
1421 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1422 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1423 return field1 != field2;
1424
1425 /* In common case the offsets and bit offsets will be the same.
1426 However if frontends do not agree on the alignment, they may be
1427 different even if they actually represent same address.
1428 Try the common case first and if that fails calcualte the
1429 actual bit offset. */
1430 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1431 DECL_FIELD_OFFSET (field2))
1432 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1433 DECL_FIELD_BIT_OFFSET (field2)))
1434 return 0;
1435
1436 /* Note that it may be possible to use component_ref_field_offset
1437 which would provide offsets as trees. However constructing and folding
1438 trees is expensive and does not seem to be worth the compile time
1439 cost. */
1440
1441 poly_uint64 offset1, offset2;
1442 poly_uint64 bit_offset1, bit_offset2;
1443
1444 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), value: &offset1)
1445 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), value: &offset2)
1446 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), value: &bit_offset1)
1447 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), value: &bit_offset2))
1448 {
1449 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1450 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1451
1452 if (known_eq (offset1, offset2))
1453 return 0;
1454
1455 poly_uint64 size1, size2;
1456
1457 if (poly_int_tree_p (DECL_SIZE (field1), value: &size1)
1458 && poly_int_tree_p (DECL_SIZE (field2), value: &size2)
1459 && !ranges_maybe_overlap_p (pos1: offset1, size1, pos2: offset2, size2))
1460 return 1;
1461 }
1462 /* Resort to slower overlap checking by looking for matching types in
1463 the middle of access path. */
1464 return -1;
1465}
1466
1467/* Return low bound of array. Do not produce new trees
1468 and thus do not care about particular type of integer constant
1469 and placeholder exprs. */
1470
1471static tree
1472cheap_array_ref_low_bound (tree ref)
1473{
1474 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1475
1476 /* Avoid expensive array_ref_low_bound.
1477 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1478 type or it is zero. */
1479 if (TREE_OPERAND (ref, 2))
1480 return TREE_OPERAND (ref, 2);
1481 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1482 return TYPE_MIN_VALUE (domain_type);
1483 else
1484 return integer_zero_node;
1485}
1486
1487/* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1488 completely disjoint.
1489
1490 Return 1 if the refs are non-overlapping.
1491 Return 0 if they are possibly overlapping but if so the overlap again
1492 starts on the same address.
1493 Return -1 otherwise. */
1494
1495int
1496nonoverlapping_array_refs_p (tree ref1, tree ref2)
1497{
1498 tree index1 = TREE_OPERAND (ref1, 1);
1499 tree index2 = TREE_OPERAND (ref2, 1);
1500 tree low_bound1 = cheap_array_ref_low_bound (ref: ref1);
1501 tree low_bound2 = cheap_array_ref_low_bound (ref: ref2);
1502
1503 /* Handle zero offsets first: we do not need to match type size in this
1504 case. */
1505 if (operand_equal_p (index1, low_bound1, flags: 0)
1506 && operand_equal_p (index2, low_bound2, flags: 0))
1507 return 0;
1508
1509 /* If type sizes are different, give up.
1510
1511 Avoid expensive array_ref_element_size.
1512 If operand 3 is present it denotes size in the alignmnet units.
1513 Otherwise size is TYPE_SIZE of the element type.
1514 Handle only common cases where types are of the same "kind". */
1515 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1516 return -1;
1517
1518 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1519 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1520
1521 if (TREE_OPERAND (ref1, 3))
1522 {
1523 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1524 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1525 TREE_OPERAND (ref2, 3), flags: 0))
1526 return -1;
1527 }
1528 else
1529 {
1530 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1531 TYPE_SIZE_UNIT (elmt_type2), flags: 0))
1532 return -1;
1533 }
1534
1535 /* Since we know that type sizes are the same, there is no need to return
1536 -1 after this point. Partial overlap can not be introduced. */
1537
1538 /* We may need to fold trees in this case.
1539 TODO: Handle integer constant case at least. */
1540 if (!operand_equal_p (low_bound1, low_bound2, flags: 0))
1541 return 0;
1542
1543 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1544 {
1545 if (tree_int_cst_equal (index1, index2))
1546 return 0;
1547 return 1;
1548 }
1549 /* TODO: We can use VRP to further disambiguate here. */
1550 return 0;
1551}
1552
1553/* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1554 MATCH2 either point to the same address or are disjoint.
1555 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1556 respectively or NULL in the case we established equivalence of bases.
1557 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1558 overlap by exact multiply of their element size.
1559
1560 This test works by matching the initial segment of the access path
1561 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1562 match was determined without use of TBAA oracle.
1563
1564 Return 1 if we can determine that component references REF1 and REF2,
1565 that are within a common DECL, cannot overlap.
1566
1567 Return 0 if paths are same and thus there is nothing to disambiguate more
1568 (i.e. there is must alias assuming there is must alias between MATCH1 and
1569 MATCH2)
1570
1571 Return -1 if we can not determine 0 or 1 - this happens when we met
1572 non-matching types was met in the path.
1573 In this case it may make sense to continue by other disambiguation
1574 oracles. */
1575
1576static int
1577nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1578 tree match2, tree ref2,
1579 bool partial_overlap)
1580{
1581 int ntbaa1 = 0, ntbaa2 = 0;
1582 /* Early return if there are no references to match, we do not need
1583 to walk the access paths.
1584
1585 Do not consider this as may-alias for stats - it is more useful
1586 to have information how many disambiguations happened provided that
1587 the query was meaningful. */
1588
1589 if (match1 == ref1 || !handled_component_p (t: ref1)
1590 || match2 == ref2 || !handled_component_p (t: ref2))
1591 return -1;
1592
1593 auto_vec<tree, 16> component_refs1;
1594 auto_vec<tree, 16> component_refs2;
1595
1596 /* Create the stack of handled components for REF1. */
1597 while (handled_component_p (t: ref1) && ref1 != match1)
1598 {
1599 /* We use TBAA only to re-synchronize after mismatched refs. So we
1600 do not need to truncate access path after TBAA part ends. */
1601 if (ends_tbaa_access_path_p (ref1))
1602 ntbaa1 = 0;
1603 else
1604 ntbaa1++;
1605 component_refs1.safe_push (obj: ref1);
1606 ref1 = TREE_OPERAND (ref1, 0);
1607 }
1608
1609 /* Create the stack of handled components for REF2. */
1610 while (handled_component_p (t: ref2) && ref2 != match2)
1611 {
1612 if (ends_tbaa_access_path_p (ref2))
1613 ntbaa2 = 0;
1614 else
1615 ntbaa2++;
1616 component_refs2.safe_push (obj: ref2);
1617 ref2 = TREE_OPERAND (ref2, 0);
1618 }
1619
1620 if (!flag_strict_aliasing)
1621 {
1622 ntbaa1 = 0;
1623 ntbaa2 = 0;
1624 }
1625
1626 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1627 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1628
1629 /* If only one of access path starts with MEM_REF check that offset is 0
1630 so the addresses stays the same after stripping it.
1631 TODO: In this case we may walk the other access path until we get same
1632 offset.
1633
1634 If both starts with MEM_REF, offset has to be same. */
1635 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1636 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1637 || (mem_ref1 && mem_ref2
1638 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1639 TREE_OPERAND (ref2, 1))))
1640 {
1641 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1642 return -1;
1643 }
1644
1645 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1646 to handle them here at all. */
1647 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1648 && TREE_CODE (ref2) != TARGET_MEM_REF);
1649
1650 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1651 rank. This is sufficient because we start from the same DECL and you
1652 cannot reference several fields at a time with COMPONENT_REFs (unlike
1653 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1654 of them to access a sub-component, unless you're in a union, in which
1655 case the return value will precisely be false. */
1656 while (true)
1657 {
1658 /* Track if we seen unmatched ref with non-zero offset. In this case
1659 we must look for partial overlaps. */
1660 bool seen_unmatched_ref_p = false;
1661
1662 /* First match ARRAY_REFs an try to disambiguate. */
1663 if (!component_refs1.is_empty ()
1664 && !component_refs2.is_empty ())
1665 {
1666 unsigned int narray_refs1=0, narray_refs2=0;
1667
1668 /* We generally assume that both access paths starts by same sequence
1669 of refs. However if number of array refs is not in sync, try
1670 to recover and pop elts until number match. This helps the case
1671 where one access path starts by array and other by element. */
1672 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1673 narray_refs1++)
1674 if (TREE_CODE (component_refs1 [component_refs1.length()
1675 - 1 - narray_refs1]) != ARRAY_REF)
1676 break;
1677
1678 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1679 narray_refs2++)
1680 if (TREE_CODE (component_refs2 [component_refs2.length()
1681 - 1 - narray_refs2]) != ARRAY_REF)
1682 break;
1683 for (; narray_refs1 > narray_refs2; narray_refs1--)
1684 {
1685 ref1 = component_refs1.pop ();
1686 ntbaa1--;
1687
1688 /* If index is non-zero we need to check whether the reference
1689 does not break the main invariant that bases are either
1690 disjoint or equal. Consider the example:
1691
1692 unsigned char out[][1];
1693 out[1]="a";
1694 out[i][0];
1695
1696 Here bases out and out are same, but after removing the
1697 [i] index, this invariant no longer holds, because
1698 out[i] points to the middle of array out.
1699
1700 TODO: If size of type of the skipped reference is an integer
1701 multiply of the size of type of the other reference this
1702 invariant can be verified, but even then it is not completely
1703 safe with !flag_strict_aliasing if the other reference contains
1704 unbounded array accesses.
1705 See */
1706
1707 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1708 cheap_array_ref_low_bound (ref: ref1), flags: 0))
1709 return 0;
1710 }
1711 for (; narray_refs2 > narray_refs1; narray_refs2--)
1712 {
1713 ref2 = component_refs2.pop ();
1714 ntbaa2--;
1715 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1716 cheap_array_ref_low_bound (ref: ref2), flags: 0))
1717 return 0;
1718 }
1719 /* Try to disambiguate matched arrays. */
1720 for (unsigned int i = 0; i < narray_refs1; i++)
1721 {
1722 int cmp = nonoverlapping_array_refs_p (ref1: component_refs1.pop (),
1723 ref2: component_refs2.pop ());
1724 ntbaa1--;
1725 ntbaa2--;
1726 if (cmp == 1 && !partial_overlap)
1727 {
1728 ++alias_stats
1729 .nonoverlapping_refs_since_match_p_no_alias;
1730 return 1;
1731 }
1732 if (cmp == -1)
1733 {
1734 seen_unmatched_ref_p = true;
1735 /* We can not maintain the invariant that bases are either
1736 same or completely disjoint. However we can still recover
1737 from type based alias analysis if we reach references to
1738 same sizes. We do not attempt to match array sizes, so
1739 just finish array walking and look for component refs. */
1740 if (ntbaa1 < 0 || ntbaa2 < 0)
1741 {
1742 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1743 return -1;
1744 }
1745 for (i++; i < narray_refs1; i++)
1746 {
1747 component_refs1.pop ();
1748 component_refs2.pop ();
1749 ntbaa1--;
1750 ntbaa2--;
1751 }
1752 break;
1753 }
1754 partial_overlap = false;
1755 }
1756 }
1757
1758 /* Next look for component_refs. */
1759 do
1760 {
1761 if (component_refs1.is_empty ())
1762 {
1763 ++alias_stats
1764 .nonoverlapping_refs_since_match_p_must_overlap;
1765 return 0;
1766 }
1767 ref1 = component_refs1.pop ();
1768 ntbaa1--;
1769 if (TREE_CODE (ref1) != COMPONENT_REF)
1770 {
1771 seen_unmatched_ref_p = true;
1772 if (ntbaa1 < 0 || ntbaa2 < 0)
1773 {
1774 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1775 return -1;
1776 }
1777 }
1778 }
1779 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1780
1781 do
1782 {
1783 if (component_refs2.is_empty ())
1784 {
1785 ++alias_stats
1786 .nonoverlapping_refs_since_match_p_must_overlap;
1787 return 0;
1788 }
1789 ref2 = component_refs2.pop ();
1790 ntbaa2--;
1791 if (TREE_CODE (ref2) != COMPONENT_REF)
1792 {
1793 if (ntbaa1 < 0 || ntbaa2 < 0)
1794 {
1795 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1796 return -1;
1797 }
1798 seen_unmatched_ref_p = true;
1799 }
1800 }
1801 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1802
1803 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1804 earlier. */
1805 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1806 && TREE_CODE (ref2) == COMPONENT_REF);
1807
1808 tree field1 = TREE_OPERAND (ref1, 1);
1809 tree field2 = TREE_OPERAND (ref2, 1);
1810
1811 /* ??? We cannot simply use the type of operand #0 of the refs here
1812 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1813 for common blocks instead of using unions like everyone else. */
1814 tree type1 = DECL_CONTEXT (field1);
1815 tree type2 = DECL_CONTEXT (field2);
1816
1817 partial_overlap = false;
1818
1819 /* If we skipped array refs on type of different sizes, we can
1820 no longer be sure that there are not partial overlaps. */
1821 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1822 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), flags: 0))
1823 {
1824 ++alias_stats
1825 .nonoverlapping_refs_since_match_p_may_alias;
1826 return -1;
1827 }
1828
1829 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1830 if (cmp == -1)
1831 {
1832 ++alias_stats
1833 .nonoverlapping_refs_since_match_p_may_alias;
1834 return -1;
1835 }
1836 else if (cmp == 1)
1837 {
1838 ++alias_stats
1839 .nonoverlapping_refs_since_match_p_no_alias;
1840 return 1;
1841 }
1842 }
1843}
1844
1845/* Return TYPE_UID which can be used to match record types we consider
1846 same for TBAA purposes. */
1847
1848static inline int
1849ncr_type_uid (const_tree field)
1850{
1851 /* ??? We cannot simply use the type of operand #0 of the refs here
1852 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1853 for common blocks instead of using unions like everyone else. */
1854 tree type = DECL_FIELD_CONTEXT (field);
1855 /* With LTO types considered same_type_for_tbaa_p
1856 from different translation unit may not have same
1857 main variant. They however have same TYPE_CANONICAL. */
1858 if (TYPE_CANONICAL (type))
1859 return TYPE_UID (TYPE_CANONICAL (type));
1860 return TYPE_UID (type);
1861}
1862
1863/* qsort compare function to sort FIELD_DECLs after their
1864 DECL_FIELD_CONTEXT TYPE_UID. */
1865
1866static inline int
1867ncr_compar (const void *field1_, const void *field2_)
1868{
1869 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1870 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1871 unsigned int uid1 = ncr_type_uid (field: field1);
1872 unsigned int uid2 = ncr_type_uid (field: field2);
1873
1874 if (uid1 < uid2)
1875 return -1;
1876 else if (uid1 > uid2)
1877 return 1;
1878 return 0;
1879}
1880
1881/* Return true if we can determine that the fields referenced cannot
1882 overlap for any pair of objects. This relies on TBAA. */
1883
1884static bool
1885nonoverlapping_component_refs_p (const_tree x, const_tree y)
1886{
1887 /* Early return if we have nothing to do.
1888
1889 Do not consider this as may-alias for stats - it is more useful
1890 to have information how many disambiguations happened provided that
1891 the query was meaningful. */
1892 if (!flag_strict_aliasing
1893 || !x || !y
1894 || !handled_component_p (t: x)
1895 || !handled_component_p (t: y))
1896 return false;
1897
1898 auto_vec<const_tree, 16> fieldsx;
1899 while (handled_component_p (t: x))
1900 {
1901 if (TREE_CODE (x) == COMPONENT_REF)
1902 {
1903 tree field = TREE_OPERAND (x, 1);
1904 tree type = DECL_FIELD_CONTEXT (field);
1905 if (TREE_CODE (type) == RECORD_TYPE)
1906 fieldsx.safe_push (obj: field);
1907 }
1908 else if (ends_tbaa_access_path_p (x))
1909 fieldsx.truncate (size: 0);
1910 x = TREE_OPERAND (x, 0);
1911 }
1912 if (fieldsx.length () == 0)
1913 return false;
1914 auto_vec<const_tree, 16> fieldsy;
1915 while (handled_component_p (t: y))
1916 {
1917 if (TREE_CODE (y) == COMPONENT_REF)
1918 {
1919 tree field = TREE_OPERAND (y, 1);
1920 tree type = DECL_FIELD_CONTEXT (field);
1921 if (TREE_CODE (type) == RECORD_TYPE)
1922 fieldsy.safe_push (TREE_OPERAND (y, 1));
1923 }
1924 else if (ends_tbaa_access_path_p (y))
1925 fieldsy.truncate (size: 0);
1926 y = TREE_OPERAND (y, 0);
1927 }
1928 if (fieldsy.length () == 0)
1929 {
1930 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1931 return false;
1932 }
1933
1934 /* Most common case first. */
1935 if (fieldsx.length () == 1
1936 && fieldsy.length () == 1)
1937 {
1938 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1939 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1940 && nonoverlapping_component_refs_p_1 (field1: fieldsx[0], field2: fieldsy[0]) == 1)
1941 {
1942 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1943 return true;
1944 }
1945 else
1946 {
1947 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1948 return false;
1949 }
1950 }
1951
1952 if (fieldsx.length () == 2)
1953 {
1954 if (ncr_compar (field1_: &fieldsx[0], field2_: &fieldsx[1]) == 1)
1955 std::swap (a&: fieldsx[0], b&: fieldsx[1]);
1956 }
1957 else
1958 fieldsx.qsort (ncr_compar);
1959
1960 if (fieldsy.length () == 2)
1961 {
1962 if (ncr_compar (field1_: &fieldsy[0], field2_: &fieldsy[1]) == 1)
1963 std::swap (a&: fieldsy[0], b&: fieldsy[1]);
1964 }
1965 else
1966 fieldsy.qsort (ncr_compar);
1967
1968 unsigned i = 0, j = 0;
1969 do
1970 {
1971 const_tree fieldx = fieldsx[i];
1972 const_tree fieldy = fieldsy[j];
1973
1974 /* We're left with accessing different fields of a structure,
1975 no possible overlap. */
1976 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1977 DECL_FIELD_CONTEXT (fieldy)) == 1
1978 && nonoverlapping_component_refs_p_1 (field1: fieldx, field2: fieldy) == 1)
1979 {
1980 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1981 return true;
1982 }
1983
1984 if (ncr_type_uid (field: fieldx) < ncr_type_uid (field: fieldy))
1985 {
1986 i++;
1987 if (i == fieldsx.length ())
1988 break;
1989 }
1990 else
1991 {
1992 j++;
1993 if (j == fieldsy.length ())
1994 break;
1995 }
1996 }
1997 while (1);
1998
1999 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
2000 return false;
2001}
2002
2003
2004/* Return true if two memory references based on the variables BASE1
2005 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2006 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2007 if non-NULL are the complete memory reference trees. */
2008
2009static bool
2010decl_refs_may_alias_p (tree ref1, tree base1,
2011 poly_int64 offset1, poly_int64 max_size1,
2012 poly_int64 size1,
2013 tree ref2, tree base2,
2014 poly_int64 offset2, poly_int64 max_size2,
2015 poly_int64 size2)
2016{
2017 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2018
2019 /* If both references are based on different variables, they cannot alias. */
2020 if (compare_base_decls (base1, base2) == 0)
2021 return false;
2022
2023 /* If both references are based on the same variable, they cannot alias if
2024 the accesses do not overlap. */
2025 if (!ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
2026 return false;
2027
2028 /* If there is must alias, there is no use disambiguating further. */
2029 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2030 return true;
2031
2032 /* For components with variable position, the above test isn't sufficient,
2033 so we disambiguate component references manually. */
2034 if (ref1 && ref2
2035 && handled_component_p (t: ref1) && handled_component_p (t: ref2)
2036 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, partial_overlap: false) == 1)
2037 return false;
2038
2039 return true;
2040}
2041
2042/* Return true if access with BASE is view converted.
2043 Base must not be stripped from inner MEM_REF (&decl)
2044 which is done by ao_ref_base and thus one extra walk
2045 of handled components is needed. */
2046
2047static bool
2048view_converted_memref_p (tree base)
2049{
2050 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2051 return false;
2052 return same_type_for_tbaa (TREE_TYPE (base),
2053 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2054}
2055
2056/* Return true if an indirect reference based on *PTR1 constrained
2057 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2058 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2059 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2060 in which case they are computed on-demand. REF1 and REF2
2061 if non-NULL are the complete memory reference trees. */
2062
2063static bool
2064indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2065 poly_int64 offset1, poly_int64 max_size1,
2066 poly_int64 size1,
2067 alias_set_type ref1_alias_set,
2068 alias_set_type base1_alias_set,
2069 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2070 poly_int64 offset2, poly_int64 max_size2,
2071 poly_int64 size2,
2072 alias_set_type ref2_alias_set,
2073 alias_set_type base2_alias_set, bool tbaa_p)
2074{
2075 tree ptr1;
2076 tree ptrtype1, dbase2;
2077
2078 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2079 || TREE_CODE (base1) == TARGET_MEM_REF)
2080 && DECL_P (base2));
2081
2082 ptr1 = TREE_OPERAND (base1, 0);
2083 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2084
2085 /* If only one reference is based on a variable, they cannot alias if
2086 the pointer access is beyond the extent of the variable access.
2087 (the pointer base cannot validly point to an offset less than zero
2088 of the variable).
2089 ??? IVOPTs creates bases that do not honor this restriction,
2090 so do not apply this optimization for TARGET_MEM_REFs. */
2091 if (TREE_CODE (base1) != TARGET_MEM_REF
2092 && !ranges_maybe_overlap_p (pos1: offset1 + moff, size1: -1, pos2: offset2, size2: max_size2))
2093 return false;
2094
2095 /* If the pointer based access is bigger than the variable they cannot
2096 alias. This is similar to the check below where we use TBAA to
2097 increase the size of the pointer based access based on the dynamic
2098 type of a containing object we can infer from it. */
2099 poly_int64 dsize2;
2100 if (known_size_p (a: size1)
2101 && poly_int_tree_p (DECL_SIZE (base2), value: &dsize2)
2102 && known_lt (dsize2, size1))
2103 return false;
2104
2105 /* They also cannot alias if the pointer may not point to the decl. */
2106 if (!ptr_deref_may_alias_decl_p (ptr: ptr1, decl: base2))
2107 return false;
2108
2109 /* Disambiguations that rely on strict aliasing rules follow. */
2110 if (!flag_strict_aliasing || !tbaa_p)
2111 return true;
2112
2113 /* If the alias set for a pointer access is zero all bets are off. */
2114 if (base1_alias_set == 0 || base2_alias_set == 0)
2115 return true;
2116
2117 /* When we are trying to disambiguate an access with a pointer dereference
2118 as base versus one with a decl as base we can use both the size
2119 of the decl and its dynamic type for extra disambiguation.
2120 ??? We do not know anything about the dynamic type of the decl
2121 other than that its alias-set contains base2_alias_set as a subset
2122 which does not help us here. */
2123 /* As we know nothing useful about the dynamic type of the decl just
2124 use the usual conflict check rather than a subset test.
2125 ??? We could introduce -fvery-strict-aliasing when the language
2126 does not allow decls to have a dynamic type that differs from their
2127 static type. Then we can check
2128 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2129 if (base1_alias_set != base2_alias_set
2130 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2131 return false;
2132
2133 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2134
2135 /* If the size of the access relevant for TBAA through the pointer
2136 is bigger than the size of the decl we can't possibly access the
2137 decl via that pointer. */
2138 if (/* ??? This in turn may run afoul when a decl of type T which is
2139 a member of union type U is accessed through a pointer to
2140 type U and sizeof T is smaller than sizeof U. */
2141 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2142 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2143 && compare_sizes (DECL_SIZE (base2),
2144 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2145 return false;
2146
2147 if (!ref2)
2148 return true;
2149
2150 /* If the decl is accessed via a MEM_REF, reconstruct the base
2151 we can use for TBAA and an appropriately adjusted offset. */
2152 dbase2 = ref2;
2153 while (handled_component_p (t: dbase2))
2154 dbase2 = TREE_OPERAND (dbase2, 0);
2155 poly_int64 doffset1 = offset1;
2156 poly_offset_int doffset2 = offset2;
2157 if (TREE_CODE (dbase2) == MEM_REF
2158 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2159 {
2160 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2161 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2162 /* If second reference is view-converted, give up now. */
2163 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2164 return true;
2165 }
2166
2167 /* If first reference is view-converted, give up now. */
2168 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2169 return true;
2170
2171 /* If both references are through the same type, they do not alias
2172 if the accesses do not overlap. This does extra disambiguation
2173 for mixed/pointer accesses but requires strict aliasing.
2174 For MEM_REFs we require that the component-ref offset we computed
2175 is relative to the start of the type which we ensure by
2176 comparing rvalue and access type and disregarding the constant
2177 pointer offset.
2178
2179 But avoid treating variable length arrays as "objects", instead assume they
2180 can overlap by an exact multiple of their element size.
2181 See gcc.dg/torture/alias-2.c. */
2182 if (((TREE_CODE (base1) != TARGET_MEM_REF
2183 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2184 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2185 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2186 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2187 {
2188 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2189 && (TYPE_SIZE (TREE_TYPE (base1))
2190 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2191 != INTEGER_CST));
2192 if (!partial_overlap
2193 && !ranges_maybe_overlap_p (pos1: doffset1, size1: max_size1, pos2: doffset2, size2: max_size2))
2194 return false;
2195 if (!ref1 || !ref2
2196 /* If there is must alias, there is no use disambiguating further. */
2197 || (!partial_overlap
2198 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2199 return true;
2200 int res = nonoverlapping_refs_since_match_p (match1: base1, ref1, match2: base2, ref2,
2201 partial_overlap);
2202 if (res == -1)
2203 return !nonoverlapping_component_refs_p (x: ref1, y: ref2);
2204 return !res;
2205 }
2206
2207 /* Do access-path based disambiguation. */
2208 if (ref1 && ref2
2209 && (handled_component_p (t: ref1) || handled_component_p (t: ref2)))
2210 return aliasing_component_refs_p (ref1,
2211 ref1_alias_set, base1_alias_set,
2212 offset1, max_size1,
2213 ref2,
2214 ref2_alias_set, base2_alias_set,
2215 offset2, max_size2);
2216
2217 return true;
2218}
2219
2220/* Return true if two indirect references based on *PTR1
2221 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2222 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2223 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2224 in which case they are computed on-demand. REF1 and REF2
2225 if non-NULL are the complete memory reference trees. */
2226
2227static bool
2228indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2229 poly_int64 offset1, poly_int64 max_size1,
2230 poly_int64 size1,
2231 alias_set_type ref1_alias_set,
2232 alias_set_type base1_alias_set,
2233 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2234 poly_int64 offset2, poly_int64 max_size2,
2235 poly_int64 size2,
2236 alias_set_type ref2_alias_set,
2237 alias_set_type base2_alias_set, bool tbaa_p)
2238{
2239 tree ptr1;
2240 tree ptr2;
2241 tree ptrtype1, ptrtype2;
2242
2243 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2244 || TREE_CODE (base1) == TARGET_MEM_REF)
2245 && (TREE_CODE (base2) == MEM_REF
2246 || TREE_CODE (base2) == TARGET_MEM_REF));
2247
2248 ptr1 = TREE_OPERAND (base1, 0);
2249 ptr2 = TREE_OPERAND (base2, 0);
2250
2251 /* If both bases are based on pointers they cannot alias if they may not
2252 point to the same memory object or if they point to the same object
2253 and the accesses do not overlap. */
2254 if ((!cfun || gimple_in_ssa_p (cfun))
2255 && operand_equal_p (ptr1, ptr2, flags: 0)
2256 && (((TREE_CODE (base1) != TARGET_MEM_REF
2257 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2258 && (TREE_CODE (base2) != TARGET_MEM_REF
2259 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2260 || (TREE_CODE (base1) == TARGET_MEM_REF
2261 && TREE_CODE (base2) == TARGET_MEM_REF
2262 && (TMR_STEP (base1) == TMR_STEP (base2)
2263 || (TMR_STEP (base1) && TMR_STEP (base2)
2264 && operand_equal_p (TMR_STEP (base1),
2265 TMR_STEP (base2), flags: 0)))
2266 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2267 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2268 && operand_equal_p (TMR_INDEX (base1),
2269 TMR_INDEX (base2), flags: 0)))
2270 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2271 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2272 && operand_equal_p (TMR_INDEX2 (base1),
2273 TMR_INDEX2 (base2), flags: 0))))))
2274 {
2275 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2276 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2277 if (!ranges_maybe_overlap_p (pos1: offset1 + moff1, size1: max_size1,
2278 pos2: offset2 + moff2, size2: max_size2))
2279 return false;
2280 /* If there is must alias, there is no use disambiguating further. */
2281 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2282 return true;
2283 if (ref1 && ref2)
2284 {
2285 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2286 partial_overlap: false);
2287 if (res != -1)
2288 return !res;
2289 }
2290 }
2291 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2292 return false;
2293
2294 /* Disambiguations that rely on strict aliasing rules follow. */
2295 if (!flag_strict_aliasing || !tbaa_p)
2296 return true;
2297
2298 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2299 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2300
2301 /* If the alias set for a pointer access is zero all bets are off. */
2302 if (base1_alias_set == 0
2303 || base2_alias_set == 0)
2304 return true;
2305
2306 /* Do type-based disambiguation. */
2307 if (base1_alias_set != base2_alias_set
2308 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2309 return false;
2310
2311 /* If either reference is view-converted, give up now. */
2312 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2313 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2314 return true;
2315
2316 /* If both references are through the same type, they do not alias
2317 if the accesses do not overlap. This does extra disambiguation
2318 for mixed/pointer accesses but requires strict aliasing. */
2319 if ((TREE_CODE (base1) != TARGET_MEM_REF
2320 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2321 && (TREE_CODE (base2) != TARGET_MEM_REF
2322 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2323 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2324 TREE_TYPE (ptrtype2)) == 1)
2325 {
2326 /* But avoid treating arrays as "objects", instead assume they
2327 can overlap by an exact multiple of their element size.
2328 See gcc.dg/torture/alias-2.c. */
2329 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2330
2331 if (!partial_overlap
2332 && !ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
2333 return false;
2334 if (!ref1 || !ref2
2335 || (!partial_overlap
2336 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2337 return true;
2338 int res = nonoverlapping_refs_since_match_p (match1: base1, ref1, match2: base2, ref2,
2339 partial_overlap);
2340 if (res == -1)
2341 return !nonoverlapping_component_refs_p (x: ref1, y: ref2);
2342 return !res;
2343 }
2344
2345 /* Do access-path based disambiguation. */
2346 if (ref1 && ref2
2347 && (handled_component_p (t: ref1) || handled_component_p (t: ref2)))
2348 return aliasing_component_refs_p (ref1,
2349 ref1_alias_set, base1_alias_set,
2350 offset1, max_size1,
2351 ref2,
2352 ref2_alias_set, base2_alias_set,
2353 offset2, max_size2);
2354
2355 return true;
2356}
2357
2358/* Return true, if the two memory references REF1 and REF2 may alias. */
2359
2360static bool
2361refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2362{
2363 tree base1, base2;
2364 poly_int64 offset1 = 0, offset2 = 0;
2365 poly_int64 max_size1 = -1, max_size2 = -1;
2366 bool var1_p, var2_p, ind1_p, ind2_p;
2367
2368 gcc_checking_assert ((!ref1->ref
2369 || TREE_CODE (ref1->ref) == SSA_NAME
2370 || DECL_P (ref1->ref)
2371 || TREE_CODE (ref1->ref) == STRING_CST
2372 || handled_component_p (ref1->ref)
2373 || TREE_CODE (ref1->ref) == MEM_REF
2374 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2375 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2376 && (!ref2->ref
2377 || TREE_CODE (ref2->ref) == SSA_NAME
2378 || DECL_P (ref2->ref)
2379 || TREE_CODE (ref2->ref) == STRING_CST
2380 || handled_component_p (ref2->ref)
2381 || TREE_CODE (ref2->ref) == MEM_REF
2382 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2383 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2384
2385 /* Decompose the references into their base objects and the access. */
2386 base1 = ao_ref_base (ref: ref1);
2387 offset1 = ref1->offset;
2388 max_size1 = ref1->max_size;
2389 base2 = ao_ref_base (ref: ref2);
2390 offset2 = ref2->offset;
2391 max_size2 = ref2->max_size;
2392
2393 /* We can end up with registers or constants as bases for example from
2394 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2395 which is seen as a struct copy. */
2396 if (TREE_CODE (base1) == SSA_NAME
2397 || TREE_CODE (base1) == CONST_DECL
2398 || TREE_CODE (base1) == CONSTRUCTOR
2399 || TREE_CODE (base1) == ADDR_EXPR
2400 || CONSTANT_CLASS_P (base1)
2401 || TREE_CODE (base2) == SSA_NAME
2402 || TREE_CODE (base2) == CONST_DECL
2403 || TREE_CODE (base2) == CONSTRUCTOR
2404 || TREE_CODE (base2) == ADDR_EXPR
2405 || CONSTANT_CLASS_P (base2))
2406 return false;
2407
2408 /* Two volatile accesses always conflict. */
2409 if (ref1->volatile_p
2410 && ref2->volatile_p)
2411 return true;
2412
2413 /* refN->ref may convey size information, do not confuse our workers
2414 with that but strip it - ao_ref_base took it into account already. */
2415 tree ref1ref = ref1->ref;
2416 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2417 ref1ref = TREE_OPERAND (ref1ref, 0);
2418 tree ref2ref = ref2->ref;
2419 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2420 ref2ref = TREE_OPERAND (ref2ref, 0);
2421
2422 /* Defer to simple offset based disambiguation if we have
2423 references based on two decls. Do this before defering to
2424 TBAA to handle must-alias cases in conformance with the
2425 GCC extension of allowing type-punning through unions. */
2426 var1_p = DECL_P (base1);
2427 var2_p = DECL_P (base2);
2428 if (var1_p && var2_p)
2429 return decl_refs_may_alias_p (ref1: ref1ref, base1, offset1, max_size1,
2430 size1: ref1->size,
2431 ref2: ref2ref, base2, offset2, max_size2,
2432 size2: ref2->size);
2433
2434 /* We can end up referring to code via function and label decls.
2435 As we likely do not properly track code aliases conservatively
2436 bail out. */
2437 if (TREE_CODE (base1) == FUNCTION_DECL
2438 || TREE_CODE (base1) == LABEL_DECL
2439 || TREE_CODE (base2) == FUNCTION_DECL
2440 || TREE_CODE (base2) == LABEL_DECL)
2441 return true;
2442
2443 /* Handle restrict based accesses.
2444 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2445 here. */
2446 tree rbase1 = base1;
2447 tree rbase2 = base2;
2448 if (var1_p)
2449 {
2450 rbase1 = ref1ref;
2451 if (rbase1)
2452 while (handled_component_p (t: rbase1))
2453 rbase1 = TREE_OPERAND (rbase1, 0);
2454 }
2455 if (var2_p)
2456 {
2457 rbase2 = ref2ref;
2458 if (rbase2)
2459 while (handled_component_p (t: rbase2))
2460 rbase2 = TREE_OPERAND (rbase2, 0);
2461 }
2462 if (rbase1 && rbase2
2463 && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2464 && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2465 /* If the accesses are in the same restrict clique... */
2466 && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2467 /* But based on different pointers they do not alias. */
2468 && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2469 return false;
2470
2471 ind1_p = (TREE_CODE (base1) == MEM_REF
2472 || TREE_CODE (base1) == TARGET_MEM_REF);
2473 ind2_p = (TREE_CODE (base2) == MEM_REF
2474 || TREE_CODE (base2) == TARGET_MEM_REF);
2475
2476 /* Canonicalize the pointer-vs-decl case. */
2477 if (ind1_p && var2_p)
2478 {
2479 std::swap (a&: offset1, b&: offset2);
2480 std::swap (a&: max_size1, b&: max_size2);
2481 std::swap (a&: base1, b&: base2);
2482 std::swap (a&: ref1, b&: ref2);
2483 std::swap (a&: ref1ref, b&: ref2ref);
2484 var1_p = true;
2485 ind1_p = false;
2486 var2_p = false;
2487 ind2_p = true;
2488 }
2489
2490 /* First defer to TBAA if possible. */
2491 if (tbaa_p
2492 && flag_strict_aliasing
2493 && !alias_sets_conflict_p (ao_ref_alias_set (ref: ref1),
2494 ao_ref_alias_set (ref: ref2)))
2495 return false;
2496
2497 /* If the reference is based on a pointer that points to memory
2498 that may not be written to then the other reference cannot possibly
2499 clobber it. */
2500 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2501 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2502 || (ind1_p
2503 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2504 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2505 return false;
2506
2507 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2508 if (var1_p && ind2_p)
2509 return indirect_ref_may_alias_decl_p (ref1: ref2ref, base1: base2,
2510 offset1: offset2, max_size1: max_size2, size1: ref2->size,
2511 ref1_alias_set: ao_ref_alias_set (ref: ref2),
2512 base1_alias_set: ao_ref_base_alias_set (ref: ref2),
2513 ref2: ref1ref, base2: base1,
2514 offset2: offset1, max_size2: max_size1, size2: ref1->size,
2515 ref2_alias_set: ao_ref_alias_set (ref: ref1),
2516 base2_alias_set: ao_ref_base_alias_set (ref: ref1),
2517 tbaa_p);
2518 else if (ind1_p && ind2_p)
2519 return indirect_refs_may_alias_p (ref1: ref1ref, base1,
2520 offset1, max_size1, size1: ref1->size,
2521 ref1_alias_set: ao_ref_alias_set (ref: ref1),
2522 base1_alias_set: ao_ref_base_alias_set (ref: ref1),
2523 ref2: ref2ref, base2,
2524 offset2, max_size2, size2: ref2->size,
2525 ref2_alias_set: ao_ref_alias_set (ref: ref2),
2526 base2_alias_set: ao_ref_base_alias_set (ref: ref2),
2527 tbaa_p);
2528
2529 gcc_unreachable ();
2530}
2531
2532/* Return true, if the two memory references REF1 and REF2 may alias
2533 and update statistics. */
2534
2535bool
2536refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2537{
2538 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2539 if (res)
2540 ++alias_stats.refs_may_alias_p_may_alias;
2541 else
2542 ++alias_stats.refs_may_alias_p_no_alias;
2543 return res;
2544}
2545
2546static bool
2547refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2548{
2549 ao_ref r1;
2550 ao_ref_init (r: &r1, ref: ref1);
2551 return refs_may_alias_p_1 (ref1: &r1, ref2, tbaa_p);
2552}
2553
2554bool
2555refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2556{
2557 ao_ref r1, r2;
2558 ao_ref_init (r: &r1, ref: ref1);
2559 ao_ref_init (r: &r2, ref: ref2);
2560 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p);
2561}
2562
2563/* Returns true if there is a anti-dependence for the STORE that
2564 executes after the LOAD. */
2565
2566bool
2567refs_anti_dependent_p (tree load, tree store)
2568{
2569 ao_ref r1, r2;
2570 ao_ref_init (r: &r1, ref: load);
2571 ao_ref_init (r: &r2, ref: store);
2572 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p: false);
2573}
2574
2575/* Returns true if there is a output dependence for the stores
2576 STORE1 and STORE2. */
2577
2578bool
2579refs_output_dependent_p (tree store1, tree store2)
2580{
2581 ao_ref r1, r2;
2582 ao_ref_init (r: &r1, ref: store1);
2583 ao_ref_init (r: &r2, ref: store2);
2584 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p: false);
2585}
2586
2587/* Returns true if and only if REF may alias any access stored in TT.
2588 IF TBAA_P is true, use TBAA oracle. */
2589
2590static bool
2591modref_may_conflict (const gcall *stmt,
2592 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2593{
2594 alias_set_type base_set, ref_set;
2595 bool global_memory_ok = false;
2596
2597 if (tt->every_base)
2598 return true;
2599
2600 if (!dbg_cnt (index: ipa_mod_ref))
2601 return true;
2602
2603 base_set = ao_ref_base_alias_set (ref);
2604
2605 ref_set = ao_ref_alias_set (ref);
2606
2607 int num_tests = 0, max_tests = param_modref_max_tests;
2608 for (auto base_node : tt->bases)
2609 {
2610 if (tbaa_p && flag_strict_aliasing)
2611 {
2612 if (num_tests >= max_tests)
2613 return true;
2614 alias_stats.modref_tests++;
2615 if (!alias_sets_conflict_p (base_set, base_node->base))
2616 continue;
2617 num_tests++;
2618 }
2619
2620 if (base_node->every_ref)
2621 return true;
2622
2623 for (auto ref_node : base_node->refs)
2624 {
2625 /* Do not repeat same test as before. */
2626 if ((ref_set != base_set || base_node->base != ref_node->ref)
2627 && tbaa_p && flag_strict_aliasing)
2628 {
2629 if (num_tests >= max_tests)
2630 return true;
2631 alias_stats.modref_tests++;
2632 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2633 continue;
2634 num_tests++;
2635 }
2636
2637 if (ref_node->every_access)
2638 return true;
2639
2640 /* TBAA checks did not disambiguate, try individual accesses. */
2641 for (auto access_node : ref_node->accesses)
2642 {
2643 if (num_tests >= max_tests)
2644 return true;
2645
2646 if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2647 {
2648 if (global_memory_ok)
2649 continue;
2650 if (ref_may_alias_global_p (ref, escaped_local_p: true))
2651 return true;
2652 global_memory_ok = true;
2653 num_tests++;
2654 continue;
2655 }
2656
2657 tree arg = access_node.get_call_arg (stmt);
2658 if (!arg)
2659 return true;
2660
2661 alias_stats.modref_baseptr_tests++;
2662
2663 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2664 continue;
2665
2666 /* PTA oracle will be unhapy of arg is not an pointer. */
2667 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2668 return true;
2669
2670 /* If we don't have base pointer, give up. */
2671 if (!ref->ref && !ref->base)
2672 continue;
2673
2674 ao_ref ref2;
2675 if (access_node.get_ao_ref (stmt, ref: &ref2))
2676 {
2677 ref2.ref_alias_set = ref_node->ref;
2678 ref2.base_alias_set = base_node->base;
2679 if (refs_may_alias_p_1 (ref1: &ref2, ref2: ref, tbaa_p))
2680 return true;
2681 }
2682 else if (ptr_deref_may_alias_ref_p_1 (ptr: arg, ref))
2683 return true;
2684
2685 num_tests++;
2686 }
2687 }
2688 }
2689 return false;
2690}
2691
2692/* Check if REF conflicts with call using "fn spec" attribute.
2693 If CLOBBER is true we are checking for writes, otherwise check loads.
2694
2695 Return 0 if there are no conflicts (except for possible function call
2696 argument reads), 1 if there are conflicts and -1 if we can not decide by
2697 fn spec. */
2698
2699static int
2700check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2701{
2702 attr_fnspec fnspec = gimple_call_fnspec (stmt: call);
2703 if (fnspec.known_p ())
2704 {
2705 if (clobber
2706 ? !fnspec.global_memory_written_p ()
2707 : !fnspec.global_memory_read_p ())
2708 {
2709 for (unsigned int i = 0; i < gimple_call_num_args (gs: call); i++)
2710 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2711 && (!fnspec.arg_specified_p (i)
2712 || (clobber ? fnspec.arg_maybe_written_p (i)
2713 : fnspec.arg_maybe_read_p (i))))
2714 {
2715 ao_ref dref;
2716 tree size = NULL_TREE;
2717 unsigned int size_arg;
2718
2719 if (!fnspec.arg_specified_p (i))
2720 ;
2721 else if (fnspec.arg_max_access_size_given_by_arg_p
2722 (i, arg: &size_arg))
2723 size = gimple_call_arg (gs: call, index: size_arg);
2724 else if (fnspec.arg_access_size_given_by_type_p (i))
2725 {
2726 tree callee = gimple_call_fndecl (gs: call);
2727 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2728
2729 for (unsigned int p = 0; p < i; p++)
2730 t = TREE_CHAIN (t);
2731 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2732 }
2733 poly_int64 size_hwi;
2734 if (size
2735 && poly_int_tree_p (t: size, value: &size_hwi)
2736 && coeffs_in_range_p (a: size_hwi, b: 0,
2737 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2738 {
2739 size_hwi = size_hwi * BITS_PER_UNIT;
2740 ao_ref_init_from_ptr_and_range (ref: &dref,
2741 ptr: gimple_call_arg (gs: call, index: i),
2742 range_known: true, offset: 0, size: -1, max_size: size_hwi);
2743 }
2744 else
2745 ao_ref_init_from_ptr_and_range (ref: &dref,
2746 ptr: gimple_call_arg (gs: call, index: i),
2747 range_known: false, offset: 0, size: -1, max_size: -1);
2748 if (refs_may_alias_p_1 (ref1: &dref, ref2: ref, tbaa_p: false))
2749 return 1;
2750 }
2751 if (clobber
2752 && fnspec.errno_maybe_written_p ()
2753 && flag_errno_math
2754 && targetm.ref_may_alias_errno (ref))
2755 return 1;
2756 return 0;
2757 }
2758 }
2759
2760 /* FIXME: we should handle barriers more consistently, but for now leave the
2761 check here. */
2762 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2763 switch (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: call)))
2764 {
2765 /* __sync_* builtins and some OpenMP builtins act as threading
2766 barriers. */
2767#undef DEF_SYNC_BUILTIN
2768#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2769#include "sync-builtins.def"
2770#undef DEF_SYNC_BUILTIN
2771 case BUILT_IN_GOMP_ATOMIC_START:
2772 case BUILT_IN_GOMP_ATOMIC_END:
2773 case BUILT_IN_GOMP_BARRIER:
2774 case BUILT_IN_GOMP_BARRIER_CANCEL:
2775 case BUILT_IN_GOMP_TASKWAIT:
2776 case BUILT_IN_GOMP_TASKGROUP_END:
2777 case BUILT_IN_GOMP_CRITICAL_START:
2778 case BUILT_IN_GOMP_CRITICAL_END:
2779 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2780 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2781 case BUILT_IN_GOMP_LOOP_END:
2782 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2783 case BUILT_IN_GOMP_ORDERED_START:
2784 case BUILT_IN_GOMP_ORDERED_END:
2785 case BUILT_IN_GOMP_SECTIONS_END:
2786 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2787 case BUILT_IN_GOMP_SINGLE_COPY_START:
2788 case BUILT_IN_GOMP_SINGLE_COPY_END:
2789 return 1;
2790
2791 default:
2792 return -1;
2793 }
2794 return -1;
2795}
2796
2797/* If the call CALL may use the memory reference REF return true,
2798 otherwise return false. */
2799
2800static bool
2801ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2802{
2803 tree base, callee;
2804 unsigned i;
2805 int flags = gimple_call_flags (call);
2806
2807 if (flags & (ECF_CONST|ECF_NOVOPS))
2808 goto process_args;
2809
2810 /* A call that is not without side-effects might involve volatile
2811 accesses and thus conflicts with all other volatile accesses. */
2812 if (ref->volatile_p)
2813 return true;
2814
2815 if (gimple_call_internal_p (gs: call))
2816 switch (gimple_call_internal_fn (gs: call))
2817 {
2818 case IFN_MASK_STORE:
2819 case IFN_SCATTER_STORE:
2820 case IFN_MASK_SCATTER_STORE:
2821 case IFN_LEN_STORE:
2822 case IFN_MASK_LEN_STORE:
2823 return false;
2824 case IFN_MASK_STORE_LANES:
2825 case IFN_MASK_LEN_STORE_LANES:
2826 goto process_args;
2827 case IFN_MASK_LOAD:
2828 case IFN_LEN_LOAD:
2829 case IFN_MASK_LEN_LOAD:
2830 case IFN_MASK_LOAD_LANES:
2831 case IFN_MASK_LEN_LOAD_LANES:
2832 {
2833 ao_ref rhs_ref;
2834 tree lhs = gimple_call_lhs (gs: call);
2835 if (lhs)
2836 {
2837 ao_ref_init_from_ptr_and_size (ref: &rhs_ref,
2838 ptr: gimple_call_arg (gs: call, index: 0),
2839 TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
2840 /* We cannot make this a known-size access since otherwise
2841 we disambiguate against refs to decls that are smaller. */
2842 rhs_ref.size = -1;
2843 rhs_ref.ref_alias_set = rhs_ref.base_alias_set
2844 = tbaa_p ? get_deref_alias_set (TREE_TYPE
2845 (gimple_call_arg (call, 1))) : 0;
2846 return refs_may_alias_p_1 (ref1: ref, ref2: &rhs_ref, tbaa_p);
2847 }
2848 break;
2849 }
2850 default:;
2851 }
2852
2853 callee = gimple_call_fndecl (gs: call);
2854 if (callee != NULL_TREE)
2855 {
2856 struct cgraph_node *node = cgraph_node::get (decl: callee);
2857 /* We can not safely optimize based on summary of calle if it does
2858 not always bind to current def: it is possible that memory load
2859 was optimized out earlier and the interposed variant may not be
2860 optimized this way. */
2861 if (node && node->binds_to_current_def_p ())
2862 {
2863 modref_summary *summary = get_modref_function_summary (func: node);
2864 if (summary && !summary->calls_interposable)
2865 {
2866 if (!modref_may_conflict (stmt: call, tt: summary->loads, ref, tbaa_p))
2867 {
2868 alias_stats.modref_use_no_alias++;
2869 if (dump_file && (dump_flags & TDF_DETAILS))
2870 {
2871 fprintf (stream: dump_file,
2872 format: "ipa-modref: call stmt ");
2873 print_gimple_stmt (dump_file, call, 0);
2874 fprintf (stream: dump_file,
2875 format: "ipa-modref: call to %s does not use ",
2876 node->dump_name ());
2877 if (!ref->ref && ref->base)
2878 {
2879 fprintf (stream: dump_file, format: "base: ");
2880 print_generic_expr (dump_file, ref->base);
2881 }
2882 else if (ref->ref)
2883 {
2884 fprintf (stream: dump_file, format: "ref: ");
2885 print_generic_expr (dump_file, ref->ref);
2886 }
2887 fprintf (stream: dump_file, format: " alias sets: %i->%i\n",
2888 ao_ref_base_alias_set (ref),
2889 ao_ref_alias_set (ref));
2890 }
2891 goto process_args;
2892 }
2893 alias_stats.modref_use_may_alias++;
2894 }
2895 }
2896 }
2897
2898 base = ao_ref_base (ref);
2899 if (!base)
2900 return true;
2901
2902 /* If the reference is based on a decl that is not aliased the call
2903 cannot possibly use it. */
2904 if (DECL_P (base)
2905 && !may_be_aliased (var: base)
2906 /* But local statics can be used through recursion. */
2907 && !is_global_var (t: base))
2908 goto process_args;
2909
2910 if (int res = check_fnspec (call, ref, clobber: false))
2911 {
2912 if (res == 1)
2913 return true;
2914 }
2915 else
2916 goto process_args;
2917
2918 /* Check if base is a global static variable that is not read
2919 by the function. */
2920 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2921 {
2922 struct cgraph_node *node = cgraph_node::get (decl: callee);
2923 bitmap read;
2924 int id;
2925
2926 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2927 node yet. We should enforce that there are nodes for all decls in the
2928 IL and remove this check instead. */
2929 if (node
2930 && (id = ipa_reference_var_uid (t: base)) != -1
2931 && (read = ipa_reference_get_read_global (fn: node))
2932 && !bitmap_bit_p (read, id))
2933 goto process_args;
2934 }
2935
2936 /* Check if the base variable is call-used. */
2937 if (DECL_P (base))
2938 {
2939 if (pt_solution_includes (gimple_call_use_set (call_stmt: call), base))
2940 return true;
2941 }
2942 else if ((TREE_CODE (base) == MEM_REF
2943 || TREE_CODE (base) == TARGET_MEM_REF)
2944 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2945 {
2946 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2947 if (!pi)
2948 return true;
2949
2950 if (pt_solutions_intersect (gimple_call_use_set (call_stmt: call), &pi->pt))
2951 return true;
2952 }
2953 else
2954 return true;
2955
2956 /* Inspect call arguments for passed-by-value aliases. */
2957process_args:
2958 for (i = 0; i < gimple_call_num_args (gs: call); ++i)
2959 {
2960 tree op = gimple_call_arg (gs: call, index: i);
2961 int flags = gimple_call_arg_flags (call, i);
2962
2963 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2964 continue;
2965
2966 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2967 op = TREE_OPERAND (op, 0);
2968
2969 if (TREE_CODE (op) != SSA_NAME
2970 && !is_gimple_min_invariant (op))
2971 {
2972 ao_ref r;
2973 ao_ref_init (r: &r, ref: op);
2974 if (refs_may_alias_p_1 (ref1: &r, ref2: ref, tbaa_p))
2975 return true;
2976 }
2977 }
2978
2979 return false;
2980}
2981
2982static bool
2983ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2984{
2985 bool res;
2986 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2987 if (res)
2988 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2989 else
2990 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2991 return res;
2992}
2993
2994
2995/* If the statement STMT may use the memory reference REF return
2996 true, otherwise return false. */
2997
2998bool
2999ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
3000{
3001 if (is_gimple_assign (gs: stmt))
3002 {
3003 tree rhs;
3004
3005 /* All memory assign statements are single. */
3006 if (!gimple_assign_single_p (gs: stmt))
3007 return false;
3008
3009 rhs = gimple_assign_rhs1 (gs: stmt);
3010 if (is_gimple_reg (rhs)
3011 || is_gimple_min_invariant (rhs)
3012 || gimple_assign_rhs_code (gs: stmt) == CONSTRUCTOR)
3013 return false;
3014
3015 return refs_may_alias_p (ref1: rhs, ref2: ref, tbaa_p);
3016 }
3017 else if (is_gimple_call (gs: stmt))
3018 return ref_maybe_used_by_call_p (call: as_a <gcall *> (p: stmt), ref, tbaa_p);
3019 else if (greturn *return_stmt = dyn_cast <greturn *> (p: stmt))
3020 {
3021 tree retval = gimple_return_retval (gs: return_stmt);
3022 if (retval
3023 && TREE_CODE (retval) != SSA_NAME
3024 && !is_gimple_min_invariant (retval)
3025 && refs_may_alias_p (ref1: retval, ref2: ref, tbaa_p))
3026 return true;
3027 /* If ref escapes the function then the return acts as a use. */
3028 tree base = ao_ref_base (ref);
3029 if (!base)
3030 ;
3031 else if (DECL_P (base))
3032 return is_global_var (t: base);
3033 else if (TREE_CODE (base) == MEM_REF
3034 || TREE_CODE (base) == TARGET_MEM_REF)
3035 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), escaped_local_p: false);
3036 return false;
3037 }
3038
3039 return true;
3040}
3041
3042bool
3043ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3044{
3045 ao_ref r;
3046 ao_ref_init (r: &r, ref);
3047 return ref_maybe_used_by_stmt_p (stmt, ref: &r, tbaa_p);
3048}
3049
3050/* If the call in statement CALL may clobber the memory reference REF
3051 return true, otherwise return false. */
3052
3053bool
3054call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3055{
3056 tree base;
3057 tree callee;
3058
3059 /* If the call is pure or const it cannot clobber anything. */
3060 if (gimple_call_flags (call)
3061 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3062 return false;
3063 if (gimple_call_internal_p (gs: call))
3064 switch (auto fn = gimple_call_internal_fn (gs: call))
3065 {
3066 /* Treat these internal calls like ECF_PURE for aliasing,
3067 they don't write to any memory the program should care about.
3068 They have important other side-effects, and read memory,
3069 so can't be ECF_NOVOPS. */
3070 case IFN_UBSAN_NULL:
3071 case IFN_UBSAN_BOUNDS:
3072 case IFN_UBSAN_VPTR:
3073 case IFN_UBSAN_OBJECT_SIZE:
3074 case IFN_UBSAN_PTR:
3075 case IFN_ASAN_CHECK:
3076 return false;
3077 case IFN_MASK_STORE:
3078 case IFN_LEN_STORE:
3079 case IFN_MASK_LEN_STORE:
3080 case IFN_MASK_STORE_LANES:
3081 case IFN_MASK_LEN_STORE_LANES:
3082 {
3083 tree rhs = gimple_call_arg (gs: call,
3084 index: internal_fn_stored_value_index (fn));
3085 ao_ref lhs_ref;
3086 ao_ref_init_from_ptr_and_size (ref: &lhs_ref, ptr: gimple_call_arg (gs: call, index: 0),
3087 TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
3088 /* We cannot make this a known-size access since otherwise
3089 we disambiguate against refs to decls that are smaller. */
3090 lhs_ref.size = -1;
3091 lhs_ref.ref_alias_set = lhs_ref.base_alias_set
3092 = tbaa_p ? get_deref_alias_set
3093 (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
3094 return refs_may_alias_p_1 (ref1: ref, ref2: &lhs_ref, tbaa_p);
3095 }
3096 default:
3097 break;
3098 }
3099
3100 callee = gimple_call_fndecl (gs: call);
3101
3102 if (callee != NULL_TREE && !ref->volatile_p)
3103 {
3104 struct cgraph_node *node = cgraph_node::get (decl: callee);
3105 if (node)
3106 {
3107 modref_summary *summary = get_modref_function_summary (func: node);
3108 if (summary)
3109 {
3110 if (!modref_may_conflict (stmt: call, tt: summary->stores, ref, tbaa_p)
3111 && (!summary->writes_errno
3112 || !targetm.ref_may_alias_errno (ref)))
3113 {
3114 alias_stats.modref_clobber_no_alias++;
3115 if (dump_file && (dump_flags & TDF_DETAILS))
3116 {
3117 fprintf (stream: dump_file,
3118 format: "ipa-modref: call stmt ");
3119 print_gimple_stmt (dump_file, call, 0);
3120 fprintf (stream: dump_file,
3121 format: "ipa-modref: call to %s does not clobber ",
3122 node->dump_name ());
3123 if (!ref->ref && ref->base)
3124 {
3125 fprintf (stream: dump_file, format: "base: ");
3126 print_generic_expr (dump_file, ref->base);
3127 }
3128 else if (ref->ref)
3129 {
3130 fprintf (stream: dump_file, format: "ref: ");
3131 print_generic_expr (dump_file, ref->ref);
3132 }
3133 fprintf (stream: dump_file, format: " alias sets: %i->%i\n",
3134 ao_ref_base_alias_set (ref),
3135 ao_ref_alias_set (ref));
3136 }
3137 return false;
3138 }
3139 alias_stats.modref_clobber_may_alias++;
3140 }
3141 }
3142 }
3143
3144 base = ao_ref_base (ref);
3145 if (!base)
3146 return true;
3147
3148 if (TREE_CODE (base) == SSA_NAME
3149 || CONSTANT_CLASS_P (base))
3150 return false;
3151
3152 /* A call that is not without side-effects might involve volatile
3153 accesses and thus conflicts with all other volatile accesses. */
3154 if (ref->volatile_p)
3155 return true;
3156
3157 /* If the reference is based on a decl that is not aliased the call
3158 cannot possibly clobber it. */
3159 if (DECL_P (base)
3160 && !may_be_aliased (var: base)
3161 /* But local non-readonly statics can be modified through recursion
3162 or the call may implement a threading barrier which we must
3163 treat as may-def. */
3164 && (TREE_READONLY (base)
3165 || !is_global_var (t: base)))
3166 return false;
3167
3168 /* If the reference is based on a pointer that points to memory
3169 that may not be written to then the call cannot possibly clobber it. */
3170 if ((TREE_CODE (base) == MEM_REF
3171 || TREE_CODE (base) == TARGET_MEM_REF)
3172 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3173 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3174 return false;
3175
3176 if (int res = check_fnspec (call, ref, clobber: true))
3177 {
3178 if (res == 1)
3179 return true;
3180 }
3181 else
3182 return false;
3183
3184 /* Check if base is a global static variable that is not written
3185 by the function. */
3186 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3187 {
3188 struct cgraph_node *node = cgraph_node::get (decl: callee);
3189 bitmap written;
3190 int id;
3191
3192 if (node
3193 && (id = ipa_reference_var_uid (t: base)) != -1
3194 && (written = ipa_reference_get_written_global (fn: node))
3195 && !bitmap_bit_p (written, id))
3196 return false;
3197 }
3198
3199 /* Check if the base variable is call-clobbered. */
3200 if (DECL_P (base))
3201 return pt_solution_includes (gimple_call_clobber_set (call_stmt: call), base);
3202 else if ((TREE_CODE (base) == MEM_REF
3203 || TREE_CODE (base) == TARGET_MEM_REF)
3204 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3205 {
3206 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3207 if (!pi)
3208 return true;
3209
3210 return pt_solutions_intersect (gimple_call_clobber_set (call_stmt: call), &pi->pt);
3211 }
3212
3213 return true;
3214}
3215
3216/* If the call in statement CALL may clobber the memory reference REF
3217 return true, otherwise return false. */
3218
3219bool
3220call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3221{
3222 bool res;
3223 ao_ref r;
3224 ao_ref_init (r: &r, ref);
3225 res = call_may_clobber_ref_p_1 (call, ref: &r, tbaa_p);
3226 if (res)
3227 ++alias_stats.call_may_clobber_ref_p_may_alias;
3228 else
3229 ++alias_stats.call_may_clobber_ref_p_no_alias;
3230 return res;
3231}
3232
3233
3234/* If the statement STMT may clobber the memory reference REF return true,
3235 otherwise return false. */
3236
3237bool
3238stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3239{
3240 if (is_gimple_call (gs: stmt))
3241 {
3242 tree lhs = gimple_call_lhs (gs: stmt);
3243 if (lhs
3244 && TREE_CODE (lhs) != SSA_NAME)
3245 {
3246 ao_ref r;
3247 ao_ref_init (r: &r, ref: lhs);
3248 if (refs_may_alias_p_1 (ref1: ref, ref2: &r, tbaa_p))
3249 return true;
3250 }
3251
3252 return call_may_clobber_ref_p_1 (call: as_a <gcall *> (p: stmt), ref, tbaa_p);
3253 }
3254 else if (gimple_assign_single_p (gs: stmt))
3255 {
3256 tree lhs = gimple_assign_lhs (gs: stmt);
3257 if (TREE_CODE (lhs) != SSA_NAME)
3258 {
3259 ao_ref r;
3260 ao_ref_init (r: &r, ref: lhs);
3261 return refs_may_alias_p_1 (ref1: ref, ref2: &r, tbaa_p);
3262 }
3263 }
3264 else if (gimple_code (g: stmt) == GIMPLE_ASM)
3265 return true;
3266
3267 return false;
3268}
3269
3270bool
3271stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3272{
3273 ao_ref r;
3274 ao_ref_init (r: &r, ref);
3275 return stmt_may_clobber_ref_p_1 (stmt, ref: &r, tbaa_p);
3276}
3277
3278/* Return true if store1 and store2 described by corresponding tuples
3279 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3280 address. */
3281
3282static bool
3283same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3284 poly_int64 max_size1,
3285 tree base2, poly_int64 offset2, poly_int64 size2,
3286 poly_int64 max_size2)
3287{
3288 /* Offsets need to be 0. */
3289 if (maybe_ne (a: offset1, b: 0)
3290 || maybe_ne (a: offset2, b: 0))
3291 return false;
3292
3293 bool base1_obj_p = SSA_VAR_P (base1);
3294 bool base2_obj_p = SSA_VAR_P (base2);
3295
3296 /* We need one object. */
3297 if (base1_obj_p == base2_obj_p)
3298 return false;
3299 tree obj = base1_obj_p ? base1 : base2;
3300
3301 /* And we need one MEM_REF. */
3302 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3303 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3304 if (base1_memref_p == base2_memref_p)
3305 return false;
3306 tree memref = base1_memref_p ? base1 : base2;
3307
3308 /* Sizes need to be valid. */
3309 if (!known_size_p (a: max_size1)
3310 || !known_size_p (a: max_size2)
3311 || !known_size_p (a: size1)
3312 || !known_size_p (a: size2))
3313 return false;
3314
3315 /* Max_size needs to match size. */
3316 if (maybe_ne (a: max_size1, b: size1)
3317 || maybe_ne (a: max_size2, b: size2))
3318 return false;
3319
3320 /* Sizes need to match. */
3321 if (maybe_ne (a: size1, b: size2))
3322 return false;
3323
3324
3325 /* Check that memref is a store to pointer with singleton points-to info. */
3326 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3327 return false;
3328 tree ptr = TREE_OPERAND (memref, 0);
3329 if (TREE_CODE (ptr) != SSA_NAME)
3330 return false;
3331 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3332 unsigned int pt_uid;
3333 if (pi == NULL
3334 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3335 return false;
3336
3337 /* Be conservative with non-call exceptions when the address might
3338 be NULL. */
3339 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3340 return false;
3341
3342 /* Check that ptr points relative to obj. */
3343 unsigned int obj_uid = DECL_PT_UID (obj);
3344 if (obj_uid != pt_uid)
3345 return false;
3346
3347 /* Check that the object size is the same as the store size. That ensures us
3348 that ptr points to the start of obj. */
3349 return (DECL_SIZE (obj)
3350 && poly_int_tree_p (DECL_SIZE (obj))
3351 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3352}
3353
3354/* Return true if REF is killed by an store described by
3355 BASE, OFFSET, SIZE and MAX_SIZE. */
3356
3357static bool
3358store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3359 poly_int64 max_size, ao_ref *ref)
3360{
3361 poly_int64 ref_offset = ref->offset;
3362 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3363 so base == ref->base does not always hold. */
3364 if (base != ref->base)
3365 {
3366 /* Try using points-to info. */
3367 if (same_addr_size_stores_p (base1: base, offset1: offset, size1: size, max_size1: max_size, base2: ref->base,
3368 offset2: ref->offset, size2: ref->size, max_size2: ref->max_size))
3369 return true;
3370
3371 /* If both base and ref->base are MEM_REFs, only compare the
3372 first operand, and if the second operand isn't equal constant,
3373 try to add the offsets into offset and ref_offset. */
3374 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3375 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3376 {
3377 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3378 TREE_OPERAND (ref->base, 1)))
3379 {
3380 poly_offset_int off1 = mem_ref_offset (base);
3381 off1 <<= LOG2_BITS_PER_UNIT;
3382 off1 += offset;
3383 poly_offset_int off2 = mem_ref_offset (ref->base);
3384 off2 <<= LOG2_BITS_PER_UNIT;
3385 off2 += ref_offset;
3386 if (!off1.to_shwi (r: &offset) || !off2.to_shwi (r: &ref_offset))
3387 size = -1;
3388 }
3389 }
3390 else
3391 size = -1;
3392 }
3393 /* For a must-alias check we need to be able to constrain
3394 the access properly. */
3395 return (known_eq (size, max_size)
3396 && known_subrange_p (pos1: ref_offset, size1: ref->max_size, pos2: offset, size2: size));
3397}
3398
3399/* If STMT kills the memory reference REF return true, otherwise
3400 return false. */
3401
3402bool
3403stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3404{
3405 if (!ao_ref_base (ref))
3406 return false;
3407
3408 if (gimple_has_lhs (stmt)
3409 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3410 /* The assignment is not necessarily carried out if it can throw
3411 and we can catch it in the current function where we could inspect
3412 the previous value. Similarly if the function can throw externally
3413 and the ref does not die on the function return.
3414 ??? We only need to care about the RHS throwing. For aggregate
3415 assignments or similar calls and non-call exceptions the LHS
3416 might throw as well.
3417 ??? We also should care about possible longjmp, but since we
3418 do not understand that longjmp is not using global memory we will
3419 not consider a kill here since the function call will be considered
3420 as possibly using REF. */
3421 && !stmt_can_throw_internal (cfun, stmt)
3422 && (!stmt_can_throw_external (cfun, stmt)
3423 || !ref_may_alias_global_p (ref, escaped_local_p: false)))
3424 {
3425 tree lhs = gimple_get_lhs (stmt);
3426 /* If LHS is literally a base of the access we are done. */
3427 if (ref->ref)
3428 {
3429 tree base = ref->ref;
3430 tree innermost_dropped_array_ref = NULL_TREE;
3431 if (handled_component_p (t: base))
3432 {
3433 tree saved_lhs0 = NULL_TREE;
3434 if (handled_component_p (t: lhs))
3435 {
3436 saved_lhs0 = TREE_OPERAND (lhs, 0);
3437 TREE_OPERAND (lhs, 0) = integer_zero_node;
3438 }
3439 do
3440 {
3441 /* Just compare the outermost handled component, if
3442 they are equal we have found a possible common
3443 base. */
3444 tree saved_base0 = TREE_OPERAND (base, 0);
3445 TREE_OPERAND (base, 0) = integer_zero_node;
3446 bool res = operand_equal_p (lhs, base, flags: 0);
3447 TREE_OPERAND (base, 0) = saved_base0;
3448 if (res)
3449 break;
3450 /* Remember if we drop an array-ref that we need to
3451 double-check not being at struct end. */
3452 if (TREE_CODE (base) == ARRAY_REF
3453 || TREE_CODE (base) == ARRAY_RANGE_REF)
3454 innermost_dropped_array_ref = base;
3455 /* Otherwise drop handled components of the access. */
3456 base = saved_base0;
3457 }
3458 while (handled_component_p (t: base));
3459 if (saved_lhs0)
3460 TREE_OPERAND (lhs, 0) = saved_lhs0;
3461 }
3462 /* Finally check if the lhs has the same address and size as the
3463 base candidate of the access. Watch out if we have dropped
3464 an array-ref that might have flexible size, this means ref->ref
3465 may be outside of the TYPE_SIZE of its base. */
3466 if ((! innermost_dropped_array_ref
3467 || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
3468 && (lhs == base
3469 || (((TYPE_SIZE (TREE_TYPE (lhs))
3470 == TYPE_SIZE (TREE_TYPE (base)))
3471 || (TYPE_SIZE (TREE_TYPE (lhs))
3472 && TYPE_SIZE (TREE_TYPE (base))
3473 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3474 TYPE_SIZE (TREE_TYPE (base)),
3475 flags: 0)))
3476 && operand_equal_p (lhs, base,
3477 flags: OEP_ADDRESS_OF
3478 | OEP_MATCH_SIDE_EFFECTS))))
3479 {
3480 ++alias_stats.stmt_kills_ref_p_yes;
3481 return true;
3482 }
3483 }
3484
3485 /* Now look for non-literal equal bases with the restriction of
3486 handling constant offset and size. */
3487 /* For a must-alias check we need to be able to constrain
3488 the access properly. */
3489 if (!ref->max_size_known_p ())
3490 {
3491 ++alias_stats.stmt_kills_ref_p_no;
3492 return false;
3493 }
3494 poly_int64 size, offset, max_size;
3495 bool reverse;
3496 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3497 &reverse);
3498 if (store_kills_ref_p (base, offset, size, max_size, ref))
3499 {
3500 ++alias_stats.stmt_kills_ref_p_yes;
3501 return true;
3502 }
3503 }
3504
3505 if (is_gimple_call (gs: stmt))
3506 {
3507 tree callee = gimple_call_fndecl (gs: stmt);
3508 struct cgraph_node *node;
3509 modref_summary *summary;
3510
3511 /* Try to disambiguate using modref summary. Modref records a vector
3512 of stores with known offsets relative to function parameters that must
3513 happen every execution of function. Find if we have a matching
3514 store and verify that function can not use the value. */
3515 if (callee != NULL_TREE
3516 && (node = cgraph_node::get (decl: callee)) != NULL
3517 && node->binds_to_current_def_p ()
3518 && (summary = get_modref_function_summary (func: node)) != NULL
3519 && summary->kills.length ()
3520 /* Check that we can not trap while evaulating function
3521 parameters. This check is overly conservative. */
3522 && (!cfun->can_throw_non_call_exceptions
3523 || (!stmt_can_throw_internal (cfun, stmt)
3524 && (!stmt_can_throw_external (cfun, stmt)
3525 || !ref_may_alias_global_p (ref, escaped_local_p: false)))))
3526 {
3527 for (auto kill : summary->kills)
3528 {
3529 ao_ref dref;
3530
3531 /* We only can do useful compares if we know the access range
3532 precisely. */
3533 if (!kill.get_ao_ref (stmt: as_a <gcall *> (p: stmt), ref: &dref))
3534 continue;
3535 if (store_kills_ref_p (base: ao_ref_base (ref: &dref), offset: dref.offset,
3536 size: dref.size, max_size: dref.max_size, ref))
3537 {
3538 /* For store to be killed it needs to not be used
3539 earlier. */
3540 if (ref_maybe_used_by_call_p_1 (call: as_a <gcall *> (p: stmt), ref,
3541 tbaa_p: true)
3542 || !dbg_cnt (index: ipa_mod_ref))
3543 break;
3544 if (dump_file && (dump_flags & TDF_DETAILS))
3545 {
3546 fprintf (stream: dump_file,
3547 format: "ipa-modref: call stmt ");
3548 print_gimple_stmt (dump_file, stmt, 0);
3549 fprintf (stream: dump_file,
3550 format: "ipa-modref: call to %s kills ",
3551 node->dump_name ());
3552 print_generic_expr (dump_file, ref->base);
3553 fprintf (stream: dump_file, format: "\n");
3554 }
3555 ++alias_stats.modref_kill_yes;
3556 return true;
3557 }
3558 }
3559 ++alias_stats.modref_kill_no;
3560 }
3561 if (callee != NULL_TREE
3562 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3563 switch (DECL_FUNCTION_CODE (decl: callee))
3564 {
3565 case BUILT_IN_FREE:
3566 {
3567 tree ptr = gimple_call_arg (gs: stmt, index: 0);
3568 tree base = ao_ref_base (ref);
3569 if (base && TREE_CODE (base) == MEM_REF
3570 && TREE_OPERAND (base, 0) == ptr)
3571 {
3572 ++alias_stats.stmt_kills_ref_p_yes;
3573 return true;
3574 }
3575 break;
3576 }
3577
3578 case BUILT_IN_MEMCPY:
3579 case BUILT_IN_MEMPCPY:
3580 case BUILT_IN_MEMMOVE:
3581 case BUILT_IN_MEMSET:
3582 case BUILT_IN_MEMCPY_CHK:
3583 case BUILT_IN_MEMPCPY_CHK:
3584 case BUILT_IN_MEMMOVE_CHK:
3585 case BUILT_IN_MEMSET_CHK:
3586 case BUILT_IN_STRNCPY:
3587 case BUILT_IN_STPNCPY:
3588 case BUILT_IN_CALLOC:
3589 {
3590 /* For a must-alias check we need to be able to constrain
3591 the access properly. */
3592 if (!ref->max_size_known_p ())
3593 {
3594 ++alias_stats.stmt_kills_ref_p_no;
3595 return false;
3596 }
3597 tree dest;
3598 tree len;
3599
3600 /* In execution order a calloc call will never kill
3601 anything. However, DSE will (ab)use this interface
3602 to ask if a calloc call writes the same memory locations
3603 as a later assignment, memset, etc. So handle calloc
3604 in the expected way. */
3605 if (DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_CALLOC)
3606 {
3607 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
3608 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
3609 if (TREE_CODE (arg0) != INTEGER_CST
3610 || TREE_CODE (arg1) != INTEGER_CST)
3611 {
3612 ++alias_stats.stmt_kills_ref_p_no;
3613 return false;
3614 }
3615
3616 dest = gimple_call_lhs (gs: stmt);
3617 if (!dest)
3618 {
3619 ++alias_stats.stmt_kills_ref_p_no;
3620 return false;
3621 }
3622 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3623 }
3624 else
3625 {
3626 dest = gimple_call_arg (gs: stmt, index: 0);
3627 len = gimple_call_arg (gs: stmt, index: 2);
3628 }
3629 if (!poly_int_tree_p (t: len))
3630 return false;
3631 ao_ref dref;
3632 ao_ref_init_from_ptr_and_size (ref: &dref, ptr: dest, size: len);
3633 if (store_kills_ref_p (base: ao_ref_base (ref: &dref), offset: dref.offset,
3634 size: dref.size, max_size: dref.max_size, ref))
3635 {
3636 ++alias_stats.stmt_kills_ref_p_yes;
3637 return true;
3638 }
3639 break;
3640 }
3641
3642 case BUILT_IN_VA_END:
3643 {
3644 tree ptr = gimple_call_arg (gs: stmt, index: 0);
3645 if (TREE_CODE (ptr) == ADDR_EXPR)
3646 {
3647 tree base = ao_ref_base (ref);
3648 if (TREE_OPERAND (ptr, 0) == base)
3649 {
3650 ++alias_stats.stmt_kills_ref_p_yes;
3651 return true;
3652 }
3653 }
3654 break;
3655 }
3656
3657 default:;
3658 }
3659 }
3660 ++alias_stats.stmt_kills_ref_p_no;
3661 return false;
3662}
3663
3664bool
3665stmt_kills_ref_p (gimple *stmt, tree ref)
3666{
3667 ao_ref r;
3668 ao_ref_init (r: &r, ref);
3669 return stmt_kills_ref_p (stmt, ref: &r);
3670}
3671
3672
3673/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3674 TARGET or a statement clobbering the memory reference REF in which
3675 case false is returned. The walk starts with VUSE, one argument of PHI. */
3676
3677static bool
3678maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3679 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3680 bitmap *visited, bool abort_on_visited,
3681 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3682 translate_flags disambiguate_only,
3683 void *data)
3684{
3685 basic_block bb = gimple_bb (g: phi);
3686
3687 if (!*visited)
3688 {
3689 *visited = BITMAP_ALLOC (NULL);
3690 bitmap_tree_view (*visited);
3691 }
3692
3693 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3694
3695 /* Walk until we hit the target. */
3696 while (vuse != target)
3697 {
3698 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3699 /* If we are searching for the target VUSE by walking up to
3700 TARGET_BB dominating the original PHI we are finished once
3701 we reach a default def or a definition in a block dominating
3702 that block. Update TARGET and return. */
3703 if (!target
3704 && (gimple_nop_p (g: def_stmt)
3705 || dominated_by_p (CDI_DOMINATORS,
3706 target_bb, gimple_bb (g: def_stmt))))
3707 {
3708 target = vuse;
3709 return true;
3710 }
3711
3712 /* Recurse for PHI nodes. */
3713 if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3714 {
3715 /* An already visited PHI node ends the walk successfully. */
3716 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3717 return !abort_on_visited;
3718 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3719 visited, abort_on_visited,
3720 translate, data, disambiguate_only);
3721 if (!vuse)
3722 return false;
3723 continue;
3724 }
3725 else if (gimple_nop_p (g: def_stmt))
3726 return false;
3727 else
3728 {
3729 /* A clobbering statement or the end of the IL ends it failing. */
3730 if ((int)limit <= 0)
3731 return false;
3732 --limit;
3733 if (stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref, tbaa_p))
3734 {
3735 translate_flags tf = disambiguate_only;
3736 if (translate
3737 && (*translate) (ref, vuse, data, &tf) == NULL)
3738 ;
3739 else
3740 return false;
3741 }
3742 }
3743 /* If we reach a new basic-block see if we already skipped it
3744 in a previous walk that ended successfully. */
3745 if (gimple_bb (g: def_stmt) != bb)
3746 {
3747 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3748 return !abort_on_visited;
3749 bb = gimple_bb (g: def_stmt);
3750 }
3751 vuse = gimple_vuse (g: def_stmt);
3752 }
3753 return true;
3754}
3755
3756
3757/* Starting from a PHI node for the virtual operand of the memory reference
3758 REF find a continuation virtual operand that allows to continue walking
3759 statements dominating PHI skipping only statements that cannot possibly
3760 clobber REF. Decrements LIMIT for each alias disambiguation done
3761 and aborts the walk, returning NULL_TREE if it reaches zero.
3762 Returns NULL_TREE if no suitable virtual operand can be found. */
3763
3764tree
3765get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3766 unsigned int &limit, bitmap *visited,
3767 bool abort_on_visited,
3768 void *(*translate)(ao_ref *, tree, void *,
3769 translate_flags *),
3770 void *data,
3771 translate_flags disambiguate_only)
3772{
3773 unsigned nargs = gimple_phi_num_args (gs: phi);
3774
3775 /* Through a single-argument PHI we can simply look through. */
3776 if (nargs == 1)
3777 return PHI_ARG_DEF (phi, 0);
3778
3779 /* For two or more arguments try to pairwise skip non-aliasing code
3780 until we hit the phi argument definition that dominates the other one. */
3781 basic_block phi_bb = gimple_bb (g: phi);
3782 tree arg0, arg1;
3783 unsigned i;
3784
3785 /* Find a candidate for the virtual operand which definition
3786 dominates those of all others. */
3787 /* First look if any of the args themselves satisfy this. */
3788 for (i = 0; i < nargs; ++i)
3789 {
3790 arg0 = PHI_ARG_DEF (phi, i);
3791 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3792 break;
3793 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3794 if (def_bb != phi_bb
3795 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3796 break;
3797 arg0 = NULL_TREE;
3798 }
3799 /* If not, look if we can reach such candidate by walking defs
3800 until we hit the immediate dominator. maybe_skip_until will
3801 do that for us. */
3802 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3803
3804 /* Then check against the (to be) found candidate. */
3805 for (i = 0; i < nargs; ++i)
3806 {
3807 arg1 = PHI_ARG_DEF (phi, i);
3808 if (arg1 == arg0)
3809 ;
3810 else if (! maybe_skip_until (phi, target&: arg0, target_bb: dom, ref, vuse: arg1, tbaa_p,
3811 limit, visited,
3812 abort_on_visited,
3813 translate,
3814 /* Do not valueize when walking over
3815 backedges. */
3816 disambiguate_only: dominated_by_p
3817 (CDI_DOMINATORS,
3818 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3819 phi_bb)
3820 ? TR_DISAMBIGUATE
3821 : disambiguate_only, data))
3822 return NULL_TREE;
3823 }
3824
3825 return arg0;
3826}
3827
3828/* Based on the memory reference REF and its virtual use VUSE call
3829 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3830 itself. That is, for each virtual use for which its defining statement
3831 does not clobber REF.
3832
3833 WALKER is called with REF, the current virtual use and DATA. If
3834 WALKER returns non-NULL the walk stops and its result is returned.
3835 At the end of a non-successful walk NULL is returned.
3836
3837 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3838 use which definition is a statement that may clobber REF and DATA.
3839 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3840 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3841 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3842 to adjust REF and *DATA to make that valid.
3843
3844 VALUEIZE if non-NULL is called with the next VUSE that is considered
3845 and return value is substituted for that. This can be used to
3846 implement optimistic value-numbering for example. Note that the
3847 VUSE argument is assumed to be valueized already.
3848
3849 LIMIT specifies the number of alias queries we are allowed to do,
3850 the walk stops when it reaches zero and NULL is returned. LIMIT
3851 is decremented by the number of alias queries (plus adjustments
3852 done by the callbacks) upon return.
3853
3854 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3855
3856void *
3857walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3858 void *(*walker)(ao_ref *, tree, void *),
3859 void *(*translate)(ao_ref *, tree, void *,
3860 translate_flags *),
3861 tree (*valueize)(tree),
3862 unsigned &limit, void *data)
3863{
3864 bitmap visited = NULL;
3865 void *res;
3866 bool translated = false;
3867
3868 timevar_push (tv: TV_ALIAS_STMT_WALK);
3869
3870 do
3871 {
3872 gimple *def_stmt;
3873
3874 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3875 res = (*walker) (ref, vuse, data);
3876 /* Abort walk. */
3877 if (res == (void *)-1)
3878 {
3879 res = NULL;
3880 break;
3881 }
3882 /* Lookup succeeded. */
3883 else if (res != NULL)
3884 break;
3885
3886 if (valueize)
3887 {
3888 vuse = valueize (vuse);
3889 if (!vuse)
3890 {
3891 res = NULL;
3892 break;
3893 }
3894 }
3895 def_stmt = SSA_NAME_DEF_STMT (vuse);
3896 if (gimple_nop_p (g: def_stmt))
3897 break;
3898 else if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3899 vuse = get_continuation_for_phi (phi: def_stmt, ref, tbaa_p, limit,
3900 visited: &visited, abort_on_visited: translated, translate, data);
3901 else
3902 {
3903 if ((int)limit <= 0)
3904 {
3905 res = NULL;
3906 break;
3907 }
3908 --limit;
3909 if (stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref, tbaa_p))
3910 {
3911 if (!translate)
3912 break;
3913 translate_flags disambiguate_only = TR_TRANSLATE;
3914 res = (*translate) (ref, vuse, data, &disambiguate_only);
3915 /* Failed lookup and translation. */
3916 if (res == (void *)-1)
3917 {
3918 res = NULL;
3919 break;
3920 }
3921 /* Lookup succeeded. */
3922 else if (res != NULL)
3923 break;
3924 /* Translation succeeded, continue walking. */
3925 translated = translated || disambiguate_only == TR_TRANSLATE;
3926 }
3927 vuse = gimple_vuse (g: def_stmt);
3928 }
3929 }
3930 while (vuse);
3931
3932 if (visited)
3933 BITMAP_FREE (visited);
3934
3935 timevar_pop (tv: TV_ALIAS_STMT_WALK);
3936
3937 return res;
3938}
3939
3940
3941/* Based on the memory reference REF call WALKER for each vdef whose
3942 defining statement may clobber REF, starting with VDEF. If REF
3943 is NULL_TREE, each defining statement is visited.
3944
3945 WALKER is called with REF, the current vdef and DATA. If WALKER
3946 returns true the walk is stopped, otherwise it continues.
3947
3948 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3949 The pointer may be NULL and then we do not track this information.
3950
3951 At PHI nodes walk_aliased_vdefs forks into one walk for each
3952 PHI argument (but only one walk continues at merge points), the
3953 return value is true if any of the walks was successful.
3954
3955 The function returns the number of statements walked or -1 if
3956 LIMIT stmts were walked and the walk was aborted at this point.
3957 If LIMIT is zero the walk is not aborted. */
3958
3959static int
3960walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3961 bool (*walker)(ao_ref *, tree, void *), void *data,
3962 bitmap *visited, unsigned int cnt,
3963 bool *function_entry_reached, unsigned limit)
3964{
3965 do
3966 {
3967 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3968
3969 if (*visited
3970 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3971 return cnt;
3972
3973 if (gimple_nop_p (g: def_stmt))
3974 {
3975 if (function_entry_reached)
3976 *function_entry_reached = true;
3977 return cnt;
3978 }
3979 else if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3980 {
3981 unsigned i;
3982 if (!*visited)
3983 {
3984 *visited = BITMAP_ALLOC (NULL);
3985 bitmap_tree_view (*visited);
3986 }
3987 for (i = 0; i < gimple_phi_num_args (gs: def_stmt); ++i)
3988 {
3989 int res = walk_aliased_vdefs_1 (ref,
3990 vdef: gimple_phi_arg_def (gs: def_stmt, index: i),
3991 walker, data, visited, cnt,
3992 function_entry_reached, limit);
3993 if (res == -1)
3994 return -1;
3995 cnt = res;
3996 }
3997 return cnt;
3998 }
3999
4000 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
4001 cnt++;
4002 if (cnt == limit)
4003 return -1;
4004 if ((!ref
4005 || stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref))
4006 && (*walker) (ref, vdef, data))
4007 return cnt;
4008
4009 vdef = gimple_vuse (g: def_stmt);
4010 }
4011 while (1);
4012}
4013
4014int
4015walk_aliased_vdefs (ao_ref *ref, tree vdef,
4016 bool (*walker)(ao_ref *, tree, void *), void *data,
4017 bitmap *visited,
4018 bool *function_entry_reached, unsigned int limit)
4019{
4020 bitmap local_visited = NULL;
4021 int ret;
4022
4023 timevar_push (tv: TV_ALIAS_STMT_WALK);
4024
4025 if (function_entry_reached)
4026 *function_entry_reached = false;
4027
4028 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4029 visited: visited ? visited : &local_visited, cnt: 0,
4030 function_entry_reached, limit);
4031 if (local_visited)
4032 BITMAP_FREE (local_visited);
4033
4034 timevar_pop (tv: TV_ALIAS_STMT_WALK);
4035
4036 return ret;
4037}
4038
4039/* Verify validity of the fnspec string.
4040 See attr-fnspec.h for details. */
4041
4042void
4043attr_fnspec::verify ()
4044{
4045 bool err = false;
4046 if (!len)
4047 return;
4048
4049 /* Check return value specifier. */
4050 if (len < return_desc_size)
4051 err = true;
4052 else if ((len - return_desc_size) % arg_desc_size)
4053 err = true;
4054 else if ((str[0] < '1' || str[0] > '4')
4055 && str[0] != '.' && str[0] != 'm')
4056 err = true;
4057
4058 switch (str[1])
4059 {
4060 case ' ':
4061 case 'p':
4062 case 'P':
4063 case 'c':
4064 case 'C':
4065 break;
4066 default:
4067 err = true;
4068 }
4069 if (err)
4070 internal_error ("invalid fn spec attribute \"%s\"", str);
4071
4072 /* Now check all parameters. */
4073 for (unsigned int i = 0; arg_specified_p (i); i++)
4074 {
4075 unsigned int idx = arg_idx (i);
4076 switch (str[idx])
4077 {
4078 case 'x':
4079 case 'X':
4080 case 'r':
4081 case 'R':
4082 case 'o':
4083 case 'O':
4084 case 'w':
4085 case 'W':
4086 case '.':
4087 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4088 || str[idx + 1] == 't')
4089 {
4090 if (str[idx] != 'r' && str[idx] != 'R'
4091 && str[idx] != 'w' && str[idx] != 'W'
4092 && str[idx] != 'o' && str[idx] != 'O')
4093 err = true;
4094 if (str[idx + 1] != 't'
4095 /* Size specified is scalar, so it should be described
4096 by ". " if specified at all. */
4097 && (arg_specified_p (i: str[idx + 1] - '1')
4098 && str[arg_idx (i: str[idx + 1] - '1')] != '.'))
4099 err = true;
4100 }
4101 else if (str[idx + 1] != ' ')
4102 err = true;
4103 break;
4104 default:
4105 if (str[idx] < '1' || str[idx] > '9')
4106 err = true;
4107 }
4108 if (err)
4109 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4110 }
4111}
4112
4113/* Return ture if TYPE1 and TYPE2 will always give the same answer
4114 when compared wit hother types using same_type_for_tbaa_p. */
4115
4116static bool
4117types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4118 bool lto_streaming_safe)
4119{
4120 /* We use same_type_for_tbaa_p to match types in the access path.
4121 This check is overly conservative. */
4122 type1 = TYPE_MAIN_VARIANT (type1);
4123 type2 = TYPE_MAIN_VARIANT (type2);
4124
4125 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4126 != TYPE_STRUCTURAL_EQUALITY_P (type2))
4127 return false;
4128 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4129 return true;
4130
4131 if (lto_streaming_safe)
4132 return type1 == type2;
4133 else
4134 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4135}
4136
4137/* Compare REF1 and REF2 and return flags specifying their differences.
4138 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4139 types that are going to be recomputed.
4140 If TBAA is true also compare TBAA metadata. */
4141
4142int
4143ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4144 bool lto_streaming_safe,
4145 bool tbaa)
4146{
4147 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4148 return SEMANTICS;
4149 tree base1 = ao_ref_base (ref: ref1);
4150 tree base2 = ao_ref_base (ref: ref2);
4151
4152 if (!known_eq (ref1->offset, ref2->offset)
4153 || !known_eq (ref1->size, ref2->size)
4154 || !known_eq (ref1->max_size, ref2->max_size))
4155 return SEMANTICS;
4156
4157 /* For variable accesses we need to compare actual paths
4158 to check that both refs are accessing same address and the access size. */
4159 if (!known_eq (ref1->size, ref1->max_size))
4160 {
4161 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4162 TYPE_SIZE (TREE_TYPE (ref2->ref)), flags: 0))
4163 return SEMANTICS;
4164 tree r1 = ref1->ref;
4165 tree r2 = ref2->ref;
4166
4167 /* Handle toplevel COMPONENT_REFs of bitfields.
4168 Those are special since they are not allowed in
4169 ADDR_EXPR. */
4170 if (TREE_CODE (r1) == COMPONENT_REF
4171 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4172 {
4173 if (TREE_CODE (r2) != COMPONENT_REF
4174 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4175 return SEMANTICS;
4176 tree field1 = TREE_OPERAND (r1, 1);
4177 tree field2 = TREE_OPERAND (r2, 1);
4178 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4179 DECL_FIELD_OFFSET (field2), flags: 0)
4180 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4181 DECL_FIELD_BIT_OFFSET (field2), flags: 0)
4182 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), flags: 0)
4183 || !types_compatible_p (TREE_TYPE (r1),
4184 TREE_TYPE (r2)))
4185 return SEMANTICS;
4186 r1 = TREE_OPERAND (r1, 0);
4187 r2 = TREE_OPERAND (r2, 0);
4188 }
4189 else if (TREE_CODE (r2) == COMPONENT_REF
4190 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4191 return SEMANTICS;
4192
4193 /* Similarly for bit field refs. */
4194 if (TREE_CODE (r1) == BIT_FIELD_REF)
4195 {
4196 if (TREE_CODE (r2) != BIT_FIELD_REF
4197 || !operand_equal_p (TREE_OPERAND (r1, 1),
4198 TREE_OPERAND (r2, 1), flags: 0)
4199 || !operand_equal_p (TREE_OPERAND (r1, 2),
4200 TREE_OPERAND (r2, 2), flags: 0)
4201 || !types_compatible_p (TREE_TYPE (r1),
4202 TREE_TYPE (r2)))
4203 return SEMANTICS;
4204 r1 = TREE_OPERAND (r1, 0);
4205 r2 = TREE_OPERAND (r2, 0);
4206 }
4207 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4208 return SEMANTICS;
4209
4210 /* Now we can compare the address of actual memory access. */
4211 if (!operand_equal_p (r1, r2, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4212 return SEMANTICS;
4213 }
4214 /* For constant accesses we get more matches by comparing offset only. */
4215 else if (!operand_equal_p (base1, base2,
4216 flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4217 return SEMANTICS;
4218
4219 /* We can't simply use get_object_alignment_1 on the full
4220 reference as for accesses with variable indexes this reports
4221 too conservative alignment. */
4222 unsigned int align1, align2;
4223 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4224 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4225 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4226 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4227 TYPE_ALIGN but still returns false. This seem to contradict
4228 its description. So compare even if alignment is unknown. */
4229 if (known1 != known2
4230 || (bitpos1 != bitpos2 || align1 != align2))
4231 return SEMANTICS;
4232
4233 /* Now we know that accesses are semantically same. */
4234 int flags = 0;
4235
4236 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4237 tree rbase1 = ref1->ref;
4238 if (rbase1)
4239 while (handled_component_p (t: rbase1))
4240 rbase1 = TREE_OPERAND (rbase1, 0);
4241 tree rbase2 = ref2->ref;
4242 while (handled_component_p (t: rbase2))
4243 rbase2 = TREE_OPERAND (rbase2, 0);
4244
4245 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4246 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4247 Otherwise we need to match bases and cliques. */
4248 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4249 && MR_DEPENDENCE_CLIQUE (rbase1))
4250 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4251 && MR_DEPENDENCE_CLIQUE (rbase2)))
4252 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4253 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4254 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4255 flags |= DEPENDENCE_CLIQUE;
4256
4257 if (!tbaa)
4258 return flags;
4259
4260 /* Alias sets are not stable across LTO sreaming; be conservative here
4261 and compare types the alias sets are ultimately based on. */
4262 if (lto_streaming_safe)
4263 {
4264 tree t1 = ao_ref_alias_ptr_type (ref: ref1);
4265 tree t2 = ao_ref_alias_ptr_type (ref: ref2);
4266 if (!alias_ptr_types_compatible_p (t1, t2))
4267 flags |= REF_ALIAS_SET;
4268
4269 t1 = ao_ref_base_alias_ptr_type (ref: ref1);
4270 t2 = ao_ref_base_alias_ptr_type (ref: ref2);
4271 if (!alias_ptr_types_compatible_p (t1, t2))
4272 flags |= BASE_ALIAS_SET;
4273 }
4274 else
4275 {
4276 if (ao_ref_alias_set (ref: ref1) != ao_ref_alias_set (ref: ref2))
4277 flags |= REF_ALIAS_SET;
4278 if (ao_ref_base_alias_set (ref: ref1) != ao_ref_base_alias_set (ref: ref2))
4279 flags |= BASE_ALIAS_SET;
4280 }
4281
4282 /* Access path is used only on non-view-converted references. */
4283 bool view_converted = view_converted_memref_p (base: rbase1);
4284 if (view_converted_memref_p (base: rbase2) != view_converted)
4285 return flags | ACCESS_PATH;
4286 else if (view_converted)
4287 return flags;
4288
4289
4290 /* Find start of access paths and look for trailing arrays. */
4291 tree c1 = ref1->ref, c2 = ref2->ref;
4292 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4293 int nskipped1 = 0, nskipped2 = 0;
4294 int i = 0;
4295
4296 for (tree p1 = ref1->ref; handled_component_p (t: p1); p1 = TREE_OPERAND (p1, 0))
4297 {
4298 if (component_ref_to_zero_sized_trailing_array_p (ref: p1))
4299 end_struct_ref1 = p1;
4300 if (ends_tbaa_access_path_p (p1))
4301 c1 = p1, nskipped1 = i;
4302 i++;
4303 }
4304 for (tree p2 = ref2->ref; handled_component_p (t: p2); p2 = TREE_OPERAND (p2, 0))
4305 {
4306 if (component_ref_to_zero_sized_trailing_array_p (ref: p2))
4307 end_struct_ref2 = p2;
4308 if (ends_tbaa_access_path_p (p2))
4309 c2 = p2, nskipped1 = i;
4310 i++;
4311 }
4312
4313 /* For variable accesses we can not rely on offset match bellow.
4314 We know that paths are struturally same, so only check that
4315 starts of TBAA paths did not diverge. */
4316 if (!known_eq (ref1->size, ref1->max_size)
4317 && nskipped1 != nskipped2)
4318 return flags | ACCESS_PATH;
4319
4320 /* Information about trailing refs is used by
4321 aliasing_component_refs_p that is applied only if paths
4322 has handled components.. */
4323 if (!handled_component_p (t: c1) && !handled_component_p (t: c2))
4324 ;
4325 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4326 return flags | ACCESS_PATH;
4327 if (end_struct_ref1
4328 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4329 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4330 return flags | ACCESS_PATH;
4331
4332 /* Now compare all handled components of the access path.
4333 We have three oracles that cares about access paths:
4334 - aliasing_component_refs_p
4335 - nonoverlapping_refs_since_match_p
4336 - nonoverlapping_component_refs_p
4337 We need to match things these oracles compare.
4338
4339 It is only necessary to check types for compatibility
4340 and offsets. Rest of what oracles compares are actual
4341 addresses. Those are already known to be same:
4342 - for constant accesses we check offsets
4343 - for variable accesses we already matched
4344 the path lexically with operand_equal_p. */
4345 while (true)
4346 {
4347 bool comp1 = handled_component_p (t: c1);
4348 bool comp2 = handled_component_p (t: c2);
4349
4350 if (comp1 != comp2)
4351 return flags | ACCESS_PATH;
4352 if (!comp1)
4353 break;
4354
4355 if (TREE_CODE (c1) != TREE_CODE (c2))
4356 return flags | ACCESS_PATH;
4357
4358 /* aliasing_component_refs_p attempts to find type match within
4359 the paths. For that reason both types needs to be equal
4360 with respect to same_type_for_tbaa_p. */
4361 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4362 TREE_TYPE (c2),
4363 lto_streaming_safe))
4364 return flags | ACCESS_PATH;
4365 if (component_ref_to_zero_sized_trailing_array_p (ref: c1)
4366 != component_ref_to_zero_sized_trailing_array_p (ref: c2))
4367 return flags | ACCESS_PATH;
4368
4369 /* aliasing_matching_component_refs_p compares
4370 offsets within the path. Other properties are ignored.
4371 Do not bother to verify offsets in variable accesses. Here we
4372 already compared them by operand_equal_p so they are
4373 structurally same. */
4374 if (!known_eq (ref1->size, ref1->max_size))
4375 {
4376 poly_int64 offadj1, sztmc1, msztmc1;
4377 bool reverse1;
4378 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4379 poly_int64 offadj2, sztmc2, msztmc2;
4380 bool reverse2;
4381 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4382 if (!known_eq (offadj1, offadj2))
4383 return flags | ACCESS_PATH;
4384 }
4385 c1 = TREE_OPERAND (c1, 0);
4386 c2 = TREE_OPERAND (c2, 0);
4387 }
4388 /* Finally test the access type. */
4389 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4390 TREE_TYPE (c2),
4391 lto_streaming_safe))
4392 return flags | ACCESS_PATH;
4393 return flags;
4394}
4395
4396/* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4397 and canonical types. */
4398void
4399ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4400 inchash::hash &hstate)
4401{
4402 tree base = ao_ref_base (ref);
4403 tree tbase = base;
4404
4405 if (!known_eq (ref->size, ref->max_size))
4406 {
4407 tree r = ref->ref;
4408 if (TREE_CODE (r) == COMPONENT_REF
4409 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4410 {
4411 tree field = TREE_OPERAND (r, 1);
4412 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags: 0);
4413 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags: 0);
4414 hash_operand (DECL_SIZE (field), hstate, flags: 0);
4415 r = TREE_OPERAND (r, 0);
4416 }
4417 if (TREE_CODE (r) == BIT_FIELD_REF)
4418 {
4419 hash_operand (TREE_OPERAND (r, 1), hstate, flags: 0);
4420 hash_operand (TREE_OPERAND (r, 2), hstate, flags: 0);
4421 r = TREE_OPERAND (r, 0);
4422 }
4423 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, flags: 0);
4424 hash_operand (r, hstate, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4425 }
4426 else
4427 {
4428 hash_operand (tbase, hstate, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4429 hstate.add_poly_int (v: ref->offset);
4430 hstate.add_poly_int (v: ref->size);
4431 hstate.add_poly_int (v: ref->max_size);
4432 }
4433 if (!lto_streaming_safe && tbaa)
4434 {
4435 hstate.add_int (v: ao_ref_alias_set (ref));
4436 hstate.add_int (v: ao_ref_base_alias_set (ref));
4437 }
4438}
4439

source code of gcc/tree-ssa-alias.cc