1 | /* Tree based points-to analysis |
2 | Copyright (C) 2005-2024 Free Software Foundation, Inc. |
3 | Contributed by Daniel Berlin <dberlin@dberlin.org> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify |
8 | under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3 of the License, or |
10 | (at your option) any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | GNU General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "alloc-pool.h" |
29 | #include "tree-pass.h" |
30 | #include "ssa.h" |
31 | #include "cgraph.h" |
32 | #include "tree-pretty-print.h" |
33 | #include "diagnostic-core.h" |
34 | #include "fold-const.h" |
35 | #include "stor-layout.h" |
36 | #include "stmt.h" |
37 | #include "gimple-iterator.h" |
38 | #include "tree-into-ssa.h" |
39 | #include "tree-dfa.h" |
40 | #include "gimple-walk.h" |
41 | #include "varasm.h" |
42 | #include "stringpool.h" |
43 | #include "attribs.h" |
44 | #include "tree-ssa.h" |
45 | #include "tree-cfg.h" |
46 | #include "gimple-range.h" |
47 | #include "ipa-modref-tree.h" |
48 | #include "ipa-modref.h" |
49 | #include "attr-fnspec.h" |
50 | |
51 | /* The idea behind this analyzer is to generate set constraints from the |
52 | program, then solve the resulting constraints in order to generate the |
53 | points-to sets. |
54 | |
55 | Set constraints are a way of modeling program analysis problems that |
56 | involve sets. They consist of an inclusion constraint language, |
57 | describing the variables (each variable is a set) and operations that |
58 | are involved on the variables, and a set of rules that derive facts |
59 | from these operations. To solve a system of set constraints, you derive |
60 | all possible facts under the rules, which gives you the correct sets |
61 | as a consequence. |
62 | |
63 | See "Efficient Field-sensitive pointer analysis for C" by "David |
64 | J. Pearce and Paul H. J. Kelly and Chris Hankin", at |
65 | http://citeseer.ist.psu.edu/pearce04efficient.html |
66 | |
67 | Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines |
68 | of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at |
69 | http://citeseer.ist.psu.edu/heintze01ultrafast.html |
70 | |
71 | There are three types of real constraint expressions, DEREF, |
72 | ADDRESSOF, and SCALAR. Each constraint expression consists |
73 | of a constraint type, a variable, and an offset. |
74 | |
75 | SCALAR is a constraint expression type used to represent x, whether |
76 | it appears on the LHS or the RHS of a statement. |
77 | DEREF is a constraint expression type used to represent *x, whether |
78 | it appears on the LHS or the RHS of a statement. |
79 | ADDRESSOF is a constraint expression used to represent &x, whether |
80 | it appears on the LHS or the RHS of a statement. |
81 | |
82 | Each pointer variable in the program is assigned an integer id, and |
83 | each field of a structure variable is assigned an integer id as well. |
84 | |
85 | Structure variables are linked to their list of fields through a "next |
86 | field" in each variable that points to the next field in offset |
87 | order. |
88 | Each variable for a structure field has |
89 | |
90 | 1. "size", that tells the size in bits of that field. |
91 | 2. "fullsize", that tells the size in bits of the entire structure. |
92 | 3. "offset", that tells the offset in bits from the beginning of the |
93 | structure to this field. |
94 | |
95 | Thus, |
96 | struct f |
97 | { |
98 | int a; |
99 | int b; |
100 | } foo; |
101 | int *bar; |
102 | |
103 | looks like |
104 | |
105 | foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b |
106 | foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL |
107 | bar -> id 3, size 32, offset 0, fullsize 32, next NULL |
108 | |
109 | |
110 | In order to solve the system of set constraints, the following is |
111 | done: |
112 | |
113 | 1. Each constraint variable x has a solution set associated with it, |
114 | Sol(x). |
115 | |
116 | 2. Constraints are separated into direct, copy, and complex. |
117 | Direct constraints are ADDRESSOF constraints that require no extra |
118 | processing, such as P = &Q |
119 | Copy constraints are those of the form P = Q. |
120 | Complex constraints are all the constraints involving dereferences |
121 | and offsets (including offsetted copies). |
122 | |
123 | 3. All direct constraints of the form P = &Q are processed, such |
124 | that Q is added to Sol(P) |
125 | |
126 | 4. All complex constraints for a given constraint variable are stored in a |
127 | linked list attached to that variable's node. |
128 | |
129 | 5. A directed graph is built out of the copy constraints. Each |
130 | constraint variable is a node in the graph, and an edge from |
131 | Q to P is added for each copy constraint of the form P = Q |
132 | |
133 | 6. The graph is then walked, and solution sets are |
134 | propagated along the copy edges, such that an edge from Q to P |
135 | causes Sol(P) <- Sol(P) union Sol(Q). |
136 | |
137 | 7. As we visit each node, all complex constraints associated with |
138 | that node are processed by adding appropriate copy edges to the graph, or the |
139 | appropriate variables to the solution set. |
140 | |
141 | 8. The process of walking the graph is iterated until no solution |
142 | sets change. |
143 | |
144 | Prior to walking the graph in steps 6 and 7, We perform static |
145 | cycle elimination on the constraint graph, as well |
146 | as off-line variable substitution. |
147 | |
148 | TODO: Adding offsets to pointer-to-structures can be handled (IE not punted |
149 | on and turned into anything), but isn't. You can just see what offset |
150 | inside the pointed-to struct it's going to access. |
151 | |
152 | TODO: Constant bounded arrays can be handled as if they were structs of the |
153 | same number of elements. |
154 | |
155 | TODO: Modeling heap and incoming pointers becomes much better if we |
156 | add fields to them as we discover them, which we could do. |
157 | |
158 | TODO: We could handle unions, but to be honest, it's probably not |
159 | worth the pain or slowdown. */ |
160 | |
161 | /* IPA-PTA optimizations possible. |
162 | |
163 | When the indirect function called is ANYTHING we can add disambiguation |
164 | based on the function signatures (or simply the parameter count which |
165 | is the varinfo size). We also do not need to consider functions that |
166 | do not have their address taken. |
167 | |
168 | The is_global_var bit which marks escape points is overly conservative |
169 | in IPA mode. Split it to is_escape_point and is_global_var - only |
170 | externally visible globals are escape points in IPA mode. |
171 | There is now is_ipa_escape_point but this is only used in a few |
172 | selected places. |
173 | |
174 | The way we introduce DECL_PT_UID to avoid fixing up all points-to |
175 | sets in the translation unit when we copy a DECL during inlining |
176 | pessimizes precision. The advantage is that the DECL_PT_UID keeps |
177 | compile-time and memory usage overhead low - the points-to sets |
178 | do not grow or get unshared as they would during a fixup phase. |
179 | An alternative solution is to delay IPA PTA until after all |
180 | inlining transformations have been applied. |
181 | |
182 | The way we propagate clobber/use information isn't optimized. |
183 | It should use a new complex constraint that properly filters |
184 | out local variables of the callee (though that would make |
185 | the sets invalid after inlining). OTOH we might as well |
186 | admit defeat to WHOPR and simply do all the clobber/use analysis |
187 | and propagation after PTA finished but before we threw away |
188 | points-to information for memory variables. WHOPR and PTA |
189 | do not play along well anyway - the whole constraint solving |
190 | would need to be done in WPA phase and it will be very interesting |
191 | to apply the results to local SSA names during LTRANS phase. |
192 | |
193 | We probably should compute a per-function unit-ESCAPE solution |
194 | propagating it simply like the clobber / uses solutions. The |
195 | solution can go alongside the non-IPA escaped solution and be |
196 | used to query which vars escape the unit through a function. |
197 | This is also required to make the escaped-HEAP trick work in IPA mode. |
198 | |
199 | We never put function decls in points-to sets so we do not |
200 | keep the set of called functions for indirect calls. |
201 | |
202 | And probably more. */ |
203 | |
204 | static bool use_field_sensitive = true; |
205 | static int in_ipa_mode = 0; |
206 | |
207 | /* Used for predecessor bitmaps. */ |
208 | static bitmap_obstack predbitmap_obstack; |
209 | |
210 | /* Used for points-to sets. */ |
211 | static bitmap_obstack pta_obstack; |
212 | |
213 | /* Used for oldsolution members of variables. */ |
214 | static bitmap_obstack oldpta_obstack; |
215 | |
216 | /* Used for per-solver-iteration bitmaps. */ |
217 | static bitmap_obstack iteration_obstack; |
218 | |
219 | static unsigned int create_variable_info_for (tree, const char *, bool); |
220 | typedef struct constraint_graph *constraint_graph_t; |
221 | static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool); |
222 | |
223 | struct constraint; |
224 | typedef struct constraint *constraint_t; |
225 | |
226 | |
227 | #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \ |
228 | if (a) \ |
229 | EXECUTE_IF_SET_IN_BITMAP (a, b, c, d) |
230 | |
231 | static struct constraint_stats |
232 | { |
233 | unsigned int total_vars; |
234 | unsigned int nonpointer_vars; |
235 | unsigned int unified_vars_static; |
236 | unsigned int unified_vars_dynamic; |
237 | unsigned int iterations; |
238 | unsigned int num_edges; |
239 | unsigned int num_implicit_edges; |
240 | unsigned int num_avoided_edges; |
241 | unsigned int points_to_sets_created; |
242 | } stats; |
243 | |
244 | struct variable_info |
245 | { |
246 | /* ID of this variable */ |
247 | unsigned int id; |
248 | |
249 | /* True if this is a variable created by the constraint analysis, such as |
250 | heap variables and constraints we had to break up. */ |
251 | unsigned int is_artificial_var : 1; |
252 | |
253 | /* True if this is a special variable whose solution set should not be |
254 | changed. */ |
255 | unsigned int is_special_var : 1; |
256 | |
257 | /* True for variables whose size is not known or variable. */ |
258 | unsigned int is_unknown_size_var : 1; |
259 | |
260 | /* True for (sub-)fields that represent a whole variable. */ |
261 | unsigned int is_full_var : 1; |
262 | |
263 | /* True if this is a heap variable. */ |
264 | unsigned int is_heap_var : 1; |
265 | |
266 | /* True if this is a register variable. */ |
267 | unsigned int is_reg_var : 1; |
268 | |
269 | /* True if this field may contain pointers. */ |
270 | unsigned int may_have_pointers : 1; |
271 | |
272 | /* True if this field has only restrict qualified pointers. */ |
273 | unsigned int only_restrict_pointers : 1; |
274 | |
275 | /* True if this represents a heap var created for a restrict qualified |
276 | pointer. */ |
277 | unsigned int is_restrict_var : 1; |
278 | |
279 | /* True if this represents a global variable. */ |
280 | unsigned int is_global_var : 1; |
281 | |
282 | /* True if this represents a module escape point for IPA analysis. */ |
283 | unsigned int is_ipa_escape_point : 1; |
284 | |
285 | /* True if this represents a IPA function info. */ |
286 | unsigned int is_fn_info : 1; |
287 | |
288 | /* True if this appears as RHS in a ADDRESSOF constraint. */ |
289 | unsigned int address_taken : 1; |
290 | |
291 | /* ??? Store somewhere better. */ |
292 | unsigned short ruid; |
293 | |
294 | /* The ID of the variable for the next field in this structure |
295 | or zero for the last field in this structure. */ |
296 | unsigned next; |
297 | |
298 | /* The ID of the variable for the first field in this structure. */ |
299 | unsigned head; |
300 | |
301 | /* Offset of this variable, in bits, from the base variable */ |
302 | unsigned HOST_WIDE_INT offset; |
303 | |
304 | /* Size of the variable, in bits. */ |
305 | unsigned HOST_WIDE_INT size; |
306 | |
307 | /* Full size of the base variable, in bits. */ |
308 | unsigned HOST_WIDE_INT fullsize; |
309 | |
310 | /* In IPA mode the shadow UID in case the variable needs to be duplicated in |
311 | the final points-to solution because it reaches its containing |
312 | function recursively. Zero if none is needed. */ |
313 | unsigned int shadow_var_uid; |
314 | |
315 | /* Name of this variable */ |
316 | const char *name; |
317 | |
318 | /* Tree that this variable is associated with. */ |
319 | tree decl; |
320 | |
321 | /* Points-to set for this variable. */ |
322 | bitmap solution; |
323 | |
324 | /* Old points-to set for this variable. */ |
325 | bitmap oldsolution; |
326 | }; |
327 | typedef struct variable_info *varinfo_t; |
328 | |
329 | static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT); |
330 | static varinfo_t first_or_preceding_vi_for_offset (varinfo_t, |
331 | unsigned HOST_WIDE_INT); |
332 | static varinfo_t lookup_vi_for_tree (tree); |
333 | static inline bool type_can_have_subvars (const_tree); |
334 | static void make_param_constraints (varinfo_t); |
335 | |
336 | /* Pool of variable info structures. */ |
337 | static object_allocator<variable_info> variable_info_pool |
338 | ("Variable info pool" ); |
339 | |
340 | /* Map varinfo to final pt_solution. */ |
341 | static hash_map<varinfo_t, pt_solution *> *final_solutions; |
342 | struct obstack final_solutions_obstack; |
343 | |
344 | /* Table of variable info structures for constraint variables. |
345 | Indexed directly by variable info id. */ |
346 | static vec<varinfo_t> varmap; |
347 | |
348 | /* Return the varmap element N */ |
349 | |
350 | static inline varinfo_t |
351 | get_varinfo (unsigned int n) |
352 | { |
353 | return varmap[n]; |
354 | } |
355 | |
356 | /* Return the next variable in the list of sub-variables of VI |
357 | or NULL if VI is the last sub-variable. */ |
358 | |
359 | static inline varinfo_t |
360 | vi_next (varinfo_t vi) |
361 | { |
362 | return get_varinfo (n: vi->next); |
363 | } |
364 | |
365 | /* Static IDs for the special variables. Variable ID zero is unused |
366 | and used as terminator for the sub-variable chain. */ |
367 | enum { nothing_id = 1, anything_id = 2, string_id = 3, |
368 | escaped_id = 4, nonlocal_id = 5, escaped_return_id = 6, |
369 | storedanything_id = 7, integer_id = 8 }; |
370 | |
371 | /* Return a new variable info structure consisting for a variable |
372 | named NAME, and using constraint graph node NODE. Append it |
373 | to the vector of variable info structures. */ |
374 | |
375 | static varinfo_t |
376 | new_var_info (tree t, const char *name, bool add_id) |
377 | { |
378 | unsigned index = varmap.length (); |
379 | varinfo_t ret = variable_info_pool.allocate (); |
380 | |
381 | if (dump_file && add_id) |
382 | { |
383 | char *tempname = xasprintf ("%s(%d)" , name, index); |
384 | name = ggc_strdup (tempname); |
385 | free (ptr: tempname); |
386 | } |
387 | |
388 | ret->id = index; |
389 | ret->name = name; |
390 | ret->decl = t; |
391 | /* Vars without decl are artificial and do not have sub-variables. */ |
392 | ret->is_artificial_var = (t == NULL_TREE); |
393 | ret->is_special_var = false; |
394 | ret->is_unknown_size_var = false; |
395 | ret->is_full_var = (t == NULL_TREE); |
396 | ret->is_heap_var = false; |
397 | ret->may_have_pointers = true; |
398 | ret->only_restrict_pointers = false; |
399 | ret->is_restrict_var = false; |
400 | ret->ruid = 0; |
401 | ret->is_global_var = (t == NULL_TREE); |
402 | ret->is_ipa_escape_point = false; |
403 | ret->is_fn_info = false; |
404 | ret->address_taken = false; |
405 | if (t && DECL_P (t)) |
406 | ret->is_global_var = (is_global_var (t) |
407 | /* We have to treat even local register variables |
408 | as escape points. */ |
409 | || (VAR_P (t) && DECL_HARD_REGISTER (t))); |
410 | ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME); |
411 | ret->solution = BITMAP_ALLOC (obstack: &pta_obstack); |
412 | ret->oldsolution = NULL; |
413 | ret->next = 0; |
414 | ret->shadow_var_uid = 0; |
415 | ret->head = ret->id; |
416 | |
417 | stats.total_vars++; |
418 | |
419 | varmap.safe_push (obj: ret); |
420 | |
421 | return ret; |
422 | } |
423 | |
424 | /* A map mapping call statements to per-stmt variables for uses |
425 | and clobbers specific to the call. */ |
426 | static hash_map<gimple *, varinfo_t> *call_stmt_vars; |
427 | |
428 | /* Lookup or create the variable for the call statement CALL. */ |
429 | |
430 | static varinfo_t |
431 | get_call_vi (gcall *call) |
432 | { |
433 | varinfo_t vi, vi2; |
434 | |
435 | bool existed; |
436 | varinfo_t *slot_p = &call_stmt_vars->get_or_insert (k: call, existed: &existed); |
437 | if (existed) |
438 | return *slot_p; |
439 | |
440 | vi = new_var_info (NULL_TREE, name: "CALLUSED" , add_id: true); |
441 | vi->offset = 0; |
442 | vi->size = 1; |
443 | vi->fullsize = 2; |
444 | vi->is_full_var = true; |
445 | vi->is_reg_var = true; |
446 | |
447 | vi2 = new_var_info (NULL_TREE, name: "CALLCLOBBERED" , add_id: true); |
448 | vi2->offset = 1; |
449 | vi2->size = 1; |
450 | vi2->fullsize = 2; |
451 | vi2->is_full_var = true; |
452 | vi2->is_reg_var = true; |
453 | |
454 | vi->next = vi2->id; |
455 | |
456 | *slot_p = vi; |
457 | return vi; |
458 | } |
459 | |
460 | /* Lookup the variable for the call statement CALL representing |
461 | the uses. Returns NULL if there is nothing special about this call. */ |
462 | |
463 | static varinfo_t |
464 | lookup_call_use_vi (gcall *call) |
465 | { |
466 | varinfo_t *slot_p = call_stmt_vars->get (k: call); |
467 | if (slot_p) |
468 | return *slot_p; |
469 | |
470 | return NULL; |
471 | } |
472 | |
473 | /* Lookup the variable for the call statement CALL representing |
474 | the clobbers. Returns NULL if there is nothing special about this call. */ |
475 | |
476 | static varinfo_t |
477 | lookup_call_clobber_vi (gcall *call) |
478 | { |
479 | varinfo_t uses = lookup_call_use_vi (call); |
480 | if (!uses) |
481 | return NULL; |
482 | |
483 | return vi_next (vi: uses); |
484 | } |
485 | |
486 | /* Lookup or create the variable for the call statement CALL representing |
487 | the uses. */ |
488 | |
489 | static varinfo_t |
490 | get_call_use_vi (gcall *call) |
491 | { |
492 | return get_call_vi (call); |
493 | } |
494 | |
495 | /* Lookup or create the variable for the call statement CALL representing |
496 | the clobbers. */ |
497 | |
498 | static varinfo_t ATTRIBUTE_UNUSED |
499 | get_call_clobber_vi (gcall *call) |
500 | { |
501 | return vi_next (vi: get_call_vi (call)); |
502 | } |
503 | |
504 | |
505 | enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF}; |
506 | |
507 | /* An expression that appears in a constraint. */ |
508 | |
509 | struct constraint_expr |
510 | { |
511 | /* Constraint type. */ |
512 | constraint_expr_type type; |
513 | |
514 | /* Variable we are referring to in the constraint. */ |
515 | unsigned int var; |
516 | |
517 | /* Offset, in bits, of this constraint from the beginning of |
518 | variables it ends up referring to. |
519 | |
520 | IOW, in a deref constraint, we would deref, get the result set, |
521 | then add OFFSET to each member. */ |
522 | HOST_WIDE_INT offset; |
523 | }; |
524 | |
525 | /* Use 0x8000... as special unknown offset. */ |
526 | #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN |
527 | |
528 | typedef struct constraint_expr ce_s; |
529 | static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool); |
530 | static void get_constraint_for (tree, vec<ce_s> *); |
531 | static void get_constraint_for_rhs (tree, vec<ce_s> *); |
532 | static void do_deref (vec<ce_s> *); |
533 | |
534 | /* Our set constraints are made up of two constraint expressions, one |
535 | LHS, and one RHS. |
536 | |
537 | As described in the introduction, our set constraints each represent an |
538 | operation between set valued variables. |
539 | */ |
540 | struct constraint |
541 | { |
542 | struct constraint_expr lhs; |
543 | struct constraint_expr rhs; |
544 | }; |
545 | |
546 | /* List of constraints that we use to build the constraint graph from. */ |
547 | |
548 | static vec<constraint_t> constraints; |
549 | static object_allocator<constraint> constraint_pool ("Constraint pool" ); |
550 | |
551 | /* The constraint graph is represented as an array of bitmaps |
552 | containing successor nodes. */ |
553 | |
554 | struct constraint_graph |
555 | { |
556 | /* Size of this graph, which may be different than the number of |
557 | nodes in the variable map. */ |
558 | unsigned int size; |
559 | |
560 | /* Explicit successors of each node. */ |
561 | bitmap *succs; |
562 | |
563 | /* Implicit predecessors of each node (Used for variable |
564 | substitution). */ |
565 | bitmap *implicit_preds; |
566 | |
567 | /* Explicit predecessors of each node (Used for variable substitution). */ |
568 | bitmap *preds; |
569 | |
570 | /* Indirect cycle representatives, or -1 if the node has no indirect |
571 | cycles. */ |
572 | int *indirect_cycles; |
573 | |
574 | /* Representative node for a node. rep[a] == a unless the node has |
575 | been unified. */ |
576 | unsigned int *rep; |
577 | |
578 | /* Equivalence class representative for a label. This is used for |
579 | variable substitution. */ |
580 | int *eq_rep; |
581 | |
582 | /* Pointer equivalence label for a node. All nodes with the same |
583 | pointer equivalence label can be unified together at some point |
584 | (either during constraint optimization or after the constraint |
585 | graph is built). */ |
586 | unsigned int *pe; |
587 | |
588 | /* Pointer equivalence representative for a label. This is used to |
589 | handle nodes that are pointer equivalent but not location |
590 | equivalent. We can unite these once the addressof constraints |
591 | are transformed into initial points-to sets. */ |
592 | int *pe_rep; |
593 | |
594 | /* Pointer equivalence label for each node, used during variable |
595 | substitution. */ |
596 | unsigned int *pointer_label; |
597 | |
598 | /* Location equivalence label for each node, used during location |
599 | equivalence finding. */ |
600 | unsigned int *loc_label; |
601 | |
602 | /* Pointed-by set for each node, used during location equivalence |
603 | finding. This is pointed-by rather than pointed-to, because it |
604 | is constructed using the predecessor graph. */ |
605 | bitmap *pointed_by; |
606 | |
607 | /* Points to sets for pointer equivalence. This is *not* the actual |
608 | points-to sets for nodes. */ |
609 | bitmap *points_to; |
610 | |
611 | /* Bitmap of nodes where the bit is set if the node is a direct |
612 | node. Used for variable substitution. */ |
613 | sbitmap direct_nodes; |
614 | |
615 | /* Bitmap of nodes where the bit is set if the node is address |
616 | taken. Used for variable substitution. */ |
617 | bitmap address_taken; |
618 | |
619 | /* Vector of complex constraints for each graph node. Complex |
620 | constraints are those involving dereferences or offsets that are |
621 | not 0. */ |
622 | vec<constraint_t> *complex; |
623 | }; |
624 | |
625 | static constraint_graph_t graph; |
626 | |
627 | /* During variable substitution and the offline version of indirect |
628 | cycle finding, we create nodes to represent dereferences and |
629 | address taken constraints. These represent where these start and |
630 | end. */ |
631 | #define FIRST_REF_NODE (varmap).length () |
632 | #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1)) |
633 | |
634 | /* Return the representative node for NODE, if NODE has been unioned |
635 | with another NODE. |
636 | This function performs path compression along the way to finding |
637 | the representative. */ |
638 | |
639 | static unsigned int |
640 | find (unsigned int node) |
641 | { |
642 | gcc_checking_assert (node < graph->size); |
643 | if (graph->rep[node] != node) |
644 | return graph->rep[node] = find (node: graph->rep[node]); |
645 | return node; |
646 | } |
647 | |
648 | /* Union the TO and FROM nodes to the TO nodes. |
649 | Note that at some point in the future, we may want to do |
650 | union-by-rank, in which case we are going to have to return the |
651 | node we unified to. */ |
652 | |
653 | static bool |
654 | unite (unsigned int to, unsigned int from) |
655 | { |
656 | gcc_checking_assert (to < graph->size && from < graph->size); |
657 | if (to != from && graph->rep[from] != to) |
658 | { |
659 | graph->rep[from] = to; |
660 | return true; |
661 | } |
662 | return false; |
663 | } |
664 | |
665 | /* Create a new constraint consisting of LHS and RHS expressions. */ |
666 | |
667 | static constraint_t |
668 | new_constraint (const struct constraint_expr lhs, |
669 | const struct constraint_expr rhs) |
670 | { |
671 | constraint_t ret = constraint_pool.allocate (); |
672 | ret->lhs = lhs; |
673 | ret->rhs = rhs; |
674 | return ret; |
675 | } |
676 | |
677 | /* Print out constraint C to FILE. */ |
678 | |
679 | static void |
680 | dump_constraint (FILE *file, constraint_t c) |
681 | { |
682 | if (c->lhs.type == ADDRESSOF) |
683 | fprintf (stream: file, format: "&" ); |
684 | else if (c->lhs.type == DEREF) |
685 | fprintf (stream: file, format: "*" ); |
686 | if (dump_file) |
687 | fprintf (stream: file, format: "%s" , get_varinfo (n: c->lhs.var)->name); |
688 | else |
689 | fprintf (stream: file, format: "V%d" , c->lhs.var); |
690 | if (c->lhs.offset == UNKNOWN_OFFSET) |
691 | fprintf (stream: file, format: " + UNKNOWN" ); |
692 | else if (c->lhs.offset != 0) |
693 | fprintf (stream: file, format: " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset); |
694 | fprintf (stream: file, format: " = " ); |
695 | if (c->rhs.type == ADDRESSOF) |
696 | fprintf (stream: file, format: "&" ); |
697 | else if (c->rhs.type == DEREF) |
698 | fprintf (stream: file, format: "*" ); |
699 | if (dump_file) |
700 | fprintf (stream: file, format: "%s" , get_varinfo (n: c->rhs.var)->name); |
701 | else |
702 | fprintf (stream: file, format: "V%d" , c->rhs.var); |
703 | if (c->rhs.offset == UNKNOWN_OFFSET) |
704 | fprintf (stream: file, format: " + UNKNOWN" ); |
705 | else if (c->rhs.offset != 0) |
706 | fprintf (stream: file, format: " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset); |
707 | } |
708 | |
709 | |
710 | void debug_constraint (constraint_t); |
711 | void debug_constraints (void); |
712 | void debug_constraint_graph (void); |
713 | void debug_solution_for_var (unsigned int); |
714 | void debug_sa_points_to_info (void); |
715 | void debug_varinfo (varinfo_t); |
716 | void debug_varmap (void); |
717 | |
718 | /* Print out constraint C to stderr. */ |
719 | |
720 | DEBUG_FUNCTION void |
721 | debug_constraint (constraint_t c) |
722 | { |
723 | dump_constraint (stderr, c); |
724 | fprintf (stderr, format: "\n" ); |
725 | } |
726 | |
727 | /* Print out all constraints to FILE */ |
728 | |
729 | static void |
730 | dump_constraints (FILE *file, int from) |
731 | { |
732 | int i; |
733 | constraint_t c; |
734 | for (i = from; constraints.iterate (ix: i, ptr: &c); i++) |
735 | if (c) |
736 | { |
737 | dump_constraint (file, c); |
738 | fprintf (stream: file, format: "\n" ); |
739 | } |
740 | } |
741 | |
742 | /* Print out all constraints to stderr. */ |
743 | |
744 | DEBUG_FUNCTION void |
745 | debug_constraints (void) |
746 | { |
747 | dump_constraints (stderr, from: 0); |
748 | } |
749 | |
750 | /* Print the constraint graph in dot format. */ |
751 | |
752 | static void |
753 | dump_constraint_graph (FILE *file) |
754 | { |
755 | unsigned int i; |
756 | |
757 | /* Only print the graph if it has already been initialized: */ |
758 | if (!graph) |
759 | return; |
760 | |
761 | /* Prints the header of the dot file: */ |
762 | fprintf (stream: file, format: "strict digraph {\n" ); |
763 | fprintf (stream: file, format: " node [\n shape = box\n ]\n" ); |
764 | fprintf (stream: file, format: " edge [\n fontsize = \"12\"\n ]\n" ); |
765 | fprintf (stream: file, format: "\n // List of nodes and complex constraints in " |
766 | "the constraint graph:\n" ); |
767 | |
768 | /* The next lines print the nodes in the graph together with the |
769 | complex constraints attached to them. */ |
770 | for (i = 1; i < graph->size; i++) |
771 | { |
772 | if (i == FIRST_REF_NODE) |
773 | continue; |
774 | if (find (node: i) != i) |
775 | continue; |
776 | if (i < FIRST_REF_NODE) |
777 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
778 | else |
779 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
780 | if (graph->complex[i].exists ()) |
781 | { |
782 | unsigned j; |
783 | constraint_t c; |
784 | fprintf (stream: file, format: " [label=\"\\N\\n" ); |
785 | for (j = 0; graph->complex[i].iterate (ix: j, ptr: &c); ++j) |
786 | { |
787 | dump_constraint (file, c); |
788 | fprintf (stream: file, format: "\\l" ); |
789 | } |
790 | fprintf (stream: file, format: "\"]" ); |
791 | } |
792 | fprintf (stream: file, format: ";\n" ); |
793 | } |
794 | |
795 | /* Go over the edges. */ |
796 | fprintf (stream: file, format: "\n // Edges in the constraint graph:\n" ); |
797 | for (i = 1; i < graph->size; i++) |
798 | { |
799 | unsigned j; |
800 | bitmap_iterator bi; |
801 | if (find (node: i) != i) |
802 | continue; |
803 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi) |
804 | { |
805 | unsigned to = find (node: j); |
806 | if (i == to) |
807 | continue; |
808 | if (i < FIRST_REF_NODE) |
809 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
810 | else |
811 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
812 | fprintf (stream: file, format: " -> " ); |
813 | if (to < FIRST_REF_NODE) |
814 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: to)->name); |
815 | else |
816 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: to - FIRST_REF_NODE)->name); |
817 | fprintf (stream: file, format: ";\n" ); |
818 | } |
819 | } |
820 | |
821 | /* Prints the tail of the dot file. */ |
822 | fprintf (stream: file, format: "}\n" ); |
823 | } |
824 | |
825 | /* Print out the constraint graph to stderr. */ |
826 | |
827 | DEBUG_FUNCTION void |
828 | debug_constraint_graph (void) |
829 | { |
830 | dump_constraint_graph (stderr); |
831 | } |
832 | |
833 | /* SOLVER FUNCTIONS |
834 | |
835 | The solver is a simple worklist solver, that works on the following |
836 | algorithm: |
837 | |
838 | sbitmap changed_nodes = all zeroes; |
839 | changed_count = 0; |
840 | For each node that is not already collapsed: |
841 | changed_count++; |
842 | set bit in changed nodes |
843 | |
844 | while (changed_count > 0) |
845 | { |
846 | compute topological ordering for constraint graph |
847 | |
848 | find and collapse cycles in the constraint graph (updating |
849 | changed if necessary) |
850 | |
851 | for each node (n) in the graph in topological order: |
852 | changed_count--; |
853 | |
854 | Process each complex constraint associated with the node, |
855 | updating changed if necessary. |
856 | |
857 | For each outgoing edge from n, propagate the solution from n to |
858 | the destination of the edge, updating changed as necessary. |
859 | |
860 | } */ |
861 | |
862 | /* Return true if two constraint expressions A and B are equal. */ |
863 | |
864 | static bool |
865 | constraint_expr_equal (struct constraint_expr a, struct constraint_expr b) |
866 | { |
867 | return a.type == b.type && a.var == b.var && a.offset == b.offset; |
868 | } |
869 | |
870 | /* Return true if constraint expression A is less than constraint expression |
871 | B. This is just arbitrary, but consistent, in order to give them an |
872 | ordering. */ |
873 | |
874 | static bool |
875 | constraint_expr_less (struct constraint_expr a, struct constraint_expr b) |
876 | { |
877 | if (a.type == b.type) |
878 | { |
879 | if (a.var == b.var) |
880 | return a.offset < b.offset; |
881 | else |
882 | return a.var < b.var; |
883 | } |
884 | else |
885 | return a.type < b.type; |
886 | } |
887 | |
888 | /* Return true if constraint A is less than constraint B. This is just |
889 | arbitrary, but consistent, in order to give them an ordering. */ |
890 | |
891 | static bool |
892 | constraint_less (const constraint_t &a, const constraint_t &b) |
893 | { |
894 | if (constraint_expr_less (a: a->lhs, b: b->lhs)) |
895 | return true; |
896 | else if (constraint_expr_less (a: b->lhs, b: a->lhs)) |
897 | return false; |
898 | else |
899 | return constraint_expr_less (a: a->rhs, b: b->rhs); |
900 | } |
901 | |
902 | /* Return true if two constraints A and B are equal. */ |
903 | |
904 | static bool |
905 | constraint_equal (struct constraint a, struct constraint b) |
906 | { |
907 | return constraint_expr_equal (a: a.lhs, b: b.lhs) |
908 | && constraint_expr_equal (a: a.rhs, b: b.rhs); |
909 | } |
910 | |
911 | |
912 | /* Find a constraint LOOKFOR in the sorted constraint vector VEC */ |
913 | |
914 | static constraint_t |
915 | constraint_vec_find (vec<constraint_t> vec, |
916 | struct constraint lookfor) |
917 | { |
918 | unsigned int place; |
919 | constraint_t found; |
920 | |
921 | if (!vec.exists ()) |
922 | return NULL; |
923 | |
924 | place = vec.lower_bound (obj: &lookfor, lessthan: constraint_less); |
925 | if (place >= vec.length ()) |
926 | return NULL; |
927 | found = vec[place]; |
928 | if (!constraint_equal (a: *found, b: lookfor)) |
929 | return NULL; |
930 | return found; |
931 | } |
932 | |
933 | /* Union two constraint vectors, TO and FROM. Put the result in TO. |
934 | Returns true of TO set is changed. */ |
935 | |
936 | static bool |
937 | constraint_set_union (vec<constraint_t> *to, |
938 | vec<constraint_t> *from) |
939 | { |
940 | int i; |
941 | constraint_t c; |
942 | bool any_change = false; |
943 | |
944 | FOR_EACH_VEC_ELT (*from, i, c) |
945 | { |
946 | if (constraint_vec_find (vec: *to, lookfor: *c) == NULL) |
947 | { |
948 | unsigned int place = to->lower_bound (obj: c, lessthan: constraint_less); |
949 | to->safe_insert (ix: place, obj: c); |
950 | any_change = true; |
951 | } |
952 | } |
953 | return any_change; |
954 | } |
955 | |
956 | /* Expands the solution in SET to all sub-fields of variables included. */ |
957 | |
958 | static bitmap |
959 | solution_set_expand (bitmap set, bitmap *expanded) |
960 | { |
961 | bitmap_iterator bi; |
962 | unsigned j; |
963 | |
964 | if (*expanded) |
965 | return *expanded; |
966 | |
967 | *expanded = BITMAP_ALLOC (obstack: &iteration_obstack); |
968 | |
969 | /* In a first pass expand variables, once for each head to avoid |
970 | quadratic behavior, to include all sub-fields. */ |
971 | unsigned prev_head = 0; |
972 | EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi) |
973 | { |
974 | varinfo_t v = get_varinfo (n: j); |
975 | if (v->is_artificial_var |
976 | || v->is_full_var) |
977 | continue; |
978 | if (v->head != prev_head) |
979 | { |
980 | varinfo_t head = get_varinfo (n: v->head); |
981 | unsigned num = 1; |
982 | for (varinfo_t n = vi_next (vi: head); n != NULL; n = vi_next (vi: n)) |
983 | { |
984 | if (n->id != head->id + num) |
985 | { |
986 | /* Usually sub variables are adjacent but since we |
987 | create pointed-to restrict representatives there |
988 | can be gaps as well. */ |
989 | bitmap_set_range (*expanded, head->id, num); |
990 | head = n; |
991 | num = 1; |
992 | } |
993 | else |
994 | num++; |
995 | } |
996 | |
997 | bitmap_set_range (*expanded, head->id, num); |
998 | prev_head = v->head; |
999 | } |
1000 | } |
1001 | |
1002 | /* And finally set the rest of the bits from SET in an efficient way. */ |
1003 | bitmap_ior_into (*expanded, set); |
1004 | |
1005 | return *expanded; |
1006 | } |
1007 | |
1008 | /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the |
1009 | process. */ |
1010 | |
1011 | static bool |
1012 | set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc, |
1013 | bitmap *expanded_delta) |
1014 | { |
1015 | bool changed = false; |
1016 | bitmap_iterator bi; |
1017 | unsigned int i; |
1018 | |
1019 | /* If the solution of DELTA contains anything it is good enough to transfer |
1020 | this to TO. */ |
1021 | if (bitmap_bit_p (delta, anything_id)) |
1022 | return bitmap_set_bit (to, anything_id); |
1023 | |
1024 | /* If the offset is unknown we have to expand the solution to |
1025 | all subfields. */ |
1026 | if (inc == UNKNOWN_OFFSET) |
1027 | { |
1028 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1029 | changed |= bitmap_ior_into (to, delta); |
1030 | return changed; |
1031 | } |
1032 | |
1033 | /* For non-zero offset union the offsetted solution into the destination. */ |
1034 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi) |
1035 | { |
1036 | varinfo_t vi = get_varinfo (n: i); |
1037 | |
1038 | /* If this is a variable with just one field just set its bit |
1039 | in the result. */ |
1040 | if (vi->is_artificial_var |
1041 | || vi->is_unknown_size_var |
1042 | || vi->is_full_var) |
1043 | changed |= bitmap_set_bit (to, i); |
1044 | else |
1045 | { |
1046 | HOST_WIDE_INT fieldoffset = vi->offset + inc; |
1047 | unsigned HOST_WIDE_INT size = vi->size; |
1048 | |
1049 | /* If the offset makes the pointer point to before the |
1050 | variable use offset zero for the field lookup. */ |
1051 | if (fieldoffset < 0) |
1052 | vi = get_varinfo (n: vi->head); |
1053 | else |
1054 | vi = first_or_preceding_vi_for_offset (vi, fieldoffset); |
1055 | |
1056 | do |
1057 | { |
1058 | changed |= bitmap_set_bit (to, vi->id); |
1059 | if (vi->is_full_var |
1060 | || vi->next == 0) |
1061 | break; |
1062 | |
1063 | /* We have to include all fields that overlap the current field |
1064 | shifted by inc. */ |
1065 | vi = vi_next (vi); |
1066 | } |
1067 | while (vi->offset < fieldoffset + size); |
1068 | } |
1069 | } |
1070 | |
1071 | return changed; |
1072 | } |
1073 | |
1074 | /* Insert constraint C into the list of complex constraints for graph |
1075 | node VAR. */ |
1076 | |
1077 | static void |
1078 | insert_into_complex (constraint_graph_t graph, |
1079 | unsigned int var, constraint_t c) |
1080 | { |
1081 | vec<constraint_t> complex = graph->complex[var]; |
1082 | unsigned int place = complex.lower_bound (obj: c, lessthan: constraint_less); |
1083 | |
1084 | /* Only insert constraints that do not already exist. */ |
1085 | if (place >= complex.length () |
1086 | || !constraint_equal (a: *c, b: *complex[place])) |
1087 | graph->complex[var].safe_insert (ix: place, obj: c); |
1088 | } |
1089 | |
1090 | |
1091 | /* Condense two variable nodes into a single variable node, by moving |
1092 | all associated info from FROM to TO. Returns true if TO node's |
1093 | constraint set changes after the merge. */ |
1094 | |
1095 | static bool |
1096 | merge_node_constraints (constraint_graph_t graph, unsigned int to, |
1097 | unsigned int from) |
1098 | { |
1099 | unsigned int i; |
1100 | constraint_t c; |
1101 | bool any_change = false; |
1102 | |
1103 | gcc_checking_assert (find (from) == to); |
1104 | |
1105 | /* Move all complex constraints from src node into to node */ |
1106 | FOR_EACH_VEC_ELT (graph->complex[from], i, c) |
1107 | { |
1108 | /* In complex constraints for node FROM, we may have either |
1109 | a = *FROM, and *FROM = a, or an offseted constraint which are |
1110 | always added to the rhs node's constraints. */ |
1111 | |
1112 | if (c->rhs.type == DEREF) |
1113 | c->rhs.var = to; |
1114 | else if (c->lhs.type == DEREF) |
1115 | c->lhs.var = to; |
1116 | else |
1117 | c->rhs.var = to; |
1118 | |
1119 | } |
1120 | any_change = constraint_set_union (to: &graph->complex[to], |
1121 | from: &graph->complex[from]); |
1122 | graph->complex[from].release (); |
1123 | return any_change; |
1124 | } |
1125 | |
1126 | |
1127 | /* Remove edges involving NODE from GRAPH. */ |
1128 | |
1129 | static void |
1130 | clear_edges_for_node (constraint_graph_t graph, unsigned int node) |
1131 | { |
1132 | if (graph->succs[node]) |
1133 | BITMAP_FREE (graph->succs[node]); |
1134 | } |
1135 | |
1136 | /* Merge GRAPH nodes FROM and TO into node TO. */ |
1137 | |
1138 | static void |
1139 | merge_graph_nodes (constraint_graph_t graph, unsigned int to, |
1140 | unsigned int from) |
1141 | { |
1142 | if (graph->indirect_cycles[from] != -1) |
1143 | { |
1144 | /* If we have indirect cycles with the from node, and we have |
1145 | none on the to node, the to node has indirect cycles from the |
1146 | from node now that they are unified. |
1147 | If indirect cycles exist on both, unify the nodes that they |
1148 | are in a cycle with, since we know they are in a cycle with |
1149 | each other. */ |
1150 | if (graph->indirect_cycles[to] == -1) |
1151 | graph->indirect_cycles[to] = graph->indirect_cycles[from]; |
1152 | } |
1153 | |
1154 | /* Merge all the successor edges. */ |
1155 | if (graph->succs[from]) |
1156 | { |
1157 | if (!graph->succs[to]) |
1158 | graph->succs[to] = BITMAP_ALLOC (obstack: &pta_obstack); |
1159 | bitmap_ior_into (graph->succs[to], |
1160 | graph->succs[from]); |
1161 | } |
1162 | |
1163 | clear_edges_for_node (graph, node: from); |
1164 | } |
1165 | |
1166 | |
1167 | /* Add an indirect graph edge to GRAPH, going from TO to FROM if |
1168 | it doesn't exist in the graph already. */ |
1169 | |
1170 | static void |
1171 | add_implicit_graph_edge (constraint_graph_t graph, unsigned int to, |
1172 | unsigned int from) |
1173 | { |
1174 | if (to == from) |
1175 | return; |
1176 | |
1177 | if (!graph->implicit_preds[to]) |
1178 | graph->implicit_preds[to] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1179 | |
1180 | if (bitmap_set_bit (graph->implicit_preds[to], from)) |
1181 | stats.num_implicit_edges++; |
1182 | } |
1183 | |
1184 | /* Add a predecessor graph edge to GRAPH, going from TO to FROM if |
1185 | it doesn't exist in the graph already. |
1186 | Return false if the edge already existed, true otherwise. */ |
1187 | |
1188 | static void |
1189 | add_pred_graph_edge (constraint_graph_t graph, unsigned int to, |
1190 | unsigned int from) |
1191 | { |
1192 | if (!graph->preds[to]) |
1193 | graph->preds[to] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1194 | bitmap_set_bit (graph->preds[to], from); |
1195 | } |
1196 | |
1197 | /* Add a graph edge to GRAPH, going from FROM to TO if |
1198 | it doesn't exist in the graph already. |
1199 | Return false if the edge already existed, true otherwise. */ |
1200 | |
1201 | static bool |
1202 | add_graph_edge (constraint_graph_t graph, unsigned int to, |
1203 | unsigned int from) |
1204 | { |
1205 | if (to == from) |
1206 | { |
1207 | return false; |
1208 | } |
1209 | else |
1210 | { |
1211 | bool r = false; |
1212 | |
1213 | if (!graph->succs[from]) |
1214 | graph->succs[from] = BITMAP_ALLOC (obstack: &pta_obstack); |
1215 | |
1216 | /* The graph solving process does not avoid "triangles", thus |
1217 | there can be multiple paths from a node to another involving |
1218 | intermediate other nodes. That causes extra copying which is |
1219 | most difficult to avoid when the intermediate node is ESCAPED |
1220 | because there are no edges added from ESCAPED. Avoid |
1221 | adding the direct edge FROM -> TO when we have FROM -> ESCAPED |
1222 | and TO contains ESCAPED. |
1223 | ??? Note this is only a heuristic, it does not prevent the |
1224 | situation from occuring. The heuristic helps PR38474 and |
1225 | PR99912 significantly. */ |
1226 | if (to < FIRST_REF_NODE |
1227 | && bitmap_bit_p (graph->succs[from], find (node: escaped_id)) |
1228 | && bitmap_bit_p (get_varinfo (n: find (node: to))->solution, escaped_id)) |
1229 | { |
1230 | stats.num_avoided_edges++; |
1231 | return false; |
1232 | } |
1233 | |
1234 | if (bitmap_set_bit (graph->succs[from], to)) |
1235 | { |
1236 | r = true; |
1237 | if (to < FIRST_REF_NODE && from < FIRST_REF_NODE) |
1238 | stats.num_edges++; |
1239 | } |
1240 | return r; |
1241 | } |
1242 | } |
1243 | |
1244 | |
1245 | /* Initialize the constraint graph structure to contain SIZE nodes. */ |
1246 | |
1247 | static void |
1248 | init_graph (unsigned int size) |
1249 | { |
1250 | unsigned int j; |
1251 | |
1252 | graph = XCNEW (struct constraint_graph); |
1253 | graph->size = size; |
1254 | graph->succs = XCNEWVEC (bitmap, graph->size); |
1255 | graph->indirect_cycles = XNEWVEC (int, graph->size); |
1256 | graph->rep = XNEWVEC (unsigned int, graph->size); |
1257 | /* ??? Macros do not support template types with multiple arguments, |
1258 | so we use a typedef to work around it. */ |
1259 | typedef vec<constraint_t> vec_constraint_t_heap; |
1260 | graph->complex = XCNEWVEC (vec_constraint_t_heap, size); |
1261 | graph->pe = XCNEWVEC (unsigned int, graph->size); |
1262 | graph->pe_rep = XNEWVEC (int, graph->size); |
1263 | |
1264 | for (j = 0; j < graph->size; j++) |
1265 | { |
1266 | graph->rep[j] = j; |
1267 | graph->pe_rep[j] = -1; |
1268 | graph->indirect_cycles[j] = -1; |
1269 | } |
1270 | } |
1271 | |
1272 | /* Build the constraint graph, adding only predecessor edges right now. */ |
1273 | |
1274 | static void |
1275 | build_pred_graph (void) |
1276 | { |
1277 | int i; |
1278 | constraint_t c; |
1279 | unsigned int j; |
1280 | |
1281 | graph->implicit_preds = XCNEWVEC (bitmap, graph->size); |
1282 | graph->preds = XCNEWVEC (bitmap, graph->size); |
1283 | graph->pointer_label = XCNEWVEC (unsigned int, graph->size); |
1284 | graph->loc_label = XCNEWVEC (unsigned int, graph->size); |
1285 | graph->pointed_by = XCNEWVEC (bitmap, graph->size); |
1286 | graph->points_to = XCNEWVEC (bitmap, graph->size); |
1287 | graph->eq_rep = XNEWVEC (int, graph->size); |
1288 | graph->direct_nodes = sbitmap_alloc (graph->size); |
1289 | graph->address_taken = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1290 | bitmap_clear (graph->direct_nodes); |
1291 | |
1292 | for (j = 1; j < FIRST_REF_NODE; j++) |
1293 | { |
1294 | if (!get_varinfo (n: j)->is_special_var) |
1295 | bitmap_set_bit (map: graph->direct_nodes, bitno: j); |
1296 | } |
1297 | |
1298 | for (j = 0; j < graph->size; j++) |
1299 | graph->eq_rep[j] = -1; |
1300 | |
1301 | for (j = 0; j < varmap.length (); j++) |
1302 | graph->indirect_cycles[j] = -1; |
1303 | |
1304 | FOR_EACH_VEC_ELT (constraints, i, c) |
1305 | { |
1306 | struct constraint_expr lhs = c->lhs; |
1307 | struct constraint_expr rhs = c->rhs; |
1308 | unsigned int lhsvar = lhs.var; |
1309 | unsigned int rhsvar = rhs.var; |
1310 | |
1311 | if (lhs.type == DEREF) |
1312 | { |
1313 | /* *x = y. */ |
1314 | if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) |
1315 | add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1316 | } |
1317 | else if (rhs.type == DEREF) |
1318 | { |
1319 | /* x = *y */ |
1320 | if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) |
1321 | add_pred_graph_edge (graph, to: lhsvar, FIRST_REF_NODE + rhsvar); |
1322 | else |
1323 | bitmap_clear_bit (map: graph->direct_nodes, bitno: lhsvar); |
1324 | } |
1325 | else if (rhs.type == ADDRESSOF) |
1326 | { |
1327 | varinfo_t v; |
1328 | |
1329 | /* x = &y */ |
1330 | if (graph->points_to[lhsvar] == NULL) |
1331 | graph->points_to[lhsvar] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1332 | bitmap_set_bit (graph->points_to[lhsvar], rhsvar); |
1333 | |
1334 | if (graph->pointed_by[rhsvar] == NULL) |
1335 | graph->pointed_by[rhsvar] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1336 | bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar); |
1337 | |
1338 | /* Implicitly, *x = y */ |
1339 | add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1340 | |
1341 | /* All related variables are no longer direct nodes. */ |
1342 | bitmap_clear_bit (map: graph->direct_nodes, bitno: rhsvar); |
1343 | v = get_varinfo (n: rhsvar); |
1344 | if (!v->is_full_var) |
1345 | { |
1346 | v = get_varinfo (n: v->head); |
1347 | do |
1348 | { |
1349 | bitmap_clear_bit (map: graph->direct_nodes, bitno: v->id); |
1350 | v = vi_next (vi: v); |
1351 | } |
1352 | while (v != NULL); |
1353 | } |
1354 | bitmap_set_bit (graph->address_taken, rhsvar); |
1355 | } |
1356 | else if (lhsvar > anything_id |
1357 | && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) |
1358 | { |
1359 | /* x = y */ |
1360 | add_pred_graph_edge (graph, to: lhsvar, from: rhsvar); |
1361 | /* Implicitly, *x = *y */ |
1362 | add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, |
1363 | FIRST_REF_NODE + rhsvar); |
1364 | } |
1365 | else if (lhs.offset != 0 || rhs.offset != 0) |
1366 | { |
1367 | if (rhs.offset != 0) |
1368 | bitmap_clear_bit (map: graph->direct_nodes, bitno: lhs.var); |
1369 | else if (lhs.offset != 0) |
1370 | bitmap_clear_bit (map: graph->direct_nodes, bitno: rhs.var); |
1371 | } |
1372 | } |
1373 | } |
1374 | |
1375 | /* Build the constraint graph, adding successor edges. */ |
1376 | |
1377 | static void |
1378 | build_succ_graph (void) |
1379 | { |
1380 | unsigned i, t; |
1381 | constraint_t c; |
1382 | |
1383 | FOR_EACH_VEC_ELT (constraints, i, c) |
1384 | { |
1385 | struct constraint_expr lhs; |
1386 | struct constraint_expr rhs; |
1387 | unsigned int lhsvar; |
1388 | unsigned int rhsvar; |
1389 | |
1390 | if (!c) |
1391 | continue; |
1392 | |
1393 | lhs = c->lhs; |
1394 | rhs = c->rhs; |
1395 | lhsvar = find (node: lhs.var); |
1396 | rhsvar = find (node: rhs.var); |
1397 | |
1398 | if (lhs.type == DEREF) |
1399 | { |
1400 | if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) |
1401 | add_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1402 | } |
1403 | else if (rhs.type == DEREF) |
1404 | { |
1405 | if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) |
1406 | add_graph_edge (graph, to: lhsvar, FIRST_REF_NODE + rhsvar); |
1407 | } |
1408 | else if (rhs.type == ADDRESSOF) |
1409 | { |
1410 | /* x = &y */ |
1411 | gcc_checking_assert (find (rhs.var) == rhs.var); |
1412 | bitmap_set_bit (get_varinfo (n: lhsvar)->solution, rhsvar); |
1413 | } |
1414 | else if (lhsvar > anything_id |
1415 | && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) |
1416 | { |
1417 | add_graph_edge (graph, to: lhsvar, from: rhsvar); |
1418 | } |
1419 | } |
1420 | |
1421 | /* Add edges from STOREDANYTHING to all non-direct nodes that can |
1422 | receive pointers. */ |
1423 | t = find (node: storedanything_id); |
1424 | for (i = integer_id + 1; i < FIRST_REF_NODE; ++i) |
1425 | { |
1426 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
1427 | && get_varinfo (n: i)->may_have_pointers) |
1428 | add_graph_edge (graph, to: find (node: i), from: t); |
1429 | } |
1430 | |
1431 | /* Everything stored to ANYTHING also potentially escapes. */ |
1432 | add_graph_edge (graph, to: find (node: escaped_id), from: t); |
1433 | } |
1434 | |
1435 | |
1436 | /* Changed variables on the last iteration. */ |
1437 | static bitmap changed; |
1438 | |
1439 | /* Strongly Connected Component visitation info. */ |
1440 | |
1441 | class scc_info |
1442 | { |
1443 | public: |
1444 | scc_info (size_t size); |
1445 | ~scc_info (); |
1446 | |
1447 | auto_sbitmap visited; |
1448 | auto_sbitmap deleted; |
1449 | unsigned int *dfs; |
1450 | unsigned int *node_mapping; |
1451 | int current_index; |
1452 | auto_vec<unsigned> scc_stack; |
1453 | }; |
1454 | |
1455 | |
1456 | /* Recursive routine to find strongly connected components in GRAPH. |
1457 | SI is the SCC info to store the information in, and N is the id of current |
1458 | graph node we are processing. |
1459 | |
1460 | This is Tarjan's strongly connected component finding algorithm, as |
1461 | modified by Nuutila to keep only non-root nodes on the stack. |
1462 | The algorithm can be found in "On finding the strongly connected |
1463 | connected components in a directed graph" by Esko Nuutila and Eljas |
1464 | Soisalon-Soininen, in Information Processing Letters volume 49, |
1465 | number 1, pages 9-14. */ |
1466 | |
1467 | static void |
1468 | scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
1469 | { |
1470 | unsigned int i; |
1471 | bitmap_iterator bi; |
1472 | unsigned int my_dfs; |
1473 | |
1474 | bitmap_set_bit (map: si->visited, bitno: n); |
1475 | si->dfs[n] = si->current_index ++; |
1476 | my_dfs = si->dfs[n]; |
1477 | |
1478 | /* Visit all the successors. */ |
1479 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi) |
1480 | { |
1481 | unsigned int w; |
1482 | |
1483 | if (i > LAST_REF_NODE) |
1484 | break; |
1485 | |
1486 | w = find (node: i); |
1487 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
1488 | continue; |
1489 | |
1490 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
1491 | scc_visit (graph, si, n: w); |
1492 | |
1493 | unsigned int t = find (node: w); |
1494 | gcc_checking_assert (find (n) == n); |
1495 | if (si->dfs[t] < si->dfs[n]) |
1496 | si->dfs[n] = si->dfs[t]; |
1497 | } |
1498 | |
1499 | /* See if any components have been identified. */ |
1500 | if (si->dfs[n] == my_dfs) |
1501 | { |
1502 | if (si->scc_stack.length () > 0 |
1503 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
1504 | { |
1505 | bitmap scc = BITMAP_ALLOC (NULL); |
1506 | unsigned int lowest_node; |
1507 | bitmap_iterator bi; |
1508 | |
1509 | bitmap_set_bit (scc, n); |
1510 | |
1511 | while (si->scc_stack.length () != 0 |
1512 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
1513 | { |
1514 | unsigned int w = si->scc_stack.pop (); |
1515 | |
1516 | bitmap_set_bit (scc, w); |
1517 | } |
1518 | |
1519 | lowest_node = bitmap_first_set_bit (scc); |
1520 | gcc_assert (lowest_node < FIRST_REF_NODE); |
1521 | |
1522 | /* Collapse the SCC nodes into a single node, and mark the |
1523 | indirect cycles. */ |
1524 | EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi) |
1525 | { |
1526 | if (i < FIRST_REF_NODE) |
1527 | { |
1528 | if (unite (to: lowest_node, from: i)) |
1529 | unify_nodes (graph, lowest_node, i, false); |
1530 | } |
1531 | else |
1532 | { |
1533 | unite (to: lowest_node, from: i); |
1534 | graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node; |
1535 | } |
1536 | } |
1537 | } |
1538 | bitmap_set_bit (map: si->deleted, bitno: n); |
1539 | } |
1540 | else |
1541 | si->scc_stack.safe_push (obj: n); |
1542 | } |
1543 | |
1544 | /* Unify node FROM into node TO, updating the changed count if |
1545 | necessary when UPDATE_CHANGED is true. */ |
1546 | |
1547 | static void |
1548 | unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from, |
1549 | bool update_changed) |
1550 | { |
1551 | gcc_checking_assert (to != from && find (to) == to); |
1552 | |
1553 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1554 | fprintf (stream: dump_file, format: "Unifying %s to %s\n" , |
1555 | get_varinfo (n: from)->name, |
1556 | get_varinfo (n: to)->name); |
1557 | |
1558 | if (update_changed) |
1559 | stats.unified_vars_dynamic++; |
1560 | else |
1561 | stats.unified_vars_static++; |
1562 | |
1563 | merge_graph_nodes (graph, to, from); |
1564 | if (merge_node_constraints (graph, to, from)) |
1565 | { |
1566 | if (update_changed) |
1567 | bitmap_set_bit (changed, to); |
1568 | } |
1569 | |
1570 | /* Mark TO as changed if FROM was changed. If TO was already marked |
1571 | as changed, decrease the changed count. */ |
1572 | |
1573 | if (update_changed |
1574 | && bitmap_clear_bit (changed, from)) |
1575 | bitmap_set_bit (changed, to); |
1576 | varinfo_t fromvi = get_varinfo (n: from); |
1577 | if (fromvi->solution) |
1578 | { |
1579 | /* If the solution changes because of the merging, we need to mark |
1580 | the variable as changed. */ |
1581 | varinfo_t tovi = get_varinfo (n: to); |
1582 | if (bitmap_ior_into (tovi->solution, fromvi->solution)) |
1583 | { |
1584 | if (update_changed) |
1585 | bitmap_set_bit (changed, to); |
1586 | } |
1587 | |
1588 | BITMAP_FREE (fromvi->solution); |
1589 | if (fromvi->oldsolution) |
1590 | BITMAP_FREE (fromvi->oldsolution); |
1591 | |
1592 | if (stats.iterations > 0 |
1593 | && tovi->oldsolution) |
1594 | BITMAP_FREE (tovi->oldsolution); |
1595 | } |
1596 | if (graph->succs[to]) |
1597 | bitmap_clear_bit (graph->succs[to], to); |
1598 | } |
1599 | |
1600 | /* Add a copy edge FROM -> TO, optimizing special cases. Returns TRUE |
1601 | if the solution of TO changed. */ |
1602 | |
1603 | static bool |
1604 | solve_add_graph_edge (constraint_graph_t graph, unsigned int to, |
1605 | unsigned int from) |
1606 | { |
1607 | /* Adding edges from the special vars is pointless. |
1608 | They don't have sets that can change. */ |
1609 | if (get_varinfo (n: from)->is_special_var) |
1610 | return bitmap_ior_into (get_varinfo (n: to)->solution, |
1611 | get_varinfo (n: from)->solution); |
1612 | /* Merging the solution from ESCAPED needlessly increases |
1613 | the set. Use ESCAPED as representative instead. */ |
1614 | else if (from == find (node: escaped_id)) |
1615 | return bitmap_set_bit (get_varinfo (n: to)->solution, escaped_id); |
1616 | else if (get_varinfo (n: from)->may_have_pointers |
1617 | && add_graph_edge (graph, to, from)) |
1618 | return bitmap_ior_into (get_varinfo (n: to)->solution, |
1619 | get_varinfo (n: from)->solution); |
1620 | return false; |
1621 | } |
1622 | |
1623 | /* Process a constraint C that represents x = *(y + off), using DELTA as the |
1624 | starting solution for y. */ |
1625 | |
1626 | static void |
1627 | do_sd_constraint (constraint_graph_t graph, constraint_t c, |
1628 | bitmap delta, bitmap *expanded_delta) |
1629 | { |
1630 | unsigned int lhs = c->lhs.var; |
1631 | bool flag = false; |
1632 | bitmap sol = get_varinfo (n: lhs)->solution; |
1633 | unsigned int j; |
1634 | bitmap_iterator bi; |
1635 | HOST_WIDE_INT roffset = c->rhs.offset; |
1636 | |
1637 | /* Our IL does not allow this. */ |
1638 | gcc_checking_assert (c->lhs.offset == 0); |
1639 | |
1640 | /* If the solution of Y contains anything it is good enough to transfer |
1641 | this to the LHS. */ |
1642 | if (bitmap_bit_p (delta, anything_id)) |
1643 | { |
1644 | flag |= bitmap_set_bit (sol, anything_id); |
1645 | goto done; |
1646 | } |
1647 | |
1648 | /* If we do not know at with offset the rhs is dereferenced compute |
1649 | the reachability set of DELTA, conservatively assuming it is |
1650 | dereferenced at all valid offsets. */ |
1651 | if (roffset == UNKNOWN_OFFSET) |
1652 | { |
1653 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1654 | /* No further offset processing is necessary. */ |
1655 | roffset = 0; |
1656 | } |
1657 | |
1658 | /* For each variable j in delta (Sol(y)), add |
1659 | an edge in the graph from j to x, and union Sol(j) into Sol(x). */ |
1660 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
1661 | { |
1662 | varinfo_t v = get_varinfo (n: j); |
1663 | HOST_WIDE_INT fieldoffset = v->offset + roffset; |
1664 | unsigned HOST_WIDE_INT size = v->size; |
1665 | unsigned int t; |
1666 | |
1667 | if (v->is_full_var) |
1668 | ; |
1669 | else if (roffset != 0) |
1670 | { |
1671 | if (fieldoffset < 0) |
1672 | v = get_varinfo (n: v->head); |
1673 | else |
1674 | v = first_or_preceding_vi_for_offset (v, fieldoffset); |
1675 | } |
1676 | |
1677 | /* We have to include all fields that overlap the current field |
1678 | shifted by roffset. */ |
1679 | do |
1680 | { |
1681 | t = find (node: v->id); |
1682 | |
1683 | flag |= solve_add_graph_edge (graph, to: lhs, from: t); |
1684 | |
1685 | if (v->is_full_var |
1686 | || v->next == 0) |
1687 | break; |
1688 | |
1689 | v = vi_next (vi: v); |
1690 | } |
1691 | while (v->offset < fieldoffset + size); |
1692 | } |
1693 | |
1694 | done: |
1695 | /* If the LHS solution changed, mark the var as changed. */ |
1696 | if (flag) |
1697 | bitmap_set_bit (changed, lhs); |
1698 | } |
1699 | |
1700 | /* Process a constraint C that represents *(x + off) = y using DELTA |
1701 | as the starting solution for x. */ |
1702 | |
1703 | static void |
1704 | do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta) |
1705 | { |
1706 | unsigned int rhs = c->rhs.var; |
1707 | bitmap sol = get_varinfo (n: rhs)->solution; |
1708 | unsigned int j; |
1709 | bitmap_iterator bi; |
1710 | HOST_WIDE_INT loff = c->lhs.offset; |
1711 | bool escaped_p = false; |
1712 | |
1713 | /* Our IL does not allow this. */ |
1714 | gcc_checking_assert (c->rhs.offset == 0); |
1715 | |
1716 | /* If the solution of y contains ANYTHING simply use the ANYTHING |
1717 | solution. This avoids needlessly increasing the points-to sets. */ |
1718 | if (bitmap_bit_p (sol, anything_id)) |
1719 | sol = get_varinfo (n: find (node: anything_id))->solution; |
1720 | |
1721 | /* If the solution for x contains ANYTHING we have to merge the |
1722 | solution of y into all pointer variables which we do via |
1723 | STOREDANYTHING. */ |
1724 | if (bitmap_bit_p (delta, anything_id)) |
1725 | { |
1726 | unsigned t = find (node: storedanything_id); |
1727 | if (solve_add_graph_edge (graph, to: t, from: rhs)) |
1728 | bitmap_set_bit (changed, t); |
1729 | return; |
1730 | } |
1731 | |
1732 | /* If we do not know at with offset the rhs is dereferenced compute |
1733 | the reachability set of DELTA, conservatively assuming it is |
1734 | dereferenced at all valid offsets. */ |
1735 | if (loff == UNKNOWN_OFFSET) |
1736 | { |
1737 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1738 | loff = 0; |
1739 | } |
1740 | |
1741 | /* For each member j of delta (Sol(x)), add an edge from y to j and |
1742 | union Sol(y) into Sol(j) */ |
1743 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
1744 | { |
1745 | varinfo_t v = get_varinfo (n: j); |
1746 | unsigned int t; |
1747 | HOST_WIDE_INT fieldoffset = v->offset + loff; |
1748 | unsigned HOST_WIDE_INT size = v->size; |
1749 | |
1750 | if (v->is_full_var) |
1751 | ; |
1752 | else if (loff != 0) |
1753 | { |
1754 | if (fieldoffset < 0) |
1755 | v = get_varinfo (n: v->head); |
1756 | else |
1757 | v = first_or_preceding_vi_for_offset (v, fieldoffset); |
1758 | } |
1759 | |
1760 | /* We have to include all fields that overlap the current field |
1761 | shifted by loff. */ |
1762 | do |
1763 | { |
1764 | if (v->may_have_pointers) |
1765 | { |
1766 | /* If v is a global variable then this is an escape point. */ |
1767 | if (v->is_global_var |
1768 | && !escaped_p) |
1769 | { |
1770 | t = find (node: escaped_id); |
1771 | if (add_graph_edge (graph, to: t, from: rhs) |
1772 | && bitmap_ior_into (get_varinfo (n: t)->solution, sol)) |
1773 | bitmap_set_bit (changed, t); |
1774 | /* Enough to let rhs escape once. */ |
1775 | escaped_p = true; |
1776 | } |
1777 | |
1778 | if (v->is_special_var) |
1779 | break; |
1780 | |
1781 | t = find (node: v->id); |
1782 | |
1783 | if (solve_add_graph_edge (graph, to: t, from: rhs)) |
1784 | bitmap_set_bit (changed, t); |
1785 | } |
1786 | |
1787 | if (v->is_full_var |
1788 | || v->next == 0) |
1789 | break; |
1790 | |
1791 | v = vi_next (vi: v); |
1792 | } |
1793 | while (v->offset < fieldoffset + size); |
1794 | } |
1795 | } |
1796 | |
1797 | /* Handle a non-simple (simple meaning requires no iteration), |
1798 | constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */ |
1799 | |
1800 | static void |
1801 | do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta, |
1802 | bitmap *expanded_delta) |
1803 | { |
1804 | if (c->lhs.type == DEREF) |
1805 | { |
1806 | if (c->rhs.type == ADDRESSOF) |
1807 | { |
1808 | gcc_unreachable (); |
1809 | } |
1810 | else |
1811 | { |
1812 | /* *x = y */ |
1813 | do_ds_constraint (c, delta, expanded_delta); |
1814 | } |
1815 | } |
1816 | else if (c->rhs.type == DEREF) |
1817 | { |
1818 | /* x = *y */ |
1819 | if (!(get_varinfo (n: c->lhs.var)->is_special_var)) |
1820 | do_sd_constraint (graph, c, delta, expanded_delta); |
1821 | } |
1822 | else |
1823 | { |
1824 | bitmap tmp; |
1825 | bool flag = false; |
1826 | |
1827 | gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR |
1828 | && c->rhs.offset != 0 && c->lhs.offset == 0); |
1829 | tmp = get_varinfo (n: c->lhs.var)->solution; |
1830 | |
1831 | flag = set_union_with_increment (to: tmp, delta, inc: c->rhs.offset, |
1832 | expanded_delta); |
1833 | |
1834 | if (flag) |
1835 | bitmap_set_bit (changed, c->lhs.var); |
1836 | } |
1837 | } |
1838 | |
1839 | /* Initialize and return a new SCC info structure. */ |
1840 | |
1841 | scc_info::scc_info (size_t size) : |
1842 | visited (size), deleted (size), current_index (0), scc_stack (1) |
1843 | { |
1844 | bitmap_clear (visited); |
1845 | bitmap_clear (deleted); |
1846 | node_mapping = XNEWVEC (unsigned int, size); |
1847 | dfs = XCNEWVEC (unsigned int, size); |
1848 | |
1849 | for (size_t i = 0; i < size; i++) |
1850 | node_mapping[i] = i; |
1851 | } |
1852 | |
1853 | /* Free an SCC info structure pointed to by SI */ |
1854 | |
1855 | scc_info::~scc_info () |
1856 | { |
1857 | free (ptr: node_mapping); |
1858 | free (ptr: dfs); |
1859 | } |
1860 | |
1861 | |
1862 | /* Find indirect cycles in GRAPH that occur, using strongly connected |
1863 | components, and note them in the indirect cycles map. |
1864 | |
1865 | This technique comes from Ben Hardekopf and Calvin Lin, |
1866 | "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of |
1867 | Lines of Code", submitted to PLDI 2007. */ |
1868 | |
1869 | static void |
1870 | find_indirect_cycles (constraint_graph_t graph) |
1871 | { |
1872 | unsigned int i; |
1873 | unsigned int size = graph->size; |
1874 | scc_info si (size); |
1875 | |
1876 | for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ ) |
1877 | if (!bitmap_bit_p (map: si.visited, bitno: i) && find (node: i) == i) |
1878 | scc_visit (graph, si: &si, n: i); |
1879 | } |
1880 | |
1881 | /* Visit the graph in topological order starting at node N, and store the |
1882 | order in TOPO_ORDER using VISITED to indicate visited nodes. */ |
1883 | |
1884 | static void |
1885 | topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order, |
1886 | sbitmap visited, unsigned int n) |
1887 | { |
1888 | bitmap_iterator bi; |
1889 | unsigned int j; |
1890 | |
1891 | bitmap_set_bit (map: visited, bitno: n); |
1892 | |
1893 | if (graph->succs[n]) |
1894 | EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi) |
1895 | { |
1896 | unsigned k = find (node: j); |
1897 | if (!bitmap_bit_p (map: visited, bitno: k)) |
1898 | topo_visit (graph, topo_order, visited, n: k); |
1899 | } |
1900 | |
1901 | topo_order.quick_push (obj: n); |
1902 | } |
1903 | |
1904 | /* Compute a topological ordering for GRAPH, and return the result. */ |
1905 | |
1906 | static auto_vec<unsigned> |
1907 | compute_topo_order (constraint_graph_t graph) |
1908 | { |
1909 | unsigned int i; |
1910 | unsigned int size = graph->size; |
1911 | |
1912 | auto_sbitmap visited (size); |
1913 | bitmap_clear (visited); |
1914 | |
1915 | /* For the heuristic in add_graph_edge to work optimally make sure to |
1916 | first visit the connected component of the graph containing |
1917 | ESCAPED. Do this by extracting the connected component |
1918 | with ESCAPED and append that to all other components as solve_graph |
1919 | pops from the order. */ |
1920 | auto_vec<unsigned> tail (size); |
1921 | topo_visit (graph, topo_order&: tail, visited, n: find (node: escaped_id)); |
1922 | |
1923 | auto_vec<unsigned> topo_order (size); |
1924 | |
1925 | for (i = 0; i != size; ++i) |
1926 | if (!bitmap_bit_p (map: visited, bitno: i) && find (node: i) == i) |
1927 | topo_visit (graph, topo_order, visited, n: i); |
1928 | |
1929 | topo_order.splice (src: tail); |
1930 | return topo_order; |
1931 | } |
1932 | |
1933 | /* Structure used to for hash value numbering of pointer equivalence |
1934 | classes. */ |
1935 | |
1936 | typedef struct equiv_class_label |
1937 | { |
1938 | hashval_t hashcode; |
1939 | unsigned int equivalence_class; |
1940 | bitmap labels; |
1941 | } *equiv_class_label_t; |
1942 | typedef const struct equiv_class_label *const_equiv_class_label_t; |
1943 | |
1944 | /* Equiv_class_label hashtable helpers. */ |
1945 | |
1946 | struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label> |
1947 | { |
1948 | static inline hashval_t hash (const equiv_class_label *); |
1949 | static inline bool equal (const equiv_class_label *, |
1950 | const equiv_class_label *); |
1951 | }; |
1952 | |
1953 | /* Hash function for a equiv_class_label_t */ |
1954 | |
1955 | inline hashval_t |
1956 | equiv_class_hasher::hash (const equiv_class_label *ecl) |
1957 | { |
1958 | return ecl->hashcode; |
1959 | } |
1960 | |
1961 | /* Equality function for two equiv_class_label_t's. */ |
1962 | |
1963 | inline bool |
1964 | equiv_class_hasher::equal (const equiv_class_label *eql1, |
1965 | const equiv_class_label *eql2) |
1966 | { |
1967 | return (eql1->hashcode == eql2->hashcode |
1968 | && bitmap_equal_p (eql1->labels, eql2->labels)); |
1969 | } |
1970 | |
1971 | /* A hashtable for mapping a bitmap of labels->pointer equivalence |
1972 | classes. */ |
1973 | static hash_table<equiv_class_hasher> *pointer_equiv_class_table; |
1974 | |
1975 | /* A hashtable for mapping a bitmap of labels->location equivalence |
1976 | classes. */ |
1977 | static hash_table<equiv_class_hasher> *location_equiv_class_table; |
1978 | |
1979 | struct obstack equiv_class_obstack; |
1980 | |
1981 | /* Lookup a equivalence class in TABLE by the bitmap of LABELS with |
1982 | hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS |
1983 | is equivalent to. */ |
1984 | |
1985 | static equiv_class_label * |
1986 | equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table, |
1987 | bitmap labels) |
1988 | { |
1989 | equiv_class_label **slot; |
1990 | equiv_class_label ecl; |
1991 | |
1992 | ecl.labels = labels; |
1993 | ecl.hashcode = bitmap_hash (labels); |
1994 | slot = table->find_slot (value: &ecl, insert: INSERT); |
1995 | if (!*slot) |
1996 | { |
1997 | *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label); |
1998 | (*slot)->labels = labels; |
1999 | (*slot)->hashcode = ecl.hashcode; |
2000 | (*slot)->equivalence_class = 0; |
2001 | } |
2002 | |
2003 | return *slot; |
2004 | } |
2005 | |
2006 | /* Perform offline variable substitution. |
2007 | |
2008 | This is a worst case quadratic time way of identifying variables |
2009 | that must have equivalent points-to sets, including those caused by |
2010 | static cycles, and single entry subgraphs, in the constraint graph. |
2011 | |
2012 | The technique is described in "Exploiting Pointer and Location |
2013 | Equivalence to Optimize Pointer Analysis. In the 14th International |
2014 | Static Analysis Symposium (SAS), August 2007." It is known as the |
2015 | "HU" algorithm, and is equivalent to value numbering the collapsed |
2016 | constraint graph including evaluating unions. |
2017 | |
2018 | The general method of finding equivalence classes is as follows: |
2019 | Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints. |
2020 | Initialize all non-REF nodes to be direct nodes. |
2021 | For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh |
2022 | variable} |
2023 | For each constraint containing the dereference, we also do the same |
2024 | thing. |
2025 | |
2026 | We then compute SCC's in the graph and unify nodes in the same SCC, |
2027 | including pts sets. |
2028 | |
2029 | For each non-collapsed node x: |
2030 | Visit all unvisited explicit incoming edges. |
2031 | Ignoring all non-pointers, set pts(x) = Union of pts(a) for y |
2032 | where y->x. |
2033 | Lookup the equivalence class for pts(x). |
2034 | If we found one, equivalence_class(x) = found class. |
2035 | Otherwise, equivalence_class(x) = new class, and new_class is |
2036 | added to the lookup table. |
2037 | |
2038 | All direct nodes with the same equivalence class can be replaced |
2039 | with a single representative node. |
2040 | All unlabeled nodes (label == 0) are not pointers and all edges |
2041 | involving them can be eliminated. |
2042 | We perform these optimizations during rewrite_constraints |
2043 | |
2044 | In addition to pointer equivalence class finding, we also perform |
2045 | location equivalence class finding. This is the set of variables |
2046 | that always appear together in points-to sets. We use this to |
2047 | compress the size of the points-to sets. */ |
2048 | |
2049 | /* Current maximum pointer equivalence class id. */ |
2050 | static int pointer_equiv_class; |
2051 | |
2052 | /* Current maximum location equivalence class id. */ |
2053 | static int location_equiv_class; |
2054 | |
2055 | /* Recursive routine to find strongly connected components in GRAPH, |
2056 | and label it's nodes with DFS numbers. */ |
2057 | |
2058 | static void |
2059 | condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
2060 | { |
2061 | unsigned int i; |
2062 | bitmap_iterator bi; |
2063 | unsigned int my_dfs; |
2064 | |
2065 | gcc_checking_assert (si->node_mapping[n] == n); |
2066 | bitmap_set_bit (map: si->visited, bitno: n); |
2067 | si->dfs[n] = si->current_index ++; |
2068 | my_dfs = si->dfs[n]; |
2069 | |
2070 | /* Visit all the successors. */ |
2071 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) |
2072 | { |
2073 | unsigned int w = si->node_mapping[i]; |
2074 | |
2075 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
2076 | continue; |
2077 | |
2078 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2079 | condense_visit (graph, si, n: w); |
2080 | |
2081 | unsigned int t = si->node_mapping[w]; |
2082 | gcc_checking_assert (si->node_mapping[n] == n); |
2083 | if (si->dfs[t] < si->dfs[n]) |
2084 | si->dfs[n] = si->dfs[t]; |
2085 | } |
2086 | |
2087 | /* Visit all the implicit predecessors. */ |
2088 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi) |
2089 | { |
2090 | unsigned int w = si->node_mapping[i]; |
2091 | |
2092 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
2093 | continue; |
2094 | |
2095 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2096 | condense_visit (graph, si, n: w); |
2097 | |
2098 | unsigned int t = si->node_mapping[w]; |
2099 | gcc_assert (si->node_mapping[n] == n); |
2100 | if (si->dfs[t] < si->dfs[n]) |
2101 | si->dfs[n] = si->dfs[t]; |
2102 | } |
2103 | |
2104 | /* See if any components have been identified. */ |
2105 | if (si->dfs[n] == my_dfs) |
2106 | { |
2107 | if (si->scc_stack.length () != 0 |
2108 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
2109 | { |
2110 | /* Find the first node of the SCC and do non-bitmap work. */ |
2111 | bool direct_p = true; |
2112 | unsigned first = si->scc_stack.length (); |
2113 | do |
2114 | { |
2115 | --first; |
2116 | unsigned int w = si->scc_stack[first]; |
2117 | si->node_mapping[w] = n; |
2118 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: w)) |
2119 | direct_p = false; |
2120 | } |
2121 | while (first > 0 |
2122 | && si->dfs[si->scc_stack[first - 1]] >= my_dfs); |
2123 | if (!direct_p) |
2124 | bitmap_clear_bit (map: graph->direct_nodes, bitno: n); |
2125 | |
2126 | /* Want to reduce to node n, push that first. */ |
2127 | si->scc_stack.reserve (nelems: 1); |
2128 | si->scc_stack.quick_push (obj: si->scc_stack[first]); |
2129 | si->scc_stack[first] = n; |
2130 | |
2131 | unsigned scc_size = si->scc_stack.length () - first; |
2132 | unsigned split = scc_size / 2; |
2133 | unsigned carry = scc_size - split * 2; |
2134 | while (split > 0) |
2135 | { |
2136 | for (unsigned i = 0; i < split; ++i) |
2137 | { |
2138 | unsigned a = si->scc_stack[first + i]; |
2139 | unsigned b = si->scc_stack[first + split + carry + i]; |
2140 | |
2141 | /* Unify our nodes. */ |
2142 | if (graph->preds[b]) |
2143 | { |
2144 | if (!graph->preds[a]) |
2145 | std::swap (a&: graph->preds[a], b&: graph->preds[b]); |
2146 | else |
2147 | bitmap_ior_into_and_free (graph->preds[a], |
2148 | &graph->preds[b]); |
2149 | } |
2150 | if (graph->implicit_preds[b]) |
2151 | { |
2152 | if (!graph->implicit_preds[a]) |
2153 | std::swap (a&: graph->implicit_preds[a], |
2154 | b&: graph->implicit_preds[b]); |
2155 | else |
2156 | bitmap_ior_into_and_free (graph->implicit_preds[a], |
2157 | &graph->implicit_preds[b]); |
2158 | } |
2159 | if (graph->points_to[b]) |
2160 | { |
2161 | if (!graph->points_to[a]) |
2162 | std::swap (a&: graph->points_to[a], b&: graph->points_to[b]); |
2163 | else |
2164 | bitmap_ior_into_and_free (graph->points_to[a], |
2165 | &graph->points_to[b]); |
2166 | } |
2167 | } |
2168 | unsigned remain = split + carry; |
2169 | split = remain / 2; |
2170 | carry = remain - split * 2; |
2171 | } |
2172 | /* Actually pop the SCC. */ |
2173 | si->scc_stack.truncate (size: first); |
2174 | } |
2175 | bitmap_set_bit (map: si->deleted, bitno: n); |
2176 | } |
2177 | else |
2178 | si->scc_stack.safe_push (obj: n); |
2179 | } |
2180 | |
2181 | /* Label pointer equivalences. |
2182 | |
2183 | This performs a value numbering of the constraint graph to |
2184 | discover which variables will always have the same points-to sets |
2185 | under the current set of constraints. |
2186 | |
2187 | The way it value numbers is to store the set of points-to bits |
2188 | generated by the constraints and graph edges. This is just used as a |
2189 | hash and equality comparison. The *actual set of points-to bits* is |
2190 | completely irrelevant, in that we don't care about being able to |
2191 | extract them later. |
2192 | |
2193 | The equality values (currently bitmaps) just have to satisfy a few |
2194 | constraints, the main ones being: |
2195 | 1. The combining operation must be order independent. |
2196 | 2. The end result of a given set of operations must be unique iff the |
2197 | combination of input values is unique |
2198 | 3. Hashable. */ |
2199 | |
2200 | static void |
2201 | label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
2202 | { |
2203 | unsigned int i, first_pred; |
2204 | bitmap_iterator bi; |
2205 | |
2206 | bitmap_set_bit (map: si->visited, bitno: n); |
2207 | |
2208 | /* Label and union our incoming edges's points to sets. */ |
2209 | first_pred = -1U; |
2210 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) |
2211 | { |
2212 | unsigned int w = si->node_mapping[i]; |
2213 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2214 | label_visit (graph, si, n: w); |
2215 | |
2216 | /* Skip unused edges */ |
2217 | if (w == n || graph->pointer_label[w] == 0) |
2218 | continue; |
2219 | |
2220 | if (graph->points_to[w]) |
2221 | { |
2222 | if (!graph->points_to[n]) |
2223 | { |
2224 | if (first_pred == -1U) |
2225 | first_pred = w; |
2226 | else |
2227 | { |
2228 | graph->points_to[n] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
2229 | bitmap_ior (graph->points_to[n], |
2230 | graph->points_to[first_pred], |
2231 | graph->points_to[w]); |
2232 | } |
2233 | } |
2234 | else |
2235 | bitmap_ior_into (graph->points_to[n], graph->points_to[w]); |
2236 | } |
2237 | } |
2238 | |
2239 | /* Indirect nodes get fresh variables and a new pointer equiv class. */ |
2240 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: n)) |
2241 | { |
2242 | if (!graph->points_to[n]) |
2243 | { |
2244 | graph->points_to[n] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
2245 | if (first_pred != -1U) |
2246 | bitmap_copy (graph->points_to[n], graph->points_to[first_pred]); |
2247 | } |
2248 | bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n); |
2249 | graph->pointer_label[n] = pointer_equiv_class++; |
2250 | equiv_class_label_t ecl; |
2251 | ecl = equiv_class_lookup_or_add (table: pointer_equiv_class_table, |
2252 | labels: graph->points_to[n]); |
2253 | ecl->equivalence_class = graph->pointer_label[n]; |
2254 | return; |
2255 | } |
2256 | |
2257 | /* If there was only a single non-empty predecessor the pointer equiv |
2258 | class is the same. */ |
2259 | if (!graph->points_to[n]) |
2260 | { |
2261 | if (first_pred != -1U) |
2262 | { |
2263 | graph->pointer_label[n] = graph->pointer_label[first_pred]; |
2264 | graph->points_to[n] = graph->points_to[first_pred]; |
2265 | } |
2266 | return; |
2267 | } |
2268 | |
2269 | if (!bitmap_empty_p (map: graph->points_to[n])) |
2270 | { |
2271 | equiv_class_label_t ecl; |
2272 | ecl = equiv_class_lookup_or_add (table: pointer_equiv_class_table, |
2273 | labels: graph->points_to[n]); |
2274 | if (ecl->equivalence_class == 0) |
2275 | ecl->equivalence_class = pointer_equiv_class++; |
2276 | else |
2277 | { |
2278 | BITMAP_FREE (graph->points_to[n]); |
2279 | graph->points_to[n] = ecl->labels; |
2280 | } |
2281 | graph->pointer_label[n] = ecl->equivalence_class; |
2282 | } |
2283 | } |
2284 | |
2285 | /* Print the pred graph in dot format. */ |
2286 | |
2287 | static void |
2288 | dump_pred_graph (class scc_info *si, FILE *file) |
2289 | { |
2290 | unsigned int i; |
2291 | |
2292 | /* Only print the graph if it has already been initialized: */ |
2293 | if (!graph) |
2294 | return; |
2295 | |
2296 | /* Prints the header of the dot file: */ |
2297 | fprintf (stream: file, format: "strict digraph {\n" ); |
2298 | fprintf (stream: file, format: " node [\n shape = box\n ]\n" ); |
2299 | fprintf (stream: file, format: " edge [\n fontsize = \"12\"\n ]\n" ); |
2300 | fprintf (stream: file, format: "\n // List of nodes and complex constraints in " |
2301 | "the constraint graph:\n" ); |
2302 | |
2303 | /* The next lines print the nodes in the graph together with the |
2304 | complex constraints attached to them. */ |
2305 | for (i = 1; i < graph->size; i++) |
2306 | { |
2307 | if (i == FIRST_REF_NODE) |
2308 | continue; |
2309 | if (si->node_mapping[i] != i) |
2310 | continue; |
2311 | if (i < FIRST_REF_NODE) |
2312 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2313 | else |
2314 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
2315 | if (graph->points_to[i] |
2316 | && !bitmap_empty_p (map: graph->points_to[i])) |
2317 | { |
2318 | if (i < FIRST_REF_NODE) |
2319 | fprintf (stream: file, format: "[label=\"%s = {" , get_varinfo (n: i)->name); |
2320 | else |
2321 | fprintf (stream: file, format: "[label=\"*%s = {" , |
2322 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2323 | unsigned j; |
2324 | bitmap_iterator bi; |
2325 | EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi) |
2326 | fprintf (stream: file, format: " %d" , j); |
2327 | fprintf (stream: file, format: " }\"]" ); |
2328 | } |
2329 | fprintf (stream: file, format: ";\n" ); |
2330 | } |
2331 | |
2332 | /* Go over the edges. */ |
2333 | fprintf (stream: file, format: "\n // Edges in the constraint graph:\n" ); |
2334 | for (i = 1; i < graph->size; i++) |
2335 | { |
2336 | unsigned j; |
2337 | bitmap_iterator bi; |
2338 | if (si->node_mapping[i] != i) |
2339 | continue; |
2340 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi) |
2341 | { |
2342 | unsigned from = si->node_mapping[j]; |
2343 | if (from < FIRST_REF_NODE) |
2344 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: from)->name); |
2345 | else |
2346 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: from - FIRST_REF_NODE)->name); |
2347 | fprintf (stream: file, format: " -> " ); |
2348 | if (i < FIRST_REF_NODE) |
2349 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2350 | else |
2351 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
2352 | fprintf (stream: file, format: ";\n" ); |
2353 | } |
2354 | } |
2355 | |
2356 | /* Prints the tail of the dot file. */ |
2357 | fprintf (stream: file, format: "}\n" ); |
2358 | } |
2359 | |
2360 | /* Perform offline variable substitution, discovering equivalence |
2361 | classes, and eliminating non-pointer variables. */ |
2362 | |
2363 | static class scc_info * |
2364 | perform_var_substitution (constraint_graph_t graph) |
2365 | { |
2366 | unsigned int i; |
2367 | unsigned int size = graph->size; |
2368 | scc_info *si = new scc_info (size); |
2369 | |
2370 | bitmap_obstack_initialize (&iteration_obstack); |
2371 | gcc_obstack_init (&equiv_class_obstack); |
2372 | pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511); |
2373 | location_equiv_class_table |
2374 | = new hash_table<equiv_class_hasher> (511); |
2375 | pointer_equiv_class = 1; |
2376 | location_equiv_class = 1; |
2377 | |
2378 | /* Condense the nodes, which means to find SCC's, count incoming |
2379 | predecessors, and unite nodes in SCC's. */ |
2380 | for (i = 1; i < FIRST_REF_NODE; i++) |
2381 | if (!bitmap_bit_p (map: si->visited, bitno: si->node_mapping[i])) |
2382 | condense_visit (graph, si, n: si->node_mapping[i]); |
2383 | |
2384 | if (dump_file && (dump_flags & TDF_GRAPH)) |
2385 | { |
2386 | fprintf (stream: dump_file, format: "\n\n// The constraint graph before var-substitution " |
2387 | "in dot format:\n" ); |
2388 | dump_pred_graph (si, file: dump_file); |
2389 | fprintf (stream: dump_file, format: "\n\n" ); |
2390 | } |
2391 | |
2392 | bitmap_clear (si->visited); |
2393 | /* Actually the label the nodes for pointer equivalences */ |
2394 | for (i = 1; i < FIRST_REF_NODE; i++) |
2395 | if (!bitmap_bit_p (map: si->visited, bitno: si->node_mapping[i])) |
2396 | label_visit (graph, si, n: si->node_mapping[i]); |
2397 | |
2398 | /* Calculate location equivalence labels. */ |
2399 | for (i = 1; i < FIRST_REF_NODE; i++) |
2400 | { |
2401 | bitmap pointed_by; |
2402 | bitmap_iterator bi; |
2403 | unsigned int j; |
2404 | |
2405 | if (!graph->pointed_by[i]) |
2406 | continue; |
2407 | pointed_by = BITMAP_ALLOC (obstack: &iteration_obstack); |
2408 | |
2409 | /* Translate the pointed-by mapping for pointer equivalence |
2410 | labels. */ |
2411 | EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi) |
2412 | { |
2413 | bitmap_set_bit (pointed_by, |
2414 | graph->pointer_label[si->node_mapping[j]]); |
2415 | } |
2416 | /* The original pointed_by is now dead. */ |
2417 | BITMAP_FREE (graph->pointed_by[i]); |
2418 | |
2419 | /* Look up the location equivalence label if one exists, or make |
2420 | one otherwise. */ |
2421 | equiv_class_label_t ecl; |
2422 | ecl = equiv_class_lookup_or_add (table: location_equiv_class_table, labels: pointed_by); |
2423 | if (ecl->equivalence_class == 0) |
2424 | ecl->equivalence_class = location_equiv_class++; |
2425 | else |
2426 | { |
2427 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2428 | fprintf (stream: dump_file, format: "Found location equivalence for node %s\n" , |
2429 | get_varinfo (n: i)->name); |
2430 | BITMAP_FREE (pointed_by); |
2431 | } |
2432 | graph->loc_label[i] = ecl->equivalence_class; |
2433 | |
2434 | } |
2435 | |
2436 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2437 | for (i = 1; i < FIRST_REF_NODE; i++) |
2438 | { |
2439 | unsigned j = si->node_mapping[i]; |
2440 | if (j != i) |
2441 | { |
2442 | fprintf (stream: dump_file, format: "%s node id %d " , |
2443 | bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
2444 | ? "Direct" : "Indirect" , i); |
2445 | if (i < FIRST_REF_NODE) |
2446 | fprintf (stream: dump_file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2447 | else |
2448 | fprintf (stream: dump_file, format: "\"*%s\"" , |
2449 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2450 | fprintf (stream: dump_file, format: " mapped to SCC leader node id %d " , j); |
2451 | if (j < FIRST_REF_NODE) |
2452 | fprintf (stream: dump_file, format: "\"%s\"\n" , get_varinfo (n: j)->name); |
2453 | else |
2454 | fprintf (stream: dump_file, format: "\"*%s\"\n" , |
2455 | get_varinfo (n: j - FIRST_REF_NODE)->name); |
2456 | } |
2457 | else |
2458 | { |
2459 | fprintf (stream: dump_file, |
2460 | format: "Equivalence classes for %s node id %d " , |
2461 | bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
2462 | ? "direct" : "indirect" , i); |
2463 | if (i < FIRST_REF_NODE) |
2464 | fprintf (stream: dump_file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2465 | else |
2466 | fprintf (stream: dump_file, format: "\"*%s\"" , |
2467 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2468 | fprintf (stream: dump_file, |
2469 | format: ": pointer %d, location %d\n" , |
2470 | graph->pointer_label[i], graph->loc_label[i]); |
2471 | } |
2472 | } |
2473 | |
2474 | /* Quickly eliminate our non-pointer variables. */ |
2475 | |
2476 | for (i = 1; i < FIRST_REF_NODE; i++) |
2477 | { |
2478 | unsigned int node = si->node_mapping[i]; |
2479 | |
2480 | if (graph->pointer_label[node] == 0) |
2481 | { |
2482 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2483 | fprintf (stream: dump_file, |
2484 | format: "%s is a non-pointer variable, eliminating edges.\n" , |
2485 | get_varinfo (n: node)->name); |
2486 | stats.nonpointer_vars++; |
2487 | clear_edges_for_node (graph, node); |
2488 | } |
2489 | } |
2490 | |
2491 | return si; |
2492 | } |
2493 | |
2494 | /* Free information that was only necessary for variable |
2495 | substitution. */ |
2496 | |
2497 | static void |
2498 | free_var_substitution_info (class scc_info *si) |
2499 | { |
2500 | delete si; |
2501 | free (ptr: graph->pointer_label); |
2502 | free (ptr: graph->loc_label); |
2503 | free (ptr: graph->pointed_by); |
2504 | free (ptr: graph->points_to); |
2505 | free (ptr: graph->eq_rep); |
2506 | sbitmap_free (map: graph->direct_nodes); |
2507 | delete pointer_equiv_class_table; |
2508 | pointer_equiv_class_table = NULL; |
2509 | delete location_equiv_class_table; |
2510 | location_equiv_class_table = NULL; |
2511 | obstack_free (&equiv_class_obstack, NULL); |
2512 | bitmap_obstack_release (&iteration_obstack); |
2513 | } |
2514 | |
2515 | /* Return an existing node that is equivalent to NODE, which has |
2516 | equivalence class LABEL, if one exists. Return NODE otherwise. */ |
2517 | |
2518 | static unsigned int |
2519 | find_equivalent_node (constraint_graph_t graph, |
2520 | unsigned int node, unsigned int label) |
2521 | { |
2522 | /* If the address version of this variable is unused, we can |
2523 | substitute it for anything else with the same label. |
2524 | Otherwise, we know the pointers are equivalent, but not the |
2525 | locations, and we can unite them later. */ |
2526 | |
2527 | if (!bitmap_bit_p (graph->address_taken, node)) |
2528 | { |
2529 | gcc_checking_assert (label < graph->size); |
2530 | |
2531 | if (graph->eq_rep[label] != -1) |
2532 | { |
2533 | /* Unify the two variables since we know they are equivalent. */ |
2534 | if (unite (to: graph->eq_rep[label], from: node)) |
2535 | unify_nodes (graph, to: graph->eq_rep[label], from: node, update_changed: false); |
2536 | return graph->eq_rep[label]; |
2537 | } |
2538 | else |
2539 | { |
2540 | graph->eq_rep[label] = node; |
2541 | graph->pe_rep[label] = node; |
2542 | } |
2543 | } |
2544 | else |
2545 | { |
2546 | gcc_checking_assert (label < graph->size); |
2547 | graph->pe[node] = label; |
2548 | if (graph->pe_rep[label] == -1) |
2549 | graph->pe_rep[label] = node; |
2550 | } |
2551 | |
2552 | return node; |
2553 | } |
2554 | |
2555 | /* Unite pointer equivalent but not location equivalent nodes in |
2556 | GRAPH. This may only be performed once variable substitution is |
2557 | finished. */ |
2558 | |
2559 | static void |
2560 | unite_pointer_equivalences (constraint_graph_t graph) |
2561 | { |
2562 | unsigned int i; |
2563 | |
2564 | /* Go through the pointer equivalences and unite them to their |
2565 | representative, if they aren't already. */ |
2566 | for (i = 1; i < FIRST_REF_NODE; i++) |
2567 | { |
2568 | unsigned int label = graph->pe[i]; |
2569 | if (label) |
2570 | { |
2571 | int label_rep = graph->pe_rep[label]; |
2572 | |
2573 | if (label_rep == -1) |
2574 | continue; |
2575 | |
2576 | label_rep = find (node: label_rep); |
2577 | if (label_rep >= 0 && unite (to: label_rep, from: find (node: i))) |
2578 | unify_nodes (graph, to: label_rep, from: i, update_changed: false); |
2579 | } |
2580 | } |
2581 | } |
2582 | |
2583 | /* Move complex constraints to the GRAPH nodes they belong to. */ |
2584 | |
2585 | static void |
2586 | move_complex_constraints (constraint_graph_t graph) |
2587 | { |
2588 | int i; |
2589 | constraint_t c; |
2590 | |
2591 | FOR_EACH_VEC_ELT (constraints, i, c) |
2592 | { |
2593 | if (c) |
2594 | { |
2595 | struct constraint_expr lhs = c->lhs; |
2596 | struct constraint_expr rhs = c->rhs; |
2597 | |
2598 | if (lhs.type == DEREF) |
2599 | { |
2600 | insert_into_complex (graph, var: lhs.var, c); |
2601 | } |
2602 | else if (rhs.type == DEREF) |
2603 | { |
2604 | if (!(get_varinfo (n: lhs.var)->is_special_var)) |
2605 | insert_into_complex (graph, var: rhs.var, c); |
2606 | } |
2607 | else if (rhs.type != ADDRESSOF && lhs.var > anything_id |
2608 | && (lhs.offset != 0 || rhs.offset != 0)) |
2609 | { |
2610 | insert_into_complex (graph, var: rhs.var, c); |
2611 | } |
2612 | } |
2613 | } |
2614 | } |
2615 | |
2616 | |
2617 | /* Optimize and rewrite complex constraints while performing |
2618 | collapsing of equivalent nodes. SI is the SCC_INFO that is the |
2619 | result of perform_variable_substitution. */ |
2620 | |
2621 | static void |
2622 | rewrite_constraints (constraint_graph_t graph, |
2623 | class scc_info *si) |
2624 | { |
2625 | int i; |
2626 | constraint_t c; |
2627 | |
2628 | if (flag_checking) |
2629 | { |
2630 | for (unsigned int j = 0; j < graph->size; j++) |
2631 | gcc_assert (find (j) == j); |
2632 | } |
2633 | |
2634 | FOR_EACH_VEC_ELT (constraints, i, c) |
2635 | { |
2636 | struct constraint_expr lhs = c->lhs; |
2637 | struct constraint_expr rhs = c->rhs; |
2638 | unsigned int lhsvar = find (node: lhs.var); |
2639 | unsigned int rhsvar = find (node: rhs.var); |
2640 | unsigned int lhsnode, rhsnode; |
2641 | unsigned int lhslabel, rhslabel; |
2642 | |
2643 | lhsnode = si->node_mapping[lhsvar]; |
2644 | rhsnode = si->node_mapping[rhsvar]; |
2645 | lhslabel = graph->pointer_label[lhsnode]; |
2646 | rhslabel = graph->pointer_label[rhsnode]; |
2647 | |
2648 | /* See if it is really a non-pointer variable, and if so, ignore |
2649 | the constraint. */ |
2650 | if (lhslabel == 0) |
2651 | { |
2652 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2653 | { |
2654 | |
2655 | fprintf (stream: dump_file, format: "%s is a non-pointer variable, " |
2656 | "ignoring constraint:" , |
2657 | get_varinfo (n: lhs.var)->name); |
2658 | dump_constraint (file: dump_file, c); |
2659 | fprintf (stream: dump_file, format: "\n" ); |
2660 | } |
2661 | constraints[i] = NULL; |
2662 | continue; |
2663 | } |
2664 | |
2665 | if (rhslabel == 0) |
2666 | { |
2667 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2668 | { |
2669 | |
2670 | fprintf (stream: dump_file, format: "%s is a non-pointer variable, " |
2671 | "ignoring constraint:" , |
2672 | get_varinfo (n: rhs.var)->name); |
2673 | dump_constraint (file: dump_file, c); |
2674 | fprintf (stream: dump_file, format: "\n" ); |
2675 | } |
2676 | constraints[i] = NULL; |
2677 | continue; |
2678 | } |
2679 | |
2680 | lhsvar = find_equivalent_node (graph, node: lhsvar, label: lhslabel); |
2681 | rhsvar = find_equivalent_node (graph, node: rhsvar, label: rhslabel); |
2682 | c->lhs.var = lhsvar; |
2683 | c->rhs.var = rhsvar; |
2684 | } |
2685 | } |
2686 | |
2687 | /* Eliminate indirect cycles involving NODE. Return true if NODE was |
2688 | part of an SCC, false otherwise. */ |
2689 | |
2690 | static bool |
2691 | eliminate_indirect_cycles (unsigned int node) |
2692 | { |
2693 | if (graph->indirect_cycles[node] != -1 |
2694 | && !bitmap_empty_p (map: get_varinfo (n: node)->solution)) |
2695 | { |
2696 | unsigned int i; |
2697 | auto_vec<unsigned> queue; |
2698 | int queuepos; |
2699 | unsigned int to = find (node: graph->indirect_cycles[node]); |
2700 | bitmap_iterator bi; |
2701 | |
2702 | /* We can't touch the solution set and call unify_nodes |
2703 | at the same time, because unify_nodes is going to do |
2704 | bitmap unions into it. */ |
2705 | |
2706 | EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi) |
2707 | { |
2708 | if (find (node: i) == i && i != to) |
2709 | { |
2710 | if (unite (to, from: i)) |
2711 | queue.safe_push (obj: i); |
2712 | } |
2713 | } |
2714 | |
2715 | for (queuepos = 0; |
2716 | queue.iterate (ix: queuepos, ptr: &i); |
2717 | queuepos++) |
2718 | { |
2719 | unify_nodes (graph, to, from: i, update_changed: true); |
2720 | } |
2721 | return true; |
2722 | } |
2723 | return false; |
2724 | } |
2725 | |
2726 | /* Solve the constraint graph GRAPH using our worklist solver. |
2727 | This is based on the PW* family of solvers from the "Efficient Field |
2728 | Sensitive Pointer Analysis for C" paper. |
2729 | It works by iterating over all the graph nodes, processing the complex |
2730 | constraints and propagating the copy constraints, until everything stops |
2731 | changed. This corresponds to steps 6-8 in the solving list given above. */ |
2732 | |
2733 | static void |
2734 | solve_graph (constraint_graph_t graph) |
2735 | { |
2736 | unsigned int size = graph->size; |
2737 | unsigned int i; |
2738 | bitmap pts; |
2739 | |
2740 | changed = BITMAP_ALLOC (NULL); |
2741 | |
2742 | /* Mark all initial non-collapsed nodes as changed. */ |
2743 | for (i = 1; i < size; i++) |
2744 | { |
2745 | varinfo_t ivi = get_varinfo (n: i); |
2746 | if (find (node: i) == i && !bitmap_empty_p (map: ivi->solution) |
2747 | && ((graph->succs[i] && !bitmap_empty_p (map: graph->succs[i])) |
2748 | || graph->complex[i].length () > 0)) |
2749 | bitmap_set_bit (changed, i); |
2750 | } |
2751 | |
2752 | /* Allocate a bitmap to be used to store the changed bits. */ |
2753 | pts = BITMAP_ALLOC (obstack: &pta_obstack); |
2754 | |
2755 | while (!bitmap_empty_p (map: changed)) |
2756 | { |
2757 | unsigned int i; |
2758 | stats.iterations++; |
2759 | |
2760 | bitmap_obstack_initialize (&iteration_obstack); |
2761 | |
2762 | auto_vec<unsigned> topo_order = compute_topo_order (graph); |
2763 | while (topo_order.length () != 0) |
2764 | { |
2765 | i = topo_order.pop (); |
2766 | |
2767 | /* If this variable is not a representative, skip it. */ |
2768 | if (find (node: i) != i) |
2769 | continue; |
2770 | |
2771 | /* In certain indirect cycle cases, we may merge this |
2772 | variable to another. */ |
2773 | if (eliminate_indirect_cycles (node: i) && find (node: i) != i) |
2774 | continue; |
2775 | |
2776 | /* If the node has changed, we need to process the |
2777 | complex constraints and outgoing edges again. For complex |
2778 | constraints that modify i itself, like the common group of |
2779 | callarg = callarg + UNKNOWN; |
2780 | callarg = *callarg + UNKNOWN; |
2781 | *callarg = callescape; |
2782 | make sure to iterate immediately because that maximizes |
2783 | cache reuse and expands the graph quickest, leading to |
2784 | better visitation order in the next iteration. */ |
2785 | while (bitmap_clear_bit (changed, i)) |
2786 | { |
2787 | unsigned int j; |
2788 | constraint_t c; |
2789 | bitmap solution; |
2790 | vec<constraint_t> complex = graph->complex[i]; |
2791 | varinfo_t vi = get_varinfo (n: i); |
2792 | bool solution_empty; |
2793 | |
2794 | /* Compute the changed set of solution bits. If anything |
2795 | is in the solution just propagate that. */ |
2796 | if (bitmap_bit_p (vi->solution, anything_id)) |
2797 | { |
2798 | /* If anything is also in the old solution there is |
2799 | nothing to do. |
2800 | ??? But we shouldn't ended up with "changed" set ... */ |
2801 | if (vi->oldsolution |
2802 | && bitmap_bit_p (vi->oldsolution, anything_id)) |
2803 | break; |
2804 | bitmap_copy (pts, get_varinfo (n: find (node: anything_id))->solution); |
2805 | } |
2806 | else if (vi->oldsolution) |
2807 | bitmap_and_compl (pts, vi->solution, vi->oldsolution); |
2808 | else |
2809 | bitmap_copy (pts, vi->solution); |
2810 | |
2811 | if (bitmap_empty_p (map: pts)) |
2812 | break; |
2813 | |
2814 | if (vi->oldsolution) |
2815 | bitmap_ior_into (vi->oldsolution, pts); |
2816 | else |
2817 | { |
2818 | vi->oldsolution = BITMAP_ALLOC (obstack: &oldpta_obstack); |
2819 | bitmap_copy (vi->oldsolution, pts); |
2820 | } |
2821 | |
2822 | solution = vi->solution; |
2823 | solution_empty = bitmap_empty_p (map: solution); |
2824 | |
2825 | /* Process the complex constraints */ |
2826 | bitmap expanded_pts = NULL; |
2827 | FOR_EACH_VEC_ELT (complex, j, c) |
2828 | { |
2829 | /* XXX: This is going to unsort the constraints in |
2830 | some cases, which will occasionally add duplicate |
2831 | constraints during unification. This does not |
2832 | affect correctness. */ |
2833 | c->lhs.var = find (node: c->lhs.var); |
2834 | c->rhs.var = find (node: c->rhs.var); |
2835 | |
2836 | /* The only complex constraint that can change our |
2837 | solution to non-empty, given an empty solution, |
2838 | is a constraint where the lhs side is receiving |
2839 | some set from elsewhere. */ |
2840 | if (!solution_empty || c->lhs.type != DEREF) |
2841 | do_complex_constraint (graph, c, delta: pts, expanded_delta: &expanded_pts); |
2842 | } |
2843 | BITMAP_FREE (expanded_pts); |
2844 | |
2845 | solution_empty = bitmap_empty_p (map: solution); |
2846 | |
2847 | if (!solution_empty) |
2848 | { |
2849 | bitmap_iterator bi; |
2850 | unsigned eff_escaped_id = find (node: escaped_id); |
2851 | |
2852 | /* Propagate solution to all successors. */ |
2853 | unsigned to_remove = ~0U; |
2854 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], |
2855 | 0, j, bi) |
2856 | { |
2857 | if (to_remove != ~0U) |
2858 | { |
2859 | bitmap_clear_bit (graph->succs[i], to_remove); |
2860 | to_remove = ~0U; |
2861 | } |
2862 | unsigned int to = find (node: j); |
2863 | if (to != j) |
2864 | { |
2865 | /* Update the succ graph, avoiding duplicate |
2866 | work. */ |
2867 | to_remove = j; |
2868 | if (! bitmap_set_bit (graph->succs[i], to)) |
2869 | continue; |
2870 | /* We eventually end up processing 'to' twice |
2871 | as it is undefined whether bitmap iteration |
2872 | iterates over bits set during iteration. |
2873 | Play safe instead of doing tricks. */ |
2874 | } |
2875 | /* Don't try to propagate to ourselves. */ |
2876 | if (to == i) |
2877 | { |
2878 | to_remove = j; |
2879 | continue; |
2880 | } |
2881 | /* Early node unification can lead to edges from |
2882 | escaped - remove them. */ |
2883 | if (i == eff_escaped_id) |
2884 | { |
2885 | to_remove = j; |
2886 | if (bitmap_set_bit (get_varinfo (n: to)->solution, |
2887 | escaped_id)) |
2888 | bitmap_set_bit (changed, to); |
2889 | continue; |
2890 | } |
2891 | |
2892 | if (bitmap_ior_into (get_varinfo (n: to)->solution, pts)) |
2893 | bitmap_set_bit (changed, to); |
2894 | } |
2895 | if (to_remove != ~0U) |
2896 | bitmap_clear_bit (graph->succs[i], to_remove); |
2897 | } |
2898 | } |
2899 | } |
2900 | bitmap_obstack_release (&iteration_obstack); |
2901 | } |
2902 | |
2903 | BITMAP_FREE (pts); |
2904 | BITMAP_FREE (changed); |
2905 | bitmap_obstack_release (&oldpta_obstack); |
2906 | } |
2907 | |
2908 | /* Map from trees to variable infos. */ |
2909 | static hash_map<tree, varinfo_t> *vi_for_tree; |
2910 | |
2911 | |
2912 | /* Insert ID as the variable id for tree T in the vi_for_tree map. */ |
2913 | |
2914 | static void |
2915 | insert_vi_for_tree (tree t, varinfo_t vi) |
2916 | { |
2917 | gcc_assert (vi); |
2918 | gcc_assert (!vi_for_tree->put (t, vi)); |
2919 | } |
2920 | |
2921 | /* Find the variable info for tree T in VI_FOR_TREE. If T does not |
2922 | exist in the map, return NULL, otherwise, return the varinfo we found. */ |
2923 | |
2924 | static varinfo_t |
2925 | lookup_vi_for_tree (tree t) |
2926 | { |
2927 | varinfo_t *slot = vi_for_tree->get (k: t); |
2928 | if (slot == NULL) |
2929 | return NULL; |
2930 | |
2931 | return *slot; |
2932 | } |
2933 | |
2934 | /* Return a printable name for DECL */ |
2935 | |
2936 | static const char * |
2937 | alias_get_name (tree decl) |
2938 | { |
2939 | const char *res = "NULL" ; |
2940 | if (dump_file) |
2941 | { |
2942 | char *temp = NULL; |
2943 | if (TREE_CODE (decl) == SSA_NAME) |
2944 | { |
2945 | res = get_name (decl); |
2946 | temp = xasprintf ("%s_%u" , res ? res : "" , SSA_NAME_VERSION (decl)); |
2947 | } |
2948 | else if (HAS_DECL_ASSEMBLER_NAME_P (decl) |
2949 | && DECL_ASSEMBLER_NAME_SET_P (decl)) |
2950 | res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl)); |
2951 | else if (DECL_P (decl)) |
2952 | { |
2953 | res = get_name (decl); |
2954 | if (!res) |
2955 | temp = xasprintf ("D.%u" , DECL_UID (decl)); |
2956 | } |
2957 | |
2958 | if (temp) |
2959 | { |
2960 | res = ggc_strdup (temp); |
2961 | free (ptr: temp); |
2962 | } |
2963 | } |
2964 | |
2965 | return res; |
2966 | } |
2967 | |
2968 | /* Find the variable id for tree T in the map. |
2969 | If T doesn't exist in the map, create an entry for it and return it. */ |
2970 | |
2971 | static varinfo_t |
2972 | get_vi_for_tree (tree t) |
2973 | { |
2974 | varinfo_t *slot = vi_for_tree->get (k: t); |
2975 | if (slot == NULL) |
2976 | { |
2977 | unsigned int id = create_variable_info_for (t, alias_get_name (decl: t), false); |
2978 | return get_varinfo (n: id); |
2979 | } |
2980 | |
2981 | return *slot; |
2982 | } |
2983 | |
2984 | /* Get a scalar constraint expression for a new temporary variable. */ |
2985 | |
2986 | static struct constraint_expr |
2987 | new_scalar_tmp_constraint_exp (const char *name, bool add_id) |
2988 | { |
2989 | struct constraint_expr tmp; |
2990 | varinfo_t vi; |
2991 | |
2992 | vi = new_var_info (NULL_TREE, name, add_id); |
2993 | vi->offset = 0; |
2994 | vi->size = -1; |
2995 | vi->fullsize = -1; |
2996 | vi->is_full_var = 1; |
2997 | vi->is_reg_var = 1; |
2998 | |
2999 | tmp.var = vi->id; |
3000 | tmp.type = SCALAR; |
3001 | tmp.offset = 0; |
3002 | |
3003 | return tmp; |
3004 | } |
3005 | |
3006 | /* Get a constraint expression vector from an SSA_VAR_P node. |
3007 | If address_p is true, the result will be taken its address of. */ |
3008 | |
3009 | static void |
3010 | get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p) |
3011 | { |
3012 | struct constraint_expr cexpr; |
3013 | varinfo_t vi; |
3014 | |
3015 | /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */ |
3016 | gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t)); |
3017 | |
3018 | if (TREE_CODE (t) == SSA_NAME |
3019 | && SSA_NAME_IS_DEFAULT_DEF (t)) |
3020 | { |
3021 | /* For parameters, get at the points-to set for the actual parm |
3022 | decl. */ |
3023 | if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL |
3024 | || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL) |
3025 | { |
3026 | get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p); |
3027 | return; |
3028 | } |
3029 | /* For undefined SSA names return nothing. */ |
3030 | else if (!ssa_defined_default_def_p (t)) |
3031 | { |
3032 | cexpr.var = nothing_id; |
3033 | cexpr.type = SCALAR; |
3034 | cexpr.offset = 0; |
3035 | results->safe_push (obj: cexpr); |
3036 | return; |
3037 | } |
3038 | } |
3039 | |
3040 | /* For global variables resort to the alias target. */ |
3041 | if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t))) |
3042 | { |
3043 | varpool_node *node = varpool_node::get (decl: t); |
3044 | if (node && node->alias && node->analyzed) |
3045 | { |
3046 | node = node->ultimate_alias_target (); |
3047 | /* Canonicalize the PT uid of all aliases to the ultimate target. |
3048 | ??? Hopefully the set of aliases can't change in a way that |
3049 | changes the ultimate alias target. */ |
3050 | gcc_assert ((! DECL_PT_UID_SET_P (node->decl) |
3051 | || DECL_PT_UID (node->decl) == DECL_UID (node->decl)) |
3052 | && (! DECL_PT_UID_SET_P (t) |
3053 | || DECL_PT_UID (t) == DECL_UID (node->decl))); |
3054 | DECL_PT_UID (t) = DECL_UID (node->decl); |
3055 | t = node->decl; |
3056 | } |
3057 | |
3058 | /* If this is decl may bind to NULL note that. */ |
3059 | if (address_p |
3060 | && (! node || ! node->nonzero_address ())) |
3061 | { |
3062 | cexpr.var = nothing_id; |
3063 | cexpr.type = SCALAR; |
3064 | cexpr.offset = 0; |
3065 | results->safe_push (obj: cexpr); |
3066 | } |
3067 | } |
3068 | |
3069 | vi = get_vi_for_tree (t); |
3070 | cexpr.var = vi->id; |
3071 | cexpr.type = SCALAR; |
3072 | cexpr.offset = 0; |
3073 | |
3074 | /* If we are not taking the address of the constraint expr, add all |
3075 | sub-fiels of the variable as well. */ |
3076 | if (!address_p |
3077 | && !vi->is_full_var) |
3078 | { |
3079 | for (; vi; vi = vi_next (vi)) |
3080 | { |
3081 | cexpr.var = vi->id; |
3082 | results->safe_push (obj: cexpr); |
3083 | } |
3084 | return; |
3085 | } |
3086 | |
3087 | results->safe_push (obj: cexpr); |
3088 | } |
3089 | |
3090 | /* Process constraint T, performing various simplifications and then |
3091 | adding it to our list of overall constraints. */ |
3092 | |
3093 | static void |
3094 | process_constraint (constraint_t t) |
3095 | { |
3096 | struct constraint_expr rhs = t->rhs; |
3097 | struct constraint_expr lhs = t->lhs; |
3098 | |
3099 | gcc_assert (rhs.var < varmap.length ()); |
3100 | gcc_assert (lhs.var < varmap.length ()); |
3101 | |
3102 | /* If we didn't get any useful constraint from the lhs we get |
3103 | &ANYTHING as fallback from get_constraint_for. Deal with |
3104 | it here by turning it into *ANYTHING. */ |
3105 | if (lhs.type == ADDRESSOF |
3106 | && lhs.var == anything_id) |
3107 | lhs.type = DEREF; |
3108 | |
3109 | /* ADDRESSOF on the lhs is invalid. */ |
3110 | gcc_assert (lhs.type != ADDRESSOF); |
3111 | |
3112 | /* We shouldn't add constraints from things that cannot have pointers. |
3113 | It's not completely trivial to avoid in the callers, so do it here. */ |
3114 | if (rhs.type != ADDRESSOF |
3115 | && !get_varinfo (n: rhs.var)->may_have_pointers) |
3116 | return; |
3117 | |
3118 | /* Likewise adding to the solution of a non-pointer var isn't useful. */ |
3119 | if (!get_varinfo (n: lhs.var)->may_have_pointers) |
3120 | return; |
3121 | |
3122 | /* This can happen in our IR with things like n->a = *p */ |
3123 | if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id) |
3124 | { |
3125 | /* Split into tmp = *rhs, *lhs = tmp */ |
3126 | struct constraint_expr tmplhs; |
3127 | tmplhs = new_scalar_tmp_constraint_exp (name: "doubledereftmp" , add_id: true); |
3128 | process_constraint (t: new_constraint (lhs: tmplhs, rhs)); |
3129 | process_constraint (t: new_constraint (lhs, rhs: tmplhs)); |
3130 | } |
3131 | else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF) |
3132 | { |
3133 | /* Split into tmp = &rhs, *lhs = tmp */ |
3134 | struct constraint_expr tmplhs; |
3135 | tmplhs = new_scalar_tmp_constraint_exp (name: "derefaddrtmp" , add_id: true); |
3136 | process_constraint (t: new_constraint (lhs: tmplhs, rhs)); |
3137 | process_constraint (t: new_constraint (lhs, rhs: tmplhs)); |
3138 | } |
3139 | else |
3140 | { |
3141 | gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0); |
3142 | if (rhs.type == ADDRESSOF) |
3143 | get_varinfo (n: get_varinfo (n: rhs.var)->head)->address_taken = true; |
3144 | constraints.safe_push (obj: t); |
3145 | } |
3146 | } |
3147 | |
3148 | |
3149 | /* Return the position, in bits, of FIELD_DECL from the beginning of its |
3150 | structure. */ |
3151 | |
3152 | static HOST_WIDE_INT |
3153 | bitpos_of_field (const tree fdecl) |
3154 | { |
3155 | if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl)) |
3156 | || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl))) |
3157 | return -1; |
3158 | |
3159 | return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT |
3160 | + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl))); |
3161 | } |
3162 | |
3163 | |
3164 | /* Get constraint expressions for offsetting PTR by OFFSET. Stores the |
3165 | resulting constraint expressions in *RESULTS. */ |
3166 | |
3167 | static void |
3168 | get_constraint_for_ptr_offset (tree ptr, tree offset, |
3169 | vec<ce_s> *results) |
3170 | { |
3171 | struct constraint_expr c; |
3172 | unsigned int j, n; |
3173 | HOST_WIDE_INT rhsoffset; |
3174 | |
3175 | /* If we do not do field-sensitive PTA adding offsets to pointers |
3176 | does not change the points-to solution. */ |
3177 | if (!use_field_sensitive) |
3178 | { |
3179 | get_constraint_for_rhs (ptr, results); |
3180 | return; |
3181 | } |
3182 | |
3183 | /* If the offset is not a non-negative integer constant that fits |
3184 | in a HOST_WIDE_INT, we have to fall back to a conservative |
3185 | solution which includes all sub-fields of all pointed-to |
3186 | variables of ptr. */ |
3187 | if (offset == NULL_TREE |
3188 | || TREE_CODE (offset) != INTEGER_CST) |
3189 | rhsoffset = UNKNOWN_OFFSET; |
3190 | else |
3191 | { |
3192 | /* Sign-extend the offset. */ |
3193 | offset_int soffset = offset_int::from (x: wi::to_wide (t: offset), sgn: SIGNED); |
3194 | if (!wi::fits_shwi_p (x: soffset)) |
3195 | rhsoffset = UNKNOWN_OFFSET; |
3196 | else |
3197 | { |
3198 | /* Make sure the bit-offset also fits. */ |
3199 | HOST_WIDE_INT rhsunitoffset = soffset.to_shwi (); |
3200 | rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT; |
3201 | if (rhsunitoffset != rhsoffset / BITS_PER_UNIT) |
3202 | rhsoffset = UNKNOWN_OFFSET; |
3203 | } |
3204 | } |
3205 | |
3206 | get_constraint_for_rhs (ptr, results); |
3207 | if (rhsoffset == 0) |
3208 | return; |
3209 | |
3210 | /* As we are eventually appending to the solution do not use |
3211 | vec::iterate here. */ |
3212 | n = results->length (); |
3213 | for (j = 0; j < n; j++) |
3214 | { |
3215 | varinfo_t curr; |
3216 | c = (*results)[j]; |
3217 | curr = get_varinfo (n: c.var); |
3218 | |
3219 | if (c.type == ADDRESSOF |
3220 | /* If this varinfo represents a full variable just use it. */ |
3221 | && curr->is_full_var) |
3222 | ; |
3223 | else if (c.type == ADDRESSOF |
3224 | /* If we do not know the offset add all subfields. */ |
3225 | && rhsoffset == UNKNOWN_OFFSET) |
3226 | { |
3227 | varinfo_t temp = get_varinfo (n: curr->head); |
3228 | do |
3229 | { |
3230 | struct constraint_expr c2; |
3231 | c2.var = temp->id; |
3232 | c2.type = ADDRESSOF; |
3233 | c2.offset = 0; |
3234 | if (c2.var != c.var) |
3235 | results->safe_push (obj: c2); |
3236 | temp = vi_next (vi: temp); |
3237 | } |
3238 | while (temp); |
3239 | } |
3240 | else if (c.type == ADDRESSOF) |
3241 | { |
3242 | varinfo_t temp; |
3243 | unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset; |
3244 | |
3245 | /* If curr->offset + rhsoffset is less than zero adjust it. */ |
3246 | if (rhsoffset < 0 |
3247 | && curr->offset < offset) |
3248 | offset = 0; |
3249 | |
3250 | /* We have to include all fields that overlap the current |
3251 | field shifted by rhsoffset. And we include at least |
3252 | the last or the first field of the variable to represent |
3253 | reachability of off-bound addresses, in particular &object + 1, |
3254 | conservatively correct. */ |
3255 | temp = first_or_preceding_vi_for_offset (curr, offset); |
3256 | c.var = temp->id; |
3257 | c.offset = 0; |
3258 | temp = vi_next (vi: temp); |
3259 | while (temp |
3260 | && temp->offset < offset + curr->size) |
3261 | { |
3262 | struct constraint_expr c2; |
3263 | c2.var = temp->id; |
3264 | c2.type = ADDRESSOF; |
3265 | c2.offset = 0; |
3266 | results->safe_push (obj: c2); |
3267 | temp = vi_next (vi: temp); |
3268 | } |
3269 | } |
3270 | else if (c.type == SCALAR) |
3271 | { |
3272 | gcc_assert (c.offset == 0); |
3273 | c.offset = rhsoffset; |
3274 | } |
3275 | else |
3276 | /* We shouldn't get any DEREFs here. */ |
3277 | gcc_unreachable (); |
3278 | |
3279 | (*results)[j] = c; |
3280 | } |
3281 | } |
3282 | |
3283 | |
3284 | /* Given a COMPONENT_REF T, return the constraint_expr vector for it. |
3285 | If address_p is true the result will be taken its address of. |
3286 | If lhs_p is true then the constraint expression is assumed to be used |
3287 | as the lhs. */ |
3288 | |
3289 | static void |
3290 | get_constraint_for_component_ref (tree t, vec<ce_s> *results, |
3291 | bool address_p, bool lhs_p) |
3292 | { |
3293 | tree orig_t = t; |
3294 | poly_int64 bitsize = -1; |
3295 | poly_int64 bitmaxsize = -1; |
3296 | poly_int64 bitpos; |
3297 | bool reverse; |
3298 | tree forzero; |
3299 | |
3300 | /* Some people like to do cute things like take the address of |
3301 | &0->a.b */ |
3302 | forzero = t; |
3303 | while (handled_component_p (t: forzero) |
3304 | || INDIRECT_REF_P (forzero) |
3305 | || TREE_CODE (forzero) == MEM_REF) |
3306 | forzero = TREE_OPERAND (forzero, 0); |
3307 | |
3308 | if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero)) |
3309 | { |
3310 | struct constraint_expr temp; |
3311 | |
3312 | temp.offset = 0; |
3313 | temp.var = integer_id; |
3314 | temp.type = SCALAR; |
3315 | results->safe_push (obj: temp); |
3316 | return; |
3317 | } |
3318 | |
3319 | t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse); |
3320 | |
3321 | /* We can end up here for component references on a |
3322 | VIEW_CONVERT_EXPR <>(&foobar) or things like a |
3323 | BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for |
3324 | symbolic constants simply give up. */ |
3325 | if (TREE_CODE (t) == ADDR_EXPR) |
3326 | { |
3327 | constraint_expr result; |
3328 | result.type = SCALAR; |
3329 | result.var = anything_id; |
3330 | result.offset = 0; |
3331 | results->safe_push (obj: result); |
3332 | return; |
3333 | } |
3334 | |
3335 | /* Avoid creating pointer-offset constraints, so handle MEM_REF |
3336 | offsets directly. Pretend to take the address of the base, |
3337 | we'll take care of adding the required subset of sub-fields below. */ |
3338 | if (TREE_CODE (t) == MEM_REF |
3339 | && !integer_zerop (TREE_OPERAND (t, 0))) |
3340 | { |
3341 | poly_offset_int off = mem_ref_offset (t); |
3342 | off <<= LOG2_BITS_PER_UNIT; |
3343 | off += bitpos; |
3344 | poly_int64 off_hwi; |
3345 | if (off.to_shwi (r: &off_hwi)) |
3346 | bitpos = off_hwi; |
3347 | else |
3348 | { |
3349 | bitpos = 0; |
3350 | bitmaxsize = -1; |
3351 | } |
3352 | get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p); |
3353 | do_deref (results); |
3354 | } |
3355 | else |
3356 | get_constraint_for_1 (t, results, true, lhs_p); |
3357 | |
3358 | /* Strip off nothing_id. */ |
3359 | if (results->length () == 2) |
3360 | { |
3361 | gcc_assert ((*results)[0].var == nothing_id); |
3362 | results->unordered_remove (ix: 0); |
3363 | } |
3364 | gcc_assert (results->length () == 1); |
3365 | struct constraint_expr &result = results->last (); |
3366 | |
3367 | if (result.type == SCALAR |
3368 | && get_varinfo (n: result.var)->is_full_var) |
3369 | /* For single-field vars do not bother about the offset. */ |
3370 | result.offset = 0; |
3371 | else if (result.type == SCALAR) |
3372 | { |
3373 | /* In languages like C, you can access one past the end of an |
3374 | array. You aren't allowed to dereference it, so we can |
3375 | ignore this constraint. When we handle pointer subtraction, |
3376 | we may have to do something cute here. */ |
3377 | |
3378 | if (maybe_lt (a: poly_uint64 (bitpos), b: get_varinfo (n: result.var)->fullsize) |
3379 | && maybe_ne (a: bitmaxsize, b: 0)) |
3380 | { |
3381 | /* It's also not true that the constraint will actually start at the |
3382 | right offset, it may start in some padding. We only care about |
3383 | setting the constraint to the first actual field it touches, so |
3384 | walk to find it. */ |
3385 | struct constraint_expr cexpr = result; |
3386 | varinfo_t curr; |
3387 | results->pop (); |
3388 | cexpr.offset = 0; |
3389 | for (curr = get_varinfo (n: cexpr.var); curr; curr = vi_next (vi: curr)) |
3390 | { |
3391 | if (ranges_maybe_overlap_p (pos1: poly_int64 (curr->offset), |
3392 | size1: curr->size, pos2: bitpos, size2: bitmaxsize)) |
3393 | { |
3394 | cexpr.var = curr->id; |
3395 | results->safe_push (obj: cexpr); |
3396 | if (address_p) |
3397 | break; |
3398 | } |
3399 | } |
3400 | /* If we are going to take the address of this field then |
3401 | to be able to compute reachability correctly add at least |
3402 | the last field of the variable. */ |
3403 | if (address_p && results->length () == 0) |
3404 | { |
3405 | curr = get_varinfo (n: cexpr.var); |
3406 | while (curr->next != 0) |
3407 | curr = vi_next (vi: curr); |
3408 | cexpr.var = curr->id; |
3409 | results->safe_push (obj: cexpr); |
3410 | } |
3411 | else if (results->length () == 0) |
3412 | /* Assert that we found *some* field there. The user couldn't be |
3413 | accessing *only* padding. */ |
3414 | /* Still the user could access one past the end of an array |
3415 | embedded in a struct resulting in accessing *only* padding. */ |
3416 | /* Or accessing only padding via type-punning to a type |
3417 | that has a filed just in padding space. */ |
3418 | { |
3419 | cexpr.type = SCALAR; |
3420 | cexpr.var = anything_id; |
3421 | cexpr.offset = 0; |
3422 | results->safe_push (obj: cexpr); |
3423 | } |
3424 | } |
3425 | else if (known_eq (bitmaxsize, 0)) |
3426 | { |
3427 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3428 | fprintf (stream: dump_file, format: "Access to zero-sized part of variable, " |
3429 | "ignoring\n" ); |
3430 | } |
3431 | else |
3432 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3433 | fprintf (stream: dump_file, format: "Access to past the end of variable, ignoring\n" ); |
3434 | } |
3435 | else if (result.type == DEREF) |
3436 | { |
3437 | /* If we do not know exactly where the access goes say so. Note |
3438 | that only for non-structure accesses we know that we access |
3439 | at most one subfiled of any variable. */ |
3440 | HOST_WIDE_INT const_bitpos; |
3441 | if (!bitpos.is_constant (const_value: &const_bitpos) |
3442 | || const_bitpos == -1 |
3443 | || maybe_ne (a: bitsize, b: bitmaxsize) |
3444 | || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)) |
3445 | || result.offset == UNKNOWN_OFFSET) |
3446 | result.offset = UNKNOWN_OFFSET; |
3447 | else |
3448 | result.offset += const_bitpos; |
3449 | } |
3450 | else if (result.type == ADDRESSOF) |
3451 | { |
3452 | /* We can end up here for component references on constants like |
3453 | VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */ |
3454 | result.type = SCALAR; |
3455 | result.var = anything_id; |
3456 | result.offset = 0; |
3457 | } |
3458 | else |
3459 | gcc_unreachable (); |
3460 | } |
3461 | |
3462 | |
3463 | /* Dereference the constraint expression CONS, and return the result. |
3464 | DEREF (ADDRESSOF) = SCALAR |
3465 | DEREF (SCALAR) = DEREF |
3466 | DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp)) |
3467 | This is needed so that we can handle dereferencing DEREF constraints. */ |
3468 | |
3469 | static void |
3470 | do_deref (vec<ce_s> *constraints) |
3471 | { |
3472 | struct constraint_expr *c; |
3473 | unsigned int i = 0; |
3474 | |
3475 | FOR_EACH_VEC_ELT (*constraints, i, c) |
3476 | { |
3477 | if (c->type == SCALAR) |
3478 | c->type = DEREF; |
3479 | else if (c->type == ADDRESSOF) |
3480 | c->type = SCALAR; |
3481 | else if (c->type == DEREF) |
3482 | { |
3483 | struct constraint_expr tmplhs; |
3484 | tmplhs = new_scalar_tmp_constraint_exp (name: "dereftmp" , add_id: true); |
3485 | process_constraint (t: new_constraint (lhs: tmplhs, rhs: *c)); |
3486 | c->var = tmplhs.var; |
3487 | } |
3488 | else |
3489 | gcc_unreachable (); |
3490 | } |
3491 | } |
3492 | |
3493 | /* Given a tree T, return the constraint expression for taking the |
3494 | address of it. */ |
3495 | |
3496 | static void |
3497 | get_constraint_for_address_of (tree t, vec<ce_s> *results) |
3498 | { |
3499 | struct constraint_expr *c; |
3500 | unsigned int i; |
3501 | |
3502 | get_constraint_for_1 (t, results, true, true); |
3503 | |
3504 | FOR_EACH_VEC_ELT (*results, i, c) |
3505 | { |
3506 | if (c->type == DEREF) |
3507 | c->type = SCALAR; |
3508 | else |
3509 | c->type = ADDRESSOF; |
3510 | } |
3511 | } |
3512 | |
3513 | /* Given a tree T, return the constraint expression for it. */ |
3514 | |
3515 | static void |
3516 | get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p, |
3517 | bool lhs_p) |
3518 | { |
3519 | struct constraint_expr temp; |
3520 | |
3521 | /* x = integer is all glommed to a single variable, which doesn't |
3522 | point to anything by itself. That is, of course, unless it is an |
3523 | integer constant being treated as a pointer, in which case, we |
3524 | will return that this is really the addressof anything. This |
3525 | happens below, since it will fall into the default case. The only |
3526 | case we know something about an integer treated like a pointer is |
3527 | when it is the NULL pointer, and then we just say it points to |
3528 | NULL. |
3529 | |
3530 | Do not do that if -fno-delete-null-pointer-checks though, because |
3531 | in that case *NULL does not fail, so it _should_ alias *anything. |
3532 | It is not worth adding a new option or renaming the existing one, |
3533 | since this case is relatively obscure. */ |
3534 | if ((TREE_CODE (t) == INTEGER_CST |
3535 | && integer_zerop (t)) |
3536 | /* The only valid CONSTRUCTORs in gimple with pointer typed |
3537 | elements are zero-initializer. But in IPA mode we also |
3538 | process global initializers, so verify at least. */ |
3539 | || (TREE_CODE (t) == CONSTRUCTOR |
3540 | && CONSTRUCTOR_NELTS (t) == 0)) |
3541 | { |
3542 | if (flag_delete_null_pointer_checks) |
3543 | temp.var = nothing_id; |
3544 | else |
3545 | temp.var = nonlocal_id; |
3546 | temp.type = ADDRESSOF; |
3547 | temp.offset = 0; |
3548 | results->safe_push (obj: temp); |
3549 | return; |
3550 | } |
3551 | |
3552 | /* String constants are read-only, ideally we'd have a CONST_DECL |
3553 | for those. */ |
3554 | if (TREE_CODE (t) == STRING_CST) |
3555 | { |
3556 | temp.var = string_id; |
3557 | temp.type = SCALAR; |
3558 | temp.offset = 0; |
3559 | results->safe_push (obj: temp); |
3560 | return; |
3561 | } |
3562 | |
3563 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
3564 | { |
3565 | case tcc_expression: |
3566 | { |
3567 | switch (TREE_CODE (t)) |
3568 | { |
3569 | case ADDR_EXPR: |
3570 | get_constraint_for_address_of (TREE_OPERAND (t, 0), results); |
3571 | return; |
3572 | default:; |
3573 | } |
3574 | break; |
3575 | } |
3576 | case tcc_reference: |
3577 | { |
3578 | switch (TREE_CODE (t)) |
3579 | { |
3580 | case MEM_REF: |
3581 | { |
3582 | struct constraint_expr cs; |
3583 | varinfo_t vi, curr; |
3584 | get_constraint_for_ptr_offset (TREE_OPERAND (t, 0), |
3585 | TREE_OPERAND (t, 1), results); |
3586 | do_deref (constraints: results); |
3587 | |
3588 | /* If we are not taking the address then make sure to process |
3589 | all subvariables we might access. */ |
3590 | if (address_p) |
3591 | return; |
3592 | |
3593 | cs = results->last (); |
3594 | if (cs.type == DEREF |
3595 | && type_can_have_subvars (TREE_TYPE (t))) |
3596 | { |
3597 | /* For dereferences this means we have to defer it |
3598 | to solving time. */ |
3599 | results->last ().offset = UNKNOWN_OFFSET; |
3600 | return; |
3601 | } |
3602 | if (cs.type != SCALAR) |
3603 | return; |
3604 | |
3605 | vi = get_varinfo (n: cs.var); |
3606 | curr = vi_next (vi); |
3607 | if (!vi->is_full_var |
3608 | && curr) |
3609 | { |
3610 | unsigned HOST_WIDE_INT size; |
3611 | if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t)))) |
3612 | size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t))); |
3613 | else |
3614 | size = -1; |
3615 | for (; curr; curr = vi_next (vi: curr)) |
3616 | { |
3617 | if (curr->offset - vi->offset < size) |
3618 | { |
3619 | cs.var = curr->id; |
3620 | results->safe_push (obj: cs); |
3621 | } |
3622 | else |
3623 | break; |
3624 | } |
3625 | } |
3626 | return; |
3627 | } |
3628 | case ARRAY_REF: |
3629 | case ARRAY_RANGE_REF: |
3630 | case COMPONENT_REF: |
3631 | case IMAGPART_EXPR: |
3632 | case REALPART_EXPR: |
3633 | case BIT_FIELD_REF: |
3634 | get_constraint_for_component_ref (t, results, address_p, lhs_p); |
3635 | return; |
3636 | case VIEW_CONVERT_EXPR: |
3637 | get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p, |
3638 | lhs_p); |
3639 | return; |
3640 | /* We are missing handling for TARGET_MEM_REF here. */ |
3641 | default:; |
3642 | } |
3643 | break; |
3644 | } |
3645 | case tcc_exceptional: |
3646 | { |
3647 | switch (TREE_CODE (t)) |
3648 | { |
3649 | case SSA_NAME: |
3650 | { |
3651 | get_constraint_for_ssa_var (t, results, address_p); |
3652 | return; |
3653 | } |
3654 | case CONSTRUCTOR: |
3655 | { |
3656 | unsigned int i; |
3657 | tree val; |
3658 | auto_vec<ce_s> tmp; |
3659 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val) |
3660 | { |
3661 | struct constraint_expr *rhsp; |
3662 | unsigned j; |
3663 | get_constraint_for_1 (t: val, results: &tmp, address_p, lhs_p); |
3664 | FOR_EACH_VEC_ELT (tmp, j, rhsp) |
3665 | results->safe_push (obj: *rhsp); |
3666 | tmp.truncate (size: 0); |
3667 | } |
3668 | /* We do not know whether the constructor was complete, |
3669 | so technically we have to add &NOTHING or &ANYTHING |
3670 | like we do for an empty constructor as well. */ |
3671 | return; |
3672 | } |
3673 | default:; |
3674 | } |
3675 | break; |
3676 | } |
3677 | case tcc_declaration: |
3678 | { |
3679 | get_constraint_for_ssa_var (t, results, address_p); |
3680 | return; |
3681 | } |
3682 | case tcc_constant: |
3683 | { |
3684 | /* We cannot refer to automatic variables through constants. */ |
3685 | temp.type = ADDRESSOF; |
3686 | temp.var = nonlocal_id; |
3687 | temp.offset = 0; |
3688 | results->safe_push (obj: temp); |
3689 | return; |
3690 | } |
3691 | default:; |
3692 | } |
3693 | |
3694 | /* The default fallback is a constraint from anything. */ |
3695 | temp.type = ADDRESSOF; |
3696 | temp.var = anything_id; |
3697 | temp.offset = 0; |
3698 | results->safe_push (obj: temp); |
3699 | } |
3700 | |
3701 | /* Given a gimple tree T, return the constraint expression vector for it. */ |
3702 | |
3703 | static void |
3704 | get_constraint_for (tree t, vec<ce_s> *results) |
3705 | { |
3706 | gcc_assert (results->length () == 0); |
3707 | |
3708 | get_constraint_for_1 (t, results, address_p: false, lhs_p: true); |
3709 | } |
3710 | |
3711 | /* Given a gimple tree T, return the constraint expression vector for it |
3712 | to be used as the rhs of a constraint. */ |
3713 | |
3714 | static void |
3715 | get_constraint_for_rhs (tree t, vec<ce_s> *results) |
3716 | { |
3717 | gcc_assert (results->length () == 0); |
3718 | |
3719 | get_constraint_for_1 (t, results, address_p: false, lhs_p: false); |
3720 | } |
3721 | |
3722 | |
3723 | /* Efficiently generates constraints from all entries in *RHSC to all |
3724 | entries in *LHSC. */ |
3725 | |
3726 | static void |
3727 | process_all_all_constraints (const vec<ce_s> &lhsc, |
3728 | const vec<ce_s> &rhsc) |
3729 | { |
3730 | struct constraint_expr *lhsp, *rhsp; |
3731 | unsigned i, j; |
3732 | |
3733 | if (lhsc.length () <= 1 || rhsc.length () <= 1) |
3734 | { |
3735 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
3736 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
3737 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: *rhsp)); |
3738 | } |
3739 | else |
3740 | { |
3741 | struct constraint_expr tmp; |
3742 | tmp = new_scalar_tmp_constraint_exp (name: "allalltmp" , add_id: true); |
3743 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
3744 | process_constraint (t: new_constraint (lhs: tmp, rhs: *rhsp)); |
3745 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
3746 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: tmp)); |
3747 | } |
3748 | } |
3749 | |
3750 | /* Handle aggregate copies by expanding into copies of the respective |
3751 | fields of the structures. */ |
3752 | |
3753 | static void |
3754 | do_structure_copy (tree lhsop, tree rhsop) |
3755 | { |
3756 | struct constraint_expr *lhsp, *rhsp; |
3757 | auto_vec<ce_s> lhsc; |
3758 | auto_vec<ce_s> rhsc; |
3759 | unsigned j; |
3760 | |
3761 | get_constraint_for (t: lhsop, results: &lhsc); |
3762 | get_constraint_for_rhs (t: rhsop, results: &rhsc); |
3763 | lhsp = &lhsc[0]; |
3764 | rhsp = &rhsc[0]; |
3765 | if (lhsp->type == DEREF |
3766 | || (lhsp->type == ADDRESSOF && lhsp->var == anything_id) |
3767 | || rhsp->type == DEREF) |
3768 | { |
3769 | if (lhsp->type == DEREF) |
3770 | { |
3771 | gcc_assert (lhsc.length () == 1); |
3772 | lhsp->offset = UNKNOWN_OFFSET; |
3773 | } |
3774 | if (rhsp->type == DEREF) |
3775 | { |
3776 | gcc_assert (rhsc.length () == 1); |
3777 | rhsp->offset = UNKNOWN_OFFSET; |
3778 | } |
3779 | process_all_all_constraints (lhsc, rhsc); |
3780 | } |
3781 | else if (lhsp->type == SCALAR |
3782 | && (rhsp->type == SCALAR |
3783 | || rhsp->type == ADDRESSOF)) |
3784 | { |
3785 | HOST_WIDE_INT lhssize, lhsoffset; |
3786 | HOST_WIDE_INT rhssize, rhsoffset; |
3787 | bool reverse; |
3788 | unsigned k = 0; |
3789 | if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse) |
3790 | || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize, |
3791 | &reverse)) |
3792 | { |
3793 | process_all_all_constraints (lhsc, rhsc); |
3794 | return; |
3795 | } |
3796 | for (j = 0; lhsc.iterate (ix: j, ptr: &lhsp);) |
3797 | { |
3798 | varinfo_t lhsv, rhsv; |
3799 | rhsp = &rhsc[k]; |
3800 | lhsv = get_varinfo (n: lhsp->var); |
3801 | rhsv = get_varinfo (n: rhsp->var); |
3802 | if (lhsv->may_have_pointers |
3803 | && (lhsv->is_full_var |
3804 | || rhsv->is_full_var |
3805 | || ranges_overlap_p (pos1: lhsv->offset + rhsoffset, size1: lhsv->size, |
3806 | pos2: rhsv->offset + lhsoffset, size2: rhsv->size))) |
3807 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: *rhsp)); |
3808 | if (!rhsv->is_full_var |
3809 | && (lhsv->is_full_var |
3810 | || (lhsv->offset + rhsoffset + lhsv->size |
3811 | > rhsv->offset + lhsoffset + rhsv->size))) |
3812 | { |
3813 | ++k; |
3814 | if (k >= rhsc.length ()) |
3815 | break; |
3816 | } |
3817 | else |
3818 | ++j; |
3819 | } |
3820 | } |
3821 | else |
3822 | gcc_unreachable (); |
3823 | } |
3824 | |
3825 | /* Create constraints ID = { rhsc }. */ |
3826 | |
3827 | static void |
3828 | make_constraints_to (unsigned id, const vec<ce_s> &rhsc) |
3829 | { |
3830 | struct constraint_expr *c; |
3831 | struct constraint_expr includes; |
3832 | unsigned int j; |
3833 | |
3834 | includes.var = id; |
3835 | includes.offset = 0; |
3836 | includes.type = SCALAR; |
3837 | |
3838 | FOR_EACH_VEC_ELT (rhsc, j, c) |
3839 | process_constraint (t: new_constraint (lhs: includes, rhs: *c)); |
3840 | } |
3841 | |
3842 | /* Create a constraint ID = OP. */ |
3843 | |
3844 | static void |
3845 | make_constraint_to (unsigned id, tree op) |
3846 | { |
3847 | auto_vec<ce_s> rhsc; |
3848 | get_constraint_for_rhs (t: op, results: &rhsc); |
3849 | make_constraints_to (id, rhsc); |
3850 | } |
3851 | |
3852 | /* Create a constraint ID = &FROM. */ |
3853 | |
3854 | static void |
3855 | make_constraint_from (varinfo_t vi, int from) |
3856 | { |
3857 | struct constraint_expr lhs, rhs; |
3858 | |
3859 | lhs.var = vi->id; |
3860 | lhs.offset = 0; |
3861 | lhs.type = SCALAR; |
3862 | |
3863 | rhs.var = from; |
3864 | rhs.offset = 0; |
3865 | rhs.type = ADDRESSOF; |
3866 | process_constraint (t: new_constraint (lhs, rhs)); |
3867 | } |
3868 | |
3869 | /* Create a constraint ID = FROM. */ |
3870 | |
3871 | static void |
3872 | make_copy_constraint (varinfo_t vi, int from) |
3873 | { |
3874 | struct constraint_expr lhs, rhs; |
3875 | |
3876 | lhs.var = vi->id; |
3877 | lhs.offset = 0; |
3878 | lhs.type = SCALAR; |
3879 | |
3880 | rhs.var = from; |
3881 | rhs.offset = 0; |
3882 | rhs.type = SCALAR; |
3883 | process_constraint (t: new_constraint (lhs, rhs)); |
3884 | } |
3885 | |
3886 | /* Make constraints necessary to make OP escape. */ |
3887 | |
3888 | static void |
3889 | make_escape_constraint (tree op) |
3890 | { |
3891 | make_constraint_to (id: escaped_id, op); |
3892 | } |
3893 | |
3894 | /* Make constraint necessary to make all indirect references |
3895 | from VI escape. */ |
3896 | |
3897 | static void |
3898 | make_indirect_escape_constraint (varinfo_t vi) |
3899 | { |
3900 | struct constraint_expr lhs, rhs; |
3901 | /* escaped = *(VAR + UNKNOWN); */ |
3902 | lhs.type = SCALAR; |
3903 | lhs.var = escaped_id; |
3904 | lhs.offset = 0; |
3905 | rhs.type = DEREF; |
3906 | rhs.var = vi->id; |
3907 | rhs.offset = UNKNOWN_OFFSET; |
3908 | process_constraint (t: new_constraint (lhs, rhs)); |
3909 | } |
3910 | |
3911 | /* Add constraints to that the solution of VI is transitively closed. */ |
3912 | |
3913 | static void |
3914 | make_transitive_closure_constraints (varinfo_t vi) |
3915 | { |
3916 | struct constraint_expr lhs, rhs; |
3917 | |
3918 | /* VAR = *(VAR + UNKNOWN); */ |
3919 | lhs.type = SCALAR; |
3920 | lhs.var = vi->id; |
3921 | lhs.offset = 0; |
3922 | rhs.type = DEREF; |
3923 | rhs.var = vi->id; |
3924 | rhs.offset = UNKNOWN_OFFSET; |
3925 | process_constraint (t: new_constraint (lhs, rhs)); |
3926 | } |
3927 | |
3928 | /* Add constraints to that the solution of VI has all subvariables added. */ |
3929 | |
3930 | static void |
3931 | make_any_offset_constraints (varinfo_t vi) |
3932 | { |
3933 | struct constraint_expr lhs, rhs; |
3934 | |
3935 | /* VAR = VAR + UNKNOWN; */ |
3936 | lhs.type = SCALAR; |
3937 | lhs.var = vi->id; |
3938 | lhs.offset = 0; |
3939 | rhs.type = SCALAR; |
3940 | rhs.var = vi->id; |
3941 | rhs.offset = UNKNOWN_OFFSET; |
3942 | process_constraint (t: new_constraint (lhs, rhs)); |
3943 | } |
3944 | |
3945 | /* Temporary storage for fake var decls. */ |
3946 | struct obstack fake_var_decl_obstack; |
3947 | |
3948 | /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */ |
3949 | |
3950 | static tree |
3951 | build_fake_var_decl (tree type) |
3952 | { |
3953 | tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl); |
3954 | memset (s: decl, c: 0, n: sizeof (struct tree_var_decl)); |
3955 | TREE_SET_CODE (decl, VAR_DECL); |
3956 | TREE_TYPE (decl) = type; |
3957 | DECL_UID (decl) = allocate_decl_uid (); |
3958 | SET_DECL_PT_UID (decl, -1); |
3959 | layout_decl (decl, 0); |
3960 | return decl; |
3961 | } |
3962 | |
3963 | /* Create a new artificial heap variable with NAME. |
3964 | Return the created variable. */ |
3965 | |
3966 | static varinfo_t |
3967 | make_heapvar (const char *name, bool add_id) |
3968 | { |
3969 | varinfo_t vi; |
3970 | tree heapvar; |
3971 | |
3972 | heapvar = build_fake_var_decl (ptr_type_node); |
3973 | DECL_EXTERNAL (heapvar) = 1; |
3974 | |
3975 | vi = new_var_info (t: heapvar, name, add_id); |
3976 | vi->is_heap_var = true; |
3977 | vi->is_unknown_size_var = true; |
3978 | vi->offset = 0; |
3979 | vi->fullsize = ~0; |
3980 | vi->size = ~0; |
3981 | vi->is_full_var = true; |
3982 | insert_vi_for_tree (t: heapvar, vi); |
3983 | |
3984 | return vi; |
3985 | } |
3986 | |
3987 | /* Create a new artificial heap variable with NAME and make a |
3988 | constraint from it to LHS. Set flags according to a tag used |
3989 | for tracking restrict pointers. */ |
3990 | |
3991 | static varinfo_t |
3992 | make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id) |
3993 | { |
3994 | varinfo_t vi = make_heapvar (name, add_id); |
3995 | vi->is_restrict_var = 1; |
3996 | vi->is_global_var = 1; |
3997 | vi->may_have_pointers = 1; |
3998 | make_constraint_from (vi: lhs, from: vi->id); |
3999 | return vi; |
4000 | } |
4001 | |
4002 | /* Create a new artificial heap variable with NAME and make a |
4003 | constraint from it to LHS. Set flags according to a tag used |
4004 | for tracking restrict pointers and make the artificial heap |
4005 | point to global memory. */ |
4006 | |
4007 | static varinfo_t |
4008 | make_constraint_from_global_restrict (varinfo_t lhs, const char *name, |
4009 | bool add_id) |
4010 | { |
4011 | varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id); |
4012 | make_copy_constraint (vi, from: nonlocal_id); |
4013 | return vi; |
4014 | } |
4015 | |
4016 | /* In IPA mode there are varinfos for different aspects of reach |
4017 | function designator. One for the points-to set of the return |
4018 | value, one for the variables that are clobbered by the function, |
4019 | one for its uses and one for each parameter (including a single |
4020 | glob for remaining variadic arguments). */ |
4021 | |
4022 | enum { fi_clobbers = 1, fi_uses = 2, |
4023 | fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 }; |
4024 | |
4025 | /* Get a constraint for the requested part of a function designator FI |
4026 | when operating in IPA mode. */ |
4027 | |
4028 | static struct constraint_expr |
4029 | get_function_part_constraint (varinfo_t fi, unsigned part) |
4030 | { |
4031 | struct constraint_expr c; |
4032 | |
4033 | gcc_assert (in_ipa_mode); |
4034 | |
4035 | if (fi->id == anything_id) |
4036 | { |
4037 | /* ??? We probably should have a ANYFN special variable. */ |
4038 | c.var = anything_id; |
4039 | c.offset = 0; |
4040 | c.type = SCALAR; |
4041 | } |
4042 | else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL) |
4043 | { |
4044 | varinfo_t ai = first_vi_for_offset (fi, part); |
4045 | if (ai) |
4046 | c.var = ai->id; |
4047 | else |
4048 | c.var = anything_id; |
4049 | c.offset = 0; |
4050 | c.type = SCALAR; |
4051 | } |
4052 | else |
4053 | { |
4054 | c.var = fi->id; |
4055 | c.offset = part; |
4056 | c.type = DEREF; |
4057 | } |
4058 | |
4059 | return c; |
4060 | } |
4061 | |
4062 | /* Produce constraints for argument ARG of call STMT with eaf flags |
4063 | FLAGS. RESULTS is array holding constraints for return value. |
4064 | CALLESCAPE_ID is variable where call loocal escapes are added. |
4065 | WRITES_GLOVEL_MEMORY is true if callee may write global memory. */ |
4066 | |
4067 | static void |
4068 | handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags, |
4069 | int callescape_id, bool writes_global_memory) |
4070 | { |
4071 | int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ |
4072 | | EAF_NO_INDIRECT_ESCAPE; |
4073 | int relevant_flags = relevant_indirect_flags |
4074 | | EAF_NO_DIRECT_CLOBBER |
4075 | | EAF_NO_DIRECT_READ |
4076 | | EAF_NO_DIRECT_ESCAPE; |
4077 | if (gimple_call_lhs (gs: stmt)) |
4078 | { |
4079 | relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY; |
4080 | relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY; |
4081 | |
4082 | /* If value is never read from it can not be returned indirectly |
4083 | (except through the escape solution). |
4084 | For all flags we get these implications right except for |
4085 | not_returned because we miss return functions in ipa-prop. */ |
4086 | |
4087 | if (flags & EAF_NO_DIRECT_READ) |
4088 | flags |= EAF_NOT_RETURNED_INDIRECTLY; |
4089 | } |
4090 | |
4091 | /* If the argument is not used we can ignore it. |
4092 | Similarly argument is invisile for us if it not clobbered, does not |
4093 | escape, is not read and can not be returned. */ |
4094 | if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags)) |
4095 | return; |
4096 | |
4097 | /* Produce varinfo for direct accesses to ARG. */ |
4098 | varinfo_t tem = new_var_info (NULL_TREE, name: "callarg" , add_id: true); |
4099 | tem->is_reg_var = true; |
4100 | make_constraint_to (id: tem->id, op: arg); |
4101 | make_any_offset_constraints (vi: tem); |
4102 | |
4103 | bool callarg_transitive = false; |
4104 | |
4105 | /* As an compile time optimization if we make no difference between |
4106 | direct and indirect accesses make arg transitively closed. |
4107 | This avoids the need to build indir arg and do everything twice. */ |
4108 | if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0) |
4109 | == ((flags & EAF_NO_DIRECT_CLOBBER) != 0) |
4110 | && (((flags & EAF_NO_INDIRECT_READ) != 0) |
4111 | == ((flags & EAF_NO_DIRECT_READ) != 0)) |
4112 | && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0) |
4113 | == ((flags & EAF_NO_DIRECT_ESCAPE) != 0)) |
4114 | && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0) |
4115 | == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0))) |
4116 | { |
4117 | make_transitive_closure_constraints (vi: tem); |
4118 | callarg_transitive = true; |
4119 | gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ)); |
4120 | } |
4121 | |
4122 | /* If necessary, produce varinfo for indirect accesses to ARG. */ |
4123 | varinfo_t indir_tem = NULL; |
4124 | if (!callarg_transitive |
4125 | && (flags & relevant_indirect_flags) != relevant_indirect_flags) |
4126 | { |
4127 | struct constraint_expr lhs, rhs; |
4128 | indir_tem = new_var_info (NULL_TREE, name: "indircallarg" , add_id: true); |
4129 | indir_tem->is_reg_var = true; |
4130 | |
4131 | /* indir_term = *tem. */ |
4132 | lhs.type = SCALAR; |
4133 | lhs.var = indir_tem->id; |
4134 | lhs.offset = 0; |
4135 | |
4136 | rhs.type = DEREF; |
4137 | rhs.var = tem->id; |
4138 | rhs.offset = UNKNOWN_OFFSET; |
4139 | process_constraint (t: new_constraint (lhs, rhs)); |
4140 | |
4141 | make_any_offset_constraints (vi: indir_tem); |
4142 | |
4143 | /* If we do not read indirectly there is no need for transitive closure. |
4144 | We know there is only one level of indirection. */ |
4145 | if (!(flags & EAF_NO_INDIRECT_READ)) |
4146 | make_transitive_closure_constraints (vi: indir_tem); |
4147 | gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ)); |
4148 | } |
4149 | |
4150 | if (gimple_call_lhs (gs: stmt)) |
4151 | { |
4152 | if (!(flags & EAF_NOT_RETURNED_DIRECTLY)) |
4153 | { |
4154 | struct constraint_expr cexpr; |
4155 | cexpr.var = tem->id; |
4156 | cexpr.type = SCALAR; |
4157 | cexpr.offset = 0; |
4158 | results->safe_push (obj: cexpr); |
4159 | } |
4160 | if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY)) |
4161 | { |
4162 | struct constraint_expr cexpr; |
4163 | cexpr.var = indir_tem->id; |
4164 | cexpr.type = SCALAR; |
4165 | cexpr.offset = 0; |
4166 | results->safe_push (obj: cexpr); |
4167 | } |
4168 | } |
4169 | |
4170 | if (!(flags & EAF_NO_DIRECT_READ)) |
4171 | { |
4172 | varinfo_t uses = get_call_use_vi (call: stmt); |
4173 | make_copy_constraint (vi: uses, from: tem->id); |
4174 | if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ)) |
4175 | make_copy_constraint (vi: uses, from: indir_tem->id); |
4176 | } |
4177 | else |
4178 | /* To read indirectly we need to read directly. */ |
4179 | gcc_checking_assert (flags & EAF_NO_INDIRECT_READ); |
4180 | |
4181 | if (!(flags & EAF_NO_DIRECT_CLOBBER)) |
4182 | { |
4183 | struct constraint_expr lhs, rhs; |
4184 | |
4185 | /* *arg = callescape. */ |
4186 | lhs.type = DEREF; |
4187 | lhs.var = tem->id; |
4188 | lhs.offset = 0; |
4189 | |
4190 | rhs.type = SCALAR; |
4191 | rhs.var = callescape_id; |
4192 | rhs.offset = 0; |
4193 | process_constraint (t: new_constraint (lhs, rhs)); |
4194 | |
4195 | /* callclobbered = arg. */ |
4196 | make_copy_constraint (vi: get_call_clobber_vi (call: stmt), from: tem->id); |
4197 | } |
4198 | if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER)) |
4199 | { |
4200 | struct constraint_expr lhs, rhs; |
4201 | |
4202 | /* *indir_arg = callescape. */ |
4203 | lhs.type = DEREF; |
4204 | lhs.var = indir_tem->id; |
4205 | lhs.offset = 0; |
4206 | |
4207 | rhs.type = SCALAR; |
4208 | rhs.var = callescape_id; |
4209 | rhs.offset = 0; |
4210 | process_constraint (t: new_constraint (lhs, rhs)); |
4211 | |
4212 | /* callclobbered = indir_arg. */ |
4213 | make_copy_constraint (vi: get_call_clobber_vi (call: stmt), from: indir_tem->id); |
4214 | } |
4215 | |
4216 | if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE))) |
4217 | { |
4218 | struct constraint_expr lhs, rhs; |
4219 | |
4220 | /* callescape = arg; */ |
4221 | lhs.var = callescape_id; |
4222 | lhs.offset = 0; |
4223 | lhs.type = SCALAR; |
4224 | |
4225 | rhs.var = tem->id; |
4226 | rhs.offset = 0; |
4227 | rhs.type = SCALAR; |
4228 | process_constraint (t: new_constraint (lhs, rhs)); |
4229 | |
4230 | if (writes_global_memory) |
4231 | make_escape_constraint (op: arg); |
4232 | } |
4233 | else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE)) |
4234 | { |
4235 | struct constraint_expr lhs, rhs; |
4236 | |
4237 | /* callescape = *(indir_arg + UNKNOWN); */ |
4238 | lhs.var = callescape_id; |
4239 | lhs.offset = 0; |
4240 | lhs.type = SCALAR; |
4241 | |
4242 | rhs.var = indir_tem->id; |
4243 | rhs.offset = 0; |
4244 | rhs.type = SCALAR; |
4245 | process_constraint (t: new_constraint (lhs, rhs)); |
4246 | |
4247 | if (writes_global_memory) |
4248 | make_indirect_escape_constraint (vi: tem); |
4249 | } |
4250 | } |
4251 | |
4252 | /* Determine global memory access of call STMT and update |
4253 | WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */ |
4254 | |
4255 | static void |
4256 | determine_global_memory_access (gcall *stmt, |
4257 | bool *writes_global_memory, |
4258 | bool *reads_global_memory, |
4259 | bool *uses_global_memory) |
4260 | { |
4261 | tree callee; |
4262 | cgraph_node *node; |
4263 | modref_summary *summary; |
4264 | |
4265 | /* We need to detrmine reads to set uses. */ |
4266 | gcc_assert (!uses_global_memory || reads_global_memory); |
4267 | |
4268 | if ((callee = gimple_call_fndecl (gs: stmt)) != NULL_TREE |
4269 | && (node = cgraph_node::get (decl: callee)) != NULL |
4270 | && (summary = get_modref_function_summary (func: node))) |
4271 | { |
4272 | if (writes_global_memory && *writes_global_memory) |
4273 | *writes_global_memory = summary->global_memory_written; |
4274 | if (reads_global_memory && *reads_global_memory) |
4275 | *reads_global_memory = summary->global_memory_read; |
4276 | if (reads_global_memory && uses_global_memory |
4277 | && !summary->calls_interposable |
4278 | && !*reads_global_memory && node->binds_to_current_def_p ()) |
4279 | *uses_global_memory = false; |
4280 | } |
4281 | if ((writes_global_memory && *writes_global_memory) |
4282 | || (uses_global_memory && *uses_global_memory) |
4283 | || (reads_global_memory && *reads_global_memory)) |
4284 | { |
4285 | attr_fnspec fnspec = gimple_call_fnspec (stmt); |
4286 | if (fnspec.known_p ()) |
4287 | { |
4288 | if (writes_global_memory |
4289 | && !fnspec.global_memory_written_p ()) |
4290 | *writes_global_memory = false; |
4291 | if (reads_global_memory && !fnspec.global_memory_read_p ()) |
4292 | { |
4293 | *reads_global_memory = false; |
4294 | if (uses_global_memory) |
4295 | *uses_global_memory = false; |
4296 | } |
4297 | } |
4298 | } |
4299 | } |
4300 | |
4301 | /* For non-IPA mode, generate constraints necessary for a call on the |
4302 | RHS and collect return value constraint to RESULTS to be used later in |
4303 | handle_lhs_call. |
4304 | |
4305 | IMPLICIT_EAF_FLAGS are added to each function argument. If |
4306 | WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global |
4307 | memory. Similar for READS_GLOBAL_MEMORY. */ |
4308 | |
4309 | static void |
4310 | handle_rhs_call (gcall *stmt, vec<ce_s> *results, |
4311 | int implicit_eaf_flags, |
4312 | bool writes_global_memory, |
4313 | bool reads_global_memory) |
4314 | { |
4315 | determine_global_memory_access (stmt, writes_global_memory: &writes_global_memory, |
4316 | reads_global_memory: &reads_global_memory, |
4317 | NULL); |
4318 | |
4319 | varinfo_t callescape = new_var_info (NULL_TREE, name: "callescape" , add_id: true); |
4320 | |
4321 | /* If function can use global memory, add it to callescape |
4322 | and to possible return values. If not we can still use/return addresses |
4323 | of global symbols. */ |
4324 | struct constraint_expr lhs, rhs; |
4325 | |
4326 | lhs.type = SCALAR; |
4327 | lhs.var = callescape->id; |
4328 | lhs.offset = 0; |
4329 | |
4330 | rhs.type = reads_global_memory ? SCALAR : ADDRESSOF; |
4331 | rhs.var = nonlocal_id; |
4332 | rhs.offset = 0; |
4333 | |
4334 | process_constraint (t: new_constraint (lhs, rhs)); |
4335 | results->safe_push (obj: rhs); |
4336 | |
4337 | varinfo_t uses = get_call_use_vi (call: stmt); |
4338 | make_copy_constraint (vi: uses, from: callescape->id); |
4339 | |
4340 | for (unsigned i = 0; i < gimple_call_num_args (gs: stmt); ++i) |
4341 | { |
4342 | tree arg = gimple_call_arg (gs: stmt, index: i); |
4343 | int flags = gimple_call_arg_flags (stmt, i); |
4344 | handle_call_arg (stmt, arg, results, |
4345 | flags: flags | implicit_eaf_flags, |
4346 | callescape_id: callescape->id, writes_global_memory); |
4347 | } |
4348 | |
4349 | /* The static chain escapes as well. */ |
4350 | if (gimple_call_chain (gs: stmt)) |
4351 | handle_call_arg (stmt, arg: gimple_call_chain (gs: stmt), results, |
4352 | flags: implicit_eaf_flags |
4353 | | gimple_call_static_chain_flags (stmt), |
4354 | callescape_id: callescape->id, writes_global_memory); |
4355 | |
4356 | /* And if we applied NRV the address of the return slot escapes as well. */ |
4357 | if (gimple_call_return_slot_opt_p (s: stmt) |
4358 | && gimple_call_lhs (gs: stmt) != NULL_TREE |
4359 | && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) |
4360 | { |
4361 | int flags = gimple_call_retslot_flags (stmt); |
4362 | const int relevant_flags = EAF_NO_DIRECT_ESCAPE |
4363 | | EAF_NOT_RETURNED_DIRECTLY; |
4364 | |
4365 | if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags) |
4366 | { |
4367 | auto_vec<ce_s> tmpc; |
4368 | |
4369 | get_constraint_for_address_of (t: gimple_call_lhs (gs: stmt), results: &tmpc); |
4370 | |
4371 | if (!(flags & EAF_NO_DIRECT_ESCAPE)) |
4372 | { |
4373 | make_constraints_to (id: callescape->id, rhsc: tmpc); |
4374 | if (writes_global_memory) |
4375 | make_constraints_to (id: escaped_id, rhsc: tmpc); |
4376 | } |
4377 | if (!(flags & EAF_NOT_RETURNED_DIRECTLY)) |
4378 | { |
4379 | struct constraint_expr *c; |
4380 | unsigned i; |
4381 | FOR_EACH_VEC_ELT (tmpc, i, c) |
4382 | results->safe_push (obj: *c); |
4383 | } |
4384 | } |
4385 | } |
4386 | } |
4387 | |
4388 | /* For non-IPA mode, generate constraints necessary for a call |
4389 | that returns a pointer and assigns it to LHS. This simply makes |
4390 | the LHS point to global and escaped variables. */ |
4391 | |
4392 | static void |
4393 | handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc, |
4394 | tree fndecl) |
4395 | { |
4396 | auto_vec<ce_s> lhsc; |
4397 | |
4398 | get_constraint_for (t: lhs, results: &lhsc); |
4399 | /* If the store is to a global decl make sure to |
4400 | add proper escape constraints. */ |
4401 | lhs = get_base_address (t: lhs); |
4402 | if (lhs |
4403 | && DECL_P (lhs) |
4404 | && is_global_var (t: lhs)) |
4405 | { |
4406 | struct constraint_expr tmpc; |
4407 | tmpc.var = escaped_id; |
4408 | tmpc.offset = 0; |
4409 | tmpc.type = SCALAR; |
4410 | lhsc.safe_push (obj: tmpc); |
4411 | } |
4412 | |
4413 | /* If the call returns an argument unmodified override the rhs |
4414 | constraints. */ |
4415 | if (flags & ERF_RETURNS_ARG |
4416 | && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (gs: stmt)) |
4417 | { |
4418 | tree arg; |
4419 | rhsc.truncate (size: 0); |
4420 | arg = gimple_call_arg (gs: stmt, index: flags & ERF_RETURN_ARG_MASK); |
4421 | get_constraint_for (t: arg, results: &rhsc); |
4422 | process_all_all_constraints (lhsc, rhsc); |
4423 | rhsc.truncate (size: 0); |
4424 | } |
4425 | else if (flags & ERF_NOALIAS) |
4426 | { |
4427 | varinfo_t vi; |
4428 | struct constraint_expr tmpc; |
4429 | rhsc.truncate (size: 0); |
4430 | vi = make_heapvar (name: "HEAP" , add_id: true); |
4431 | /* We are marking allocated storage local, we deal with it becoming |
4432 | global by escaping and setting of vars_contains_escaped_heap. */ |
4433 | DECL_EXTERNAL (vi->decl) = 0; |
4434 | vi->is_global_var = 0; |
4435 | /* If this is not a real malloc call assume the memory was |
4436 | initialized and thus may point to global memory. All |
4437 | builtin functions with the malloc attribute behave in a sane way. */ |
4438 | if (!fndecl |
4439 | || !fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
4440 | make_constraint_from (vi, from: nonlocal_id); |
4441 | tmpc.var = vi->id; |
4442 | tmpc.offset = 0; |
4443 | tmpc.type = ADDRESSOF; |
4444 | rhsc.safe_push (obj: tmpc); |
4445 | process_all_all_constraints (lhsc, rhsc); |
4446 | rhsc.truncate (size: 0); |
4447 | } |
4448 | else |
4449 | process_all_all_constraints (lhsc, rhsc); |
4450 | } |
4451 | |
4452 | |
4453 | /* Return the varinfo for the callee of CALL. */ |
4454 | |
4455 | static varinfo_t |
4456 | get_fi_for_callee (gcall *call) |
4457 | { |
4458 | tree decl, fn = gimple_call_fn (gs: call); |
4459 | |
4460 | if (fn && TREE_CODE (fn) == OBJ_TYPE_REF) |
4461 | fn = OBJ_TYPE_REF_EXPR (fn); |
4462 | |
4463 | /* If we can directly resolve the function being called, do so. |
4464 | Otherwise, it must be some sort of indirect expression that |
4465 | we should still be able to handle. */ |
4466 | decl = gimple_call_addr_fndecl (fn); |
4467 | if (decl) |
4468 | return get_vi_for_tree (t: decl); |
4469 | |
4470 | /* If the function is anything other than a SSA name pointer we have no |
4471 | clue and should be getting ANYFN (well, ANYTHING for now). */ |
4472 | if (!fn || TREE_CODE (fn) != SSA_NAME) |
4473 | return get_varinfo (n: anything_id); |
4474 | |
4475 | if (SSA_NAME_IS_DEFAULT_DEF (fn) |
4476 | && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL |
4477 | || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)) |
4478 | fn = SSA_NAME_VAR (fn); |
4479 | |
4480 | return get_vi_for_tree (t: fn); |
4481 | } |
4482 | |
4483 | /* Create constraints for assigning call argument ARG to the incoming parameter |
4484 | INDEX of function FI. */ |
4485 | |
4486 | static void |
4487 | find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg) |
4488 | { |
4489 | struct constraint_expr lhs; |
4490 | lhs = get_function_part_constraint (fi, part: fi_parm_base + index); |
4491 | |
4492 | auto_vec<ce_s, 2> rhsc; |
4493 | get_constraint_for_rhs (t: arg, results: &rhsc); |
4494 | |
4495 | unsigned j; |
4496 | struct constraint_expr *rhsp; |
4497 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
4498 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
4499 | } |
4500 | |
4501 | /* Return true if FNDECL may be part of another lto partition. */ |
4502 | |
4503 | static bool |
4504 | fndecl_maybe_in_other_partition (tree fndecl) |
4505 | { |
4506 | cgraph_node *fn_node = cgraph_node::get (decl: fndecl); |
4507 | if (fn_node == NULL) |
4508 | return true; |
4509 | |
4510 | return fn_node->in_other_partition; |
4511 | } |
4512 | |
4513 | /* Create constraints for the builtin call T. Return true if the call |
4514 | was handled, otherwise false. */ |
4515 | |
4516 | static bool |
4517 | find_func_aliases_for_builtin_call (struct function *fn, gcall *t) |
4518 | { |
4519 | tree fndecl = gimple_call_fndecl (gs: t); |
4520 | auto_vec<ce_s, 2> lhsc; |
4521 | auto_vec<ce_s, 4> rhsc; |
4522 | varinfo_t fi; |
4523 | |
4524 | if (gimple_call_builtin_p (t, BUILT_IN_NORMAL)) |
4525 | /* ??? All builtins that are handled here need to be handled |
4526 | in the alias-oracle query functions explicitly! */ |
4527 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
4528 | { |
4529 | /* All the following functions return a pointer to the same object |
4530 | as their first argument points to. The functions do not add |
4531 | to the ESCAPED solution. The functions make the first argument |
4532 | pointed to memory point to what the second argument pointed to |
4533 | memory points to. */ |
4534 | case BUILT_IN_STRCPY: |
4535 | case BUILT_IN_STRNCPY: |
4536 | case BUILT_IN_BCOPY: |
4537 | case BUILT_IN_MEMCPY: |
4538 | case BUILT_IN_MEMMOVE: |
4539 | case BUILT_IN_MEMPCPY: |
4540 | case BUILT_IN_STPCPY: |
4541 | case BUILT_IN_STPNCPY: |
4542 | case BUILT_IN_STRCAT: |
4543 | case BUILT_IN_STRNCAT: |
4544 | case BUILT_IN_STRCPY_CHK: |
4545 | case BUILT_IN_STRNCPY_CHK: |
4546 | case BUILT_IN_MEMCPY_CHK: |
4547 | case BUILT_IN_MEMMOVE_CHK: |
4548 | case BUILT_IN_MEMPCPY_CHK: |
4549 | case BUILT_IN_STPCPY_CHK: |
4550 | case BUILT_IN_STPNCPY_CHK: |
4551 | case BUILT_IN_STRCAT_CHK: |
4552 | case BUILT_IN_STRNCAT_CHK: |
4553 | case BUILT_IN_TM_MEMCPY: |
4554 | case BUILT_IN_TM_MEMMOVE: |
4555 | { |
4556 | tree res = gimple_call_lhs (gs: t); |
4557 | tree dest = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl: fndecl) |
4558 | == BUILT_IN_BCOPY ? 1 : 0)); |
4559 | tree src = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl: fndecl) |
4560 | == BUILT_IN_BCOPY ? 0 : 1)); |
4561 | if (res != NULL_TREE) |
4562 | { |
4563 | get_constraint_for (t: res, results: &lhsc); |
4564 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_MEMPCPY |
4565 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPCPY |
4566 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPNCPY |
4567 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_MEMPCPY_CHK |
4568 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPCPY_CHK |
4569 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPNCPY_CHK) |
4570 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &rhsc); |
4571 | else |
4572 | get_constraint_for (t: dest, results: &rhsc); |
4573 | process_all_all_constraints (lhsc, rhsc); |
4574 | lhsc.truncate (size: 0); |
4575 | rhsc.truncate (size: 0); |
4576 | } |
4577 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
4578 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
4579 | do_deref (constraints: &lhsc); |
4580 | do_deref (constraints: &rhsc); |
4581 | process_all_all_constraints (lhsc, rhsc); |
4582 | return true; |
4583 | } |
4584 | case BUILT_IN_MEMSET: |
4585 | case BUILT_IN_MEMSET_CHK: |
4586 | case BUILT_IN_TM_MEMSET: |
4587 | { |
4588 | tree res = gimple_call_lhs (gs: t); |
4589 | tree dest = gimple_call_arg (gs: t, index: 0); |
4590 | unsigned i; |
4591 | ce_s *lhsp; |
4592 | struct constraint_expr ac; |
4593 | if (res != NULL_TREE) |
4594 | { |
4595 | get_constraint_for (t: res, results: &lhsc); |
4596 | get_constraint_for (t: dest, results: &rhsc); |
4597 | process_all_all_constraints (lhsc, rhsc); |
4598 | lhsc.truncate (size: 0); |
4599 | } |
4600 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
4601 | do_deref (constraints: &lhsc); |
4602 | if (flag_delete_null_pointer_checks |
4603 | && integer_zerop (gimple_call_arg (gs: t, index: 1))) |
4604 | { |
4605 | ac.type = ADDRESSOF; |
4606 | ac.var = nothing_id; |
4607 | } |
4608 | else |
4609 | { |
4610 | ac.type = SCALAR; |
4611 | ac.var = integer_id; |
4612 | } |
4613 | ac.offset = 0; |
4614 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
4615 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: ac)); |
4616 | return true; |
4617 | } |
4618 | case BUILT_IN_STACK_SAVE: |
4619 | case BUILT_IN_STACK_RESTORE: |
4620 | /* Nothing interesting happens. */ |
4621 | return true; |
4622 | case BUILT_IN_ALLOCA: |
4623 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
4624 | case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
4625 | { |
4626 | tree ptr = gimple_call_lhs (gs: t); |
4627 | if (ptr == NULL_TREE) |
4628 | return true; |
4629 | get_constraint_for (t: ptr, results: &lhsc); |
4630 | varinfo_t vi = make_heapvar (name: "HEAP" , add_id: true); |
4631 | /* Alloca storage is never global. To exempt it from escaped |
4632 | handling make it a non-heap var. */ |
4633 | DECL_EXTERNAL (vi->decl) = 0; |
4634 | vi->is_global_var = 0; |
4635 | vi->is_heap_var = 0; |
4636 | struct constraint_expr tmpc; |
4637 | tmpc.var = vi->id; |
4638 | tmpc.offset = 0; |
4639 | tmpc.type = ADDRESSOF; |
4640 | rhsc.safe_push (obj: tmpc); |
4641 | process_all_all_constraints (lhsc, rhsc); |
4642 | return true; |
4643 | } |
4644 | case BUILT_IN_POSIX_MEMALIGN: |
4645 | { |
4646 | tree ptrptr = gimple_call_arg (gs: t, index: 0); |
4647 | get_constraint_for (t: ptrptr, results: &lhsc); |
4648 | do_deref (constraints: &lhsc); |
4649 | varinfo_t vi = make_heapvar (name: "HEAP" , add_id: true); |
4650 | /* We are marking allocated storage local, we deal with it becoming |
4651 | global by escaping and setting of vars_contains_escaped_heap. */ |
4652 | DECL_EXTERNAL (vi->decl) = 0; |
4653 | vi->is_global_var = 0; |
4654 | struct constraint_expr tmpc; |
4655 | tmpc.var = vi->id; |
4656 | tmpc.offset = 0; |
4657 | tmpc.type = ADDRESSOF; |
4658 | rhsc.safe_push (obj: tmpc); |
4659 | process_all_all_constraints (lhsc, rhsc); |
4660 | return true; |
4661 | } |
4662 | case BUILT_IN_ASSUME_ALIGNED: |
4663 | { |
4664 | tree res = gimple_call_lhs (gs: t); |
4665 | tree dest = gimple_call_arg (gs: t, index: 0); |
4666 | if (res != NULL_TREE) |
4667 | { |
4668 | get_constraint_for (t: res, results: &lhsc); |
4669 | get_constraint_for (t: dest, results: &rhsc); |
4670 | process_all_all_constraints (lhsc, rhsc); |
4671 | } |
4672 | return true; |
4673 | } |
4674 | /* All the following functions do not return pointers, do not |
4675 | modify the points-to sets of memory reachable from their |
4676 | arguments and do not add to the ESCAPED solution. */ |
4677 | case BUILT_IN_SINCOS: |
4678 | case BUILT_IN_SINCOSF: |
4679 | case BUILT_IN_SINCOSL: |
4680 | case BUILT_IN_FREXP: |
4681 | case BUILT_IN_FREXPF: |
4682 | case BUILT_IN_FREXPL: |
4683 | case BUILT_IN_GAMMA_R: |
4684 | case BUILT_IN_GAMMAF_R: |
4685 | case BUILT_IN_GAMMAL_R: |
4686 | case BUILT_IN_LGAMMA_R: |
4687 | case BUILT_IN_LGAMMAF_R: |
4688 | case BUILT_IN_LGAMMAL_R: |
4689 | case BUILT_IN_MODF: |
4690 | case BUILT_IN_MODFF: |
4691 | case BUILT_IN_MODFL: |
4692 | case BUILT_IN_REMQUO: |
4693 | case BUILT_IN_REMQUOF: |
4694 | case BUILT_IN_REMQUOL: |
4695 | case BUILT_IN_FREE: |
4696 | return true; |
4697 | case BUILT_IN_STRDUP: |
4698 | case BUILT_IN_STRNDUP: |
4699 | case BUILT_IN_REALLOC: |
4700 | if (gimple_call_lhs (gs: t)) |
4701 | { |
4702 | auto_vec<ce_s> rhsc; |
4703 | handle_lhs_call (stmt: t, lhs: gimple_call_lhs (gs: t), |
4704 | flags: gimple_call_return_flags (t) | ERF_NOALIAS, |
4705 | rhsc, fndecl); |
4706 | get_constraint_for_ptr_offset (ptr: gimple_call_lhs (gs: t), |
4707 | NULL_TREE, results: &lhsc); |
4708 | get_constraint_for_ptr_offset (ptr: gimple_call_arg (gs: t, index: 0), |
4709 | NULL_TREE, results: &rhsc); |
4710 | do_deref (constraints: &lhsc); |
4711 | do_deref (constraints: &rhsc); |
4712 | process_all_all_constraints (lhsc, rhsc); |
4713 | lhsc.truncate (size: 0); |
4714 | rhsc.truncate (size: 0); |
4715 | /* For realloc the resulting pointer can be equal to the |
4716 | argument as well. But only doing this wouldn't be |
4717 | correct because with ptr == 0 realloc behaves like malloc. */ |
4718 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_REALLOC) |
4719 | { |
4720 | get_constraint_for (t: gimple_call_lhs (gs: t), results: &lhsc); |
4721 | get_constraint_for (t: gimple_call_arg (gs: t, index: 0), results: &rhsc); |
4722 | process_all_all_constraints (lhsc, rhsc); |
4723 | } |
4724 | return true; |
4725 | } |
4726 | break; |
4727 | /* String / character search functions return a pointer into the |
4728 | source string or NULL. */ |
4729 | case BUILT_IN_INDEX: |
4730 | case BUILT_IN_STRCHR: |
4731 | case BUILT_IN_STRRCHR: |
4732 | case BUILT_IN_MEMCHR: |
4733 | case BUILT_IN_STRSTR: |
4734 | case BUILT_IN_STRPBRK: |
4735 | if (gimple_call_lhs (gs: t)) |
4736 | { |
4737 | tree src = gimple_call_arg (gs: t, index: 0); |
4738 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
4739 | constraint_expr nul; |
4740 | nul.var = nothing_id; |
4741 | nul.offset = 0; |
4742 | nul.type = ADDRESSOF; |
4743 | rhsc.safe_push (obj: nul); |
4744 | get_constraint_for (t: gimple_call_lhs (gs: t), results: &lhsc); |
4745 | process_all_all_constraints (lhsc, rhsc); |
4746 | } |
4747 | return true; |
4748 | /* Pure functions that return something not based on any object and |
4749 | that use the memory pointed to by their arguments (but not |
4750 | transitively). */ |
4751 | case BUILT_IN_STRCMP: |
4752 | case BUILT_IN_STRCMP_EQ: |
4753 | case BUILT_IN_STRNCMP: |
4754 | case BUILT_IN_STRNCMP_EQ: |
4755 | case BUILT_IN_STRCASECMP: |
4756 | case BUILT_IN_STRNCASECMP: |
4757 | case BUILT_IN_MEMCMP: |
4758 | case BUILT_IN_BCMP: |
4759 | case BUILT_IN_STRSPN: |
4760 | case BUILT_IN_STRCSPN: |
4761 | { |
4762 | varinfo_t uses = get_call_use_vi (call: t); |
4763 | make_any_offset_constraints (vi: uses); |
4764 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 0)); |
4765 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 1)); |
4766 | /* No constraints are necessary for the return value. */ |
4767 | return true; |
4768 | } |
4769 | case BUILT_IN_STRLEN: |
4770 | { |
4771 | varinfo_t uses = get_call_use_vi (call: t); |
4772 | make_any_offset_constraints (vi: uses); |
4773 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 0)); |
4774 | /* No constraints are necessary for the return value. */ |
4775 | return true; |
4776 | } |
4777 | case BUILT_IN_OBJECT_SIZE: |
4778 | case BUILT_IN_CONSTANT_P: |
4779 | { |
4780 | /* No constraints are necessary for the return value or the |
4781 | arguments. */ |
4782 | return true; |
4783 | } |
4784 | /* Trampolines are special - they set up passing the static |
4785 | frame. */ |
4786 | case BUILT_IN_INIT_TRAMPOLINE: |
4787 | { |
4788 | tree tramp = gimple_call_arg (gs: t, index: 0); |
4789 | tree nfunc = gimple_call_arg (gs: t, index: 1); |
4790 | tree frame = gimple_call_arg (gs: t, index: 2); |
4791 | unsigned i; |
4792 | struct constraint_expr lhs, *rhsp; |
4793 | if (in_ipa_mode) |
4794 | { |
4795 | varinfo_t nfi = NULL; |
4796 | gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR); |
4797 | nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0)); |
4798 | if (nfi) |
4799 | { |
4800 | lhs = get_function_part_constraint (fi: nfi, part: fi_static_chain); |
4801 | get_constraint_for (t: frame, results: &rhsc); |
4802 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
4803 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
4804 | rhsc.truncate (size: 0); |
4805 | |
4806 | /* Make the frame point to the function for |
4807 | the trampoline adjustment call. */ |
4808 | get_constraint_for (t: tramp, results: &lhsc); |
4809 | do_deref (constraints: &lhsc); |
4810 | get_constraint_for (t: nfunc, results: &rhsc); |
4811 | process_all_all_constraints (lhsc, rhsc); |
4812 | |
4813 | return true; |
4814 | } |
4815 | } |
4816 | /* Else fallthru to generic handling which will let |
4817 | the frame escape. */ |
4818 | break; |
4819 | } |
4820 | case BUILT_IN_ADJUST_TRAMPOLINE: |
4821 | { |
4822 | tree tramp = gimple_call_arg (gs: t, index: 0); |
4823 | tree res = gimple_call_lhs (gs: t); |
4824 | if (in_ipa_mode && res) |
4825 | { |
4826 | get_constraint_for (t: res, results: &lhsc); |
4827 | get_constraint_for (t: tramp, results: &rhsc); |
4828 | do_deref (constraints: &rhsc); |
4829 | process_all_all_constraints (lhsc, rhsc); |
4830 | } |
4831 | return true; |
4832 | } |
4833 | CASE_BUILT_IN_TM_STORE (1): |
4834 | CASE_BUILT_IN_TM_STORE (2): |
4835 | CASE_BUILT_IN_TM_STORE (4): |
4836 | CASE_BUILT_IN_TM_STORE (8): |
4837 | CASE_BUILT_IN_TM_STORE (FLOAT): |
4838 | CASE_BUILT_IN_TM_STORE (DOUBLE): |
4839 | CASE_BUILT_IN_TM_STORE (LDOUBLE): |
4840 | CASE_BUILT_IN_TM_STORE (M64): |
4841 | CASE_BUILT_IN_TM_STORE (M128): |
4842 | CASE_BUILT_IN_TM_STORE (M256): |
4843 | { |
4844 | tree addr = gimple_call_arg (gs: t, index: 0); |
4845 | tree src = gimple_call_arg (gs: t, index: 1); |
4846 | |
4847 | get_constraint_for (t: addr, results: &lhsc); |
4848 | do_deref (constraints: &lhsc); |
4849 | get_constraint_for (t: src, results: &rhsc); |
4850 | process_all_all_constraints (lhsc, rhsc); |
4851 | return true; |
4852 | } |
4853 | CASE_BUILT_IN_TM_LOAD (1): |
4854 | CASE_BUILT_IN_TM_LOAD (2): |
4855 | CASE_BUILT_IN_TM_LOAD (4): |
4856 | CASE_BUILT_IN_TM_LOAD (8): |
4857 | CASE_BUILT_IN_TM_LOAD (FLOAT): |
4858 | CASE_BUILT_IN_TM_LOAD (DOUBLE): |
4859 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): |
4860 | CASE_BUILT_IN_TM_LOAD (M64): |
4861 | CASE_BUILT_IN_TM_LOAD (M128): |
4862 | CASE_BUILT_IN_TM_LOAD (M256): |
4863 | { |
4864 | tree dest = gimple_call_lhs (gs: t); |
4865 | tree addr = gimple_call_arg (gs: t, index: 0); |
4866 | |
4867 | get_constraint_for (t: dest, results: &lhsc); |
4868 | get_constraint_for (t: addr, results: &rhsc); |
4869 | do_deref (constraints: &rhsc); |
4870 | process_all_all_constraints (lhsc, rhsc); |
4871 | return true; |
4872 | } |
4873 | /* Variadic argument handling needs to be handled in IPA |
4874 | mode as well. */ |
4875 | case BUILT_IN_VA_START: |
4876 | { |
4877 | tree valist = gimple_call_arg (gs: t, index: 0); |
4878 | struct constraint_expr rhs, *lhsp; |
4879 | unsigned i; |
4880 | get_constraint_for_ptr_offset (ptr: valist, NULL_TREE, results: &lhsc); |
4881 | do_deref (constraints: &lhsc); |
4882 | /* The va_list gets access to pointers in variadic |
4883 | arguments. Which we know in the case of IPA analysis |
4884 | and otherwise are just all nonlocal variables. */ |
4885 | if (in_ipa_mode) |
4886 | { |
4887 | fi = lookup_vi_for_tree (t: fn->decl); |
4888 | rhs = get_function_part_constraint (fi, part: ~0); |
4889 | rhs.type = ADDRESSOF; |
4890 | } |
4891 | else |
4892 | { |
4893 | rhs.var = nonlocal_id; |
4894 | rhs.type = ADDRESSOF; |
4895 | rhs.offset = 0; |
4896 | } |
4897 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
4898 | process_constraint (t: new_constraint (lhs: *lhsp, rhs)); |
4899 | /* va_list is clobbered. */ |
4900 | make_constraint_to (id: get_call_clobber_vi (call: t)->id, op: valist); |
4901 | return true; |
4902 | } |
4903 | /* va_end doesn't have any effect that matters. */ |
4904 | case BUILT_IN_VA_END: |
4905 | return true; |
4906 | /* Alternate return. Simply give up for now. */ |
4907 | case BUILT_IN_RETURN: |
4908 | { |
4909 | fi = NULL; |
4910 | if (!in_ipa_mode |
4911 | || !(fi = get_vi_for_tree (t: fn->decl))) |
4912 | make_constraint_from (vi: get_varinfo (n: escaped_id), from: anything_id); |
4913 | else if (in_ipa_mode |
4914 | && fi != NULL) |
4915 | { |
4916 | struct constraint_expr lhs, rhs; |
4917 | lhs = get_function_part_constraint (fi, part: fi_result); |
4918 | rhs.var = anything_id; |
4919 | rhs.offset = 0; |
4920 | rhs.type = SCALAR; |
4921 | process_constraint (t: new_constraint (lhs, rhs)); |
4922 | } |
4923 | return true; |
4924 | } |
4925 | case BUILT_IN_GOMP_PARALLEL: |
4926 | case BUILT_IN_GOACC_PARALLEL: |
4927 | { |
4928 | if (in_ipa_mode) |
4929 | { |
4930 | unsigned int fnpos, argpos; |
4931 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
4932 | { |
4933 | case BUILT_IN_GOMP_PARALLEL: |
4934 | /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */ |
4935 | fnpos = 0; |
4936 | argpos = 1; |
4937 | break; |
4938 | case BUILT_IN_GOACC_PARALLEL: |
4939 | /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs, |
4940 | sizes, kinds, ...). */ |
4941 | fnpos = 1; |
4942 | argpos = 3; |
4943 | break; |
4944 | default: |
4945 | gcc_unreachable (); |
4946 | } |
4947 | |
4948 | tree fnarg = gimple_call_arg (gs: t, index: fnpos); |
4949 | gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR); |
4950 | tree fndecl = TREE_OPERAND (fnarg, 0); |
4951 | if (fndecl_maybe_in_other_partition (fndecl)) |
4952 | /* Fallthru to general call handling. */ |
4953 | break; |
4954 | |
4955 | tree arg = gimple_call_arg (gs: t, index: argpos); |
4956 | |
4957 | varinfo_t fi = get_vi_for_tree (t: fndecl); |
4958 | find_func_aliases_for_call_arg (fi, index: 0, arg); |
4959 | return true; |
4960 | } |
4961 | /* Else fallthru to generic call handling. */ |
4962 | break; |
4963 | } |
4964 | /* printf-style functions may have hooks to set pointers to |
4965 | point to somewhere into the generated string. Leave them |
4966 | for a later exercise... */ |
4967 | default: |
4968 | /* Fallthru to general call handling. */; |
4969 | } |
4970 | |
4971 | return false; |
4972 | } |
4973 | |
4974 | /* Create constraints for the call T. */ |
4975 | |
4976 | static void |
4977 | find_func_aliases_for_call (struct function *fn, gcall *t) |
4978 | { |
4979 | tree fndecl = gimple_call_fndecl (gs: t); |
4980 | varinfo_t fi; |
4981 | |
4982 | if (fndecl != NULL_TREE |
4983 | && fndecl_built_in_p (node: fndecl) |
4984 | && find_func_aliases_for_builtin_call (fn, t)) |
4985 | return; |
4986 | |
4987 | if (gimple_call_internal_p (gs: t, fn: IFN_DEFERRED_INIT)) |
4988 | return; |
4989 | |
4990 | fi = get_fi_for_callee (call: t); |
4991 | if (!in_ipa_mode |
4992 | || (fi->decl && fndecl && !fi->is_fn_info)) |
4993 | { |
4994 | auto_vec<ce_s, 16> rhsc; |
4995 | int flags = gimple_call_flags (t); |
4996 | |
4997 | /* Const functions can return their arguments and addresses |
4998 | of global memory but not of escaped memory. */ |
4999 | if (flags & (ECF_CONST|ECF_NOVOPS)) |
5000 | { |
5001 | if (gimple_call_lhs (gs: t)) |
5002 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: implicit_const_eaf_flags, writes_global_memory: false, reads_global_memory: false); |
5003 | } |
5004 | /* Pure functions can return addresses in and of memory |
5005 | reachable from their arguments, but they are not an escape |
5006 | point for reachable memory of their arguments. */ |
5007 | else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE)) |
5008 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: implicit_pure_eaf_flags, writes_global_memory: false, reads_global_memory: true); |
5009 | /* If the call is to a replaceable operator delete and results |
5010 | from a delete expression as opposed to a direct call to |
5011 | such operator, then the effects for PTA (in particular |
5012 | the escaping of the pointer) can be ignored. */ |
5013 | else if (fndecl |
5014 | && DECL_IS_OPERATOR_DELETE_P (fndecl) |
5015 | && gimple_call_from_new_or_delete (s: t)) |
5016 | ; |
5017 | else |
5018 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: 0, writes_global_memory: true, reads_global_memory: true); |
5019 | if (gimple_call_lhs (gs: t)) |
5020 | handle_lhs_call (stmt: t, lhs: gimple_call_lhs (gs: t), |
5021 | flags: gimple_call_return_flags (t), rhsc, fndecl); |
5022 | } |
5023 | else |
5024 | { |
5025 | auto_vec<ce_s, 2> rhsc; |
5026 | tree lhsop; |
5027 | unsigned j; |
5028 | |
5029 | /* Assign all the passed arguments to the appropriate incoming |
5030 | parameters of the function. */ |
5031 | for (j = 0; j < gimple_call_num_args (gs: t); j++) |
5032 | { |
5033 | tree arg = gimple_call_arg (gs: t, index: j); |
5034 | find_func_aliases_for_call_arg (fi, index: j, arg); |
5035 | } |
5036 | |
5037 | /* If we are returning a value, assign it to the result. */ |
5038 | lhsop = gimple_call_lhs (gs: t); |
5039 | if (lhsop) |
5040 | { |
5041 | auto_vec<ce_s, 2> lhsc; |
5042 | struct constraint_expr rhs; |
5043 | struct constraint_expr *lhsp; |
5044 | bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (gs: t)); |
5045 | |
5046 | get_constraint_for (t: lhsop, results: &lhsc); |
5047 | rhs = get_function_part_constraint (fi, part: fi_result); |
5048 | if (aggr_p) |
5049 | { |
5050 | auto_vec<ce_s, 2> tem; |
5051 | tem.quick_push (obj: rhs); |
5052 | do_deref (constraints: &tem); |
5053 | gcc_checking_assert (tem.length () == 1); |
5054 | rhs = tem[0]; |
5055 | } |
5056 | FOR_EACH_VEC_ELT (lhsc, j, lhsp) |
5057 | process_constraint (t: new_constraint (lhs: *lhsp, rhs)); |
5058 | |
5059 | /* If we pass the result decl by reference, honor that. */ |
5060 | if (aggr_p) |
5061 | { |
5062 | struct constraint_expr lhs; |
5063 | struct constraint_expr *rhsp; |
5064 | |
5065 | get_constraint_for_address_of (t: lhsop, results: &rhsc); |
5066 | lhs = get_function_part_constraint (fi, part: fi_result); |
5067 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5068 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5069 | rhsc.truncate (size: 0); |
5070 | } |
5071 | } |
5072 | |
5073 | /* If we use a static chain, pass it along. */ |
5074 | if (gimple_call_chain (gs: t)) |
5075 | { |
5076 | struct constraint_expr lhs; |
5077 | struct constraint_expr *rhsp; |
5078 | |
5079 | get_constraint_for (t: gimple_call_chain (gs: t), results: &rhsc); |
5080 | lhs = get_function_part_constraint (fi, part: fi_static_chain); |
5081 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5082 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5083 | } |
5084 | } |
5085 | } |
5086 | |
5087 | /* Walk statement T setting up aliasing constraints according to the |
5088 | references found in T. This function is the main part of the |
5089 | constraint builder. AI points to auxiliary alias information used |
5090 | when building alias sets and computing alias grouping heuristics. */ |
5091 | |
5092 | static void |
5093 | find_func_aliases (struct function *fn, gimple *origt) |
5094 | { |
5095 | gimple *t = origt; |
5096 | auto_vec<ce_s, 16> lhsc; |
5097 | auto_vec<ce_s, 16> rhsc; |
5098 | varinfo_t fi; |
5099 | |
5100 | /* Now build constraints expressions. */ |
5101 | if (gimple_code (g: t) == GIMPLE_PHI) |
5102 | { |
5103 | /* For a phi node, assign all the arguments to |
5104 | the result. */ |
5105 | get_constraint_for (t: gimple_phi_result (gs: t), results: &lhsc); |
5106 | for (unsigned i = 0; i < gimple_phi_num_args (gs: t); i++) |
5107 | { |
5108 | get_constraint_for_rhs (t: gimple_phi_arg_def (gs: t, index: i), results: &rhsc); |
5109 | process_all_all_constraints (lhsc, rhsc); |
5110 | rhsc.truncate (size: 0); |
5111 | } |
5112 | } |
5113 | /* In IPA mode, we need to generate constraints to pass call |
5114 | arguments through their calls. There are two cases, |
5115 | either a GIMPLE_CALL returning a value, or just a plain |
5116 | GIMPLE_CALL when we are not. |
5117 | |
5118 | In non-ipa mode, we need to generate constraints for each |
5119 | pointer passed by address. */ |
5120 | else if (is_gimple_call (gs: t)) |
5121 | find_func_aliases_for_call (fn, t: as_a <gcall *> (p: t)); |
5122 | |
5123 | /* Otherwise, just a regular assignment statement. Only care about |
5124 | operations with pointer result, others are dealt with as escape |
5125 | points if they have pointer operands. */ |
5126 | else if (is_gimple_assign (gs: t)) |
5127 | { |
5128 | /* Otherwise, just a regular assignment statement. */ |
5129 | tree lhsop = gimple_assign_lhs (gs: t); |
5130 | tree rhsop = (gimple_num_ops (gs: t) == 2) ? gimple_assign_rhs1 (gs: t) : NULL; |
5131 | |
5132 | if (rhsop && TREE_CLOBBER_P (rhsop)) |
5133 | /* Ignore clobbers, they don't actually store anything into |
5134 | the LHS. */ |
5135 | ; |
5136 | else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop))) |
5137 | do_structure_copy (lhsop, rhsop); |
5138 | else |
5139 | { |
5140 | enum tree_code code = gimple_assign_rhs_code (gs: t); |
5141 | |
5142 | get_constraint_for (t: lhsop, results: &lhsc); |
5143 | |
5144 | if (code == POINTER_PLUS_EXPR) |
5145 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5146 | offset: gimple_assign_rhs2 (gs: t), results: &rhsc); |
5147 | else if (code == POINTER_DIFF_EXPR) |
5148 | /* The result is not a pointer (part). */ |
5149 | ; |
5150 | else if (code == BIT_AND_EXPR |
5151 | && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST) |
5152 | { |
5153 | /* Aligning a pointer via a BIT_AND_EXPR is offsetting |
5154 | the pointer. Handle it by offsetting it by UNKNOWN. */ |
5155 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5156 | NULL_TREE, results: &rhsc); |
5157 | } |
5158 | else if (code == TRUNC_DIV_EXPR |
5159 | || code == CEIL_DIV_EXPR |
5160 | || code == FLOOR_DIV_EXPR |
5161 | || code == ROUND_DIV_EXPR |
5162 | || code == EXACT_DIV_EXPR |
5163 | || code == TRUNC_MOD_EXPR |
5164 | || code == CEIL_MOD_EXPR |
5165 | || code == FLOOR_MOD_EXPR |
5166 | || code == ROUND_MOD_EXPR) |
5167 | /* Division and modulo transfer the pointer from the LHS. */ |
5168 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5169 | NULL_TREE, results: &rhsc); |
5170 | else if (CONVERT_EXPR_CODE_P (code) |
5171 | || gimple_assign_single_p (gs: t)) |
5172 | /* See through conversions, single RHS are handled by |
5173 | get_constraint_for_rhs. */ |
5174 | get_constraint_for_rhs (t: rhsop, results: &rhsc); |
5175 | else if (code == COND_EXPR) |
5176 | { |
5177 | /* The result is a merge of both COND_EXPR arms. */ |
5178 | auto_vec<ce_s, 2> tmp; |
5179 | struct constraint_expr *rhsp; |
5180 | unsigned i; |
5181 | get_constraint_for_rhs (t: gimple_assign_rhs2 (gs: t), results: &rhsc); |
5182 | get_constraint_for_rhs (t: gimple_assign_rhs3 (gs: t), results: &tmp); |
5183 | FOR_EACH_VEC_ELT (tmp, i, rhsp) |
5184 | rhsc.safe_push (obj: *rhsp); |
5185 | } |
5186 | else if (truth_value_p (code)) |
5187 | /* Truth value results are not pointer (parts). Or at least |
5188 | very unreasonable obfuscation of a part. */ |
5189 | ; |
5190 | else |
5191 | { |
5192 | /* All other operations are possibly offsetting merges. */ |
5193 | auto_vec<ce_s, 4> tmp; |
5194 | struct constraint_expr *rhsp; |
5195 | unsigned i, j; |
5196 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5197 | NULL_TREE, results: &rhsc); |
5198 | for (i = 2; i < gimple_num_ops (gs: t); ++i) |
5199 | { |
5200 | get_constraint_for_ptr_offset (ptr: gimple_op (gs: t, i), |
5201 | NULL_TREE, results: &tmp); |
5202 | FOR_EACH_VEC_ELT (tmp, j, rhsp) |
5203 | rhsc.safe_push (obj: *rhsp); |
5204 | tmp.truncate (size: 0); |
5205 | } |
5206 | } |
5207 | process_all_all_constraints (lhsc, rhsc); |
5208 | } |
5209 | /* If there is a store to a global variable the rhs escapes. */ |
5210 | if ((lhsop = get_base_address (t: lhsop)) != NULL_TREE |
5211 | && DECL_P (lhsop)) |
5212 | { |
5213 | varinfo_t vi = get_vi_for_tree (t: lhsop); |
5214 | if ((! in_ipa_mode && vi->is_global_var) |
5215 | || vi->is_ipa_escape_point) |
5216 | make_escape_constraint (op: rhsop); |
5217 | } |
5218 | } |
5219 | /* Handle escapes through return. */ |
5220 | else if (gimple_code (g: t) == GIMPLE_RETURN |
5221 | && gimple_return_retval (gs: as_a <greturn *> (p: t)) != NULL_TREE) |
5222 | { |
5223 | greturn *return_stmt = as_a <greturn *> (p: t); |
5224 | tree retval = gimple_return_retval (gs: return_stmt); |
5225 | if (!in_ipa_mode) |
5226 | make_constraint_to (id: escaped_return_id, op: retval); |
5227 | else |
5228 | { |
5229 | struct constraint_expr lhs ; |
5230 | struct constraint_expr *rhsp; |
5231 | unsigned i; |
5232 | |
5233 | fi = lookup_vi_for_tree (t: fn->decl); |
5234 | lhs = get_function_part_constraint (fi, part: fi_result); |
5235 | get_constraint_for_rhs (t: retval, results: &rhsc); |
5236 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5237 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5238 | } |
5239 | } |
5240 | /* Handle asms conservatively by adding escape constraints to everything. */ |
5241 | else if (gasm *asm_stmt = dyn_cast <gasm *> (p: t)) |
5242 | { |
5243 | unsigned i, noutputs; |
5244 | const char **oconstraints; |
5245 | const char *constraint; |
5246 | bool allows_mem, allows_reg, is_inout; |
5247 | |
5248 | noutputs = gimple_asm_noutputs (asm_stmt); |
5249 | oconstraints = XALLOCAVEC (const char *, noutputs); |
5250 | |
5251 | for (i = 0; i < noutputs; ++i) |
5252 | { |
5253 | tree link = gimple_asm_output_op (asm_stmt, index: i); |
5254 | tree op = TREE_VALUE (link); |
5255 | |
5256 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
5257 | oconstraints[i] = constraint; |
5258 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
5259 | &allows_reg, &is_inout); |
5260 | |
5261 | /* A memory constraint makes the address of the operand escape. */ |
5262 | if (!allows_reg && allows_mem) |
5263 | make_escape_constraint (build_fold_addr_expr (op)); |
5264 | |
5265 | /* The asm may read global memory, so outputs may point to |
5266 | any global memory. */ |
5267 | if (op) |
5268 | { |
5269 | auto_vec<ce_s, 2> lhsc; |
5270 | struct constraint_expr rhsc, *lhsp; |
5271 | unsigned j; |
5272 | get_constraint_for (t: op, results: &lhsc); |
5273 | rhsc.var = nonlocal_id; |
5274 | rhsc.offset = 0; |
5275 | rhsc.type = SCALAR; |
5276 | FOR_EACH_VEC_ELT (lhsc, j, lhsp) |
5277 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: rhsc)); |
5278 | } |
5279 | } |
5280 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
5281 | { |
5282 | tree link = gimple_asm_input_op (asm_stmt, index: i); |
5283 | tree op = TREE_VALUE (link); |
5284 | |
5285 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
5286 | |
5287 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
5288 | &allows_mem, &allows_reg); |
5289 | |
5290 | /* A memory constraint makes the address of the operand escape. */ |
5291 | if (!allows_reg && allows_mem) |
5292 | make_escape_constraint (build_fold_addr_expr (op)); |
5293 | /* Strictly we'd only need the constraint to ESCAPED if |
5294 | the asm clobbers memory, otherwise using something |
5295 | along the lines of per-call clobbers/uses would be enough. */ |
5296 | else if (op) |
5297 | make_escape_constraint (op); |
5298 | } |
5299 | } |
5300 | } |
5301 | |
5302 | |
5303 | /* Create a constraint adding to the clobber set of FI the memory |
5304 | pointed to by PTR. */ |
5305 | |
5306 | static void |
5307 | process_ipa_clobber (varinfo_t fi, tree ptr) |
5308 | { |
5309 | vec<ce_s> ptrc = vNULL; |
5310 | struct constraint_expr *c, lhs; |
5311 | unsigned i; |
5312 | get_constraint_for_rhs (t: ptr, results: &ptrc); |
5313 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5314 | FOR_EACH_VEC_ELT (ptrc, i, c) |
5315 | process_constraint (t: new_constraint (lhs, rhs: *c)); |
5316 | ptrc.release (); |
5317 | } |
5318 | |
5319 | /* Walk statement T setting up clobber and use constraints according to the |
5320 | references found in T. This function is a main part of the |
5321 | IPA constraint builder. */ |
5322 | |
5323 | static void |
5324 | find_func_clobbers (struct function *fn, gimple *origt) |
5325 | { |
5326 | gimple *t = origt; |
5327 | auto_vec<ce_s, 16> lhsc; |
5328 | auto_vec<ce_s, 16> rhsc; |
5329 | varinfo_t fi; |
5330 | |
5331 | /* Add constraints for clobbered/used in IPA mode. |
5332 | We are not interested in what automatic variables are clobbered |
5333 | or used as we only use the information in the caller to which |
5334 | they do not escape. */ |
5335 | gcc_assert (in_ipa_mode); |
5336 | |
5337 | /* If the stmt refers to memory in any way it better had a VUSE. */ |
5338 | if (gimple_vuse (g: t) == NULL_TREE) |
5339 | return; |
5340 | |
5341 | /* We'd better have function information for the current function. */ |
5342 | fi = lookup_vi_for_tree (t: fn->decl); |
5343 | gcc_assert (fi != NULL); |
5344 | |
5345 | /* Account for stores in assignments and calls. */ |
5346 | if (gimple_vdef (g: t) != NULL_TREE |
5347 | && gimple_has_lhs (stmt: t)) |
5348 | { |
5349 | tree lhs = gimple_get_lhs (t); |
5350 | tree tem = lhs; |
5351 | while (handled_component_p (t: tem)) |
5352 | tem = TREE_OPERAND (tem, 0); |
5353 | if ((DECL_P (tem) |
5354 | && !auto_var_in_fn_p (tem, fn->decl)) |
5355 | || INDIRECT_REF_P (tem) |
5356 | || (TREE_CODE (tem) == MEM_REF |
5357 | && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR |
5358 | && auto_var_in_fn_p |
5359 | (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl)))) |
5360 | { |
5361 | struct constraint_expr lhsc, *rhsp; |
5362 | unsigned i; |
5363 | lhsc = get_function_part_constraint (fi, part: fi_clobbers); |
5364 | get_constraint_for_address_of (t: lhs, results: &rhsc); |
5365 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5366 | process_constraint (t: new_constraint (lhs: lhsc, rhs: *rhsp)); |
5367 | rhsc.truncate (size: 0); |
5368 | } |
5369 | } |
5370 | |
5371 | /* Account for uses in assigments and returns. */ |
5372 | if (gimple_assign_single_p (gs: t) |
5373 | || (gimple_code (g: t) == GIMPLE_RETURN |
5374 | && gimple_return_retval (gs: as_a <greturn *> (p: t)) != NULL_TREE)) |
5375 | { |
5376 | tree rhs = (gimple_assign_single_p (gs: t) |
5377 | ? gimple_assign_rhs1 (gs: t) |
5378 | : gimple_return_retval (gs: as_a <greturn *> (p: t))); |
5379 | tree tem = rhs; |
5380 | while (handled_component_p (t: tem)) |
5381 | tem = TREE_OPERAND (tem, 0); |
5382 | if ((DECL_P (tem) |
5383 | && !auto_var_in_fn_p (tem, fn->decl)) |
5384 | || INDIRECT_REF_P (tem) |
5385 | || (TREE_CODE (tem) == MEM_REF |
5386 | && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR |
5387 | && auto_var_in_fn_p |
5388 | (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl)))) |
5389 | { |
5390 | struct constraint_expr lhs, *rhsp; |
5391 | unsigned i; |
5392 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5393 | get_constraint_for_address_of (t: rhs, results: &rhsc); |
5394 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5395 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5396 | rhsc.truncate (size: 0); |
5397 | } |
5398 | } |
5399 | |
5400 | if (gcall *call_stmt = dyn_cast <gcall *> (p: t)) |
5401 | { |
5402 | varinfo_t cfi = NULL; |
5403 | tree decl = gimple_call_fndecl (gs: t); |
5404 | struct constraint_expr lhs, rhs; |
5405 | unsigned i, j; |
5406 | |
5407 | /* For builtins we do not have separate function info. For those |
5408 | we do not generate escapes for we have to generate clobbers/uses. */ |
5409 | if (gimple_call_builtin_p (t, BUILT_IN_NORMAL)) |
5410 | switch (DECL_FUNCTION_CODE (decl)) |
5411 | { |
5412 | /* The following functions use and clobber memory pointed to |
5413 | by their arguments. */ |
5414 | case BUILT_IN_STRCPY: |
5415 | case BUILT_IN_STRNCPY: |
5416 | case BUILT_IN_BCOPY: |
5417 | case BUILT_IN_MEMCPY: |
5418 | case BUILT_IN_MEMMOVE: |
5419 | case BUILT_IN_MEMPCPY: |
5420 | case BUILT_IN_STPCPY: |
5421 | case BUILT_IN_STPNCPY: |
5422 | case BUILT_IN_STRCAT: |
5423 | case BUILT_IN_STRNCAT: |
5424 | case BUILT_IN_STRCPY_CHK: |
5425 | case BUILT_IN_STRNCPY_CHK: |
5426 | case BUILT_IN_MEMCPY_CHK: |
5427 | case BUILT_IN_MEMMOVE_CHK: |
5428 | case BUILT_IN_MEMPCPY_CHK: |
5429 | case BUILT_IN_STPCPY_CHK: |
5430 | case BUILT_IN_STPNCPY_CHK: |
5431 | case BUILT_IN_STRCAT_CHK: |
5432 | case BUILT_IN_STRNCAT_CHK: |
5433 | { |
5434 | tree dest = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl) |
5435 | == BUILT_IN_BCOPY ? 1 : 0)); |
5436 | tree src = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl) |
5437 | == BUILT_IN_BCOPY ? 0 : 1)); |
5438 | unsigned i; |
5439 | struct constraint_expr *rhsp, *lhsp; |
5440 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
5441 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5442 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
5443 | process_constraint (t: new_constraint (lhs, rhs: *lhsp)); |
5444 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
5445 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5446 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5447 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5448 | return; |
5449 | } |
5450 | /* The following function clobbers memory pointed to by |
5451 | its argument. */ |
5452 | case BUILT_IN_MEMSET: |
5453 | case BUILT_IN_MEMSET_CHK: |
5454 | case BUILT_IN_POSIX_MEMALIGN: |
5455 | { |
5456 | tree dest = gimple_call_arg (gs: t, index: 0); |
5457 | unsigned i; |
5458 | ce_s *lhsp; |
5459 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
5460 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5461 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
5462 | process_constraint (t: new_constraint (lhs, rhs: *lhsp)); |
5463 | return; |
5464 | } |
5465 | /* The following functions clobber their second and third |
5466 | arguments. */ |
5467 | case BUILT_IN_SINCOS: |
5468 | case BUILT_IN_SINCOSF: |
5469 | case BUILT_IN_SINCOSL: |
5470 | { |
5471 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 1)); |
5472 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 2)); |
5473 | return; |
5474 | } |
5475 | /* The following functions clobber their second argument. */ |
5476 | case BUILT_IN_FREXP: |
5477 | case BUILT_IN_FREXPF: |
5478 | case BUILT_IN_FREXPL: |
5479 | case BUILT_IN_LGAMMA_R: |
5480 | case BUILT_IN_LGAMMAF_R: |
5481 | case BUILT_IN_LGAMMAL_R: |
5482 | case BUILT_IN_GAMMA_R: |
5483 | case BUILT_IN_GAMMAF_R: |
5484 | case BUILT_IN_GAMMAL_R: |
5485 | case BUILT_IN_MODF: |
5486 | case BUILT_IN_MODFF: |
5487 | case BUILT_IN_MODFL: |
5488 | { |
5489 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 1)); |
5490 | return; |
5491 | } |
5492 | /* The following functions clobber their third argument. */ |
5493 | case BUILT_IN_REMQUO: |
5494 | case BUILT_IN_REMQUOF: |
5495 | case BUILT_IN_REMQUOL: |
5496 | { |
5497 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 2)); |
5498 | return; |
5499 | } |
5500 | /* The following functions neither read nor clobber memory. */ |
5501 | case BUILT_IN_ASSUME_ALIGNED: |
5502 | case BUILT_IN_FREE: |
5503 | return; |
5504 | /* Trampolines are of no interest to us. */ |
5505 | case BUILT_IN_INIT_TRAMPOLINE: |
5506 | case BUILT_IN_ADJUST_TRAMPOLINE: |
5507 | return; |
5508 | case BUILT_IN_VA_START: |
5509 | case BUILT_IN_VA_END: |
5510 | return; |
5511 | case BUILT_IN_GOMP_PARALLEL: |
5512 | case BUILT_IN_GOACC_PARALLEL: |
5513 | { |
5514 | unsigned int fnpos, argpos; |
5515 | unsigned int implicit_use_args[2]; |
5516 | unsigned int num_implicit_use_args = 0; |
5517 | switch (DECL_FUNCTION_CODE (decl)) |
5518 | { |
5519 | case BUILT_IN_GOMP_PARALLEL: |
5520 | /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */ |
5521 | fnpos = 0; |
5522 | argpos = 1; |
5523 | break; |
5524 | case BUILT_IN_GOACC_PARALLEL: |
5525 | /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs, |
5526 | sizes, kinds, ...). */ |
5527 | fnpos = 1; |
5528 | argpos = 3; |
5529 | implicit_use_args[num_implicit_use_args++] = 4; |
5530 | implicit_use_args[num_implicit_use_args++] = 5; |
5531 | break; |
5532 | default: |
5533 | gcc_unreachable (); |
5534 | } |
5535 | |
5536 | tree fnarg = gimple_call_arg (gs: t, index: fnpos); |
5537 | gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR); |
5538 | tree fndecl = TREE_OPERAND (fnarg, 0); |
5539 | if (fndecl_maybe_in_other_partition (fndecl)) |
5540 | /* Fallthru to general call handling. */ |
5541 | break; |
5542 | |
5543 | varinfo_t cfi = get_vi_for_tree (t: fndecl); |
5544 | |
5545 | tree arg = gimple_call_arg (gs: t, index: argpos); |
5546 | |
5547 | /* Parameter passed by value is used. */ |
5548 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5549 | struct constraint_expr *rhsp; |
5550 | get_constraint_for (t: arg, results: &rhsc); |
5551 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5552 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5553 | rhsc.truncate (size: 0); |
5554 | |
5555 | /* Handle parameters used by the call, but not used in cfi, as |
5556 | implicitly used by cfi. */ |
5557 | lhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5558 | for (unsigned i = 0; i < num_implicit_use_args; ++i) |
5559 | { |
5560 | tree arg = gimple_call_arg (gs: t, index: implicit_use_args[i]); |
5561 | get_constraint_for (t: arg, results: &rhsc); |
5562 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5563 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5564 | rhsc.truncate (size: 0); |
5565 | } |
5566 | |
5567 | /* The caller clobbers what the callee does. */ |
5568 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5569 | rhs = get_function_part_constraint (fi: cfi, part: fi_clobbers); |
5570 | process_constraint (t: new_constraint (lhs, rhs)); |
5571 | |
5572 | /* The caller uses what the callee does. */ |
5573 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5574 | rhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5575 | process_constraint (t: new_constraint (lhs, rhs)); |
5576 | |
5577 | return; |
5578 | } |
5579 | /* printf-style functions may have hooks to set pointers to |
5580 | point to somewhere into the generated string. Leave them |
5581 | for a later exercise... */ |
5582 | default: |
5583 | /* Fallthru to general call handling. */; |
5584 | } |
5585 | |
5586 | /* Parameters passed by value are used. */ |
5587 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5588 | for (i = 0; i < gimple_call_num_args (gs: t); i++) |
5589 | { |
5590 | struct constraint_expr *rhsp; |
5591 | tree arg = gimple_call_arg (gs: t, index: i); |
5592 | |
5593 | if (TREE_CODE (arg) == SSA_NAME |
5594 | || is_gimple_min_invariant (arg)) |
5595 | continue; |
5596 | |
5597 | get_constraint_for_address_of (t: arg, results: &rhsc); |
5598 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5599 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5600 | rhsc.truncate (size: 0); |
5601 | } |
5602 | |
5603 | /* Build constraints for propagating clobbers/uses along the |
5604 | callgraph edges. */ |
5605 | cfi = get_fi_for_callee (call: call_stmt); |
5606 | if (cfi->id == anything_id) |
5607 | { |
5608 | if (gimple_vdef (g: t)) |
5609 | make_constraint_from (vi: first_vi_for_offset (fi, fi_clobbers), |
5610 | from: anything_id); |
5611 | make_constraint_from (vi: first_vi_for_offset (fi, fi_uses), |
5612 | from: anything_id); |
5613 | return; |
5614 | } |
5615 | |
5616 | /* For callees without function info (that's external functions), |
5617 | ESCAPED is clobbered and used. */ |
5618 | if (cfi->decl |
5619 | && TREE_CODE (cfi->decl) == FUNCTION_DECL |
5620 | && !cfi->is_fn_info) |
5621 | { |
5622 | varinfo_t vi; |
5623 | |
5624 | if (gimple_vdef (g: t)) |
5625 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_clobbers), |
5626 | from: escaped_id); |
5627 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_uses), from: escaped_id); |
5628 | |
5629 | /* Also honor the call statement use/clobber info. */ |
5630 | if ((vi = lookup_call_clobber_vi (call: call_stmt)) != NULL) |
5631 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_clobbers), |
5632 | from: vi->id); |
5633 | if ((vi = lookup_call_use_vi (call: call_stmt)) != NULL) |
5634 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_uses), |
5635 | from: vi->id); |
5636 | return; |
5637 | } |
5638 | |
5639 | /* Otherwise the caller clobbers and uses what the callee does. |
5640 | ??? This should use a new complex constraint that filters |
5641 | local variables of the callee. */ |
5642 | if (gimple_vdef (g: t)) |
5643 | { |
5644 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5645 | rhs = get_function_part_constraint (fi: cfi, part: fi_clobbers); |
5646 | process_constraint (t: new_constraint (lhs, rhs)); |
5647 | } |
5648 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5649 | rhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5650 | process_constraint (t: new_constraint (lhs, rhs)); |
5651 | } |
5652 | else if (gimple_code (g: t) == GIMPLE_ASM) |
5653 | { |
5654 | /* ??? Ick. We can do better. */ |
5655 | if (gimple_vdef (g: t)) |
5656 | make_constraint_from (vi: first_vi_for_offset (fi, fi_clobbers), |
5657 | from: anything_id); |
5658 | make_constraint_from (vi: first_vi_for_offset (fi, fi_uses), |
5659 | from: anything_id); |
5660 | } |
5661 | } |
5662 | |
5663 | |
5664 | /* Find the first varinfo in the same variable as START that overlaps with |
5665 | OFFSET. Return NULL if we can't find one. */ |
5666 | |
5667 | static varinfo_t |
5668 | first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset) |
5669 | { |
5670 | /* If the offset is outside of the variable, bail out. */ |
5671 | if (offset >= start->fullsize) |
5672 | return NULL; |
5673 | |
5674 | /* If we cannot reach offset from start, lookup the first field |
5675 | and start from there. */ |
5676 | if (start->offset > offset) |
5677 | start = get_varinfo (n: start->head); |
5678 | |
5679 | while (start) |
5680 | { |
5681 | /* We may not find a variable in the field list with the actual |
5682 | offset when we have glommed a structure to a variable. |
5683 | In that case, however, offset should still be within the size |
5684 | of the variable. */ |
5685 | if (offset >= start->offset |
5686 | && (offset - start->offset) < start->size) |
5687 | return start; |
5688 | |
5689 | start = vi_next (vi: start); |
5690 | } |
5691 | |
5692 | return NULL; |
5693 | } |
5694 | |
5695 | /* Find the first varinfo in the same variable as START that overlaps with |
5696 | OFFSET. If there is no such varinfo the varinfo directly preceding |
5697 | OFFSET is returned. */ |
5698 | |
5699 | static varinfo_t |
5700 | first_or_preceding_vi_for_offset (varinfo_t start, |
5701 | unsigned HOST_WIDE_INT offset) |
5702 | { |
5703 | /* If we cannot reach offset from start, lookup the first field |
5704 | and start from there. */ |
5705 | if (start->offset > offset) |
5706 | start = get_varinfo (n: start->head); |
5707 | |
5708 | /* We may not find a variable in the field list with the actual |
5709 | offset when we have glommed a structure to a variable. |
5710 | In that case, however, offset should still be within the size |
5711 | of the variable. |
5712 | If we got beyond the offset we look for return the field |
5713 | directly preceding offset which may be the last field. */ |
5714 | while (start->next |
5715 | && offset >= start->offset |
5716 | && !((offset - start->offset) < start->size)) |
5717 | start = vi_next (vi: start); |
5718 | |
5719 | return start; |
5720 | } |
5721 | |
5722 | |
5723 | /* This structure is used during pushing fields onto the fieldstack |
5724 | to track the offset of the field, since bitpos_of_field gives it |
5725 | relative to its immediate containing type, and we want it relative |
5726 | to the ultimate containing object. */ |
5727 | |
5728 | struct fieldoff |
5729 | { |
5730 | /* Offset from the base of the base containing object to this field. */ |
5731 | HOST_WIDE_INT offset; |
5732 | |
5733 | /* Size, in bits, of the field. */ |
5734 | unsigned HOST_WIDE_INT size; |
5735 | |
5736 | unsigned has_unknown_size : 1; |
5737 | |
5738 | unsigned must_have_pointers : 1; |
5739 | |
5740 | unsigned may_have_pointers : 1; |
5741 | |
5742 | unsigned only_restrict_pointers : 1; |
5743 | |
5744 | tree restrict_pointed_type; |
5745 | }; |
5746 | typedef struct fieldoff fieldoff_s; |
5747 | |
5748 | |
5749 | /* qsort comparison function for two fieldoff's PA and PB */ |
5750 | |
5751 | static int |
5752 | fieldoff_compare (const void *pa, const void *pb) |
5753 | { |
5754 | const fieldoff_s *foa = (const fieldoff_s *)pa; |
5755 | const fieldoff_s *fob = (const fieldoff_s *)pb; |
5756 | unsigned HOST_WIDE_INT foasize, fobsize; |
5757 | |
5758 | if (foa->offset < fob->offset) |
5759 | return -1; |
5760 | else if (foa->offset > fob->offset) |
5761 | return 1; |
5762 | |
5763 | foasize = foa->size; |
5764 | fobsize = fob->size; |
5765 | if (foasize < fobsize) |
5766 | return -1; |
5767 | else if (foasize > fobsize) |
5768 | return 1; |
5769 | return 0; |
5770 | } |
5771 | |
5772 | /* Sort a fieldstack according to the field offset and sizes. */ |
5773 | static void |
5774 | sort_fieldstack (vec<fieldoff_s> &fieldstack) |
5775 | { |
5776 | fieldstack.qsort (fieldoff_compare); |
5777 | } |
5778 | |
5779 | /* Return true if T is a type that can have subvars. */ |
5780 | |
5781 | static inline bool |
5782 | type_can_have_subvars (const_tree t) |
5783 | { |
5784 | /* Aggregates without overlapping fields can have subvars. */ |
5785 | return TREE_CODE (t) == RECORD_TYPE; |
5786 | } |
5787 | |
5788 | /* Return true if V is a tree that we can have subvars for. |
5789 | Normally, this is any aggregate type. Also complex |
5790 | types which are not gimple registers can have subvars. */ |
5791 | |
5792 | static inline bool |
5793 | var_can_have_subvars (const_tree v) |
5794 | { |
5795 | /* Volatile variables should never have subvars. */ |
5796 | if (TREE_THIS_VOLATILE (v)) |
5797 | return false; |
5798 | |
5799 | /* Non decls or memory tags can never have subvars. */ |
5800 | if (!DECL_P (v)) |
5801 | return false; |
5802 | |
5803 | return type_can_have_subvars (TREE_TYPE (v)); |
5804 | } |
5805 | |
5806 | /* Return true if T is a type that does contain pointers. */ |
5807 | |
5808 | static bool |
5809 | type_must_have_pointers (tree type) |
5810 | { |
5811 | if (POINTER_TYPE_P (type)) |
5812 | return true; |
5813 | |
5814 | if (TREE_CODE (type) == ARRAY_TYPE) |
5815 | return type_must_have_pointers (TREE_TYPE (type)); |
5816 | |
5817 | /* A function or method can have pointers as arguments, so track |
5818 | those separately. */ |
5819 | if (FUNC_OR_METHOD_TYPE_P (type)) |
5820 | return true; |
5821 | |
5822 | return false; |
5823 | } |
5824 | |
5825 | static bool |
5826 | field_must_have_pointers (tree t) |
5827 | { |
5828 | return type_must_have_pointers (TREE_TYPE (t)); |
5829 | } |
5830 | |
5831 | /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all |
5832 | the fields of TYPE onto fieldstack, recording their offsets along |
5833 | the way. |
5834 | |
5835 | OFFSET is used to keep track of the offset in this entire |
5836 | structure, rather than just the immediately containing structure. |
5837 | Returns false if the caller is supposed to handle the field we |
5838 | recursed for. */ |
5839 | |
5840 | static bool |
5841 | push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack, |
5842 | HOST_WIDE_INT offset) |
5843 | { |
5844 | tree field; |
5845 | bool empty_p = true; |
5846 | |
5847 | if (TREE_CODE (type) != RECORD_TYPE) |
5848 | return false; |
5849 | |
5850 | /* If the vector of fields is growing too big, bail out early. |
5851 | Callers check for vec::length <= param_max_fields_for_field_sensitive, make |
5852 | sure this fails. */ |
5853 | if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive) |
5854 | return false; |
5855 | |
5856 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
5857 | if (TREE_CODE (field) == FIELD_DECL) |
5858 | { |
5859 | bool push = false; |
5860 | HOST_WIDE_INT foff = bitpos_of_field (fdecl: field); |
5861 | tree field_type = TREE_TYPE (field); |
5862 | |
5863 | if (!var_can_have_subvars (v: field) |
5864 | || TREE_CODE (field_type) == QUAL_UNION_TYPE |
5865 | || TREE_CODE (field_type) == UNION_TYPE) |
5866 | push = true; |
5867 | else if (!push_fields_onto_fieldstack |
5868 | (type: field_type, fieldstack, offset: offset + foff) |
5869 | && (DECL_SIZE (field) |
5870 | && !integer_zerop (DECL_SIZE (field)))) |
5871 | /* Empty structures may have actual size, like in C++. So |
5872 | see if we didn't push any subfields and the size is |
5873 | nonzero, push the field onto the stack. */ |
5874 | push = true; |
5875 | |
5876 | if (push) |
5877 | { |
5878 | fieldoff_s *pair = NULL; |
5879 | bool has_unknown_size = false; |
5880 | bool must_have_pointers_p; |
5881 | |
5882 | if (!fieldstack->is_empty ()) |
5883 | pair = &fieldstack->last (); |
5884 | |
5885 | /* If there isn't anything at offset zero, create sth. */ |
5886 | if (!pair |
5887 | && offset + foff != 0) |
5888 | { |
5889 | fieldoff_s e |
5890 | = {.offset: 0, .size: offset + foff, .has_unknown_size: false, .must_have_pointers: false, .may_have_pointers: true, .only_restrict_pointers: false, NULL_TREE}; |
5891 | pair = fieldstack->safe_push (obj: e); |
5892 | } |
5893 | |
5894 | if (!DECL_SIZE (field) |
5895 | || !tree_fits_uhwi_p (DECL_SIZE (field))) |
5896 | has_unknown_size = true; |
5897 | |
5898 | /* If adjacent fields do not contain pointers merge them. */ |
5899 | must_have_pointers_p = field_must_have_pointers (t: field); |
5900 | if (pair |
5901 | && !has_unknown_size |
5902 | && !must_have_pointers_p |
5903 | && !pair->must_have_pointers |
5904 | && !pair->has_unknown_size |
5905 | && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff) |
5906 | { |
5907 | pair->size += tree_to_uhwi (DECL_SIZE (field)); |
5908 | } |
5909 | else |
5910 | { |
5911 | fieldoff_s e; |
5912 | e.offset = offset + foff; |
5913 | e.has_unknown_size = has_unknown_size; |
5914 | if (!has_unknown_size) |
5915 | e.size = tree_to_uhwi (DECL_SIZE (field)); |
5916 | else |
5917 | e.size = -1; |
5918 | e.must_have_pointers = must_have_pointers_p; |
5919 | e.may_have_pointers = true; |
5920 | e.only_restrict_pointers |
5921 | = (!has_unknown_size |
5922 | && POINTER_TYPE_P (field_type) |
5923 | && TYPE_RESTRICT (field_type)); |
5924 | if (e.only_restrict_pointers) |
5925 | e.restrict_pointed_type = TREE_TYPE (field_type); |
5926 | fieldstack->safe_push (obj: e); |
5927 | } |
5928 | } |
5929 | |
5930 | empty_p = false; |
5931 | } |
5932 | |
5933 | return !empty_p; |
5934 | } |
5935 | |
5936 | /* Count the number of arguments DECL has, and set IS_VARARGS to true |
5937 | if it is a varargs function. */ |
5938 | |
5939 | static unsigned int |
5940 | count_num_arguments (tree decl, bool *is_varargs) |
5941 | { |
5942 | unsigned int num = 0; |
5943 | tree t; |
5944 | |
5945 | /* Capture named arguments for K&R functions. They do not |
5946 | have a prototype and thus no TYPE_ARG_TYPES. */ |
5947 | for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t)) |
5948 | ++num; |
5949 | |
5950 | /* Check if the function has variadic arguments. */ |
5951 | for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t)) |
5952 | if (TREE_VALUE (t) == void_type_node) |
5953 | break; |
5954 | if (!t) |
5955 | *is_varargs = true; |
5956 | |
5957 | return num; |
5958 | } |
5959 | |
5960 | /* Creation function node for DECL, using NAME, and return the index |
5961 | of the variable we've created for the function. If NONLOCAL_p, create |
5962 | initial constraints. */ |
5963 | |
5964 | static varinfo_t |
5965 | create_function_info_for (tree decl, const char *name, bool add_id, |
5966 | bool nonlocal_p) |
5967 | { |
5968 | struct function *fn = DECL_STRUCT_FUNCTION (decl); |
5969 | varinfo_t vi, prev_vi; |
5970 | tree arg; |
5971 | unsigned int i; |
5972 | bool is_varargs = false; |
5973 | unsigned int num_args = count_num_arguments (decl, is_varargs: &is_varargs); |
5974 | |
5975 | /* Create the variable info. */ |
5976 | |
5977 | vi = new_var_info (t: decl, name, add_id); |
5978 | vi->offset = 0; |
5979 | vi->size = 1; |
5980 | vi->fullsize = fi_parm_base + num_args; |
5981 | vi->is_fn_info = 1; |
5982 | vi->may_have_pointers = false; |
5983 | if (is_varargs) |
5984 | vi->fullsize = ~0; |
5985 | insert_vi_for_tree (t: vi->decl, vi); |
5986 | |
5987 | prev_vi = vi; |
5988 | |
5989 | /* Create a variable for things the function clobbers and one for |
5990 | things the function uses. */ |
5991 | { |
5992 | varinfo_t clobbervi, usevi; |
5993 | const char *newname; |
5994 | char *tempname; |
5995 | |
5996 | tempname = xasprintf ("%s.clobber" , name); |
5997 | newname = ggc_strdup (tempname); |
5998 | free (ptr: tempname); |
5999 | |
6000 | clobbervi = new_var_info (NULL, name: newname, add_id: false); |
6001 | clobbervi->offset = fi_clobbers; |
6002 | clobbervi->size = 1; |
6003 | clobbervi->fullsize = vi->fullsize; |
6004 | clobbervi->is_full_var = true; |
6005 | clobbervi->is_global_var = false; |
6006 | clobbervi->is_reg_var = true; |
6007 | |
6008 | gcc_assert (prev_vi->offset < clobbervi->offset); |
6009 | prev_vi->next = clobbervi->id; |
6010 | prev_vi = clobbervi; |
6011 | |
6012 | tempname = xasprintf ("%s.use" , name); |
6013 | newname = ggc_strdup (tempname); |
6014 | free (ptr: tempname); |
6015 | |
6016 | usevi = new_var_info (NULL, name: newname, add_id: false); |
6017 | usevi->offset = fi_uses; |
6018 | usevi->size = 1; |
6019 | usevi->fullsize = vi->fullsize; |
6020 | usevi->is_full_var = true; |
6021 | usevi->is_global_var = false; |
6022 | usevi->is_reg_var = true; |
6023 | |
6024 | gcc_assert (prev_vi->offset < usevi->offset); |
6025 | prev_vi->next = usevi->id; |
6026 | prev_vi = usevi; |
6027 | } |
6028 | |
6029 | /* And one for the static chain. */ |
6030 | if (fn->static_chain_decl != NULL_TREE) |
6031 | { |
6032 | varinfo_t chainvi; |
6033 | const char *newname; |
6034 | char *tempname; |
6035 | |
6036 | tempname = xasprintf ("%s.chain" , name); |
6037 | newname = ggc_strdup (tempname); |
6038 | free (ptr: tempname); |
6039 | |
6040 | chainvi = new_var_info (t: fn->static_chain_decl, name: newname, add_id: false); |
6041 | chainvi->offset = fi_static_chain; |
6042 | chainvi->size = 1; |
6043 | chainvi->fullsize = vi->fullsize; |
6044 | chainvi->is_full_var = true; |
6045 | chainvi->is_global_var = false; |
6046 | |
6047 | insert_vi_for_tree (t: fn->static_chain_decl, vi: chainvi); |
6048 | |
6049 | if (nonlocal_p |
6050 | && chainvi->may_have_pointers) |
6051 | make_constraint_from (vi: chainvi, from: nonlocal_id); |
6052 | |
6053 | gcc_assert (prev_vi->offset < chainvi->offset); |
6054 | prev_vi->next = chainvi->id; |
6055 | prev_vi = chainvi; |
6056 | } |
6057 | |
6058 | /* Create a variable for the return var. */ |
6059 | if (DECL_RESULT (decl) != NULL |
6060 | || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))) |
6061 | { |
6062 | varinfo_t resultvi; |
6063 | const char *newname; |
6064 | char *tempname; |
6065 | tree resultdecl = decl; |
6066 | |
6067 | if (DECL_RESULT (decl)) |
6068 | resultdecl = DECL_RESULT (decl); |
6069 | |
6070 | tempname = xasprintf ("%s.result" , name); |
6071 | newname = ggc_strdup (tempname); |
6072 | free (ptr: tempname); |
6073 | |
6074 | resultvi = new_var_info (t: resultdecl, name: newname, add_id: false); |
6075 | resultvi->offset = fi_result; |
6076 | resultvi->size = 1; |
6077 | resultvi->fullsize = vi->fullsize; |
6078 | resultvi->is_full_var = true; |
6079 | if (DECL_RESULT (decl)) |
6080 | resultvi->may_have_pointers = true; |
6081 | |
6082 | if (DECL_RESULT (decl)) |
6083 | insert_vi_for_tree (DECL_RESULT (decl), vi: resultvi); |
6084 | |
6085 | if (nonlocal_p |
6086 | && DECL_RESULT (decl) |
6087 | && DECL_BY_REFERENCE (DECL_RESULT (decl))) |
6088 | make_constraint_from (vi: resultvi, from: nonlocal_id); |
6089 | |
6090 | gcc_assert (prev_vi->offset < resultvi->offset); |
6091 | prev_vi->next = resultvi->id; |
6092 | prev_vi = resultvi; |
6093 | } |
6094 | |
6095 | /* We also need to make function return values escape. Nothing |
6096 | escapes by returning from main though. */ |
6097 | if (nonlocal_p |
6098 | && !MAIN_NAME_P (DECL_NAME (decl))) |
6099 | { |
6100 | varinfo_t fi, rvi; |
6101 | fi = lookup_vi_for_tree (t: decl); |
6102 | rvi = first_vi_for_offset (start: fi, offset: fi_result); |
6103 | if (rvi && rvi->offset == fi_result) |
6104 | make_copy_constraint (vi: get_varinfo (n: escaped_id), from: rvi->id); |
6105 | } |
6106 | |
6107 | /* Set up variables for each argument. */ |
6108 | arg = DECL_ARGUMENTS (decl); |
6109 | for (i = 0; i < num_args; i++) |
6110 | { |
6111 | varinfo_t argvi; |
6112 | const char *newname; |
6113 | char *tempname; |
6114 | tree argdecl = decl; |
6115 | |
6116 | if (arg) |
6117 | argdecl = arg; |
6118 | |
6119 | tempname = xasprintf ("%s.arg%d" , name, i); |
6120 | newname = ggc_strdup (tempname); |
6121 | free (ptr: tempname); |
6122 | |
6123 | argvi = new_var_info (t: argdecl, name: newname, add_id: false); |
6124 | argvi->offset = fi_parm_base + i; |
6125 | argvi->size = 1; |
6126 | argvi->is_full_var = true; |
6127 | argvi->fullsize = vi->fullsize; |
6128 | if (arg) |
6129 | argvi->may_have_pointers = true; |
6130 | |
6131 | if (arg) |
6132 | insert_vi_for_tree (t: arg, vi: argvi); |
6133 | |
6134 | if (nonlocal_p |
6135 | && argvi->may_have_pointers) |
6136 | make_constraint_from (vi: argvi, from: nonlocal_id); |
6137 | |
6138 | gcc_assert (prev_vi->offset < argvi->offset); |
6139 | prev_vi->next = argvi->id; |
6140 | prev_vi = argvi; |
6141 | if (arg) |
6142 | arg = DECL_CHAIN (arg); |
6143 | } |
6144 | |
6145 | /* Add one representative for all further args. */ |
6146 | if (is_varargs) |
6147 | { |
6148 | varinfo_t argvi; |
6149 | const char *newname; |
6150 | char *tempname; |
6151 | tree decl; |
6152 | |
6153 | tempname = xasprintf ("%s.varargs" , name); |
6154 | newname = ggc_strdup (tempname); |
6155 | free (ptr: tempname); |
6156 | |
6157 | /* We need sth that can be pointed to for va_start. */ |
6158 | decl = build_fake_var_decl (ptr_type_node); |
6159 | |
6160 | argvi = new_var_info (t: decl, name: newname, add_id: false); |
6161 | argvi->offset = fi_parm_base + num_args; |
6162 | argvi->size = ~0; |
6163 | argvi->is_full_var = true; |
6164 | argvi->is_heap_var = true; |
6165 | argvi->fullsize = vi->fullsize; |
6166 | |
6167 | if (nonlocal_p |
6168 | && argvi->may_have_pointers) |
6169 | make_constraint_from (vi: argvi, from: nonlocal_id); |
6170 | |
6171 | gcc_assert (prev_vi->offset < argvi->offset); |
6172 | prev_vi->next = argvi->id; |
6173 | } |
6174 | |
6175 | return vi; |
6176 | } |
6177 | |
6178 | |
6179 | /* Return true if FIELDSTACK contains fields that overlap. |
6180 | FIELDSTACK is assumed to be sorted by offset. */ |
6181 | |
6182 | static bool |
6183 | check_for_overlaps (const vec<fieldoff_s> &fieldstack) |
6184 | { |
6185 | fieldoff_s *fo = NULL; |
6186 | unsigned int i; |
6187 | HOST_WIDE_INT lastoffset = -1; |
6188 | |
6189 | FOR_EACH_VEC_ELT (fieldstack, i, fo) |
6190 | { |
6191 | if (fo->offset == lastoffset) |
6192 | return true; |
6193 | lastoffset = fo->offset; |
6194 | } |
6195 | return false; |
6196 | } |
6197 | |
6198 | /* Create a varinfo structure for NAME and DECL, and add it to VARMAP. |
6199 | This will also create any varinfo structures necessary for fields |
6200 | of DECL. DECL is a function parameter if HANDLE_PARAM is set. |
6201 | HANDLED_STRUCT_TYPE is used to register struct types reached by following |
6202 | restrict pointers. This is needed to prevent infinite recursion. |
6203 | If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL |
6204 | does not advertise it. */ |
6205 | |
6206 | static varinfo_t |
6207 | create_variable_info_for_1 (tree decl, const char *name, bool add_id, |
6208 | bool handle_param, bitmap handled_struct_type, |
6209 | bool add_restrict = false) |
6210 | { |
6211 | varinfo_t vi, newvi; |
6212 | tree decl_type = TREE_TYPE (decl); |
6213 | tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type); |
6214 | auto_vec<fieldoff_s> fieldstack; |
6215 | fieldoff_s *fo; |
6216 | unsigned int i; |
6217 | |
6218 | if (!declsize |
6219 | || !tree_fits_uhwi_p (declsize)) |
6220 | { |
6221 | vi = new_var_info (t: decl, name, add_id); |
6222 | vi->offset = 0; |
6223 | vi->size = ~0; |
6224 | vi->fullsize = ~0; |
6225 | vi->is_unknown_size_var = true; |
6226 | vi->is_full_var = true; |
6227 | vi->may_have_pointers = true; |
6228 | return vi; |
6229 | } |
6230 | |
6231 | /* Collect field information. */ |
6232 | if (use_field_sensitive |
6233 | && var_can_have_subvars (v: decl) |
6234 | /* ??? Force us to not use subfields for globals in IPA mode. |
6235 | Else we'd have to parse arbitrary initializers. */ |
6236 | && !(in_ipa_mode |
6237 | && is_global_var (t: decl))) |
6238 | { |
6239 | fieldoff_s *fo = NULL; |
6240 | bool notokay = false; |
6241 | unsigned int i; |
6242 | |
6243 | push_fields_onto_fieldstack (type: decl_type, fieldstack: &fieldstack, offset: 0); |
6244 | |
6245 | for (i = 0; !notokay && fieldstack.iterate (ix: i, ptr: &fo); i++) |
6246 | if (fo->has_unknown_size |
6247 | || fo->offset < 0) |
6248 | { |
6249 | notokay = true; |
6250 | break; |
6251 | } |
6252 | |
6253 | /* We can't sort them if we have a field with a variable sized type, |
6254 | which will make notokay = true. In that case, we are going to return |
6255 | without creating varinfos for the fields anyway, so sorting them is a |
6256 | waste to boot. */ |
6257 | if (!notokay) |
6258 | { |
6259 | sort_fieldstack (fieldstack); |
6260 | /* Due to some C++ FE issues, like PR 22488, we might end up |
6261 | what appear to be overlapping fields even though they, |
6262 | in reality, do not overlap. Until the C++ FE is fixed, |
6263 | we will simply disable field-sensitivity for these cases. */ |
6264 | notokay = check_for_overlaps (fieldstack); |
6265 | } |
6266 | |
6267 | if (notokay) |
6268 | fieldstack.release (); |
6269 | } |
6270 | |
6271 | /* If we didn't end up collecting sub-variables create a full |
6272 | variable for the decl. */ |
6273 | if (fieldstack.length () == 0 |
6274 | || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive) |
6275 | { |
6276 | vi = new_var_info (t: decl, name, add_id); |
6277 | vi->offset = 0; |
6278 | vi->may_have_pointers = true; |
6279 | vi->fullsize = tree_to_uhwi (declsize); |
6280 | vi->size = vi->fullsize; |
6281 | vi->is_full_var = true; |
6282 | if (POINTER_TYPE_P (decl_type) |
6283 | && (TYPE_RESTRICT (decl_type) || add_restrict)) |
6284 | vi->only_restrict_pointers = 1; |
6285 | if (vi->only_restrict_pointers |
6286 | && !type_contains_placeholder_p (TREE_TYPE (decl_type)) |
6287 | && handle_param |
6288 | && !bitmap_bit_p (handled_struct_type, |
6289 | TYPE_UID (TREE_TYPE (decl_type)))) |
6290 | { |
6291 | varinfo_t rvi; |
6292 | tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type)); |
6293 | DECL_EXTERNAL (heapvar) = 1; |
6294 | if (var_can_have_subvars (v: heapvar)) |
6295 | bitmap_set_bit (handled_struct_type, |
6296 | TYPE_UID (TREE_TYPE (decl_type))); |
6297 | rvi = create_variable_info_for_1 (decl: heapvar, name: "PARM_NOALIAS" , add_id: true, |
6298 | handle_param: true, handled_struct_type); |
6299 | if (var_can_have_subvars (v: heapvar)) |
6300 | bitmap_clear_bit (handled_struct_type, |
6301 | TYPE_UID (TREE_TYPE (decl_type))); |
6302 | rvi->is_restrict_var = 1; |
6303 | insert_vi_for_tree (t: heapvar, vi: rvi); |
6304 | make_constraint_from (vi, from: rvi->id); |
6305 | make_param_constraints (rvi); |
6306 | } |
6307 | fieldstack.release (); |
6308 | return vi; |
6309 | } |
6310 | |
6311 | vi = new_var_info (t: decl, name, add_id); |
6312 | vi->fullsize = tree_to_uhwi (declsize); |
6313 | if (fieldstack.length () == 1) |
6314 | vi->is_full_var = true; |
6315 | for (i = 0, newvi = vi; |
6316 | fieldstack.iterate (ix: i, ptr: &fo); |
6317 | ++i, newvi = vi_next (vi: newvi)) |
6318 | { |
6319 | const char *newname = NULL; |
6320 | char *tempname; |
6321 | |
6322 | if (dump_file) |
6323 | { |
6324 | if (fieldstack.length () != 1) |
6325 | { |
6326 | tempname |
6327 | = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC |
6328 | "+" HOST_WIDE_INT_PRINT_DEC, name, |
6329 | fo->offset, fo->size); |
6330 | newname = ggc_strdup (tempname); |
6331 | free (ptr: tempname); |
6332 | } |
6333 | } |
6334 | else |
6335 | newname = "NULL" ; |
6336 | |
6337 | if (newname) |
6338 | newvi->name = newname; |
6339 | newvi->offset = fo->offset; |
6340 | newvi->size = fo->size; |
6341 | newvi->fullsize = vi->fullsize; |
6342 | newvi->may_have_pointers = fo->may_have_pointers; |
6343 | newvi->only_restrict_pointers = fo->only_restrict_pointers; |
6344 | if (handle_param |
6345 | && newvi->only_restrict_pointers |
6346 | && !type_contains_placeholder_p (fo->restrict_pointed_type) |
6347 | && !bitmap_bit_p (handled_struct_type, |
6348 | TYPE_UID (fo->restrict_pointed_type))) |
6349 | { |
6350 | varinfo_t rvi; |
6351 | tree heapvar = build_fake_var_decl (type: fo->restrict_pointed_type); |
6352 | DECL_EXTERNAL (heapvar) = 1; |
6353 | if (var_can_have_subvars (v: heapvar)) |
6354 | bitmap_set_bit (handled_struct_type, |
6355 | TYPE_UID (fo->restrict_pointed_type)); |
6356 | rvi = create_variable_info_for_1 (decl: heapvar, name: "PARM_NOALIAS" , add_id: true, |
6357 | handle_param: true, handled_struct_type); |
6358 | if (var_can_have_subvars (v: heapvar)) |
6359 | bitmap_clear_bit (handled_struct_type, |
6360 | TYPE_UID (fo->restrict_pointed_type)); |
6361 | rvi->is_restrict_var = 1; |
6362 | insert_vi_for_tree (t: heapvar, vi: rvi); |
6363 | make_constraint_from (vi: newvi, from: rvi->id); |
6364 | make_param_constraints (rvi); |
6365 | } |
6366 | if (i + 1 < fieldstack.length ()) |
6367 | { |
6368 | varinfo_t tem = new_var_info (t: decl, name, add_id: false); |
6369 | newvi->next = tem->id; |
6370 | tem->head = vi->id; |
6371 | } |
6372 | } |
6373 | |
6374 | return vi; |
6375 | } |
6376 | |
6377 | static unsigned int |
6378 | create_variable_info_for (tree decl, const char *name, bool add_id) |
6379 | { |
6380 | /* First see if we are dealing with an ifunc resolver call and |
6381 | assiociate that with a call to the resolver function result. */ |
6382 | cgraph_node *node; |
6383 | if (in_ipa_mode |
6384 | && TREE_CODE (decl) == FUNCTION_DECL |
6385 | && (node = cgraph_node::get (decl)) |
6386 | && node->ifunc_resolver) |
6387 | { |
6388 | varinfo_t fi = get_vi_for_tree (t: node->get_alias_target ()->decl); |
6389 | constraint_expr rhs |
6390 | = get_function_part_constraint (fi, part: fi_result); |
6391 | fi = new_var_info (NULL_TREE, name: "ifuncres" , add_id: true); |
6392 | fi->is_reg_var = true; |
6393 | constraint_expr lhs; |
6394 | lhs.type = SCALAR; |
6395 | lhs.var = fi->id; |
6396 | lhs.offset = 0; |
6397 | process_constraint (t: new_constraint (lhs, rhs)); |
6398 | insert_vi_for_tree (t: decl, vi: fi); |
6399 | return fi->id; |
6400 | } |
6401 | |
6402 | varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, handle_param: false, NULL); |
6403 | unsigned int id = vi->id; |
6404 | |
6405 | insert_vi_for_tree (t: decl, vi); |
6406 | |
6407 | if (!VAR_P (decl)) |
6408 | return id; |
6409 | |
6410 | /* Create initial constraints for globals. */ |
6411 | for (; vi; vi = vi_next (vi)) |
6412 | { |
6413 | if (!vi->may_have_pointers |
6414 | || !vi->is_global_var) |
6415 | continue; |
6416 | |
6417 | /* Mark global restrict qualified pointers. */ |
6418 | if ((POINTER_TYPE_P (TREE_TYPE (decl)) |
6419 | && TYPE_RESTRICT (TREE_TYPE (decl))) |
6420 | || vi->only_restrict_pointers) |
6421 | { |
6422 | varinfo_t rvi |
6423 | = make_constraint_from_global_restrict (lhs: vi, name: "GLOBAL_RESTRICT" , |
6424 | add_id: true); |
6425 | /* ??? For now exclude reads from globals as restrict sources |
6426 | if those are not (indirectly) from incoming parameters. */ |
6427 | rvi->is_restrict_var = false; |
6428 | continue; |
6429 | } |
6430 | |
6431 | /* In non-IPA mode the initializer from nonlocal is all we need. */ |
6432 | if (!in_ipa_mode |
6433 | || DECL_HARD_REGISTER (decl)) |
6434 | make_copy_constraint (vi, from: nonlocal_id); |
6435 | |
6436 | /* In IPA mode parse the initializer and generate proper constraints |
6437 | for it. */ |
6438 | else |
6439 | { |
6440 | varpool_node *vnode = varpool_node::get (decl); |
6441 | |
6442 | /* For escaped variables initialize them from nonlocal. */ |
6443 | if (!vnode->all_refs_explicit_p ()) |
6444 | make_copy_constraint (vi, from: nonlocal_id); |
6445 | |
6446 | /* If this is a global variable with an initializer and we are in |
6447 | IPA mode generate constraints for it. */ |
6448 | ipa_ref *ref; |
6449 | for (unsigned idx = 0; vnode->iterate_reference (i: idx, ref); ++idx) |
6450 | { |
6451 | auto_vec<ce_s> rhsc; |
6452 | struct constraint_expr lhs, *rhsp; |
6453 | unsigned i; |
6454 | get_constraint_for_address_of (t: ref->referred->decl, results: &rhsc); |
6455 | lhs.var = vi->id; |
6456 | lhs.offset = 0; |
6457 | lhs.type = SCALAR; |
6458 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
6459 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
6460 | /* If this is a variable that escapes from the unit |
6461 | the initializer escapes as well. */ |
6462 | if (!vnode->all_refs_explicit_p ()) |
6463 | { |
6464 | lhs.var = escaped_id; |
6465 | lhs.offset = 0; |
6466 | lhs.type = SCALAR; |
6467 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
6468 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
6469 | } |
6470 | } |
6471 | } |
6472 | } |
6473 | |
6474 | return id; |
6475 | } |
6476 | |
6477 | /* Print out the points-to solution for VAR to FILE. */ |
6478 | |
6479 | static void |
6480 | dump_solution_for_var (FILE *file, unsigned int var) |
6481 | { |
6482 | varinfo_t vi = get_varinfo (n: var); |
6483 | unsigned int i; |
6484 | bitmap_iterator bi; |
6485 | |
6486 | /* Dump the solution for unified vars anyway, this avoids difficulties |
6487 | in scanning dumps in the testsuite. */ |
6488 | fprintf (stream: file, format: "%s = { " , vi->name); |
6489 | vi = get_varinfo (n: find (node: var)); |
6490 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
6491 | fprintf (stream: file, format: "%s " , get_varinfo (n: i)->name); |
6492 | fprintf (stream: file, format: "}" ); |
6493 | |
6494 | /* But note when the variable was unified. */ |
6495 | if (vi->id != var) |
6496 | fprintf (stream: file, format: " same as %s" , vi->name); |
6497 | |
6498 | fprintf (stream: file, format: "\n" ); |
6499 | } |
6500 | |
6501 | /* Print the points-to solution for VAR to stderr. */ |
6502 | |
6503 | DEBUG_FUNCTION void |
6504 | debug_solution_for_var (unsigned int var) |
6505 | { |
6506 | dump_solution_for_var (stderr, var); |
6507 | } |
6508 | |
6509 | /* Register the constraints for function parameter related VI. */ |
6510 | |
6511 | static void |
6512 | make_param_constraints (varinfo_t vi) |
6513 | { |
6514 | for (; vi; vi = vi_next (vi)) |
6515 | { |
6516 | if (vi->only_restrict_pointers) |
6517 | ; |
6518 | else if (vi->may_have_pointers) |
6519 | make_constraint_from (vi, from: nonlocal_id); |
6520 | |
6521 | if (vi->is_full_var) |
6522 | break; |
6523 | } |
6524 | } |
6525 | |
6526 | /* Create varinfo structures for all of the variables in the |
6527 | function for intraprocedural mode. */ |
6528 | |
6529 | static void |
6530 | intra_create_variable_infos (struct function *fn) |
6531 | { |
6532 | tree t; |
6533 | bitmap handled_struct_type = NULL; |
6534 | bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl); |
6535 | |
6536 | /* For each incoming pointer argument arg, create the constraint ARG |
6537 | = NONLOCAL or a dummy variable if it is a restrict qualified |
6538 | passed-by-reference argument. */ |
6539 | for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t)) |
6540 | { |
6541 | if (handled_struct_type == NULL) |
6542 | handled_struct_type = BITMAP_ALLOC (NULL); |
6543 | |
6544 | varinfo_t p |
6545 | = create_variable_info_for_1 (decl: t, name: alias_get_name (decl: t), add_id: false, handle_param: true, |
6546 | handled_struct_type, add_restrict: this_parm_in_ctor); |
6547 | insert_vi_for_tree (t, vi: p); |
6548 | |
6549 | make_param_constraints (vi: p); |
6550 | |
6551 | this_parm_in_ctor = false; |
6552 | } |
6553 | |
6554 | if (handled_struct_type != NULL) |
6555 | BITMAP_FREE (handled_struct_type); |
6556 | |
6557 | /* Add a constraint for a result decl that is passed by reference. */ |
6558 | if (DECL_RESULT (fn->decl) |
6559 | && DECL_BY_REFERENCE (DECL_RESULT (fn->decl))) |
6560 | { |
6561 | varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl)); |
6562 | |
6563 | for (p = result_vi; p; p = vi_next (vi: p)) |
6564 | make_constraint_from (vi: p, from: nonlocal_id); |
6565 | } |
6566 | |
6567 | /* Add a constraint for the incoming static chain parameter. */ |
6568 | if (fn->static_chain_decl != NULL_TREE) |
6569 | { |
6570 | varinfo_t p, chain_vi = get_vi_for_tree (t: fn->static_chain_decl); |
6571 | |
6572 | for (p = chain_vi; p; p = vi_next (vi: p)) |
6573 | make_constraint_from (vi: p, from: nonlocal_id); |
6574 | } |
6575 | } |
6576 | |
6577 | /* Structure used to put solution bitmaps in a hashtable so they can |
6578 | be shared among variables with the same points-to set. */ |
6579 | |
6580 | typedef struct shared_bitmap_info |
6581 | { |
6582 | bitmap pt_vars; |
6583 | hashval_t hashcode; |
6584 | } *shared_bitmap_info_t; |
6585 | typedef const struct shared_bitmap_info *const_shared_bitmap_info_t; |
6586 | |
6587 | /* Shared_bitmap hashtable helpers. */ |
6588 | |
6589 | struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info> |
6590 | { |
6591 | static inline hashval_t hash (const shared_bitmap_info *); |
6592 | static inline bool equal (const shared_bitmap_info *, |
6593 | const shared_bitmap_info *); |
6594 | }; |
6595 | |
6596 | /* Hash function for a shared_bitmap_info_t */ |
6597 | |
6598 | inline hashval_t |
6599 | shared_bitmap_hasher::hash (const shared_bitmap_info *bi) |
6600 | { |
6601 | return bi->hashcode; |
6602 | } |
6603 | |
6604 | /* Equality function for two shared_bitmap_info_t's. */ |
6605 | |
6606 | inline bool |
6607 | shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1, |
6608 | const shared_bitmap_info *sbi2) |
6609 | { |
6610 | return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars); |
6611 | } |
6612 | |
6613 | /* Shared_bitmap hashtable. */ |
6614 | |
6615 | static hash_table<shared_bitmap_hasher> *shared_bitmap_table; |
6616 | |
6617 | /* Lookup a bitmap in the shared bitmap hashtable, and return an already |
6618 | existing instance if there is one, NULL otherwise. */ |
6619 | |
6620 | static bitmap |
6621 | shared_bitmap_lookup (bitmap pt_vars) |
6622 | { |
6623 | shared_bitmap_info **slot; |
6624 | struct shared_bitmap_info sbi; |
6625 | |
6626 | sbi.pt_vars = pt_vars; |
6627 | sbi.hashcode = bitmap_hash (pt_vars); |
6628 | |
6629 | slot = shared_bitmap_table->find_slot (value: &sbi, insert: NO_INSERT); |
6630 | if (!slot) |
6631 | return NULL; |
6632 | else |
6633 | return (*slot)->pt_vars; |
6634 | } |
6635 | |
6636 | |
6637 | /* Add a bitmap to the shared bitmap hashtable. */ |
6638 | |
6639 | static void |
6640 | shared_bitmap_add (bitmap pt_vars) |
6641 | { |
6642 | shared_bitmap_info **slot; |
6643 | shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info); |
6644 | |
6645 | sbi->pt_vars = pt_vars; |
6646 | sbi->hashcode = bitmap_hash (pt_vars); |
6647 | |
6648 | slot = shared_bitmap_table->find_slot (value: sbi, insert: INSERT); |
6649 | gcc_assert (!*slot); |
6650 | *slot = sbi; |
6651 | } |
6652 | |
6653 | |
6654 | /* Set bits in INTO corresponding to the variable uids in solution set FROM. */ |
6655 | |
6656 | static void |
6657 | set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt, |
6658 | tree fndecl) |
6659 | { |
6660 | unsigned int i; |
6661 | bitmap_iterator bi; |
6662 | varinfo_t escaped_vi = get_varinfo (n: find (node: escaped_id)); |
6663 | varinfo_t escaped_return_vi = get_varinfo (n: find (node: escaped_return_id)); |
6664 | bool everything_escaped |
6665 | = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id); |
6666 | |
6667 | EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi) |
6668 | { |
6669 | varinfo_t vi = get_varinfo (n: i); |
6670 | |
6671 | if (vi->is_artificial_var) |
6672 | continue; |
6673 | |
6674 | if (everything_escaped |
6675 | || (escaped_vi->solution |
6676 | && bitmap_bit_p (escaped_vi->solution, i))) |
6677 | { |
6678 | pt->vars_contains_escaped = true; |
6679 | pt->vars_contains_escaped_heap |= vi->is_heap_var; |
6680 | } |
6681 | if (escaped_return_vi->solution |
6682 | && bitmap_bit_p (escaped_return_vi->solution, i)) |
6683 | pt->vars_contains_escaped_heap |= vi->is_heap_var; |
6684 | |
6685 | if (vi->is_restrict_var) |
6686 | pt->vars_contains_restrict = true; |
6687 | |
6688 | if (VAR_P (vi->decl) |
6689 | || TREE_CODE (vi->decl) == PARM_DECL |
6690 | || TREE_CODE (vi->decl) == RESULT_DECL) |
6691 | { |
6692 | /* If we are in IPA mode we will not recompute points-to |
6693 | sets after inlining so make sure they stay valid. */ |
6694 | if (in_ipa_mode |
6695 | && !DECL_PT_UID_SET_P (vi->decl)) |
6696 | SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl)); |
6697 | |
6698 | /* Add the decl to the points-to set. Note that the points-to |
6699 | set contains global variables. */ |
6700 | bitmap_set_bit (into, DECL_PT_UID (vi->decl)); |
6701 | if (vi->is_global_var |
6702 | /* In IPA mode the escaped_heap trick doesn't work as |
6703 | ESCAPED is escaped from the unit but |
6704 | pt_solution_includes_global needs to answer true for |
6705 | all variables not automatic within a function. |
6706 | For the same reason is_global_var is not the |
6707 | correct flag to track - local variables from other |
6708 | functions also need to be considered global. |
6709 | Conveniently all HEAP vars are not put in function |
6710 | scope. */ |
6711 | || (in_ipa_mode |
6712 | && fndecl |
6713 | && ! auto_var_in_fn_p (vi->decl, fndecl))) |
6714 | pt->vars_contains_nonlocal = true; |
6715 | |
6716 | /* If we have a variable that is interposable record that fact |
6717 | for pointer comparison simplification. */ |
6718 | if (VAR_P (vi->decl) |
6719 | && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl)) |
6720 | && ! decl_binds_to_current_def_p (vi->decl)) |
6721 | pt->vars_contains_interposable = true; |
6722 | |
6723 | /* If this is a local variable we can have overlapping lifetime |
6724 | of different function invocations through recursion duplicate |
6725 | it with its shadow variable. */ |
6726 | if (in_ipa_mode |
6727 | && vi->shadow_var_uid != 0) |
6728 | { |
6729 | bitmap_set_bit (into, vi->shadow_var_uid); |
6730 | pt->vars_contains_nonlocal = true; |
6731 | } |
6732 | } |
6733 | |
6734 | else if (TREE_CODE (vi->decl) == FUNCTION_DECL |
6735 | || TREE_CODE (vi->decl) == LABEL_DECL) |
6736 | { |
6737 | /* Nothing should read/write from/to code so we can |
6738 | save bits by not including them in the points-to bitmaps. |
6739 | Still mark the points-to set as containing global memory |
6740 | to make code-patching possible - see PR70128. */ |
6741 | pt->vars_contains_nonlocal = true; |
6742 | } |
6743 | } |
6744 | } |
6745 | |
6746 | |
6747 | /* Compute the points-to solution *PT for the variable VI. */ |
6748 | |
6749 | static struct pt_solution |
6750 | find_what_var_points_to (tree fndecl, varinfo_t orig_vi) |
6751 | { |
6752 | unsigned int i; |
6753 | bitmap_iterator bi; |
6754 | bitmap finished_solution; |
6755 | bitmap result; |
6756 | varinfo_t vi; |
6757 | struct pt_solution *pt; |
6758 | |
6759 | /* This variable may have been collapsed, let's get the real |
6760 | variable. */ |
6761 | vi = get_varinfo (n: find (node: orig_vi->id)); |
6762 | |
6763 | /* See if we have already computed the solution and return it. */ |
6764 | pt_solution **slot = &final_solutions->get_or_insert (k: vi); |
6765 | if (*slot != NULL) |
6766 | return **slot; |
6767 | |
6768 | *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution); |
6769 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6770 | |
6771 | /* Translate artificial variables into SSA_NAME_PTR_INFO |
6772 | attributes. */ |
6773 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
6774 | { |
6775 | varinfo_t vi = get_varinfo (n: i); |
6776 | |
6777 | if (vi->is_artificial_var) |
6778 | { |
6779 | if (vi->id == nothing_id) |
6780 | pt->null = 1; |
6781 | else if (vi->id == escaped_id) |
6782 | { |
6783 | if (in_ipa_mode) |
6784 | pt->ipa_escaped = 1; |
6785 | else |
6786 | pt->escaped = 1; |
6787 | /* Expand some special vars of ESCAPED in-place here. */ |
6788 | varinfo_t evi = get_varinfo (n: find (node: escaped_id)); |
6789 | if (bitmap_bit_p (evi->solution, nonlocal_id)) |
6790 | pt->nonlocal = 1; |
6791 | } |
6792 | else if (vi->id == nonlocal_id) |
6793 | pt->nonlocal = 1; |
6794 | else if (vi->id == string_id) |
6795 | /* Nobody cares - STRING_CSTs are read-only entities. */ |
6796 | ; |
6797 | else if (vi->id == anything_id |
6798 | || vi->id == integer_id) |
6799 | pt->anything = 1; |
6800 | } |
6801 | } |
6802 | |
6803 | /* Instead of doing extra work, simply do not create |
6804 | elaborate points-to information for pt_anything pointers. */ |
6805 | if (pt->anything) |
6806 | return *pt; |
6807 | |
6808 | /* Share the final set of variables when possible. */ |
6809 | finished_solution = BITMAP_GGC_ALLOC (); |
6810 | stats.points_to_sets_created++; |
6811 | |
6812 | set_uids_in_ptset (into: finished_solution, from: vi->solution, pt, fndecl); |
6813 | result = shared_bitmap_lookup (pt_vars: finished_solution); |
6814 | if (!result) |
6815 | { |
6816 | shared_bitmap_add (pt_vars: finished_solution); |
6817 | pt->vars = finished_solution; |
6818 | } |
6819 | else |
6820 | { |
6821 | pt->vars = result; |
6822 | bitmap_clear (finished_solution); |
6823 | } |
6824 | |
6825 | return *pt; |
6826 | } |
6827 | |
6828 | /* Given a pointer variable P, fill in its points-to set. */ |
6829 | |
6830 | static void |
6831 | find_what_p_points_to (tree fndecl, tree p) |
6832 | { |
6833 | struct ptr_info_def *pi; |
6834 | tree lookup_p = p; |
6835 | varinfo_t vi; |
6836 | value_range vr; |
6837 | get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (r&: vr, expr: p); |
6838 | bool nonnull = vr.nonzero_p (); |
6839 | |
6840 | /* For parameters, get at the points-to set for the actual parm |
6841 | decl. */ |
6842 | if (TREE_CODE (p) == SSA_NAME |
6843 | && SSA_NAME_IS_DEFAULT_DEF (p) |
6844 | && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL |
6845 | || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)) |
6846 | lookup_p = SSA_NAME_VAR (p); |
6847 | |
6848 | vi = lookup_vi_for_tree (t: lookup_p); |
6849 | if (!vi) |
6850 | return; |
6851 | |
6852 | pi = get_ptr_info (p); |
6853 | pi->pt = find_what_var_points_to (fndecl, orig_vi: vi); |
6854 | /* Conservatively set to NULL from PTA (to true). */ |
6855 | pi->pt.null = 1; |
6856 | /* Preserve pointer nonnull globally computed. */ |
6857 | if (nonnull) |
6858 | set_ptr_nonnull (p); |
6859 | } |
6860 | |
6861 | |
6862 | /* Query statistics for points-to solutions. */ |
6863 | |
6864 | static struct { |
6865 | unsigned HOST_WIDE_INT pt_solution_includes_may_alias; |
6866 | unsigned HOST_WIDE_INT pt_solution_includes_no_alias; |
6867 | unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias; |
6868 | unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias; |
6869 | } pta_stats; |
6870 | |
6871 | void |
6872 | dump_pta_stats (FILE *s) |
6873 | { |
6874 | fprintf (stream: s, format: "\nPTA query stats:\n" ); |
6875 | fprintf (stream: s, format: " pt_solution_includes: " |
6876 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
6877 | HOST_WIDE_INT_PRINT_DEC" queries\n" , |
6878 | pta_stats.pt_solution_includes_no_alias, |
6879 | pta_stats.pt_solution_includes_no_alias |
6880 | + pta_stats.pt_solution_includes_may_alias); |
6881 | fprintf (stream: s, format: " pt_solutions_intersect: " |
6882 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
6883 | HOST_WIDE_INT_PRINT_DEC" queries\n" , |
6884 | pta_stats.pt_solutions_intersect_no_alias, |
6885 | pta_stats.pt_solutions_intersect_no_alias |
6886 | + pta_stats.pt_solutions_intersect_may_alias); |
6887 | } |
6888 | |
6889 | |
6890 | /* Reset the points-to solution *PT to a conservative default |
6891 | (point to anything). */ |
6892 | |
6893 | void |
6894 | pt_solution_reset (struct pt_solution *pt) |
6895 | { |
6896 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6897 | pt->anything = true; |
6898 | pt->null = true; |
6899 | } |
6900 | |
6901 | /* Set the points-to solution *PT to point only to the variables |
6902 | in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains |
6903 | global variables and VARS_CONTAINS_RESTRICT specifies whether |
6904 | it contains restrict tag variables. */ |
6905 | |
6906 | void |
6907 | pt_solution_set (struct pt_solution *pt, bitmap vars, |
6908 | bool vars_contains_nonlocal) |
6909 | { |
6910 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6911 | pt->vars = vars; |
6912 | pt->vars_contains_nonlocal = vars_contains_nonlocal; |
6913 | pt->vars_contains_escaped |
6914 | = (cfun->gimple_df->escaped.anything |
6915 | || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars)); |
6916 | } |
6917 | |
6918 | /* Set the points-to solution *PT to point only to the variable VAR. */ |
6919 | |
6920 | void |
6921 | pt_solution_set_var (struct pt_solution *pt, tree var) |
6922 | { |
6923 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6924 | pt->vars = BITMAP_GGC_ALLOC (); |
6925 | bitmap_set_bit (pt->vars, DECL_PT_UID (var)); |
6926 | pt->vars_contains_nonlocal = is_global_var (t: var); |
6927 | pt->vars_contains_escaped |
6928 | = (cfun->gimple_df->escaped.anything |
6929 | || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var))); |
6930 | } |
6931 | |
6932 | /* Computes the union of the points-to solutions *DEST and *SRC and |
6933 | stores the result in *DEST. This changes the points-to bitmap |
6934 | of *DEST and thus may not be used if that might be shared. |
6935 | The points-to bitmap of *SRC and *DEST will not be shared after |
6936 | this function if they were not before. */ |
6937 | |
6938 | static void |
6939 | pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src) |
6940 | { |
6941 | dest->anything |= src->anything; |
6942 | if (dest->anything) |
6943 | { |
6944 | pt_solution_reset (pt: dest); |
6945 | return; |
6946 | } |
6947 | |
6948 | dest->nonlocal |= src->nonlocal; |
6949 | dest->escaped |= src->escaped; |
6950 | dest->ipa_escaped |= src->ipa_escaped; |
6951 | dest->null |= src->null; |
6952 | dest->vars_contains_nonlocal |= src->vars_contains_nonlocal; |
6953 | dest->vars_contains_escaped |= src->vars_contains_escaped; |
6954 | dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap; |
6955 | if (!src->vars) |
6956 | return; |
6957 | |
6958 | if (!dest->vars) |
6959 | dest->vars = BITMAP_GGC_ALLOC (); |
6960 | bitmap_ior_into (dest->vars, src->vars); |
6961 | } |
6962 | |
6963 | /* Return true if the points-to solution *PT is empty. */ |
6964 | |
6965 | bool |
6966 | pt_solution_empty_p (const pt_solution *pt) |
6967 | { |
6968 | if (pt->anything |
6969 | || pt->nonlocal) |
6970 | return false; |
6971 | |
6972 | if (pt->vars |
6973 | && !bitmap_empty_p (map: pt->vars)) |
6974 | return false; |
6975 | |
6976 | /* If the solution includes ESCAPED, check if that is empty. */ |
6977 | if (pt->escaped |
6978 | && !pt_solution_empty_p (pt: &cfun->gimple_df->escaped)) |
6979 | return false; |
6980 | |
6981 | /* If the solution includes ESCAPED, check if that is empty. */ |
6982 | if (pt->ipa_escaped |
6983 | && !pt_solution_empty_p (pt: &ipa_escaped_pt)) |
6984 | return false; |
6985 | |
6986 | return true; |
6987 | } |
6988 | |
6989 | /* Return true if the points-to solution *PT only point to a single var, and |
6990 | return the var uid in *UID. */ |
6991 | |
6992 | bool |
6993 | pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid) |
6994 | { |
6995 | if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped |
6996 | || pt->vars == NULL |
6997 | || !bitmap_single_bit_set_p (pt->vars)) |
6998 | return false; |
6999 | |
7000 | *uid = bitmap_first_set_bit (pt->vars); |
7001 | return true; |
7002 | } |
7003 | |
7004 | /* Return true if the points-to solution *PT includes global memory. |
7005 | If ESCAPED_LOCAL_P is true then escaped local variables are also |
7006 | considered global. */ |
7007 | |
7008 | bool |
7009 | pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p) |
7010 | { |
7011 | if (pt->anything |
7012 | || pt->nonlocal |
7013 | || pt->vars_contains_nonlocal |
7014 | /* The following is a hack to make the malloc escape hack work. |
7015 | In reality we'd need different sets for escaped-through-return |
7016 | and escaped-to-callees and passes would need to be updated. */ |
7017 | || pt->vars_contains_escaped_heap) |
7018 | return true; |
7019 | |
7020 | if (escaped_local_p && pt->vars_contains_escaped) |
7021 | return true; |
7022 | |
7023 | /* 'escaped' is also a placeholder so we have to look into it. */ |
7024 | if (pt->escaped) |
7025 | return pt_solution_includes_global (pt: &cfun->gimple_df->escaped, |
7026 | escaped_local_p); |
7027 | |
7028 | if (pt->ipa_escaped) |
7029 | return pt_solution_includes_global (pt: &ipa_escaped_pt, |
7030 | escaped_local_p); |
7031 | |
7032 | return false; |
7033 | } |
7034 | |
7035 | /* Return true if the points-to solution *PT includes the variable |
7036 | declaration DECL. */ |
7037 | |
7038 | static bool |
7039 | pt_solution_includes_1 (struct pt_solution *pt, const_tree decl) |
7040 | { |
7041 | if (pt->anything) |
7042 | return true; |
7043 | |
7044 | if (pt->nonlocal |
7045 | && is_global_var (t: decl)) |
7046 | return true; |
7047 | |
7048 | if (pt->vars |
7049 | && bitmap_bit_p (pt->vars, DECL_PT_UID (decl))) |
7050 | return true; |
7051 | |
7052 | /* If the solution includes ESCAPED, check it. */ |
7053 | if (pt->escaped |
7054 | && pt_solution_includes_1 (pt: &cfun->gimple_df->escaped, decl)) |
7055 | return true; |
7056 | |
7057 | /* If the solution includes ESCAPED, check it. */ |
7058 | if (pt->ipa_escaped |
7059 | && pt_solution_includes_1 (pt: &ipa_escaped_pt, decl)) |
7060 | return true; |
7061 | |
7062 | return false; |
7063 | } |
7064 | |
7065 | bool |
7066 | pt_solution_includes (struct pt_solution *pt, const_tree decl) |
7067 | { |
7068 | bool res = pt_solution_includes_1 (pt, decl); |
7069 | if (res) |
7070 | ++pta_stats.pt_solution_includes_may_alias; |
7071 | else |
7072 | ++pta_stats.pt_solution_includes_no_alias; |
7073 | return res; |
7074 | } |
7075 | |
7076 | /* Return true if both points-to solutions PT1 and PT2 have a non-empty |
7077 | intersection. */ |
7078 | |
7079 | static bool |
7080 | pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2) |
7081 | { |
7082 | if (pt1->anything || pt2->anything) |
7083 | return true; |
7084 | |
7085 | /* If either points to unknown global memory and the other points to |
7086 | any global memory they alias. */ |
7087 | if ((pt1->nonlocal |
7088 | && (pt2->nonlocal |
7089 | || pt2->vars_contains_nonlocal)) |
7090 | || (pt2->nonlocal |
7091 | && pt1->vars_contains_nonlocal)) |
7092 | return true; |
7093 | |
7094 | /* If either points to all escaped memory and the other points to |
7095 | any escaped memory they alias. */ |
7096 | if ((pt1->escaped |
7097 | && (pt2->escaped |
7098 | || pt2->vars_contains_escaped)) |
7099 | || (pt2->escaped |
7100 | && pt1->vars_contains_escaped)) |
7101 | return true; |
7102 | |
7103 | /* Check the escaped solution if required. |
7104 | ??? Do we need to check the local against the IPA escaped sets? */ |
7105 | if ((pt1->ipa_escaped || pt2->ipa_escaped) |
7106 | && !pt_solution_empty_p (pt: &ipa_escaped_pt)) |
7107 | { |
7108 | /* If both point to escaped memory and that solution |
7109 | is not empty they alias. */ |
7110 | if (pt1->ipa_escaped && pt2->ipa_escaped) |
7111 | return true; |
7112 | |
7113 | /* If either points to escaped memory see if the escaped solution |
7114 | intersects with the other. */ |
7115 | if ((pt1->ipa_escaped |
7116 | && pt_solutions_intersect_1 (pt1: &ipa_escaped_pt, pt2)) |
7117 | || (pt2->ipa_escaped |
7118 | && pt_solutions_intersect_1 (pt1: &ipa_escaped_pt, pt2: pt1))) |
7119 | return true; |
7120 | } |
7121 | |
7122 | /* Now both pointers alias if their points-to solution intersects. */ |
7123 | return (pt1->vars |
7124 | && pt2->vars |
7125 | && bitmap_intersect_p (pt1->vars, pt2->vars)); |
7126 | } |
7127 | |
7128 | bool |
7129 | pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2) |
7130 | { |
7131 | bool res = pt_solutions_intersect_1 (pt1, pt2); |
7132 | if (res) |
7133 | ++pta_stats.pt_solutions_intersect_may_alias; |
7134 | else |
7135 | ++pta_stats.pt_solutions_intersect_no_alias; |
7136 | return res; |
7137 | } |
7138 | |
7139 | /* Dump stats information to OUTFILE. */ |
7140 | |
7141 | static void |
7142 | dump_sa_stats (FILE *outfile) |
7143 | { |
7144 | fprintf (stream: outfile, format: "Points-to Stats:\n" ); |
7145 | fprintf (stream: outfile, format: "Total vars: %d\n" , stats.total_vars); |
7146 | fprintf (stream: outfile, format: "Non-pointer vars: %d\n" , |
7147 | stats.nonpointer_vars); |
7148 | fprintf (stream: outfile, format: "Statically unified vars: %d\n" , |
7149 | stats.unified_vars_static); |
7150 | fprintf (stream: outfile, format: "Dynamically unified vars: %d\n" , |
7151 | stats.unified_vars_dynamic); |
7152 | fprintf (stream: outfile, format: "Iterations: %d\n" , stats.iterations); |
7153 | fprintf (stream: outfile, format: "Number of edges: %d\n" , stats.num_edges); |
7154 | fprintf (stream: outfile, format: "Number of implicit edges: %d\n" , |
7155 | stats.num_implicit_edges); |
7156 | fprintf (stream: outfile, format: "Number of avoided edges: %d\n" , |
7157 | stats.num_avoided_edges); |
7158 | } |
7159 | |
7160 | /* Dump points-to information to OUTFILE. */ |
7161 | |
7162 | static void |
7163 | dump_sa_points_to_info (FILE *outfile) |
7164 | { |
7165 | fprintf (stream: outfile, format: "\nPoints-to sets\n\n" ); |
7166 | |
7167 | for (unsigned i = 1; i < varmap.length (); i++) |
7168 | { |
7169 | varinfo_t vi = get_varinfo (n: i); |
7170 | if (!vi->may_have_pointers) |
7171 | continue; |
7172 | dump_solution_for_var (file: outfile, var: i); |
7173 | } |
7174 | } |
7175 | |
7176 | |
7177 | /* Debug points-to information to stderr. */ |
7178 | |
7179 | DEBUG_FUNCTION void |
7180 | debug_sa_points_to_info (void) |
7181 | { |
7182 | dump_sa_points_to_info (stderr); |
7183 | } |
7184 | |
7185 | |
7186 | /* Initialize the always-existing constraint variables for NULL |
7187 | ANYTHING, READONLY, and INTEGER */ |
7188 | |
7189 | static void |
7190 | init_base_vars (void) |
7191 | { |
7192 | struct constraint_expr lhs, rhs; |
7193 | varinfo_t var_anything; |
7194 | varinfo_t var_nothing; |
7195 | varinfo_t var_string; |
7196 | varinfo_t var_escaped; |
7197 | varinfo_t var_nonlocal; |
7198 | varinfo_t var_escaped_return; |
7199 | varinfo_t var_storedanything; |
7200 | varinfo_t var_integer; |
7201 | |
7202 | /* Variable ID zero is reserved and should be NULL. */ |
7203 | varmap.safe_push (NULL); |
7204 | |
7205 | /* Create the NULL variable, used to represent that a variable points |
7206 | to NULL. */ |
7207 | var_nothing = new_var_info (NULL_TREE, name: "NULL" , add_id: false); |
7208 | gcc_assert (var_nothing->id == nothing_id); |
7209 | var_nothing->is_artificial_var = 1; |
7210 | var_nothing->offset = 0; |
7211 | var_nothing->size = ~0; |
7212 | var_nothing->fullsize = ~0; |
7213 | var_nothing->is_special_var = 1; |
7214 | var_nothing->may_have_pointers = 0; |
7215 | var_nothing->is_global_var = 0; |
7216 | |
7217 | /* Create the ANYTHING variable, used to represent that a variable |
7218 | points to some unknown piece of memory. */ |
7219 | var_anything = new_var_info (NULL_TREE, name: "ANYTHING" , add_id: false); |
7220 | gcc_assert (var_anything->id == anything_id); |
7221 | var_anything->is_artificial_var = 1; |
7222 | var_anything->size = ~0; |
7223 | var_anything->offset = 0; |
7224 | var_anything->fullsize = ~0; |
7225 | var_anything->is_special_var = 1; |
7226 | |
7227 | /* Anything points to anything. This makes deref constraints just |
7228 | work in the presence of linked list and other p = *p type loops, |
7229 | by saying that *ANYTHING = ANYTHING. */ |
7230 | lhs.type = SCALAR; |
7231 | lhs.var = anything_id; |
7232 | lhs.offset = 0; |
7233 | rhs.type = ADDRESSOF; |
7234 | rhs.var = anything_id; |
7235 | rhs.offset = 0; |
7236 | |
7237 | /* This specifically does not use process_constraint because |
7238 | process_constraint ignores all anything = anything constraints, since all |
7239 | but this one are redundant. */ |
7240 | constraints.safe_push (obj: new_constraint (lhs, rhs)); |
7241 | |
7242 | /* Create the STRING variable, used to represent that a variable |
7243 | points to a string literal. String literals don't contain |
7244 | pointers so STRING doesn't point to anything. */ |
7245 | var_string = new_var_info (NULL_TREE, name: "STRING" , add_id: false); |
7246 | gcc_assert (var_string->id == string_id); |
7247 | var_string->is_artificial_var = 1; |
7248 | var_string->offset = 0; |
7249 | var_string->size = ~0; |
7250 | var_string->fullsize = ~0; |
7251 | var_string->is_special_var = 1; |
7252 | var_string->may_have_pointers = 0; |
7253 | |
7254 | /* Create the ESCAPED variable, used to represent the set of escaped |
7255 | memory. */ |
7256 | var_escaped = new_var_info (NULL_TREE, name: "ESCAPED" , add_id: false); |
7257 | gcc_assert (var_escaped->id == escaped_id); |
7258 | var_escaped->is_artificial_var = 1; |
7259 | var_escaped->offset = 0; |
7260 | var_escaped->size = ~0; |
7261 | var_escaped->fullsize = ~0; |
7262 | var_escaped->is_special_var = 0; |
7263 | |
7264 | /* Create the NONLOCAL variable, used to represent the set of nonlocal |
7265 | memory. */ |
7266 | var_nonlocal = new_var_info (NULL_TREE, name: "NONLOCAL" , add_id: false); |
7267 | gcc_assert (var_nonlocal->id == nonlocal_id); |
7268 | var_nonlocal->is_artificial_var = 1; |
7269 | var_nonlocal->offset = 0; |
7270 | var_nonlocal->size = ~0; |
7271 | var_nonlocal->fullsize = ~0; |
7272 | var_nonlocal->is_special_var = 1; |
7273 | |
7274 | /* Create the ESCAPED_RETURN variable, used to represent the set of escaped |
7275 | memory via a regular return stmt. */ |
7276 | var_escaped_return = new_var_info (NULL_TREE, name: "ESCAPED_RETURN" , add_id: false); |
7277 | gcc_assert (var_escaped_return->id == escaped_return_id); |
7278 | var_escaped_return->is_artificial_var = 1; |
7279 | var_escaped_return->offset = 0; |
7280 | var_escaped_return->size = ~0; |
7281 | var_escaped_return->fullsize = ~0; |
7282 | var_escaped_return->is_special_var = 0; |
7283 | |
7284 | /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */ |
7285 | lhs.type = SCALAR; |
7286 | lhs.var = escaped_id; |
7287 | lhs.offset = 0; |
7288 | rhs.type = DEREF; |
7289 | rhs.var = escaped_id; |
7290 | rhs.offset = 0; |
7291 | process_constraint (t: new_constraint (lhs, rhs)); |
7292 | |
7293 | /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the |
7294 | whole variable escapes. */ |
7295 | lhs.type = SCALAR; |
7296 | lhs.var = escaped_id; |
7297 | lhs.offset = 0; |
7298 | rhs.type = SCALAR; |
7299 | rhs.var = escaped_id; |
7300 | rhs.offset = UNKNOWN_OFFSET; |
7301 | process_constraint (t: new_constraint (lhs, rhs)); |
7302 | |
7303 | /* *ESCAPED = NONLOCAL. This is true because we have to assume |
7304 | everything pointed to by escaped points to what global memory can |
7305 | point to. */ |
7306 | lhs.type = DEREF; |
7307 | lhs.var = escaped_id; |
7308 | lhs.offset = 0; |
7309 | rhs.type = SCALAR; |
7310 | rhs.var = nonlocal_id; |
7311 | rhs.offset = 0; |
7312 | process_constraint (t: new_constraint (lhs, rhs)); |
7313 | |
7314 | /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because |
7315 | global memory may point to global memory and escaped memory. */ |
7316 | lhs.type = SCALAR; |
7317 | lhs.var = nonlocal_id; |
7318 | lhs.offset = 0; |
7319 | rhs.type = ADDRESSOF; |
7320 | rhs.var = nonlocal_id; |
7321 | rhs.offset = 0; |
7322 | process_constraint (t: new_constraint (lhs, rhs)); |
7323 | rhs.type = ADDRESSOF; |
7324 | rhs.var = escaped_id; |
7325 | rhs.offset = 0; |
7326 | process_constraint (t: new_constraint (lhs, rhs)); |
7327 | |
7328 | /* Transitively close ESCAPED_RETURN. |
7329 | ESCAPED_RETURN = ESCAPED_RETURN + UNKNOWN_OFFSET |
7330 | ESCAPED_RETURN = *ESCAPED_RETURN. */ |
7331 | lhs.type = SCALAR; |
7332 | lhs.var = escaped_return_id; |
7333 | lhs.offset = 0; |
7334 | rhs.type = SCALAR; |
7335 | rhs.var = escaped_return_id; |
7336 | rhs.offset = UNKNOWN_OFFSET; |
7337 | process_constraint (t: new_constraint (lhs, rhs)); |
7338 | lhs.type = SCALAR; |
7339 | lhs.var = escaped_return_id; |
7340 | lhs.offset = 0; |
7341 | rhs.type = DEREF; |
7342 | rhs.var = escaped_return_id; |
7343 | rhs.offset = 0; |
7344 | process_constraint (t: new_constraint (lhs, rhs)); |
7345 | |
7346 | /* Create the STOREDANYTHING variable, used to represent the set of |
7347 | variables stored to *ANYTHING. */ |
7348 | var_storedanything = new_var_info (NULL_TREE, name: "STOREDANYTHING" , add_id: false); |
7349 | gcc_assert (var_storedanything->id == storedanything_id); |
7350 | var_storedanything->is_artificial_var = 1; |
7351 | var_storedanything->offset = 0; |
7352 | var_storedanything->size = ~0; |
7353 | var_storedanything->fullsize = ~0; |
7354 | var_storedanything->is_special_var = 0; |
7355 | |
7356 | /* Create the INTEGER variable, used to represent that a variable points |
7357 | to what an INTEGER "points to". */ |
7358 | var_integer = new_var_info (NULL_TREE, name: "INTEGER" , add_id: false); |
7359 | gcc_assert (var_integer->id == integer_id); |
7360 | var_integer->is_artificial_var = 1; |
7361 | var_integer->size = ~0; |
7362 | var_integer->fullsize = ~0; |
7363 | var_integer->offset = 0; |
7364 | var_integer->is_special_var = 1; |
7365 | |
7366 | /* INTEGER = ANYTHING, because we don't know where a dereference of |
7367 | a random integer will point to. */ |
7368 | lhs.type = SCALAR; |
7369 | lhs.var = integer_id; |
7370 | lhs.offset = 0; |
7371 | rhs.type = ADDRESSOF; |
7372 | rhs.var = anything_id; |
7373 | rhs.offset = 0; |
7374 | process_constraint (t: new_constraint (lhs, rhs)); |
7375 | } |
7376 | |
7377 | /* Initialize things necessary to perform PTA */ |
7378 | |
7379 | static void |
7380 | init_alias_vars (void) |
7381 | { |
7382 | use_field_sensitive = (param_max_fields_for_field_sensitive > 1); |
7383 | |
7384 | bitmap_obstack_initialize (&pta_obstack); |
7385 | bitmap_obstack_initialize (&oldpta_obstack); |
7386 | bitmap_obstack_initialize (&predbitmap_obstack); |
7387 | |
7388 | constraints.create (nelems: 8); |
7389 | varmap.create (nelems: 8); |
7390 | vi_for_tree = new hash_map<tree, varinfo_t>; |
7391 | call_stmt_vars = new hash_map<gimple *, varinfo_t>; |
7392 | |
7393 | memset (s: &stats, c: 0, n: sizeof (stats)); |
7394 | shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511); |
7395 | init_base_vars (); |
7396 | |
7397 | gcc_obstack_init (&fake_var_decl_obstack); |
7398 | |
7399 | final_solutions = new hash_map<varinfo_t, pt_solution *>; |
7400 | gcc_obstack_init (&final_solutions_obstack); |
7401 | } |
7402 | |
7403 | /* Remove the REF and ADDRESS edges from GRAPH, as well as all the |
7404 | predecessor edges. */ |
7405 | |
7406 | static void |
7407 | remove_preds_and_fake_succs (constraint_graph_t graph) |
7408 | { |
7409 | unsigned int i; |
7410 | |
7411 | /* Clear the implicit ref and address nodes from the successor |
7412 | lists. */ |
7413 | for (i = 1; i < FIRST_REF_NODE; i++) |
7414 | { |
7415 | if (graph->succs[i]) |
7416 | bitmap_clear_range (graph->succs[i], FIRST_REF_NODE, |
7417 | FIRST_REF_NODE * 2); |
7418 | } |
7419 | |
7420 | /* Free the successor list for the non-ref nodes. */ |
7421 | for (i = FIRST_REF_NODE + 1; i < graph->size; i++) |
7422 | { |
7423 | if (graph->succs[i]) |
7424 | BITMAP_FREE (graph->succs[i]); |
7425 | } |
7426 | |
7427 | /* Now reallocate the size of the successor list as, and blow away |
7428 | the predecessor bitmaps. */ |
7429 | graph->size = varmap.length (); |
7430 | graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size); |
7431 | |
7432 | free (ptr: graph->implicit_preds); |
7433 | graph->implicit_preds = NULL; |
7434 | free (ptr: graph->preds); |
7435 | graph->preds = NULL; |
7436 | bitmap_obstack_release (&predbitmap_obstack); |
7437 | } |
7438 | |
7439 | /* Solve the constraint set. */ |
7440 | |
7441 | static void |
7442 | solve_constraints (void) |
7443 | { |
7444 | class scc_info *si; |
7445 | |
7446 | /* Sort varinfos so that ones that cannot be pointed to are last. |
7447 | This makes bitmaps more efficient. */ |
7448 | unsigned int *map = XNEWVEC (unsigned int, varmap.length ()); |
7449 | for (unsigned i = 0; i < integer_id + 1; ++i) |
7450 | map[i] = i; |
7451 | /* Start with address-taken vars, followed by not address-taken vars |
7452 | to move vars never appearing in the points-to solution bitmaps last. */ |
7453 | unsigned j = integer_id + 1; |
7454 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7455 | if (varmap[varmap[i]->head]->address_taken) |
7456 | map[i] = j++; |
7457 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7458 | if (! varmap[varmap[i]->head]->address_taken) |
7459 | map[i] = j++; |
7460 | /* Shuffle varmap according to map. */ |
7461 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7462 | { |
7463 | while (map[varmap[i]->id] != i) |
7464 | std::swap (a&: varmap[i], b&: varmap[map[varmap[i]->id]]); |
7465 | gcc_assert (bitmap_empty_p (varmap[i]->solution)); |
7466 | varmap[i]->id = i; |
7467 | varmap[i]->next = map[varmap[i]->next]; |
7468 | varmap[i]->head = map[varmap[i]->head]; |
7469 | } |
7470 | /* Finally rewrite constraints. */ |
7471 | for (unsigned i = 0; i < constraints.length (); ++i) |
7472 | { |
7473 | constraints[i]->lhs.var = map[constraints[i]->lhs.var]; |
7474 | constraints[i]->rhs.var = map[constraints[i]->rhs.var]; |
7475 | } |
7476 | free (ptr: map); |
7477 | |
7478 | if (dump_file) |
7479 | fprintf (stream: dump_file, |
7480 | format: "\nCollapsing static cycles and doing variable " |
7481 | "substitution\n" ); |
7482 | |
7483 | init_graph (size: varmap.length () * 2); |
7484 | |
7485 | if (dump_file) |
7486 | fprintf (stream: dump_file, format: "Building predecessor graph\n" ); |
7487 | build_pred_graph (); |
7488 | |
7489 | if (dump_file) |
7490 | fprintf (stream: dump_file, format: "Detecting pointer and location " |
7491 | "equivalences\n" ); |
7492 | si = perform_var_substitution (graph); |
7493 | |
7494 | if (dump_file) |
7495 | fprintf (stream: dump_file, format: "Rewriting constraints and unifying " |
7496 | "variables\n" ); |
7497 | rewrite_constraints (graph, si); |
7498 | |
7499 | build_succ_graph (); |
7500 | |
7501 | free_var_substitution_info (si); |
7502 | |
7503 | /* Attach complex constraints to graph nodes. */ |
7504 | move_complex_constraints (graph); |
7505 | |
7506 | if (dump_file) |
7507 | fprintf (stream: dump_file, format: "Uniting pointer but not location equivalent " |
7508 | "variables\n" ); |
7509 | unite_pointer_equivalences (graph); |
7510 | |
7511 | if (dump_file) |
7512 | fprintf (stream: dump_file, format: "Finding indirect cycles\n" ); |
7513 | find_indirect_cycles (graph); |
7514 | |
7515 | /* Implicit nodes and predecessors are no longer necessary at this |
7516 | point. */ |
7517 | remove_preds_and_fake_succs (graph); |
7518 | |
7519 | if (dump_file && (dump_flags & TDF_GRAPH)) |
7520 | { |
7521 | fprintf (stream: dump_file, format: "\n\n// The constraint graph before solve-graph " |
7522 | "in dot format:\n" ); |
7523 | dump_constraint_graph (file: dump_file); |
7524 | fprintf (stream: dump_file, format: "\n\n" ); |
7525 | } |
7526 | |
7527 | if (dump_file) |
7528 | fprintf (stream: dump_file, format: "Solving graph\n" ); |
7529 | |
7530 | solve_graph (graph); |
7531 | |
7532 | if (dump_file && (dump_flags & TDF_GRAPH)) |
7533 | { |
7534 | fprintf (stream: dump_file, format: "\n\n// The constraint graph after solve-graph " |
7535 | "in dot format:\n" ); |
7536 | dump_constraint_graph (file: dump_file); |
7537 | fprintf (stream: dump_file, format: "\n\n" ); |
7538 | } |
7539 | } |
7540 | |
7541 | /* Create points-to sets for the current function. See the comments |
7542 | at the start of the file for an algorithmic overview. */ |
7543 | |
7544 | static void |
7545 | compute_points_to_sets (void) |
7546 | { |
7547 | basic_block bb; |
7548 | varinfo_t vi; |
7549 | |
7550 | timevar_push (tv: TV_TREE_PTA); |
7551 | |
7552 | init_alias_vars (); |
7553 | |
7554 | intra_create_variable_infos (cfun); |
7555 | |
7556 | /* Now walk all statements and build the constraint set. */ |
7557 | FOR_EACH_BB_FN (bb, cfun) |
7558 | { |
7559 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
7560 | gsi_next (i: &gsi)) |
7561 | { |
7562 | gphi *phi = gsi.phi (); |
7563 | |
7564 | if (! virtual_operand_p (op: gimple_phi_result (gs: phi))) |
7565 | find_func_aliases (cfun, origt: phi); |
7566 | } |
7567 | |
7568 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
7569 | gsi_next (i: &gsi)) |
7570 | { |
7571 | gimple *stmt = gsi_stmt (i: gsi); |
7572 | |
7573 | find_func_aliases (cfun, origt: stmt); |
7574 | } |
7575 | } |
7576 | |
7577 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7578 | { |
7579 | fprintf (stream: dump_file, format: "Points-to analysis\n\nConstraints:\n\n" ); |
7580 | dump_constraints (file: dump_file, from: 0); |
7581 | } |
7582 | |
7583 | /* From the constraints compute the points-to sets. */ |
7584 | solve_constraints (); |
7585 | |
7586 | if (dump_file && (dump_flags & TDF_STATS)) |
7587 | dump_sa_stats (outfile: dump_file); |
7588 | |
7589 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7590 | dump_sa_points_to_info (outfile: dump_file); |
7591 | |
7592 | /* Compute the points-to set for ESCAPED used for call-clobber analysis. */ |
7593 | cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl, |
7594 | orig_vi: get_varinfo (n: escaped_id)); |
7595 | |
7596 | /* Make sure the ESCAPED solution (which is used as placeholder in |
7597 | other solutions) does not reference itself. This simplifies |
7598 | points-to solution queries. */ |
7599 | cfun->gimple_df->escaped.escaped = 0; |
7600 | |
7601 | /* The ESCAPED_RETURN solution is what contains all memory that needs |
7602 | to be considered global. */ |
7603 | cfun->gimple_df->escaped_return |
7604 | = find_what_var_points_to (cfun->decl, orig_vi: get_varinfo (n: escaped_return_id)); |
7605 | cfun->gimple_df->escaped_return.escaped = 1; |
7606 | |
7607 | /* Compute the points-to sets for pointer SSA_NAMEs. */ |
7608 | unsigned i; |
7609 | tree ptr; |
7610 | |
7611 | FOR_EACH_SSA_NAME (i, ptr, cfun) |
7612 | { |
7613 | if (POINTER_TYPE_P (TREE_TYPE (ptr))) |
7614 | find_what_p_points_to (cfun->decl, p: ptr); |
7615 | } |
7616 | |
7617 | /* Compute the call-used/clobbered sets. */ |
7618 | FOR_EACH_BB_FN (bb, cfun) |
7619 | { |
7620 | gimple_stmt_iterator gsi; |
7621 | |
7622 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
7623 | { |
7624 | gcall *stmt; |
7625 | struct pt_solution *pt; |
7626 | |
7627 | stmt = dyn_cast <gcall *> (p: gsi_stmt (i: gsi)); |
7628 | if (!stmt) |
7629 | continue; |
7630 | |
7631 | pt = gimple_call_use_set (call_stmt: stmt); |
7632 | if (gimple_call_flags (stmt) & ECF_CONST) |
7633 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7634 | else |
7635 | { |
7636 | bool uses_global_memory = true; |
7637 | bool reads_global_memory = true; |
7638 | |
7639 | determine_global_memory_access (stmt, NULL, |
7640 | reads_global_memory: &reads_global_memory, |
7641 | uses_global_memory: &uses_global_memory); |
7642 | if ((vi = lookup_call_use_vi (call: stmt)) != NULL) |
7643 | { |
7644 | *pt = find_what_var_points_to (cfun->decl, orig_vi: vi); |
7645 | /* Escaped (and thus nonlocal) variables are always |
7646 | implicitly used by calls. */ |
7647 | /* ??? ESCAPED can be empty even though NONLOCAL |
7648 | always escaped. */ |
7649 | if (uses_global_memory) |
7650 | { |
7651 | pt->nonlocal = 1; |
7652 | pt->escaped = 1; |
7653 | } |
7654 | } |
7655 | else if (uses_global_memory) |
7656 | { |
7657 | /* If there is nothing special about this call then |
7658 | we have made everything that is used also escape. */ |
7659 | *pt = cfun->gimple_df->escaped; |
7660 | pt->nonlocal = 1; |
7661 | } |
7662 | else |
7663 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7664 | } |
7665 | |
7666 | pt = gimple_call_clobber_set (call_stmt: stmt); |
7667 | if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS)) |
7668 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7669 | else |
7670 | { |
7671 | bool writes_global_memory = true; |
7672 | |
7673 | determine_global_memory_access (stmt, writes_global_memory: &writes_global_memory, |
7674 | NULL, NULL); |
7675 | |
7676 | if ((vi = lookup_call_clobber_vi (call: stmt)) != NULL) |
7677 | { |
7678 | *pt = find_what_var_points_to (cfun->decl, orig_vi: vi); |
7679 | /* Escaped (and thus nonlocal) variables are always |
7680 | implicitly clobbered by calls. */ |
7681 | /* ??? ESCAPED can be empty even though NONLOCAL |
7682 | always escaped. */ |
7683 | if (writes_global_memory) |
7684 | { |
7685 | pt->nonlocal = 1; |
7686 | pt->escaped = 1; |
7687 | } |
7688 | } |
7689 | else if (writes_global_memory) |
7690 | { |
7691 | /* If there is nothing special about this call then |
7692 | we have made everything that is used also escape. */ |
7693 | *pt = cfun->gimple_df->escaped; |
7694 | pt->nonlocal = 1; |
7695 | } |
7696 | else |
7697 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7698 | } |
7699 | } |
7700 | } |
7701 | |
7702 | timevar_pop (tv: TV_TREE_PTA); |
7703 | } |
7704 | |
7705 | |
7706 | /* Delete created points-to sets. */ |
7707 | |
7708 | static void |
7709 | delete_points_to_sets (void) |
7710 | { |
7711 | unsigned int i; |
7712 | |
7713 | delete shared_bitmap_table; |
7714 | shared_bitmap_table = NULL; |
7715 | if (dump_file && (dump_flags & TDF_STATS)) |
7716 | fprintf (stream: dump_file, format: "Points to sets created:%d\n" , |
7717 | stats.points_to_sets_created); |
7718 | |
7719 | delete vi_for_tree; |
7720 | delete call_stmt_vars; |
7721 | bitmap_obstack_release (&pta_obstack); |
7722 | constraints.release (); |
7723 | |
7724 | for (i = 0; i < graph->size; i++) |
7725 | graph->complex[i].release (); |
7726 | free (ptr: graph->complex); |
7727 | |
7728 | free (ptr: graph->rep); |
7729 | free (ptr: graph->succs); |
7730 | free (ptr: graph->pe); |
7731 | free (ptr: graph->pe_rep); |
7732 | free (ptr: graph->indirect_cycles); |
7733 | free (ptr: graph); |
7734 | |
7735 | varmap.release (); |
7736 | variable_info_pool.release (); |
7737 | constraint_pool.release (); |
7738 | |
7739 | obstack_free (&fake_var_decl_obstack, NULL); |
7740 | |
7741 | delete final_solutions; |
7742 | obstack_free (&final_solutions_obstack, NULL); |
7743 | } |
7744 | |
7745 | struct vls_data |
7746 | { |
7747 | unsigned short clique; |
7748 | bool escaped_p; |
7749 | bitmap rvars; |
7750 | }; |
7751 | |
7752 | /* Mark "other" loads and stores as belonging to CLIQUE and with |
7753 | base zero. */ |
7754 | |
7755 | static bool |
7756 | visit_loadstore (gimple *, tree base, tree ref, void *data) |
7757 | { |
7758 | unsigned short clique = ((vls_data *) data)->clique; |
7759 | bitmap rvars = ((vls_data *) data)->rvars; |
7760 | bool escaped_p = ((vls_data *) data)->escaped_p; |
7761 | if (TREE_CODE (base) == MEM_REF |
7762 | || TREE_CODE (base) == TARGET_MEM_REF) |
7763 | { |
7764 | tree ptr = TREE_OPERAND (base, 0); |
7765 | if (TREE_CODE (ptr) == SSA_NAME) |
7766 | { |
7767 | /* For parameters, get at the points-to set for the actual parm |
7768 | decl. */ |
7769 | if (SSA_NAME_IS_DEFAULT_DEF (ptr) |
7770 | && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL |
7771 | || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL)) |
7772 | ptr = SSA_NAME_VAR (ptr); |
7773 | |
7774 | /* We need to make sure 'ptr' doesn't include any of |
7775 | the restrict tags we added bases for in its points-to set. */ |
7776 | varinfo_t vi = lookup_vi_for_tree (t: ptr); |
7777 | if (! vi) |
7778 | return false; |
7779 | |
7780 | vi = get_varinfo (n: find (node: vi->id)); |
7781 | if (bitmap_intersect_p (rvars, vi->solution) |
7782 | || (escaped_p && bitmap_bit_p (vi->solution, escaped_id))) |
7783 | return false; |
7784 | } |
7785 | |
7786 | /* Do not overwrite existing cliques (that includes clique, base |
7787 | pairs we just set). */ |
7788 | if (MR_DEPENDENCE_CLIQUE (base) == 0) |
7789 | { |
7790 | MR_DEPENDENCE_CLIQUE (base) = clique; |
7791 | MR_DEPENDENCE_BASE (base) = 0; |
7792 | } |
7793 | } |
7794 | |
7795 | /* For plain decl accesses see whether they are accesses to globals |
7796 | and rewrite them to MEM_REFs with { clique, 0 }. */ |
7797 | if (VAR_P (base) |
7798 | && is_global_var (t: base) |
7799 | /* ??? We can't rewrite a plain decl with the walk_stmt_load_store |
7800 | ops callback. */ |
7801 | && base != ref) |
7802 | { |
7803 | tree *basep = &ref; |
7804 | while (handled_component_p (t: *basep)) |
7805 | basep = &TREE_OPERAND (*basep, 0); |
7806 | gcc_assert (VAR_P (*basep)); |
7807 | tree ptr = build_fold_addr_expr (*basep); |
7808 | tree zero = build_int_cst (TREE_TYPE (ptr), 0); |
7809 | *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero); |
7810 | MR_DEPENDENCE_CLIQUE (*basep) = clique; |
7811 | MR_DEPENDENCE_BASE (*basep) = 0; |
7812 | } |
7813 | |
7814 | return false; |
7815 | } |
7816 | |
7817 | struct msdi_data { |
7818 | tree ptr; |
7819 | unsigned short *clique; |
7820 | unsigned short *last_ruid; |
7821 | varinfo_t restrict_var; |
7822 | }; |
7823 | |
7824 | /* If BASE is a MEM_REF then assign a clique, base pair to it, updating |
7825 | CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA. |
7826 | Return whether dependence info was assigned to BASE. */ |
7827 | |
7828 | static bool |
7829 | maybe_set_dependence_info (gimple *, tree base, tree, void *data) |
7830 | { |
7831 | tree ptr = ((msdi_data *)data)->ptr; |
7832 | unsigned short &clique = *((msdi_data *)data)->clique; |
7833 | unsigned short &last_ruid = *((msdi_data *)data)->last_ruid; |
7834 | varinfo_t restrict_var = ((msdi_data *)data)->restrict_var; |
7835 | if ((TREE_CODE (base) == MEM_REF |
7836 | || TREE_CODE (base) == TARGET_MEM_REF) |
7837 | && TREE_OPERAND (base, 0) == ptr) |
7838 | { |
7839 | /* Do not overwrite existing cliques. This avoids overwriting dependence |
7840 | info inlined from a function with restrict parameters inlined |
7841 | into a function with restrict parameters. This usually means we |
7842 | prefer to be precise in innermost loops. */ |
7843 | if (MR_DEPENDENCE_CLIQUE (base) == 0) |
7844 | { |
7845 | if (clique == 0) |
7846 | { |
7847 | if (cfun->last_clique == 0) |
7848 | cfun->last_clique = 1; |
7849 | clique = 1; |
7850 | } |
7851 | if (restrict_var->ruid == 0) |
7852 | restrict_var->ruid = ++last_ruid; |
7853 | MR_DEPENDENCE_CLIQUE (base) = clique; |
7854 | MR_DEPENDENCE_BASE (base) = restrict_var->ruid; |
7855 | return true; |
7856 | } |
7857 | } |
7858 | return false; |
7859 | } |
7860 | |
7861 | /* Clear dependence info for the clique DATA. */ |
7862 | |
7863 | static bool |
7864 | clear_dependence_clique (gimple *, tree base, tree, void *data) |
7865 | { |
7866 | unsigned short clique = (uintptr_t)data; |
7867 | if ((TREE_CODE (base) == MEM_REF |
7868 | || TREE_CODE (base) == TARGET_MEM_REF) |
7869 | && MR_DEPENDENCE_CLIQUE (base) == clique) |
7870 | { |
7871 | MR_DEPENDENCE_CLIQUE (base) = 0; |
7872 | MR_DEPENDENCE_BASE (base) = 0; |
7873 | } |
7874 | |
7875 | return false; |
7876 | } |
7877 | |
7878 | /* Compute the set of independend memory references based on restrict |
7879 | tags and their conservative propagation to the points-to sets. */ |
7880 | |
7881 | static void |
7882 | compute_dependence_clique (void) |
7883 | { |
7884 | /* First clear the special "local" clique. */ |
7885 | basic_block bb; |
7886 | if (cfun->last_clique != 0) |
7887 | FOR_EACH_BB_FN (bb, cfun) |
7888 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); |
7889 | !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
7890 | { |
7891 | gimple *stmt = gsi_stmt (i: gsi); |
7892 | walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1, |
7893 | clear_dependence_clique, |
7894 | clear_dependence_clique); |
7895 | } |
7896 | |
7897 | unsigned short clique = 0; |
7898 | unsigned short last_ruid = 0; |
7899 | bitmap rvars = BITMAP_ALLOC (NULL); |
7900 | bool escaped_p = false; |
7901 | for (unsigned i = 0; i < num_ssa_names; ++i) |
7902 | { |
7903 | tree ptr = ssa_name (i); |
7904 | if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr))) |
7905 | continue; |
7906 | |
7907 | /* Avoid all this when ptr is not dereferenced? */ |
7908 | tree p = ptr; |
7909 | if (SSA_NAME_IS_DEFAULT_DEF (ptr) |
7910 | && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL |
7911 | || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL)) |
7912 | p = SSA_NAME_VAR (ptr); |
7913 | varinfo_t vi = lookup_vi_for_tree (t: p); |
7914 | if (!vi) |
7915 | continue; |
7916 | vi = get_varinfo (n: find (node: vi->id)); |
7917 | bitmap_iterator bi; |
7918 | unsigned j; |
7919 | varinfo_t restrict_var = NULL; |
7920 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
7921 | { |
7922 | varinfo_t oi = get_varinfo (n: j); |
7923 | if (oi->head != j) |
7924 | oi = get_varinfo (n: oi->head); |
7925 | if (oi->is_restrict_var) |
7926 | { |
7927 | if (restrict_var |
7928 | && restrict_var != oi) |
7929 | { |
7930 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7931 | { |
7932 | fprintf (stream: dump_file, format: "found restrict pointed-to " |
7933 | "for " ); |
7934 | print_generic_expr (dump_file, ptr); |
7935 | fprintf (stream: dump_file, format: " but not exclusively\n" ); |
7936 | } |
7937 | restrict_var = NULL; |
7938 | break; |
7939 | } |
7940 | restrict_var = oi; |
7941 | } |
7942 | /* NULL is the only other valid points-to entry. */ |
7943 | else if (oi->id != nothing_id) |
7944 | { |
7945 | restrict_var = NULL; |
7946 | break; |
7947 | } |
7948 | } |
7949 | /* Ok, found that ptr must(!) point to a single(!) restrict |
7950 | variable. */ |
7951 | /* ??? PTA isn't really a proper propagation engine to compute |
7952 | this property. |
7953 | ??? We could handle merging of two restricts by unifying them. */ |
7954 | if (restrict_var) |
7955 | { |
7956 | /* Now look at possible dereferences of ptr. */ |
7957 | imm_use_iterator ui; |
7958 | gimple *use_stmt; |
7959 | bool used = false; |
7960 | msdi_data data = { .ptr: ptr, .clique: &clique, .last_ruid: &last_ruid, .restrict_var: restrict_var }; |
7961 | FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr) |
7962 | used |= walk_stmt_load_store_ops (use_stmt, &data, |
7963 | maybe_set_dependence_info, |
7964 | maybe_set_dependence_info); |
7965 | if (used) |
7966 | { |
7967 | /* Add all subvars to the set of restrict pointed-to set. */ |
7968 | for (unsigned sv = restrict_var->head; sv != 0; |
7969 | sv = get_varinfo (n: sv)->next) |
7970 | bitmap_set_bit (rvars, sv); |
7971 | varinfo_t escaped = get_varinfo (n: find (node: escaped_id)); |
7972 | if (bitmap_bit_p (escaped->solution, restrict_var->id)) |
7973 | escaped_p = true; |
7974 | } |
7975 | } |
7976 | } |
7977 | |
7978 | if (clique != 0) |
7979 | { |
7980 | /* Assign the BASE id zero to all accesses not based on a restrict |
7981 | pointer. That way they get disambiguated against restrict |
7982 | accesses but not against each other. */ |
7983 | /* ??? For restricts derived from globals (thus not incoming |
7984 | parameters) we can't restrict scoping properly thus the following |
7985 | is too aggressive there. For now we have excluded those globals from |
7986 | getting into the MR_DEPENDENCE machinery. */ |
7987 | vls_data data = { .clique: clique, .escaped_p: escaped_p, .rvars: rvars }; |
7988 | basic_block bb; |
7989 | FOR_EACH_BB_FN (bb, cfun) |
7990 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); |
7991 | !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
7992 | { |
7993 | gimple *stmt = gsi_stmt (i: gsi); |
7994 | walk_stmt_load_store_ops (stmt, &data, |
7995 | visit_loadstore, visit_loadstore); |
7996 | } |
7997 | } |
7998 | |
7999 | BITMAP_FREE (rvars); |
8000 | } |
8001 | |
8002 | /* Compute points-to information for every SSA_NAME pointer in the |
8003 | current function and compute the transitive closure of escaped |
8004 | variables to re-initialize the call-clobber states of local variables. */ |
8005 | |
8006 | unsigned int |
8007 | compute_may_aliases (void) |
8008 | { |
8009 | if (cfun->gimple_df->ipa_pta) |
8010 | { |
8011 | if (dump_file) |
8012 | { |
8013 | fprintf (stream: dump_file, format: "\nNot re-computing points-to information " |
8014 | "because IPA points-to information is available.\n\n" ); |
8015 | |
8016 | /* But still dump what we have remaining it. */ |
8017 | if (dump_flags & (TDF_DETAILS|TDF_ALIAS)) |
8018 | dump_alias_info (dump_file); |
8019 | } |
8020 | |
8021 | return 0; |
8022 | } |
8023 | |
8024 | /* For each pointer P_i, determine the sets of variables that P_i may |
8025 | point-to. Compute the reachability set of escaped and call-used |
8026 | variables. */ |
8027 | compute_points_to_sets (); |
8028 | |
8029 | /* Debugging dumps. */ |
8030 | if (dump_file && (dump_flags & (TDF_DETAILS|TDF_ALIAS))) |
8031 | dump_alias_info (dump_file); |
8032 | |
8033 | /* Compute restrict-based memory disambiguations. */ |
8034 | compute_dependence_clique (); |
8035 | |
8036 | /* Deallocate memory used by aliasing data structures and the internal |
8037 | points-to solution. */ |
8038 | delete_points_to_sets (); |
8039 | |
8040 | gcc_assert (!need_ssa_update_p (cfun)); |
8041 | |
8042 | return 0; |
8043 | } |
8044 | |
8045 | /* A dummy pass to cause points-to information to be computed via |
8046 | TODO_rebuild_alias. */ |
8047 | |
8048 | namespace { |
8049 | |
8050 | const pass_data pass_data_build_alias = |
8051 | { |
8052 | .type: GIMPLE_PASS, /* type */ |
8053 | .name: "alias" , /* name */ |
8054 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8055 | .tv_id: TV_NONE, /* tv_id */ |
8056 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
8057 | .properties_provided: 0, /* properties_provided */ |
8058 | .properties_destroyed: 0, /* properties_destroyed */ |
8059 | .todo_flags_start: 0, /* todo_flags_start */ |
8060 | TODO_rebuild_alias, /* todo_flags_finish */ |
8061 | }; |
8062 | |
8063 | class pass_build_alias : public gimple_opt_pass |
8064 | { |
8065 | public: |
8066 | pass_build_alias (gcc::context *ctxt) |
8067 | : gimple_opt_pass (pass_data_build_alias, ctxt) |
8068 | {} |
8069 | |
8070 | /* opt_pass methods: */ |
8071 | bool gate (function *) final override { return flag_tree_pta; } |
8072 | |
8073 | }; // class pass_build_alias |
8074 | |
8075 | } // anon namespace |
8076 | |
8077 | gimple_opt_pass * |
8078 | make_pass_build_alias (gcc::context *ctxt) |
8079 | { |
8080 | return new pass_build_alias (ctxt); |
8081 | } |
8082 | |
8083 | /* A dummy pass to cause points-to information to be computed via |
8084 | TODO_rebuild_alias. */ |
8085 | |
8086 | namespace { |
8087 | |
8088 | const pass_data pass_data_build_ealias = |
8089 | { |
8090 | .type: GIMPLE_PASS, /* type */ |
8091 | .name: "ealias" , /* name */ |
8092 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8093 | .tv_id: TV_NONE, /* tv_id */ |
8094 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
8095 | .properties_provided: 0, /* properties_provided */ |
8096 | .properties_destroyed: 0, /* properties_destroyed */ |
8097 | .todo_flags_start: 0, /* todo_flags_start */ |
8098 | TODO_rebuild_alias, /* todo_flags_finish */ |
8099 | }; |
8100 | |
8101 | class pass_build_ealias : public gimple_opt_pass |
8102 | { |
8103 | public: |
8104 | pass_build_ealias (gcc::context *ctxt) |
8105 | : gimple_opt_pass (pass_data_build_ealias, ctxt) |
8106 | {} |
8107 | |
8108 | /* opt_pass methods: */ |
8109 | bool gate (function *) final override { return flag_tree_pta; } |
8110 | |
8111 | }; // class pass_build_ealias |
8112 | |
8113 | } // anon namespace |
8114 | |
8115 | gimple_opt_pass * |
8116 | make_pass_build_ealias (gcc::context *ctxt) |
8117 | { |
8118 | return new pass_build_ealias (ctxt); |
8119 | } |
8120 | |
8121 | |
8122 | /* IPA PTA solutions for ESCAPED. */ |
8123 | struct pt_solution ipa_escaped_pt |
8124 | = { .anything: true, .nonlocal: false, .escaped: false, .ipa_escaped: false, .null: false, |
8125 | .vars_contains_nonlocal: false, .vars_contains_escaped: false, .vars_contains_escaped_heap: false, .vars_contains_restrict: false, .vars_contains_interposable: false, NULL }; |
8126 | |
8127 | /* Associate node with varinfo DATA. Worker for |
8128 | cgraph_for_symbol_thunks_and_aliases. */ |
8129 | static bool |
8130 | associate_varinfo_to_alias (struct cgraph_node *node, void *data) |
8131 | { |
8132 | if ((node->alias |
8133 | || (node->thunk |
8134 | && ! node->inlined_to)) |
8135 | && node->analyzed |
8136 | && !node->ifunc_resolver) |
8137 | insert_vi_for_tree (t: node->decl, vi: (varinfo_t)data); |
8138 | return false; |
8139 | } |
8140 | |
8141 | /* Dump varinfo VI to FILE. */ |
8142 | |
8143 | static void |
8144 | dump_varinfo (FILE *file, varinfo_t vi) |
8145 | { |
8146 | if (vi == NULL) |
8147 | return; |
8148 | |
8149 | fprintf (stream: file, format: "%u: %s\n" , vi->id, vi->name); |
8150 | |
8151 | const char *sep = " " ; |
8152 | if (vi->is_artificial_var) |
8153 | fprintf (stream: file, format: "%sartificial" , sep); |
8154 | if (vi->is_special_var) |
8155 | fprintf (stream: file, format: "%sspecial" , sep); |
8156 | if (vi->is_unknown_size_var) |
8157 | fprintf (stream: file, format: "%sunknown-size" , sep); |
8158 | if (vi->is_full_var) |
8159 | fprintf (stream: file, format: "%sfull" , sep); |
8160 | if (vi->is_heap_var) |
8161 | fprintf (stream: file, format: "%sheap" , sep); |
8162 | if (vi->may_have_pointers) |
8163 | fprintf (stream: file, format: "%smay-have-pointers" , sep); |
8164 | if (vi->only_restrict_pointers) |
8165 | fprintf (stream: file, format: "%sonly-restrict-pointers" , sep); |
8166 | if (vi->is_restrict_var) |
8167 | fprintf (stream: file, format: "%sis-restrict-var" , sep); |
8168 | if (vi->is_global_var) |
8169 | fprintf (stream: file, format: "%sglobal" , sep); |
8170 | if (vi->is_ipa_escape_point) |
8171 | fprintf (stream: file, format: "%sipa-escape-point" , sep); |
8172 | if (vi->is_fn_info) |
8173 | fprintf (stream: file, format: "%sfn-info" , sep); |
8174 | if (vi->ruid) |
8175 | fprintf (stream: file, format: "%srestrict-uid:%u" , sep, vi->ruid); |
8176 | if (vi->next) |
8177 | fprintf (stream: file, format: "%snext:%u" , sep, vi->next); |
8178 | if (vi->head != vi->id) |
8179 | fprintf (stream: file, format: "%shead:%u" , sep, vi->head); |
8180 | if (vi->offset) |
8181 | fprintf (stream: file, format: "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset); |
8182 | if (vi->size != ~HOST_WIDE_INT_0U) |
8183 | fprintf (stream: file, format: "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size); |
8184 | if (vi->fullsize != ~HOST_WIDE_INT_0U && vi->fullsize != vi->size) |
8185 | fprintf (stream: file, format: "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep, |
8186 | vi->fullsize); |
8187 | fprintf (stream: file, format: "\n" ); |
8188 | |
8189 | if (vi->solution && !bitmap_empty_p (map: vi->solution)) |
8190 | { |
8191 | bitmap_iterator bi; |
8192 | unsigned i; |
8193 | fprintf (stream: file, format: " solution: {" ); |
8194 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
8195 | fprintf (stream: file, format: " %u" , i); |
8196 | fprintf (stream: file, format: " }\n" ); |
8197 | } |
8198 | |
8199 | if (vi->oldsolution && !bitmap_empty_p (map: vi->oldsolution) |
8200 | && !bitmap_equal_p (vi->solution, vi->oldsolution)) |
8201 | { |
8202 | bitmap_iterator bi; |
8203 | unsigned i; |
8204 | fprintf (stream: file, format: " oldsolution: {" ); |
8205 | EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi) |
8206 | fprintf (stream: file, format: " %u" , i); |
8207 | fprintf (stream: file, format: " }\n" ); |
8208 | } |
8209 | } |
8210 | |
8211 | /* Dump varinfo VI to stderr. */ |
8212 | |
8213 | DEBUG_FUNCTION void |
8214 | debug_varinfo (varinfo_t vi) |
8215 | { |
8216 | dump_varinfo (stderr, vi); |
8217 | } |
8218 | |
8219 | /* Dump varmap to FILE. */ |
8220 | |
8221 | static void |
8222 | dump_varmap (FILE *file) |
8223 | { |
8224 | if (varmap.length () == 0) |
8225 | return; |
8226 | |
8227 | fprintf (stream: file, format: "variables:\n" ); |
8228 | |
8229 | for (unsigned int i = 0; i < varmap.length (); ++i) |
8230 | { |
8231 | varinfo_t vi = get_varinfo (n: i); |
8232 | dump_varinfo (file, vi); |
8233 | } |
8234 | |
8235 | fprintf (stream: file, format: "\n" ); |
8236 | } |
8237 | |
8238 | /* Dump varmap to stderr. */ |
8239 | |
8240 | DEBUG_FUNCTION void |
8241 | debug_varmap (void) |
8242 | { |
8243 | dump_varmap (stderr); |
8244 | } |
8245 | |
8246 | /* Compute whether node is refered to non-locally. Worker for |
8247 | cgraph_for_symbol_thunks_and_aliases. */ |
8248 | static bool |
8249 | refered_from_nonlocal_fn (struct cgraph_node *node, void *data) |
8250 | { |
8251 | bool *nonlocal_p = (bool *)data; |
8252 | *nonlocal_p |= (node->used_from_other_partition |
8253 | || DECL_EXTERNAL (node->decl) |
8254 | || TREE_PUBLIC (node->decl) |
8255 | || node->force_output |
8256 | || lookup_attribute (attr_name: "noipa" , DECL_ATTRIBUTES (node->decl))); |
8257 | return false; |
8258 | } |
8259 | |
8260 | /* Same for varpool nodes. */ |
8261 | static bool |
8262 | refered_from_nonlocal_var (struct varpool_node *node, void *data) |
8263 | { |
8264 | bool *nonlocal_p = (bool *)data; |
8265 | *nonlocal_p |= (node->used_from_other_partition |
8266 | || DECL_EXTERNAL (node->decl) |
8267 | || TREE_PUBLIC (node->decl) |
8268 | || node->force_output); |
8269 | return false; |
8270 | } |
8271 | |
8272 | /* Execute the driver for IPA PTA. */ |
8273 | static unsigned int |
8274 | ipa_pta_execute (void) |
8275 | { |
8276 | struct cgraph_node *node; |
8277 | varpool_node *var; |
8278 | unsigned int from = 0; |
8279 | |
8280 | in_ipa_mode = 1; |
8281 | |
8282 | init_alias_vars (); |
8283 | |
8284 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8285 | { |
8286 | symtab->dump (f: dump_file); |
8287 | fprintf (stream: dump_file, format: "\n" ); |
8288 | } |
8289 | |
8290 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8291 | { |
8292 | fprintf (stream: dump_file, format: "Generating generic constraints\n\n" ); |
8293 | dump_constraints (file: dump_file, from); |
8294 | fprintf (stream: dump_file, format: "\n" ); |
8295 | from = constraints.length (); |
8296 | } |
8297 | |
8298 | /* Build the constraints. */ |
8299 | FOR_EACH_DEFINED_FUNCTION (node) |
8300 | { |
8301 | varinfo_t vi; |
8302 | /* Nodes without a body in this partition are not interesting. |
8303 | Especially do not visit clones at this point for now - we |
8304 | get duplicate decls there for inline clones at least. */ |
8305 | if (!node->has_gimple_body_p () |
8306 | || node->in_other_partition |
8307 | || node->inlined_to) |
8308 | continue; |
8309 | node->get_body (); |
8310 | |
8311 | gcc_assert (!node->clone_of); |
8312 | |
8313 | /* For externally visible or attribute used annotated functions use |
8314 | local constraints for their arguments. |
8315 | For local functions we see all callers and thus do not need initial |
8316 | constraints for parameters. */ |
8317 | bool nonlocal_p = (node->used_from_other_partition |
8318 | || DECL_EXTERNAL (node->decl) |
8319 | || TREE_PUBLIC (node->decl) |
8320 | || node->force_output |
8321 | || lookup_attribute (attr_name: "noipa" , |
8322 | DECL_ATTRIBUTES (node->decl))); |
8323 | node->call_for_symbol_thunks_and_aliases (callback: refered_from_nonlocal_fn, |
8324 | data: &nonlocal_p, include_overwritable: true); |
8325 | |
8326 | vi = create_function_info_for (decl: node->decl, |
8327 | name: alias_get_name (decl: node->decl), add_id: false, |
8328 | nonlocal_p); |
8329 | if (dump_file && (dump_flags & TDF_DETAILS) |
8330 | && from != constraints.length ()) |
8331 | { |
8332 | fprintf (stream: dump_file, |
8333 | format: "Generating initial constraints for %s" , |
8334 | node->dump_name ()); |
8335 | if (DECL_ASSEMBLER_NAME_SET_P (node->decl)) |
8336 | fprintf (stream: dump_file, format: " (%s)" , |
8337 | IDENTIFIER_POINTER |
8338 | (DECL_ASSEMBLER_NAME (node->decl))); |
8339 | fprintf (stream: dump_file, format: "\n\n" ); |
8340 | dump_constraints (file: dump_file, from); |
8341 | fprintf (stream: dump_file, format: "\n" ); |
8342 | |
8343 | from = constraints.length (); |
8344 | } |
8345 | |
8346 | node->call_for_symbol_thunks_and_aliases |
8347 | (callback: associate_varinfo_to_alias, data: vi, include_overwritable: true); |
8348 | } |
8349 | |
8350 | /* Create constraints for global variables and their initializers. */ |
8351 | FOR_EACH_VARIABLE (var) |
8352 | { |
8353 | if (var->alias && var->analyzed) |
8354 | continue; |
8355 | |
8356 | varinfo_t vi = get_vi_for_tree (t: var->decl); |
8357 | |
8358 | /* For the purpose of IPA PTA unit-local globals are not |
8359 | escape points. */ |
8360 | bool nonlocal_p = (DECL_EXTERNAL (var->decl) |
8361 | || TREE_PUBLIC (var->decl) |
8362 | || var->used_from_other_partition |
8363 | || var->force_output); |
8364 | var->call_for_symbol_and_aliases (callback: refered_from_nonlocal_var, |
8365 | data: &nonlocal_p, include_overwritable: true); |
8366 | if (nonlocal_p) |
8367 | vi->is_ipa_escape_point = true; |
8368 | } |
8369 | |
8370 | if (dump_file && (dump_flags & TDF_DETAILS) |
8371 | && from != constraints.length ()) |
8372 | { |
8373 | fprintf (stream: dump_file, |
8374 | format: "Generating constraints for global initializers\n\n" ); |
8375 | dump_constraints (file: dump_file, from); |
8376 | fprintf (stream: dump_file, format: "\n" ); |
8377 | from = constraints.length (); |
8378 | } |
8379 | |
8380 | FOR_EACH_DEFINED_FUNCTION (node) |
8381 | { |
8382 | struct function *func; |
8383 | basic_block bb; |
8384 | |
8385 | /* Nodes without a body in this partition are not interesting. */ |
8386 | if (!node->has_gimple_body_p () |
8387 | || node->in_other_partition |
8388 | || node->clone_of) |
8389 | continue; |
8390 | |
8391 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8392 | { |
8393 | fprintf (stream: dump_file, |
8394 | format: "Generating constraints for %s" , node->dump_name ()); |
8395 | if (DECL_ASSEMBLER_NAME_SET_P (node->decl)) |
8396 | fprintf (stream: dump_file, format: " (%s)" , |
8397 | IDENTIFIER_POINTER |
8398 | (DECL_ASSEMBLER_NAME (node->decl))); |
8399 | fprintf (stream: dump_file, format: "\n" ); |
8400 | } |
8401 | |
8402 | func = DECL_STRUCT_FUNCTION (node->decl); |
8403 | gcc_assert (cfun == NULL); |
8404 | |
8405 | /* Build constriants for the function body. */ |
8406 | FOR_EACH_BB_FN (bb, func) |
8407 | { |
8408 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
8409 | gsi_next (i: &gsi)) |
8410 | { |
8411 | gphi *phi = gsi.phi (); |
8412 | |
8413 | if (! virtual_operand_p (op: gimple_phi_result (gs: phi))) |
8414 | find_func_aliases (fn: func, origt: phi); |
8415 | } |
8416 | |
8417 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
8418 | gsi_next (i: &gsi)) |
8419 | { |
8420 | gimple *stmt = gsi_stmt (i: gsi); |
8421 | |
8422 | find_func_aliases (fn: func, origt: stmt); |
8423 | find_func_clobbers (fn: func, origt: stmt); |
8424 | } |
8425 | } |
8426 | |
8427 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8428 | { |
8429 | fprintf (stream: dump_file, format: "\n" ); |
8430 | dump_constraints (file: dump_file, from); |
8431 | fprintf (stream: dump_file, format: "\n" ); |
8432 | from = constraints.length (); |
8433 | } |
8434 | } |
8435 | |
8436 | /* From the constraints compute the points-to sets. */ |
8437 | solve_constraints (); |
8438 | |
8439 | if (dump_file && (dump_flags & TDF_STATS)) |
8440 | dump_sa_stats (outfile: dump_file); |
8441 | |
8442 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8443 | dump_sa_points_to_info (outfile: dump_file); |
8444 | |
8445 | /* Now post-process solutions to handle locals from different |
8446 | runtime instantiations coming in through recursive invocations. */ |
8447 | unsigned shadow_var_cnt = 0; |
8448 | for (unsigned i = 1; i < varmap.length (); ++i) |
8449 | { |
8450 | varinfo_t fi = get_varinfo (n: i); |
8451 | if (fi->is_fn_info |
8452 | && fi->decl) |
8453 | /* Automatic variables pointed to by their containing functions |
8454 | parameters need this treatment. */ |
8455 | for (varinfo_t ai = first_vi_for_offset (start: fi, offset: fi_parm_base); |
8456 | ai; ai = vi_next (vi: ai)) |
8457 | { |
8458 | varinfo_t vi = get_varinfo (n: find (node: ai->id)); |
8459 | bitmap_iterator bi; |
8460 | unsigned j; |
8461 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8462 | { |
8463 | varinfo_t pt = get_varinfo (n: j); |
8464 | if (pt->shadow_var_uid == 0 |
8465 | && pt->decl |
8466 | && auto_var_in_fn_p (pt->decl, fi->decl)) |
8467 | { |
8468 | pt->shadow_var_uid = allocate_decl_uid (); |
8469 | shadow_var_cnt++; |
8470 | } |
8471 | } |
8472 | } |
8473 | /* As well as global variables which are another way of passing |
8474 | arguments to recursive invocations. */ |
8475 | else if (fi->is_global_var) |
8476 | { |
8477 | for (varinfo_t ai = fi; ai; ai = vi_next (vi: ai)) |
8478 | { |
8479 | varinfo_t vi = get_varinfo (n: find (node: ai->id)); |
8480 | bitmap_iterator bi; |
8481 | unsigned j; |
8482 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8483 | { |
8484 | varinfo_t pt = get_varinfo (n: j); |
8485 | if (pt->shadow_var_uid == 0 |
8486 | && pt->decl |
8487 | && auto_var_p (pt->decl)) |
8488 | { |
8489 | pt->shadow_var_uid = allocate_decl_uid (); |
8490 | shadow_var_cnt++; |
8491 | } |
8492 | } |
8493 | } |
8494 | } |
8495 | } |
8496 | if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS)) |
8497 | fprintf (stream: dump_file, format: "Allocated %u shadow variables for locals " |
8498 | "maybe leaking into recursive invocations of their containing " |
8499 | "functions\n" , shadow_var_cnt); |
8500 | |
8501 | /* Compute the global points-to sets for ESCAPED. |
8502 | ??? Note that the computed escape set is not correct |
8503 | for the whole unit as we fail to consider graph edges to |
8504 | externally visible functions. */ |
8505 | ipa_escaped_pt = find_what_var_points_to (NULL, orig_vi: get_varinfo (n: escaped_id)); |
8506 | |
8507 | /* Make sure the ESCAPED solution (which is used as placeholder in |
8508 | other solutions) does not reference itself. This simplifies |
8509 | points-to solution queries. */ |
8510 | ipa_escaped_pt.ipa_escaped = 0; |
8511 | |
8512 | /* Assign the points-to sets to the SSA names in the unit. */ |
8513 | FOR_EACH_DEFINED_FUNCTION (node) |
8514 | { |
8515 | tree ptr; |
8516 | struct function *fn; |
8517 | unsigned i; |
8518 | basic_block bb; |
8519 | |
8520 | /* Nodes without a body in this partition are not interesting. */ |
8521 | if (!node->has_gimple_body_p () |
8522 | || node->in_other_partition |
8523 | || node->clone_of) |
8524 | continue; |
8525 | |
8526 | fn = DECL_STRUCT_FUNCTION (node->decl); |
8527 | |
8528 | /* Compute the points-to sets for pointer SSA_NAMEs. */ |
8529 | FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr) |
8530 | { |
8531 | if (ptr |
8532 | && POINTER_TYPE_P (TREE_TYPE (ptr))) |
8533 | find_what_p_points_to (fndecl: node->decl, p: ptr); |
8534 | } |
8535 | |
8536 | /* Compute the call-use and call-clobber sets for indirect calls |
8537 | and calls to external functions. */ |
8538 | FOR_EACH_BB_FN (bb, fn) |
8539 | { |
8540 | gimple_stmt_iterator gsi; |
8541 | |
8542 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
8543 | { |
8544 | gcall *stmt; |
8545 | struct pt_solution *pt; |
8546 | varinfo_t vi, fi; |
8547 | tree decl; |
8548 | |
8549 | stmt = dyn_cast <gcall *> (p: gsi_stmt (i: gsi)); |
8550 | if (!stmt) |
8551 | continue; |
8552 | |
8553 | /* Handle direct calls to functions with body. */ |
8554 | decl = gimple_call_fndecl (gs: stmt); |
8555 | |
8556 | { |
8557 | tree called_decl = NULL_TREE; |
8558 | if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL)) |
8559 | called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0); |
8560 | else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL)) |
8561 | called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); |
8562 | |
8563 | if (called_decl != NULL_TREE |
8564 | && !fndecl_maybe_in_other_partition (fndecl: called_decl)) |
8565 | decl = called_decl; |
8566 | } |
8567 | |
8568 | if (decl |
8569 | && (fi = lookup_vi_for_tree (t: decl)) |
8570 | && fi->is_fn_info) |
8571 | { |
8572 | *gimple_call_clobber_set (call_stmt: stmt) |
8573 | = find_what_var_points_to |
8574 | (fndecl: node->decl, orig_vi: first_vi_for_offset (start: fi, offset: fi_clobbers)); |
8575 | *gimple_call_use_set (call_stmt: stmt) |
8576 | = find_what_var_points_to |
8577 | (fndecl: node->decl, orig_vi: first_vi_for_offset (start: fi, offset: fi_uses)); |
8578 | } |
8579 | /* Handle direct calls to external functions. */ |
8580 | else if (decl && (!fi || fi->decl)) |
8581 | { |
8582 | pt = gimple_call_use_set (call_stmt: stmt); |
8583 | if (gimple_call_flags (stmt) & ECF_CONST) |
8584 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
8585 | else if ((vi = lookup_call_use_vi (call: stmt)) != NULL) |
8586 | { |
8587 | *pt = find_what_var_points_to (fndecl: node->decl, orig_vi: vi); |
8588 | /* Escaped (and thus nonlocal) variables are always |
8589 | implicitly used by calls. */ |
8590 | /* ??? ESCAPED can be empty even though NONLOCAL |
8591 | always escaped. */ |
8592 | pt->nonlocal = 1; |
8593 | pt->ipa_escaped = 1; |
8594 | } |
8595 | else |
8596 | { |
8597 | /* If there is nothing special about this call then |
8598 | we have made everything that is used also escape. */ |
8599 | *pt = ipa_escaped_pt; |
8600 | pt->nonlocal = 1; |
8601 | } |
8602 | |
8603 | pt = gimple_call_clobber_set (call_stmt: stmt); |
8604 | if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS)) |
8605 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
8606 | else if ((vi = lookup_call_clobber_vi (call: stmt)) != NULL) |
8607 | { |
8608 | *pt = find_what_var_points_to (fndecl: node->decl, orig_vi: vi); |
8609 | /* Escaped (and thus nonlocal) variables are always |
8610 | implicitly clobbered by calls. */ |
8611 | /* ??? ESCAPED can be empty even though NONLOCAL |
8612 | always escaped. */ |
8613 | pt->nonlocal = 1; |
8614 | pt->ipa_escaped = 1; |
8615 | } |
8616 | else |
8617 | { |
8618 | /* If there is nothing special about this call then |
8619 | we have made everything that is used also escape. */ |
8620 | *pt = ipa_escaped_pt; |
8621 | pt->nonlocal = 1; |
8622 | } |
8623 | } |
8624 | /* Handle indirect calls. */ |
8625 | else if ((fi = get_fi_for_callee (call: stmt))) |
8626 | { |
8627 | /* We need to accumulate all clobbers/uses of all possible |
8628 | callees. */ |
8629 | fi = get_varinfo (n: find (node: fi->id)); |
8630 | /* If we cannot constrain the set of functions we'll end up |
8631 | calling we end up using/clobbering everything. */ |
8632 | if (bitmap_bit_p (fi->solution, anything_id) |
8633 | || bitmap_bit_p (fi->solution, nonlocal_id) |
8634 | || bitmap_bit_p (fi->solution, escaped_id)) |
8635 | { |
8636 | pt_solution_reset (pt: gimple_call_clobber_set (call_stmt: stmt)); |
8637 | pt_solution_reset (pt: gimple_call_use_set (call_stmt: stmt)); |
8638 | } |
8639 | else |
8640 | { |
8641 | bitmap_iterator bi; |
8642 | unsigned i; |
8643 | struct pt_solution *uses, *clobbers; |
8644 | |
8645 | uses = gimple_call_use_set (call_stmt: stmt); |
8646 | clobbers = gimple_call_clobber_set (call_stmt: stmt); |
8647 | memset (s: uses, c: 0, n: sizeof (struct pt_solution)); |
8648 | memset (s: clobbers, c: 0, n: sizeof (struct pt_solution)); |
8649 | EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi) |
8650 | { |
8651 | struct pt_solution sol; |
8652 | |
8653 | vi = get_varinfo (n: i); |
8654 | if (!vi->is_fn_info) |
8655 | { |
8656 | /* ??? We could be more precise here? */ |
8657 | uses->nonlocal = 1; |
8658 | uses->ipa_escaped = 1; |
8659 | clobbers->nonlocal = 1; |
8660 | clobbers->ipa_escaped = 1; |
8661 | continue; |
8662 | } |
8663 | |
8664 | if (!uses->anything) |
8665 | { |
8666 | sol = find_what_var_points_to |
8667 | (fndecl: node->decl, |
8668 | orig_vi: first_vi_for_offset (start: vi, offset: fi_uses)); |
8669 | pt_solution_ior_into (dest: uses, src: &sol); |
8670 | } |
8671 | if (!clobbers->anything) |
8672 | { |
8673 | sol = find_what_var_points_to |
8674 | (fndecl: node->decl, |
8675 | orig_vi: first_vi_for_offset (start: vi, offset: fi_clobbers)); |
8676 | pt_solution_ior_into (dest: clobbers, src: &sol); |
8677 | } |
8678 | } |
8679 | } |
8680 | } |
8681 | else |
8682 | gcc_unreachable (); |
8683 | } |
8684 | } |
8685 | |
8686 | fn->gimple_df->ipa_pta = true; |
8687 | |
8688 | /* We have to re-set the final-solution cache after each function |
8689 | because what is a "global" is dependent on function context. */ |
8690 | final_solutions->empty (); |
8691 | obstack_free (&final_solutions_obstack, NULL); |
8692 | gcc_obstack_init (&final_solutions_obstack); |
8693 | } |
8694 | |
8695 | delete_points_to_sets (); |
8696 | |
8697 | in_ipa_mode = 0; |
8698 | |
8699 | return 0; |
8700 | } |
8701 | |
8702 | namespace { |
8703 | |
8704 | const pass_data pass_data_ipa_pta = |
8705 | { |
8706 | .type: SIMPLE_IPA_PASS, /* type */ |
8707 | .name: "pta" , /* name */ |
8708 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8709 | .tv_id: TV_IPA_PTA, /* tv_id */ |
8710 | .properties_required: 0, /* properties_required */ |
8711 | .properties_provided: 0, /* properties_provided */ |
8712 | .properties_destroyed: 0, /* properties_destroyed */ |
8713 | .todo_flags_start: 0, /* todo_flags_start */ |
8714 | .todo_flags_finish: 0, /* todo_flags_finish */ |
8715 | }; |
8716 | |
8717 | class pass_ipa_pta : public simple_ipa_opt_pass |
8718 | { |
8719 | public: |
8720 | pass_ipa_pta (gcc::context *ctxt) |
8721 | : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt) |
8722 | {} |
8723 | |
8724 | /* opt_pass methods: */ |
8725 | bool gate (function *) final override |
8726 | { |
8727 | return (optimize |
8728 | && flag_ipa_pta |
8729 | /* Don't bother doing anything if the program has errors. */ |
8730 | && !seen_error ()); |
8731 | } |
8732 | |
8733 | opt_pass * clone () final override { return new pass_ipa_pta (m_ctxt); } |
8734 | |
8735 | unsigned int execute (function *) final override |
8736 | { |
8737 | return ipa_pta_execute (); |
8738 | } |
8739 | |
8740 | }; // class pass_ipa_pta |
8741 | |
8742 | } // anon namespace |
8743 | |
8744 | simple_ipa_opt_pass * |
8745 | make_pass_ipa_pta (gcc::context *ctxt) |
8746 | { |
8747 | return new pass_ipa_pta (ctxt); |
8748 | } |
8749 | |