1 | /* Basic IPA optimizations and utilities. |
2 | Copyright (C) 2003-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "tree.h" |
26 | #include "gimple.h" |
27 | #include "alloc-pool.h" |
28 | #include "tree-pass.h" |
29 | #include "stringpool.h" |
30 | #include "cgraph.h" |
31 | #include "gimplify.h" |
32 | #include "tree-iterator.h" |
33 | #include "ipa-utils.h" |
34 | #include "symbol-summary.h" |
35 | #include "tree-vrp.h" |
36 | #include "sreal.h" |
37 | #include "ipa-cp.h" |
38 | #include "ipa-prop.h" |
39 | #include "ipa-fnsummary.h" |
40 | #include "dbgcnt.h" |
41 | #include "debug.h" |
42 | #include "stringpool.h" |
43 | #include "attribs.h" |
44 | |
45 | /* Return true when NODE has ADDR reference. */ |
46 | |
47 | static bool |
48 | has_addr_references_p (struct cgraph_node *node, |
49 | void *) |
50 | { |
51 | int i; |
52 | struct ipa_ref *ref = NULL; |
53 | |
54 | for (i = 0; node->iterate_referring (i, ref); i++) |
55 | if (ref->use == IPA_REF_ADDR) |
56 | return true; |
57 | return false; |
58 | } |
59 | |
60 | /* Return true when NODE can be target of an indirect call. */ |
61 | |
62 | static bool |
63 | is_indirect_call_target_p (struct cgraph_node *node, void *) |
64 | { |
65 | return node->indirect_call_target; |
66 | } |
67 | |
68 | /* Look for all functions inlined to NODE and update their inlined_to pointers |
69 | to INLINED_TO. */ |
70 | |
71 | static void |
72 | update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to) |
73 | { |
74 | struct cgraph_edge *e; |
75 | for (e = node->callees; e; e = e->next_callee) |
76 | if (e->callee->inlined_to) |
77 | { |
78 | e->callee->inlined_to = inlined_to; |
79 | update_inlined_to_pointer (node: e->callee, inlined_to); |
80 | } |
81 | } |
82 | |
83 | /* Add symtab NODE to queue starting at FIRST. |
84 | |
85 | The queue is linked via AUX pointers and terminated by pointer to 1. |
86 | We enqueue nodes at two occasions: when we find them reachable or when we find |
87 | their bodies needed for further clonning. In the second case we mark them |
88 | by pointer to 2 after processing so they are re-queue when they become |
89 | reachable. */ |
90 | |
91 | static void |
92 | enqueue_node (symtab_node *node, symtab_node **first, |
93 | hash_set<symtab_node *> *reachable) |
94 | { |
95 | /* Node is still in queue; do nothing. */ |
96 | if (node->aux && node->aux != (void *) 2) |
97 | return; |
98 | /* Node was already processed as unreachable, re-enqueue |
99 | only if it became reachable now. */ |
100 | if (node->aux == (void *)2 && !reachable->contains (k: node)) |
101 | return; |
102 | node->aux = *first; |
103 | *first = node; |
104 | } |
105 | |
106 | /* Return true if NODE may get inlined later. |
107 | This is used to keep DECL_EXTERNAL function bodies around long enough |
108 | so inliner can proces them. */ |
109 | |
110 | static bool |
111 | possible_inline_candidate_p (symtab_node *node) |
112 | { |
113 | if (symtab->state >= IPA_SSA_AFTER_INLINING) |
114 | return false; |
115 | cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node); |
116 | if (!cnode) |
117 | return false; |
118 | if (DECL_UNINLINABLE (cnode->decl)) |
119 | return false; |
120 | if (opt_for_fn (cnode->decl, optimize)) |
121 | return true; |
122 | if (symtab->state >= IPA_SSA) |
123 | return false; |
124 | return lookup_attribute (attr_name: "always_inline" , DECL_ATTRIBUTES (node->decl)); |
125 | } |
126 | |
127 | /* Process references. */ |
128 | |
129 | static void |
130 | process_references (symtab_node *snode, |
131 | symtab_node **first, |
132 | hash_set<symtab_node *> *reachable) |
133 | { |
134 | int i; |
135 | struct ipa_ref *ref = NULL; |
136 | for (i = 0; snode->iterate_reference (i, ref); i++) |
137 | { |
138 | symtab_node *node = ref->referred; |
139 | symtab_node *body = node->ultimate_alias_target (); |
140 | |
141 | if (node->definition && !node->in_other_partition |
142 | && ((!DECL_EXTERNAL (node->decl) || node->alias) |
143 | || (possible_inline_candidate_p (node) |
144 | /* We use variable constructors during late compilation for |
145 | constant folding. Keep references alive so partitioning |
146 | knows about potential references. */ |
147 | || (VAR_P (node->decl) |
148 | && (flag_wpa |
149 | || flag_incremental_link |
150 | == INCREMENTAL_LINK_LTO) |
151 | && dyn_cast <varpool_node *> (p: node) |
152 | ->ctor_useable_for_folding_p ())))) |
153 | { |
154 | /* Be sure that we will not optimize out alias target |
155 | body. */ |
156 | if (DECL_EXTERNAL (node->decl) |
157 | && node->alias |
158 | && symtab->state < IPA_SSA_AFTER_INLINING) |
159 | reachable->add (k: body); |
160 | reachable->add (k: node); |
161 | } |
162 | enqueue_node (node, first, reachable); |
163 | } |
164 | } |
165 | |
166 | /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark |
167 | all its potential targets as reachable to permit later inlining if |
168 | devirtualization happens. After inlining still keep their declarations |
169 | around, so we can devirtualize to a direct call. |
170 | |
171 | Also try to make trivial devirutalization when no or only one target is |
172 | possible. */ |
173 | |
174 | static void |
175 | walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets, |
176 | struct cgraph_edge *edge, |
177 | symtab_node **first, |
178 | hash_set<symtab_node *> *reachable) |
179 | { |
180 | unsigned int i; |
181 | void *cache_token; |
182 | bool final; |
183 | vec <cgraph_node *>targets |
184 | = possible_polymorphic_call_targets |
185 | (e: edge, completep: &final, cache_token: &cache_token); |
186 | |
187 | if (cache_token != NULL && !reachable_call_targets->add (k: cache_token)) |
188 | { |
189 | for (i = 0; i < targets.length (); i++) |
190 | { |
191 | struct cgraph_node *n = targets[i]; |
192 | |
193 | /* Do not bother to mark virtual methods in anonymous namespace; |
194 | either we will find use of virtual table defining it, or it is |
195 | unused. */ |
196 | if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE |
197 | && type_in_anonymous_namespace_p |
198 | (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl)))) |
199 | continue; |
200 | |
201 | n->indirect_call_target = true; |
202 | symtab_node *body = n->function_symbol (); |
203 | |
204 | /* Prior inlining, keep alive bodies of possible targets for |
205 | devirtualization. */ |
206 | if (n->definition |
207 | && (possible_inline_candidate_p (node: body) |
208 | && opt_for_fn (body->decl, flag_devirtualize))) |
209 | { |
210 | /* Be sure that we will not optimize out alias target |
211 | body. */ |
212 | if (DECL_EXTERNAL (n->decl) |
213 | && n->alias |
214 | && symtab->state < IPA_SSA_AFTER_INLINING) |
215 | reachable->add (k: body); |
216 | reachable->add (k: n); |
217 | } |
218 | /* Even after inlining we want to keep the possible targets in the |
219 | boundary, so late passes can still produce direct call even if |
220 | the chance for inlining is lost. */ |
221 | enqueue_node (node: n, first, reachable); |
222 | } |
223 | } |
224 | |
225 | /* Very trivial devirtualization; when the type is |
226 | final or anonymous (so we know all its derivation) |
227 | and there is only one possible virtual call target, |
228 | make the edge direct. */ |
229 | if (final) |
230 | { |
231 | if (targets.length () <= 1 && dbg_cnt (index: devirt)) |
232 | { |
233 | cgraph_node *target, *node = edge->caller; |
234 | if (targets.length () == 1) |
235 | target = targets[0]; |
236 | else |
237 | target = cgraph_node::get_create (builtin_decl_unreachable ()); |
238 | |
239 | if (dump_enabled_p ()) |
240 | { |
241 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt, |
242 | "devirtualizing call in %s to %s\n" , |
243 | edge->caller->dump_name (), |
244 | target->dump_name ()); |
245 | } |
246 | edge = cgraph_edge::make_direct (edge, callee: target); |
247 | if (ipa_fn_summaries) |
248 | ipa_update_overall_fn_summary (node: node->inlined_to |
249 | ? node->inlined_to : node); |
250 | else if (edge->call_stmt) |
251 | cgraph_edge::redirect_call_stmt_to_callee (e: edge); |
252 | } |
253 | } |
254 | } |
255 | |
256 | /* Perform reachability analysis and reclaim all unreachable nodes. |
257 | |
258 | The algorithm is basically mark&sweep but with some extra refinements: |
259 | |
260 | - reachable extern inline functions needs special handling; the bodies needs |
261 | to stay in memory until inlining in hope that they will be inlined. |
262 | After inlining we release their bodies and turn them into unanalyzed |
263 | nodes even when they are reachable. |
264 | |
265 | - virtual functions are kept in callgraph even if they seem unreachable in |
266 | hope calls to them will be devirtualized. |
267 | |
268 | Again we remove them after inlining. In late optimization some |
269 | devirtualization may happen, but it is not important since we won't inline |
270 | the call. In theory early opts and IPA should work out all important cases. |
271 | |
272 | - virtual clones needs bodies of their origins for later materialization; |
273 | this means that we want to keep the body even if the origin is unreachable |
274 | otherwise. To avoid origin from sitting in the callgraph and being |
275 | walked by IPA passes, we turn them into unanalyzed nodes with body |
276 | defined. |
277 | |
278 | We maintain set of function declaration where body needs to stay in |
279 | body_needed_for_clonning |
280 | |
281 | Inline clones represent special case: their declaration match the |
282 | declaration of origin and cgraph_remove_node already knows how to |
283 | reshape callgraph and preserve body when offline copy of function or |
284 | inline clone is being removed. |
285 | |
286 | - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL |
287 | variables with DECL_INITIAL set. We finalize these and keep reachable |
288 | ones around for constant folding purposes. After inlining we however |
289 | stop walking their references to let everything static referenced by them |
290 | to be removed when it is otherwise unreachable. |
291 | |
292 | We maintain queue of both reachable symbols (i.e. defined symbols that needs |
293 | to stay) and symbols that are in boundary (i.e. external symbols referenced |
294 | by reachable symbols or origins of clones). The queue is represented |
295 | as linked list by AUX pointer terminated by 1. |
296 | |
297 | At the end we keep all reachable symbols. For symbols in boundary we always |
298 | turn definition into a declaration, but we may keep function body around |
299 | based on body_needed_for_clonning |
300 | |
301 | All symbols that enter the queue have AUX pointer non-zero and are in the |
302 | boundary. Pointer set REACHABLE is used to track reachable symbols. |
303 | |
304 | Every symbol can be visited twice - once as part of boundary and once |
305 | as real reachable symbol. enqueue_node needs to decide whether the |
306 | node needs to be re-queued for second processing. For this purpose |
307 | we set AUX pointer of processed symbols in the boundary to constant 2. */ |
308 | |
309 | bool |
310 | symbol_table::remove_unreachable_nodes (FILE *file) |
311 | { |
312 | symtab_node *first = (symtab_node *) (void *) 1; |
313 | struct cgraph_node *node, *next; |
314 | varpool_node *vnode, *vnext; |
315 | bool changed = false; |
316 | hash_set<symtab_node *> reachable; |
317 | hash_set<tree> body_needed_for_clonning; |
318 | hash_set<void *> reachable_call_targets; |
319 | |
320 | timevar_push (tv: TV_IPA_UNREACHABLE); |
321 | build_type_inheritance_graph (); |
322 | if (file) |
323 | fprintf (stream: file, format: "\nReclaiming functions:" ); |
324 | if (flag_checking) |
325 | { |
326 | FOR_EACH_FUNCTION (node) |
327 | gcc_assert (!node->aux); |
328 | FOR_EACH_VARIABLE (vnode) |
329 | gcc_assert (!vnode->aux); |
330 | } |
331 | /* Mark functions whose bodies are obviously needed. |
332 | This is mostly when they can be referenced externally. Inline clones |
333 | are special since their declarations are shared with master clone and thus |
334 | cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */ |
335 | FOR_EACH_FUNCTION (node) |
336 | { |
337 | node->used_as_abstract_origin = false; |
338 | node->indirect_call_target = false; |
339 | if (node->definition |
340 | && !node->inlined_to |
341 | && !node->in_other_partition |
342 | && !node->can_remove_if_no_direct_calls_and_refs_p ()) |
343 | { |
344 | gcc_assert (!node->inlined_to); |
345 | reachable.add (k: node); |
346 | enqueue_node (node, first: &first, reachable: &reachable); |
347 | } |
348 | else |
349 | gcc_assert (!node->aux); |
350 | } |
351 | |
352 | /* Mark variables that are obviously needed. */ |
353 | FOR_EACH_DEFINED_VARIABLE (vnode) |
354 | if (!vnode->can_remove_if_no_refs_p() |
355 | && !vnode->in_other_partition) |
356 | { |
357 | reachable.add (k: vnode); |
358 | enqueue_node (node: vnode, first: &first, reachable: &reachable); |
359 | } |
360 | |
361 | /* Perform reachability analysis. */ |
362 | while (first != (symtab_node *) (void *) 1) |
363 | { |
364 | bool in_boundary_p = !reachable.contains (k: first); |
365 | symtab_node *node = first; |
366 | |
367 | first = (symtab_node *)first->aux; |
368 | |
369 | /* If we are processing symbol in boundary, mark its AUX pointer for |
370 | possible later re-processing in enqueue_node. */ |
371 | if (in_boundary_p) |
372 | { |
373 | node->aux = (void *)2; |
374 | if (node->alias && node->analyzed) |
375 | enqueue_node (node: node->get_alias_target (), first: &first, reachable: &reachable); |
376 | } |
377 | else |
378 | { |
379 | if (TREE_CODE (node->decl) == FUNCTION_DECL |
380 | && DECL_ABSTRACT_ORIGIN (node->decl)) |
381 | { |
382 | struct cgraph_node *origin_node |
383 | = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl)); |
384 | if (origin_node && !origin_node->used_as_abstract_origin) |
385 | { |
386 | origin_node->used_as_abstract_origin = true; |
387 | gcc_assert (!origin_node->prev_sibling_clone); |
388 | gcc_assert (!origin_node->next_sibling_clone); |
389 | for (cgraph_node *n = origin_node->clones; n; |
390 | n = n->next_sibling_clone) |
391 | if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl)) |
392 | n->used_as_abstract_origin = true; |
393 | } |
394 | } |
395 | /* If any non-external and non-local symbol in a comdat group is |
396 | reachable, force all externally visible symbols in the same comdat |
397 | group to be reachable as well. Comdat-local symbols |
398 | can be discarded if all uses were inlined. */ |
399 | if (node->same_comdat_group |
400 | && node->externally_visible |
401 | && !DECL_EXTERNAL (node->decl)) |
402 | { |
403 | symtab_node *next; |
404 | for (next = node->same_comdat_group; |
405 | next != node; |
406 | next = next->same_comdat_group) |
407 | if (!next->comdat_local_p () |
408 | && !DECL_EXTERNAL (next->decl) |
409 | && !reachable.add (k: next)) |
410 | enqueue_node (node: next, first: &first, reachable: &reachable); |
411 | } |
412 | /* Mark references as reachable. */ |
413 | process_references (snode: node, first: &first, reachable: &reachable); |
414 | } |
415 | |
416 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node)) |
417 | { |
418 | /* Mark the callees reachable unless they are direct calls to extern |
419 | inline functions we decided to not inline. */ |
420 | if (!in_boundary_p) |
421 | { |
422 | struct cgraph_edge *e; |
423 | /* Keep alive possible targets for devirtualization. */ |
424 | if (opt_for_fn (cnode->decl, optimize) |
425 | && opt_for_fn (cnode->decl, flag_devirtualize)) |
426 | { |
427 | struct cgraph_edge *next; |
428 | for (e = cnode->indirect_calls; e; e = next) |
429 | { |
430 | next = e->next_callee; |
431 | if (e->indirect_info->polymorphic) |
432 | walk_polymorphic_call_targets (reachable_call_targets: &reachable_call_targets, |
433 | edge: e, first: &first, reachable: &reachable); |
434 | } |
435 | } |
436 | for (e = cnode->callees; e; e = e->next_callee) |
437 | { |
438 | symtab_node *body = e->callee->function_symbol (); |
439 | if (e->callee->definition |
440 | && !e->callee->in_other_partition |
441 | && (!e->inline_failed |
442 | || !DECL_EXTERNAL (e->callee->decl) |
443 | || e->callee->alias |
444 | || possible_inline_candidate_p (node: e->callee))) |
445 | { |
446 | /* Be sure that we will not optimize out alias target |
447 | body. */ |
448 | if (DECL_EXTERNAL (e->callee->decl) |
449 | && e->callee->alias |
450 | && symtab->state < IPA_SSA_AFTER_INLINING) |
451 | reachable.add (k: body); |
452 | reachable.add (k: e->callee); |
453 | } |
454 | else if (e->callee->declare_variant_alt |
455 | && !e->callee->in_other_partition) |
456 | reachable.add (k: e->callee); |
457 | enqueue_node (node: e->callee, first: &first, reachable: &reachable); |
458 | } |
459 | |
460 | /* When inline clone exists, mark body to be preserved so when removing |
461 | offline copy of the function we don't kill it. */ |
462 | if (cnode->inlined_to) |
463 | body_needed_for_clonning.add (k: cnode->decl); |
464 | |
465 | /* For non-inline clones, force their origins to the boundary and ensure |
466 | that body is not removed. */ |
467 | while (cnode->clone_of) |
468 | { |
469 | bool noninline = cnode->clone_of->decl != cnode->decl; |
470 | cnode = cnode->clone_of; |
471 | if (noninline) |
472 | { |
473 | body_needed_for_clonning.add (k: cnode->decl); |
474 | enqueue_node (node: cnode, first: &first, reachable: &reachable); |
475 | } |
476 | } |
477 | |
478 | } |
479 | else if (cnode->thunk) |
480 | enqueue_node (node: cnode->callees->callee, first: &first, reachable: &reachable); |
481 | |
482 | /* If any reachable function has simd clones, mark them as |
483 | reachable as well. */ |
484 | if (cnode->simd_clones) |
485 | { |
486 | cgraph_node *next; |
487 | for (next = cnode->simd_clones; |
488 | next; |
489 | next = next->simdclone->next_clone) |
490 | if (in_boundary_p |
491 | || !reachable.add (k: next)) |
492 | enqueue_node (node: next, first: &first, reachable: &reachable); |
493 | } |
494 | } |
495 | /* When we see constructor of external variable, keep referred nodes in the |
496 | boundary. This will also hold initializers of the external vars NODE |
497 | refers to. */ |
498 | varpool_node *vnode = dyn_cast <varpool_node *> (p: node); |
499 | if (vnode |
500 | && DECL_EXTERNAL (node->decl) |
501 | && !vnode->alias |
502 | && in_boundary_p) |
503 | { |
504 | struct ipa_ref *ref = NULL; |
505 | for (int i = 0; node->iterate_reference (i, ref); i++) |
506 | enqueue_node (node: ref->referred, first: &first, reachable: &reachable); |
507 | } |
508 | } |
509 | |
510 | /* Remove unreachable functions. */ |
511 | for (node = first_function (); node; node = next) |
512 | { |
513 | next = next_function (node); |
514 | |
515 | /* If node is not needed at all, remove it. */ |
516 | if (!node->aux) |
517 | { |
518 | if (file) |
519 | fprintf (stream: file, format: " %s" , node->dump_name ()); |
520 | node->remove (); |
521 | changed = true; |
522 | } |
523 | /* If node is unreachable, remove its body. */ |
524 | else if (!reachable.contains (k: node)) |
525 | { |
526 | /* We keep definitions of thunks and aliases in the boundary so |
527 | we can walk to the ultimate alias targets and function symbols |
528 | reliably. */ |
529 | if (node->alias || node->thunk) |
530 | ; |
531 | else if (!body_needed_for_clonning.contains (k: node->decl)) |
532 | { |
533 | /* Make the node a non-clone so that we do not attempt to |
534 | materialize it later. */ |
535 | if (node->clone_of) |
536 | node->remove_from_clone_tree (); |
537 | node->release_body (); |
538 | } |
539 | else if (!node->clone_of) |
540 | gcc_assert (in_lto_p || DECL_RESULT (node->decl)); |
541 | if (node->definition && !node->alias && !node->thunk) |
542 | { |
543 | if (file) |
544 | fprintf (stream: file, format: " %s" , node->dump_name ()); |
545 | node->body_removed = true; |
546 | node->analyzed = false; |
547 | node->definition = false; |
548 | node->cpp_implicit_alias = false; |
549 | node->alias = false; |
550 | node->transparent_alias = false; |
551 | node->thunk = false; |
552 | node->weakref = false; |
553 | /* After early inlining we drop always_inline attributes on |
554 | bodies of functions that are still referenced (have their |
555 | address taken). */ |
556 | DECL_ATTRIBUTES (node->decl) |
557 | = remove_attribute ("always_inline" , |
558 | DECL_ATTRIBUTES (node->decl)); |
559 | if (!node->in_other_partition) |
560 | node->local = false; |
561 | node->remove_callees (); |
562 | node->remove_all_references (); |
563 | changed = true; |
564 | } |
565 | } |
566 | else |
567 | gcc_assert (node->clone_of || !node->has_gimple_body_p () |
568 | || in_lto_p || DECL_RESULT (node->decl)); |
569 | } |
570 | |
571 | /* Inline clones might be kept around so their materializing allows further |
572 | cloning. If the function the clone is inlined into is removed, we need |
573 | to turn it into normal cone. */ |
574 | FOR_EACH_FUNCTION (node) |
575 | { |
576 | if (node->inlined_to |
577 | && !node->callers) |
578 | { |
579 | gcc_assert (node->clones); |
580 | node->inlined_to = NULL; |
581 | update_inlined_to_pointer (node, inlined_to: node); |
582 | } |
583 | node->aux = NULL; |
584 | } |
585 | |
586 | /* Remove unreachable variables. */ |
587 | if (file) |
588 | fprintf (stream: file, format: "\nReclaiming variables:" ); |
589 | for (vnode = first_variable (); vnode; vnode = vnext) |
590 | { |
591 | vnext = next_variable (node: vnode); |
592 | if (!vnode->aux |
593 | /* For can_refer_decl_in_current_unit_p we want to track for |
594 | all external variables if they are defined in other partition |
595 | or not. */ |
596 | && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl))) |
597 | { |
598 | struct ipa_ref *ref = NULL; |
599 | |
600 | /* First remove the aliases, so varpool::remove can possibly lookup |
601 | the constructor and save it for future use. */ |
602 | while (vnode->iterate_direct_aliases (i: 0, ref)) |
603 | { |
604 | if (file) |
605 | fprintf (stream: file, format: " %s" , ref->referred->dump_name ()); |
606 | ref->referring->remove (); |
607 | } |
608 | if (file) |
609 | fprintf (stream: file, format: " %s" , vnode->dump_name ()); |
610 | vnext = next_variable (node: vnode); |
611 | /* Signal removal to the debug machinery. */ |
612 | if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO) |
613 | { |
614 | vnode->definition = false; |
615 | (*debug_hooks->late_global_decl) (vnode->decl); |
616 | } |
617 | vnode->remove (); |
618 | changed = true; |
619 | } |
620 | else if (!reachable.contains (k: vnode) && !vnode->alias) |
621 | { |
622 | tree init; |
623 | if (vnode->definition) |
624 | { |
625 | if (file) |
626 | fprintf (stream: file, format: " %s" , vnode->dump_name ()); |
627 | changed = true; |
628 | } |
629 | /* Keep body if it may be useful for constant folding. */ |
630 | if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO) |
631 | || ((init = ctor_for_folding (vnode->decl)) == error_mark_node)) |
632 | vnode->remove_initializer (); |
633 | else |
634 | DECL_INITIAL (vnode->decl) = init; |
635 | vnode->body_removed = true; |
636 | vnode->definition = false; |
637 | vnode->analyzed = false; |
638 | vnode->aux = NULL; |
639 | |
640 | vnode->remove_from_same_comdat_group (); |
641 | |
642 | vnode->remove_all_references (); |
643 | } |
644 | else |
645 | vnode->aux = NULL; |
646 | } |
647 | |
648 | /* Now update address_taken flags and try to promote functions to be local. */ |
649 | if (file) |
650 | fprintf (stream: file, format: "\nClearing address taken flags:" ); |
651 | FOR_EACH_DEFINED_FUNCTION (node) |
652 | if (node->address_taken |
653 | && !node->used_from_other_partition) |
654 | { |
655 | if (!node->call_for_symbol_and_aliases |
656 | (callback: has_addr_references_p, NULL, include_overwritable: true)) |
657 | { |
658 | if (file) |
659 | fprintf (stream: file, format: " %s" , node->dump_name ()); |
660 | node->address_taken = false; |
661 | changed = true; |
662 | if (node->local_p () |
663 | /* Virtual functions may be kept in cgraph just because |
664 | of possible later devirtualization. Do not mark them as |
665 | local too early so we won't optimize them out before |
666 | we are done with polymorphic call analysis. */ |
667 | && (symtab->state >= IPA_SSA_AFTER_INLINING |
668 | || !node->call_for_symbol_and_aliases |
669 | (callback: is_indirect_call_target_p, NULL, include_overwritable: true))) |
670 | { |
671 | node->local = true; |
672 | if (file) |
673 | fprintf (stream: file, format: " (local)" ); |
674 | } |
675 | } |
676 | } |
677 | if (file) |
678 | fprintf (stream: file, format: "\n" ); |
679 | |
680 | symtab_node::checking_verify_symtab_nodes (); |
681 | |
682 | /* If we removed something, perhaps profile could be improved. */ |
683 | if (changed && (optimize || in_lto_p) && ipa_call_summaries) |
684 | FOR_EACH_DEFINED_FUNCTION (node) |
685 | ipa_propagate_frequency (node); |
686 | |
687 | timevar_pop (tv: TV_IPA_UNREACHABLE); |
688 | return changed; |
689 | } |
690 | |
691 | /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ |
692 | as needed, also clear EXPLICIT_REFS if the references to given variable |
693 | do not need to be explicit. */ |
694 | |
695 | void |
696 | process_references (varpool_node *vnode, |
697 | bool *written, bool *address_taken, |
698 | bool *read, bool *explicit_refs) |
699 | { |
700 | int i; |
701 | struct ipa_ref *ref; |
702 | |
703 | if (!vnode->all_refs_explicit_p () |
704 | || TREE_THIS_VOLATILE (vnode->decl)) |
705 | *explicit_refs = false; |
706 | |
707 | for (i = 0; vnode->iterate_referring (i, ref) |
708 | && *explicit_refs && (!*written || !*address_taken || !*read); i++) |
709 | switch (ref->use) |
710 | { |
711 | case IPA_REF_ADDR: |
712 | *address_taken = true; |
713 | break; |
714 | case IPA_REF_LOAD: |
715 | *read = true; |
716 | break; |
717 | case IPA_REF_STORE: |
718 | *written = true; |
719 | break; |
720 | case IPA_REF_ALIAS: |
721 | process_references (vnode: dyn_cast<varpool_node *> (p: ref->referring), written, |
722 | address_taken, read, explicit_refs); |
723 | break; |
724 | } |
725 | } |
726 | |
727 | /* Set TREE_READONLY bit. */ |
728 | |
729 | bool |
730 | set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) |
731 | { |
732 | TREE_READONLY (vnode->decl) = true; |
733 | return false; |
734 | } |
735 | |
736 | /* Set writeonly bit and clear the initalizer, since it will not be needed. */ |
737 | |
738 | bool |
739 | set_writeonly_bit (varpool_node *vnode, void *data) |
740 | { |
741 | vnode->writeonly = true; |
742 | if (optimize || in_lto_p) |
743 | { |
744 | DECL_INITIAL (vnode->decl) = NULL; |
745 | if (!vnode->alias) |
746 | { |
747 | if (vnode->num_references ()) |
748 | *(bool *)data = true; |
749 | vnode->remove_all_references (); |
750 | } |
751 | } |
752 | return false; |
753 | } |
754 | |
755 | /* Clear addressale bit of VNODE. */ |
756 | |
757 | bool |
758 | clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) |
759 | { |
760 | vnode->address_taken = false; |
761 | TREE_ADDRESSABLE (vnode->decl) = 0; |
762 | return false; |
763 | } |
764 | |
765 | /* Discover variables that have no longer address taken, are read-only or |
766 | write-only and update their flags. |
767 | |
768 | Return true when unreachable symbol removal should be done. |
769 | |
770 | FIXME: This cannot be done in between gimplify and omp_expand since |
771 | readonly flag plays role on what is shared and what is not. Currently we do |
772 | this transformation as part of whole program visibility and re-do at |
773 | ipa-reference pass (to take into account clonning), but it would |
774 | make sense to do it before early optimizations. */ |
775 | |
776 | bool |
777 | ipa_discover_variable_flags (void) |
778 | { |
779 | if (!flag_ipa_reference_addressable) |
780 | return false; |
781 | |
782 | bool remove_p = false; |
783 | varpool_node *vnode; |
784 | if (dump_file) |
785 | fprintf (stream: dump_file, format: "Clearing variable flags:" ); |
786 | FOR_EACH_VARIABLE (vnode) |
787 | if (!vnode->alias |
788 | && (TREE_ADDRESSABLE (vnode->decl) |
789 | || !vnode->writeonly |
790 | || !TREE_READONLY (vnode->decl))) |
791 | { |
792 | bool written = false; |
793 | bool address_taken = false; |
794 | bool read = false; |
795 | bool explicit_refs = true; |
796 | |
797 | process_references (vnode, written: &written, address_taken: &address_taken, read: &read, |
798 | explicit_refs: &explicit_refs); |
799 | if (!explicit_refs) |
800 | continue; |
801 | if (!address_taken) |
802 | { |
803 | if (TREE_ADDRESSABLE (vnode->decl) && dump_file) |
804 | fprintf (stream: dump_file, format: " %s (non-addressable)" , |
805 | vnode->dump_name ()); |
806 | vnode->call_for_symbol_and_aliases (callback: clear_addressable_bit, NULL, |
807 | include_overwritable: true); |
808 | } |
809 | if (!address_taken && !written |
810 | /* Making variable in explicit section readonly can cause section |
811 | type conflict. |
812 | See e.g. gcc.c-torture/compile/pr23237.c */ |
813 | && vnode->get_section () == NULL) |
814 | { |
815 | if (!TREE_READONLY (vnode->decl) && dump_file) |
816 | fprintf (stream: dump_file, format: " %s (read-only)" , vnode->dump_name ()); |
817 | vnode->call_for_symbol_and_aliases (callback: set_readonly_bit, NULL, include_overwritable: true); |
818 | } |
819 | if (!vnode->writeonly && !read && !address_taken && written) |
820 | { |
821 | if (dump_file) |
822 | fprintf (stream: dump_file, format: " %s (write-only)" , vnode->dump_name ()); |
823 | vnode->call_for_symbol_and_aliases (callback: set_writeonly_bit, data: &remove_p, |
824 | include_overwritable: true); |
825 | } |
826 | } |
827 | if (dump_file) |
828 | fprintf (stream: dump_file, format: "\n" ); |
829 | return remove_p; |
830 | } |
831 | |
832 | /* Generate and emit a static constructor or destructor. WHICH must |
833 | be one of 'I' (for a constructor), 'D' (for a destructor). |
834 | BODY is a STATEMENT_LIST containing GENERIC |
835 | statements. PRIORITY is the initialization priority for this |
836 | constructor or destructor. |
837 | |
838 | FINAL specify whether the externally visible name for collect2 should |
839 | be produced. */ |
840 | |
841 | static tree |
842 | cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final, |
843 | tree optimization, |
844 | tree target) |
845 | { |
846 | static int counter = 0; |
847 | char which_buf[16]; |
848 | tree decl, name, resdecl; |
849 | |
850 | /* The priority is encoded in the constructor or destructor name. |
851 | collect2 will sort the names and arrange that they are called at |
852 | program startup. */ |
853 | if (!targetm.have_ctors_dtors && final) |
854 | { |
855 | sprintf (s: which_buf, format: "%c_%.5d_%d" , which, priority, counter++); |
856 | name = get_file_function_name (which_buf); |
857 | } |
858 | else |
859 | { |
860 | /* Proudce sane name but one not recognizable by collect2, just for the |
861 | case we fail to inline the function. */ |
862 | sprintf (s: which_buf, format: "_sub_%c_%.5d_%d" , which, priority, counter++); |
863 | name = get_identifier (which_buf); |
864 | } |
865 | |
866 | decl = build_decl (input_location, FUNCTION_DECL, name, |
867 | build_function_type_list (void_type_node, NULL_TREE)); |
868 | current_function_decl = decl; |
869 | |
870 | resdecl = build_decl (input_location, |
871 | RESULT_DECL, NULL_TREE, void_type_node); |
872 | DECL_ARTIFICIAL (resdecl) = 1; |
873 | DECL_RESULT (decl) = resdecl; |
874 | DECL_CONTEXT (resdecl) = decl; |
875 | |
876 | allocate_struct_function (decl, false); |
877 | |
878 | TREE_STATIC (decl) = 1; |
879 | TREE_USED (decl) = 1; |
880 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization; |
881 | DECL_FUNCTION_SPECIFIC_TARGET (decl) = target; |
882 | DECL_ARTIFICIAL (decl) = 1; |
883 | DECL_IGNORED_P (decl) = 1; |
884 | DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1; |
885 | DECL_SAVED_TREE (decl) = body; |
886 | if (!targetm.have_ctors_dtors && final) |
887 | { |
888 | TREE_PUBLIC (decl) = 1; |
889 | DECL_PRESERVE_P (decl) = 1; |
890 | } |
891 | DECL_UNINLINABLE (decl) = 1; |
892 | |
893 | DECL_INITIAL (decl) = make_node (BLOCK); |
894 | BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl; |
895 | TREE_USED (DECL_INITIAL (decl)) = 1; |
896 | |
897 | DECL_SOURCE_LOCATION (decl) = input_location; |
898 | cfun->function_end_locus = input_location; |
899 | |
900 | switch (which) |
901 | { |
902 | case 'I': |
903 | DECL_STATIC_CONSTRUCTOR (decl) = 1; |
904 | decl_init_priority_insert (decl, priority); |
905 | break; |
906 | case 'D': |
907 | DECL_STATIC_DESTRUCTOR (decl) = 1; |
908 | decl_fini_priority_insert (decl, priority); |
909 | break; |
910 | default: |
911 | gcc_unreachable (); |
912 | } |
913 | |
914 | gimplify_function_tree (decl); |
915 | |
916 | cgraph_node::add_new_function (fndecl: decl, lowered: false); |
917 | |
918 | set_cfun (NULL); |
919 | current_function_decl = NULL; |
920 | return decl; |
921 | } |
922 | |
923 | /* Generate and emit a static constructor or destructor. WHICH must |
924 | be one of 'I' (for a constructor) or 'D' (for a destructor). |
925 | BODY is a STATEMENT_LIST containing GENERIC |
926 | statements. PRIORITY is the initialization priority for this |
927 | constructor or destructor. */ |
928 | |
929 | void |
930 | cgraph_build_static_cdtor (char which, tree body, int priority) |
931 | { |
932 | /* FIXME: We should be able to |
933 | gcc_assert (!in_lto_p); |
934 | because at LTO time the global options are not safe to use. |
935 | Unfortunately ASAN finish_file will produce constructors late and they |
936 | may lead to surprises. */ |
937 | cgraph_build_static_cdtor_1 (which, body, priority, final: false, |
938 | optimization_default_node, |
939 | target_option_default_node); |
940 | } |
941 | |
942 | /* When target does not have ctors and dtors, we call all constructor |
943 | and destructor by special initialization/destruction function |
944 | recognized by collect2. |
945 | |
946 | When we are going to build this function, collect all constructors and |
947 | destructors and turn them into normal functions. */ |
948 | |
949 | static void |
950 | record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors) |
951 | { |
952 | if (DECL_STATIC_CONSTRUCTOR (node->decl)) |
953 | ctors->safe_push (obj: node->decl); |
954 | if (DECL_STATIC_DESTRUCTOR (node->decl)) |
955 | dtors->safe_push (obj: node->decl); |
956 | node = cgraph_node::get (decl: node->decl); |
957 | DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1; |
958 | } |
959 | |
960 | /* Define global constructors/destructor functions for the CDTORS, of |
961 | which they are LEN. The CDTORS are sorted by initialization |
962 | priority. If CTOR_P is true, these are constructors; otherwise, |
963 | they are destructors. */ |
964 | |
965 | static void |
966 | build_cdtor (bool ctor_p, const vec<tree> &cdtors) |
967 | { |
968 | size_t i,j; |
969 | size_t len = cdtors.length (); |
970 | |
971 | i = 0; |
972 | while (i < len) |
973 | { |
974 | tree body; |
975 | tree fn; |
976 | priority_type priority; |
977 | |
978 | priority = 0; |
979 | body = NULL_TREE; |
980 | j = i; |
981 | do |
982 | { |
983 | priority_type p; |
984 | fn = cdtors[j]; |
985 | p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn); |
986 | if (j == i) |
987 | priority = p; |
988 | else if (p != priority) |
989 | break; |
990 | j++; |
991 | } |
992 | while (j < len); |
993 | |
994 | /* When there is only one cdtor and target supports them, do nothing. */ |
995 | if (j == i + 1 |
996 | && targetm.have_ctors_dtors) |
997 | { |
998 | i++; |
999 | continue; |
1000 | } |
1001 | /* Find the next batch of constructors/destructors with the same |
1002 | initialization priority. */ |
1003 | for (;i < j; i++) |
1004 | { |
1005 | tree call; |
1006 | fn = cdtors[i]; |
1007 | call = build_call_expr (fn, 0); |
1008 | if (ctor_p) |
1009 | DECL_STATIC_CONSTRUCTOR (fn) = 0; |
1010 | else |
1011 | DECL_STATIC_DESTRUCTOR (fn) = 0; |
1012 | /* We do not want to optimize away pure/const calls here. |
1013 | When optimizing, these should be already removed, when not |
1014 | optimizing, we want user to be able to breakpoint in them. */ |
1015 | TREE_SIDE_EFFECTS (call) = 1; |
1016 | append_to_statement_list (call, &body); |
1017 | } |
1018 | gcc_assert (body != NULL_TREE); |
1019 | /* Generate a function to call all the function of like |
1020 | priority. */ |
1021 | cgraph_build_static_cdtor_1 (which: ctor_p ? 'I' : 'D', body, priority, final: true, |
1022 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]), |
1023 | DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0])); |
1024 | } |
1025 | } |
1026 | |
1027 | /* Helper functions for build_cxa_dtor_registrations (). |
1028 | Build a decl for __cxa_atexit (). */ |
1029 | |
1030 | static tree |
1031 | build_cxa_atexit_decl () |
1032 | { |
1033 | /* The parameter to "__cxa_atexit" is "void (*)(void *)". */ |
1034 | tree fn_type = build_function_type_list (void_type_node, |
1035 | ptr_type_node, NULL_TREE); |
1036 | tree fn_ptr_type = build_pointer_type (fn_type); |
1037 | /* The declaration for `__cxa_atexit' is: |
1038 | int __cxa_atexit (void (*)(void *), void *, void *). */ |
1039 | const char *name = "__cxa_atexit" ; |
1040 | tree cxa_name = get_identifier (name); |
1041 | fn_type = build_function_type_list (integer_type_node, fn_ptr_type, |
1042 | ptr_type_node, ptr_type_node, NULL_TREE); |
1043 | tree atexit_fndecl = build_decl (BUILTINS_LOCATION, FUNCTION_DECL, |
1044 | cxa_name, fn_type); |
1045 | SET_DECL_ASSEMBLER_NAME (atexit_fndecl, cxa_name); |
1046 | DECL_VISIBILITY (atexit_fndecl) = VISIBILITY_DEFAULT; |
1047 | DECL_VISIBILITY_SPECIFIED (atexit_fndecl) = true; |
1048 | set_call_expr_flags (atexit_fndecl, ECF_LEAF | ECF_NOTHROW); |
1049 | TREE_PUBLIC (atexit_fndecl) = true; |
1050 | DECL_EXTERNAL (atexit_fndecl) = true; |
1051 | DECL_ARTIFICIAL (atexit_fndecl) = true; |
1052 | return atexit_fndecl; |
1053 | } |
1054 | |
1055 | /* Build a decl for __dso_handle. */ |
1056 | |
1057 | static tree |
1058 | build_dso_handle_decl () |
1059 | { |
1060 | /* Declare the __dso_handle variable. */ |
1061 | tree dso_handle_decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, |
1062 | get_identifier ("__dso_handle" ), |
1063 | ptr_type_node); |
1064 | TREE_PUBLIC (dso_handle_decl) = true; |
1065 | DECL_EXTERNAL (dso_handle_decl) = true; |
1066 | DECL_ARTIFICIAL (dso_handle_decl) = true; |
1067 | #ifdef HAVE_GAS_HIDDEN |
1068 | if (dso_handle_decl != error_mark_node) |
1069 | { |
1070 | DECL_VISIBILITY (dso_handle_decl) = VISIBILITY_HIDDEN; |
1071 | DECL_VISIBILITY_SPECIFIED (dso_handle_decl) = true; |
1072 | } |
1073 | #endif |
1074 | return dso_handle_decl; |
1075 | } |
1076 | |
1077 | /* This builds one or more constructor functions that register DTORs with |
1078 | __cxa_atexit (). Within a priority level, DTORs are registered in TU |
1079 | order - which means that they will run in reverse TU order from cxa_atexit. |
1080 | This is the same behavior as using a .fini / .mod_term_funcs section. |
1081 | As the functions are built, they are appended to the CTORs vector. */ |
1082 | |
1083 | static void |
1084 | build_cxa_dtor_registrations (const vec<tree> &dtors, vec<tree> *ctors) |
1085 | { |
1086 | size_t i,j; |
1087 | size_t len = dtors.length (); |
1088 | |
1089 | location_t sav_loc = input_location; |
1090 | input_location = UNKNOWN_LOCATION; |
1091 | |
1092 | tree atexit_fndecl = build_cxa_atexit_decl (); |
1093 | tree dso_handle_decl = build_dso_handle_decl (); |
1094 | |
1095 | /* We want &__dso_handle. */ |
1096 | tree dso_ptr = build1_loc (UNKNOWN_LOCATION, code: ADDR_EXPR, |
1097 | ptr_type_node, arg1: dso_handle_decl); |
1098 | |
1099 | i = 0; |
1100 | while (i < len) |
1101 | { |
1102 | priority_type priority = 0; |
1103 | tree body = NULL_TREE; |
1104 | j = i; |
1105 | do |
1106 | { |
1107 | priority_type p; |
1108 | tree fn = dtors[j]; |
1109 | p = DECL_FINI_PRIORITY (fn); |
1110 | if (j == i) |
1111 | priority = p; |
1112 | else if (p != priority) |
1113 | break; |
1114 | j++; |
1115 | } |
1116 | while (j < len); |
1117 | |
1118 | /* Find the next batch of destructors with the same initialization |
1119 | priority. */ |
1120 | for (;i < j; i++) |
1121 | { |
1122 | tree fn = dtors[i]; |
1123 | DECL_STATIC_DESTRUCTOR (fn) = 0; |
1124 | tree dtor_ptr = build1_loc (UNKNOWN_LOCATION, code: ADDR_EXPR, |
1125 | ptr_type_node, arg1: fn); |
1126 | tree call_cxa_atexit |
1127 | = build_call_expr_loc (UNKNOWN_LOCATION, atexit_fndecl, 3, |
1128 | dtor_ptr, null_pointer_node, dso_ptr); |
1129 | TREE_SIDE_EFFECTS (call_cxa_atexit) = 1; |
1130 | append_to_statement_list (call_cxa_atexit, &body); |
1131 | } |
1132 | |
1133 | gcc_assert (body != NULL_TREE); |
1134 | /* Generate a function to register the DTORs at this priority. */ |
1135 | tree new_ctor |
1136 | = cgraph_build_static_cdtor_1 (which: 'I', body, priority, final: true, |
1137 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (dtors[0]), |
1138 | DECL_FUNCTION_SPECIFIC_TARGET (dtors[0])); |
1139 | /* Add this to the list of ctors. */ |
1140 | ctors->safe_push (obj: new_ctor); |
1141 | } |
1142 | input_location = sav_loc; |
1143 | } |
1144 | |
1145 | /* Comparison function for qsort. P1 and P2 are actually of type |
1146 | "tree *" and point to static constructors. DECL_INIT_PRIORITY is |
1147 | used to determine the sort order. */ |
1148 | |
1149 | static int |
1150 | compare_ctor (const void *p1, const void *p2) |
1151 | { |
1152 | tree f1; |
1153 | tree f2; |
1154 | int priority1; |
1155 | int priority2; |
1156 | |
1157 | f1 = *(const tree *)p1; |
1158 | f2 = *(const tree *)p2; |
1159 | priority1 = DECL_INIT_PRIORITY (f1); |
1160 | priority2 = DECL_INIT_PRIORITY (f2); |
1161 | |
1162 | if (priority1 < priority2) |
1163 | return -1; |
1164 | else if (priority1 > priority2) |
1165 | return 1; |
1166 | else |
1167 | /* Ensure a stable sort. Constructors are executed in backwarding |
1168 | order to make LTO initialize braries first. */ |
1169 | return DECL_UID (f2) - DECL_UID (f1); |
1170 | } |
1171 | |
1172 | /* Comparison function for qsort. P1 and P2 are actually of type |
1173 | "tree *" and point to static destructors. DECL_FINI_PRIORITY is |
1174 | used to determine the sort order. */ |
1175 | |
1176 | static int |
1177 | compare_dtor (const void *p1, const void *p2) |
1178 | { |
1179 | tree f1; |
1180 | tree f2; |
1181 | int priority1; |
1182 | int priority2; |
1183 | |
1184 | f1 = *(const tree *)p1; |
1185 | f2 = *(const tree *)p2; |
1186 | priority1 = DECL_FINI_PRIORITY (f1); |
1187 | priority2 = DECL_FINI_PRIORITY (f2); |
1188 | |
1189 | if (priority1 < priority2) |
1190 | return -1; |
1191 | else if (priority1 > priority2) |
1192 | return 1; |
1193 | else |
1194 | /* Ensure a stable sort - into TU order. */ |
1195 | return DECL_UID (f1) - DECL_UID (f2); |
1196 | } |
1197 | |
1198 | /* Comparison function for qsort. P1 and P2 are of type "tree *" and point to |
1199 | a pair of static constructors or destructors. We first sort on the basis of |
1200 | priority and then into TU order (on the strict assumption that DECL_UIDs are |
1201 | ordered in the same way as the original functions). ???: this seems quite |
1202 | fragile. */ |
1203 | |
1204 | static int |
1205 | compare_cdtor_tu_order (const void *p1, const void *p2) |
1206 | { |
1207 | tree f1; |
1208 | tree f2; |
1209 | int priority1; |
1210 | int priority2; |
1211 | |
1212 | f1 = *(const tree *)p1; |
1213 | f2 = *(const tree *)p2; |
1214 | /* We process the DTORs first, and then remove their flag, so this order |
1215 | allows for functions that are declared as both CTOR and DTOR. */ |
1216 | if (DECL_STATIC_DESTRUCTOR (f1)) |
1217 | { |
1218 | gcc_checking_assert (DECL_STATIC_DESTRUCTOR (f2)); |
1219 | priority1 = DECL_FINI_PRIORITY (f1); |
1220 | priority2 = DECL_FINI_PRIORITY (f2); |
1221 | } |
1222 | else |
1223 | { |
1224 | priority1 = DECL_INIT_PRIORITY (f1); |
1225 | priority2 = DECL_INIT_PRIORITY (f2); |
1226 | } |
1227 | |
1228 | if (priority1 < priority2) |
1229 | return -1; |
1230 | else if (priority1 > priority2) |
1231 | return 1; |
1232 | else |
1233 | /* For equal priority, sort into the order of definition in the TU. */ |
1234 | return DECL_UID (f1) - DECL_UID (f2); |
1235 | } |
1236 | |
1237 | /* Generate functions to call static constructors and destructors |
1238 | for targets that do not support .ctors/.dtors sections. These |
1239 | functions have magic names which are detected by collect2. */ |
1240 | |
1241 | static void |
1242 | build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors) |
1243 | { |
1244 | if (!ctors->is_empty ()) |
1245 | { |
1246 | gcc_assert (!targetm.have_ctors_dtors || in_lto_p); |
1247 | ctors->qsort (compare_ctor); |
1248 | build_cdtor (/*ctor_p=*/true, cdtors: *ctors); |
1249 | } |
1250 | |
1251 | if (!dtors->is_empty ()) |
1252 | { |
1253 | gcc_assert (!targetm.have_ctors_dtors || in_lto_p); |
1254 | dtors->qsort (compare_dtor); |
1255 | build_cdtor (/*ctor_p=*/false, cdtors: *dtors); |
1256 | } |
1257 | } |
1258 | |
1259 | /* Generate new CTORs to register static destructors with __cxa_atexit and add |
1260 | them to the existing list of CTORs; we then process the revised CTORs list. |
1261 | |
1262 | We sort the DTORs into priority and then TU order, this means that they are |
1263 | registered in that order with __cxa_atexit () and therefore will be run in |
1264 | the reverse order. |
1265 | |
1266 | Likewise, CTORs are sorted into priority and then TU order, which means that |
1267 | they will run in that order. |
1268 | |
1269 | This matches the behavior of using init/fini or mod_init_func/mod_term_func |
1270 | sections. */ |
1271 | |
1272 | static void |
1273 | build_cxa_atexit_fns (vec<tree> *ctors, vec<tree> *dtors) |
1274 | { |
1275 | if (!dtors->is_empty ()) |
1276 | { |
1277 | gcc_assert (targetm.dtors_from_cxa_atexit); |
1278 | dtors->qsort (compare_cdtor_tu_order); |
1279 | build_cxa_dtor_registrations (dtors: *dtors, ctors); |
1280 | } |
1281 | |
1282 | if (!ctors->is_empty ()) |
1283 | { |
1284 | gcc_assert (targetm.dtors_from_cxa_atexit); |
1285 | ctors->qsort (compare_cdtor_tu_order); |
1286 | build_cdtor (/*ctor_p=*/true, cdtors: *ctors); |
1287 | } |
1288 | } |
1289 | |
1290 | /* Look for constructors and destructors and produce function calling them. |
1291 | This is needed for targets not supporting ctors or dtors, but we perform the |
1292 | transformation also at linktime to merge possibly numerous |
1293 | constructors/destructors into single function to improve code locality and |
1294 | reduce size. */ |
1295 | |
1296 | static unsigned int |
1297 | ipa_cdtor_merge (void) |
1298 | { |
1299 | /* A vector of FUNCTION_DECLs declared as static constructors. */ |
1300 | auto_vec<tree, 20> ctors; |
1301 | /* A vector of FUNCTION_DECLs declared as static destructors. */ |
1302 | auto_vec<tree, 20> dtors; |
1303 | struct cgraph_node *node; |
1304 | FOR_EACH_DEFINED_FUNCTION (node) |
1305 | if (DECL_STATIC_CONSTRUCTOR (node->decl) |
1306 | || DECL_STATIC_DESTRUCTOR (node->decl)) |
1307 | record_cdtor_fn (node, ctors: &ctors, dtors: &dtors); |
1308 | if (targetm.dtors_from_cxa_atexit) |
1309 | build_cxa_atexit_fns (ctors: &ctors, dtors: &dtors); |
1310 | else |
1311 | build_cdtor_fns (ctors: &ctors, dtors: &dtors); |
1312 | return 0; |
1313 | } |
1314 | |
1315 | namespace { |
1316 | |
1317 | const pass_data pass_data_ipa_cdtor_merge = |
1318 | { |
1319 | .type: IPA_PASS, /* type */ |
1320 | .name: "cdtor" , /* name */ |
1321 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
1322 | .tv_id: TV_CGRAPHOPT, /* tv_id */ |
1323 | .properties_required: 0, /* properties_required */ |
1324 | .properties_provided: 0, /* properties_provided */ |
1325 | .properties_destroyed: 0, /* properties_destroyed */ |
1326 | .todo_flags_start: 0, /* todo_flags_start */ |
1327 | .todo_flags_finish: 0, /* todo_flags_finish */ |
1328 | }; |
1329 | |
1330 | class pass_ipa_cdtor_merge : public ipa_opt_pass_d |
1331 | { |
1332 | public: |
1333 | pass_ipa_cdtor_merge (gcc::context *ctxt) |
1334 | : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt, |
1335 | NULL, /* generate_summary */ |
1336 | NULL, /* write_summary */ |
1337 | NULL, /* read_summary */ |
1338 | NULL, /* write_optimization_summary */ |
1339 | NULL, /* read_optimization_summary */ |
1340 | NULL, /* stmt_fixup */ |
1341 | 0, /* function_transform_todo_flags_start */ |
1342 | NULL, /* function_transform */ |
1343 | NULL) /* variable_transform */ |
1344 | {} |
1345 | |
1346 | /* opt_pass methods: */ |
1347 | bool gate (function *) final override; |
1348 | unsigned int execute (function *) final override |
1349 | { |
1350 | return ipa_cdtor_merge (); |
1351 | } |
1352 | |
1353 | }; // class pass_ipa_cdtor_merge |
1354 | |
1355 | bool |
1356 | pass_ipa_cdtor_merge::gate (function *) |
1357 | { |
1358 | /* Perform the pass when we have no ctors/dtors support |
1359 | or at LTO time to merge multiple constructors into single |
1360 | function. */ |
1361 | return !targetm.have_ctors_dtors || in_lto_p || targetm.dtors_from_cxa_atexit; |
1362 | } |
1363 | |
1364 | } // anon namespace |
1365 | |
1366 | ipa_opt_pass_d * |
1367 | make_pass_ipa_cdtor_merge (gcc::context *ctxt) |
1368 | { |
1369 | return new pass_ipa_cdtor_merge (ctxt); |
1370 | } |
1371 | |
1372 | /* Invalid pointer representing BOTTOM for single user dataflow. */ |
1373 | #define BOTTOM ((cgraph_node *)(size_t) 2) |
1374 | |
1375 | /* Meet operation for single user dataflow. |
1376 | Here we want to associate variables with sigle function that may access it. |
1377 | |
1378 | FUNCTION is current single user of a variable, VAR is variable that uses it. |
1379 | Latttice is stored in SINGLE_USER_MAP. |
1380 | |
1381 | We represent: |
1382 | - TOP by no entry in SIGNLE_USER_MAP |
1383 | - BOTTOM by BOTTOM in AUX pointer (to save lookups) |
1384 | - known single user by cgraph pointer in SINGLE_USER_MAP. */ |
1385 | |
1386 | cgraph_node * |
1387 | meet (cgraph_node *function, varpool_node *var, |
1388 | hash_map<varpool_node *, cgraph_node *> &single_user_map) |
1389 | { |
1390 | struct cgraph_node *user, **f; |
1391 | |
1392 | if (var->aux == BOTTOM) |
1393 | return BOTTOM; |
1394 | |
1395 | f = single_user_map.get (k: var); |
1396 | if (!f) |
1397 | return function; |
1398 | user = *f; |
1399 | if (!function) |
1400 | return user; |
1401 | else if (function != user) |
1402 | return BOTTOM; |
1403 | else |
1404 | return function; |
1405 | } |
1406 | |
1407 | /* Propagation step of single-use dataflow. |
1408 | |
1409 | Check all uses of VNODE and see if they are used by single function FUNCTION. |
1410 | SINGLE_USER_MAP represents the dataflow lattice. */ |
1411 | |
1412 | cgraph_node * |
1413 | propagate_single_user (varpool_node *vnode, cgraph_node *function, |
1414 | hash_map<varpool_node *, cgraph_node *> &single_user_map) |
1415 | { |
1416 | int i; |
1417 | struct ipa_ref *ref; |
1418 | |
1419 | gcc_assert (!vnode->externally_visible); |
1420 | |
1421 | /* If node is an alias, first meet with its target. */ |
1422 | if (vnode->alias) |
1423 | function = meet (function, var: vnode->get_alias_target (), single_user_map); |
1424 | |
1425 | /* Check all users and see if they correspond to a single function. */ |
1426 | for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++) |
1427 | { |
1428 | struct cgraph_node *cnode = dyn_cast <cgraph_node *> (p: ref->referring); |
1429 | if (cnode) |
1430 | { |
1431 | if (cnode->inlined_to) |
1432 | cnode = cnode->inlined_to; |
1433 | if (!function) |
1434 | function = cnode; |
1435 | else if (function != cnode) |
1436 | function = BOTTOM; |
1437 | } |
1438 | else |
1439 | function = meet (function, var: dyn_cast <varpool_node *> (p: ref->referring), |
1440 | single_user_map); |
1441 | } |
1442 | return function; |
1443 | } |
1444 | |
1445 | /* Pass setting used_by_single_function flag. |
1446 | This flag is set on variable when there is only one function that may |
1447 | possibly referr to it. */ |
1448 | |
1449 | static unsigned int |
1450 | ipa_single_use (void) |
1451 | { |
1452 | varpool_node *first = (varpool_node *) (void *) 1; |
1453 | varpool_node *var; |
1454 | hash_map<varpool_node *, cgraph_node *> single_user_map; |
1455 | |
1456 | FOR_EACH_DEFINED_VARIABLE (var) |
1457 | if (!var->all_refs_explicit_p ()) |
1458 | var->aux = BOTTOM; |
1459 | else |
1460 | { |
1461 | /* Enqueue symbol for dataflow. */ |
1462 | var->aux = first; |
1463 | first = var; |
1464 | } |
1465 | |
1466 | /* The actual dataflow. */ |
1467 | |
1468 | while (first != (void *) 1) |
1469 | { |
1470 | cgraph_node *user, *orig_user, **f; |
1471 | |
1472 | var = first; |
1473 | first = (varpool_node *)first->aux; |
1474 | |
1475 | f = single_user_map.get (k: var); |
1476 | if (f) |
1477 | orig_user = *f; |
1478 | else |
1479 | orig_user = NULL; |
1480 | user = propagate_single_user (vnode: var, function: orig_user, single_user_map); |
1481 | |
1482 | gcc_checking_assert (var->aux != BOTTOM); |
1483 | |
1484 | /* If user differs, enqueue all references. */ |
1485 | if (user != orig_user) |
1486 | { |
1487 | unsigned int i; |
1488 | ipa_ref *ref; |
1489 | |
1490 | single_user_map.put (k: var, v: user); |
1491 | |
1492 | /* Enqueue all aliases for re-processing. */ |
1493 | for (i = 0; var->iterate_direct_aliases (i, ref); i++) |
1494 | if (!ref->referring->aux) |
1495 | { |
1496 | ref->referring->aux = first; |
1497 | first = dyn_cast <varpool_node *> (p: ref->referring); |
1498 | } |
1499 | /* Enqueue all users for re-processing. */ |
1500 | for (i = 0; var->iterate_reference (i, ref); i++) |
1501 | if (!ref->referred->aux |
1502 | && ref->referred->definition |
1503 | && is_a <varpool_node *> (p: ref->referred)) |
1504 | { |
1505 | ref->referred->aux = first; |
1506 | first = dyn_cast <varpool_node *> (p: ref->referred); |
1507 | } |
1508 | |
1509 | /* If user is BOTTOM, just punt on this var. */ |
1510 | if (user == BOTTOM) |
1511 | var->aux = BOTTOM; |
1512 | else |
1513 | var->aux = NULL; |
1514 | } |
1515 | else |
1516 | var->aux = NULL; |
1517 | } |
1518 | |
1519 | FOR_EACH_DEFINED_VARIABLE (var) |
1520 | { |
1521 | if (var->aux != BOTTOM) |
1522 | { |
1523 | /* Not having the single user known means that the VAR is |
1524 | unreachable. Either someone forgot to remove unreachable |
1525 | variables or the reachability here is wrong. */ |
1526 | |
1527 | gcc_checking_assert (single_user_map.get (var)); |
1528 | |
1529 | if (dump_file) |
1530 | { |
1531 | fprintf (stream: dump_file, format: "Variable %s is used by single function\n" , |
1532 | var->dump_name ()); |
1533 | } |
1534 | var->used_by_single_function = true; |
1535 | } |
1536 | var->aux = NULL; |
1537 | } |
1538 | return 0; |
1539 | } |
1540 | |
1541 | namespace { |
1542 | |
1543 | const pass_data pass_data_ipa_single_use = |
1544 | { |
1545 | .type: IPA_PASS, /* type */ |
1546 | .name: "single-use" , /* name */ |
1547 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
1548 | .tv_id: TV_CGRAPHOPT, /* tv_id */ |
1549 | .properties_required: 0, /* properties_required */ |
1550 | .properties_provided: 0, /* properties_provided */ |
1551 | .properties_destroyed: 0, /* properties_destroyed */ |
1552 | .todo_flags_start: 0, /* todo_flags_start */ |
1553 | .todo_flags_finish: 0, /* todo_flags_finish */ |
1554 | }; |
1555 | |
1556 | class pass_ipa_single_use : public ipa_opt_pass_d |
1557 | { |
1558 | public: |
1559 | pass_ipa_single_use (gcc::context *ctxt) |
1560 | : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt, |
1561 | NULL, /* generate_summary */ |
1562 | NULL, /* write_summary */ |
1563 | NULL, /* read_summary */ |
1564 | NULL, /* write_optimization_summary */ |
1565 | NULL, /* read_optimization_summary */ |
1566 | NULL, /* stmt_fixup */ |
1567 | 0, /* function_transform_todo_flags_start */ |
1568 | NULL, /* function_transform */ |
1569 | NULL) /* variable_transform */ |
1570 | {} |
1571 | |
1572 | /* opt_pass methods: */ |
1573 | unsigned int execute (function *) final override { return ipa_single_use (); } |
1574 | |
1575 | }; // class pass_ipa_single_use |
1576 | |
1577 | } // anon namespace |
1578 | |
1579 | ipa_opt_pass_d * |
1580 | make_pass_ipa_single_use (gcc::context *ctxt) |
1581 | { |
1582 | return new pass_ipa_single_use (ctxt); |
1583 | } |
1584 | |
1585 | |