1 | /* Write and read the cgraph to the memory mapped representation of a |
2 | .o file. |
3 | |
4 | Copyright (C) 2009-2024 Free Software Foundation, Inc. |
5 | Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> |
6 | |
7 | This file is part of GCC. |
8 | |
9 | GCC is free software; you can redistribute it and/or modify it under |
10 | the terms of the GNU General Public License as published by the Free |
11 | Software Foundation; either version 3, or (at your option) any later |
12 | version. |
13 | |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
17 | for more details. |
18 | |
19 | You should have received a copy of the GNU General Public License |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ |
22 | |
23 | #include "config.h" |
24 | #include "system.h" |
25 | #include "coretypes.h" |
26 | #include "backend.h" |
27 | #include "rtl.h" |
28 | #include "tree.h" |
29 | #include "gimple.h" |
30 | #include "predict.h" |
31 | #include "stringpool.h" |
32 | #include "tree-streamer.h" |
33 | #include "cgraph.h" |
34 | #include "tree-pass.h" |
35 | #include "profile.h" |
36 | #include "context.h" |
37 | #include "pass_manager.h" |
38 | #include "ipa-utils.h" |
39 | #include "omp-offload.h" |
40 | #include "omp-general.h" |
41 | #include "stringpool.h" |
42 | #include "attribs.h" |
43 | #include "alloc-pool.h" |
44 | #include "symbol-summary.h" |
45 | #include "symtab-thunks.h" |
46 | #include "symtab-clones.h" |
47 | |
48 | /* True when asm nodes has been output. */ |
49 | bool asm_nodes_output = false; |
50 | |
51 | static void output_cgraph_opt_summary (void); |
52 | static void input_cgraph_opt_summary (vec<symtab_node *> nodes); |
53 | |
54 | /* Number of LDPR values known to GCC. */ |
55 | #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1) |
56 | |
57 | /* Cgraph streaming is organized as set of record whose type |
58 | is indicated by a tag. */ |
59 | enum LTO_symtab_tags |
60 | { |
61 | /* Must leave 0 for the stopper. */ |
62 | |
63 | /* Cgraph node without body available. */ |
64 | LTO_symtab_unavail_node = 1, |
65 | /* Cgraph node with function body. */ |
66 | LTO_symtab_analyzed_node, |
67 | /* Cgraph edges. */ |
68 | LTO_symtab_edge, |
69 | LTO_symtab_indirect_edge, |
70 | LTO_symtab_variable, |
71 | LTO_symtab_indirect_function, |
72 | LTO_symtab_last_tag |
73 | }; |
74 | |
75 | /* Create a new symtab encoder. |
76 | if FOR_INPUT, the encoder allocate only datastructures needed |
77 | to read the symtab. */ |
78 | |
79 | lto_symtab_encoder_t |
80 | lto_symtab_encoder_new (bool for_input) |
81 | { |
82 | lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d); |
83 | |
84 | if (!for_input) |
85 | encoder->map = new hash_map<symtab_node *, size_t>; |
86 | encoder->nodes.create (nelems: 0); |
87 | return encoder; |
88 | } |
89 | |
90 | |
91 | /* Delete ENCODER and its components. */ |
92 | |
93 | void |
94 | lto_symtab_encoder_delete (lto_symtab_encoder_t encoder) |
95 | { |
96 | encoder->nodes.release (); |
97 | if (encoder->map) |
98 | delete encoder->map; |
99 | free (ptr: encoder); |
100 | } |
101 | |
102 | |
103 | /* Return the existing reference number of NODE in the symtab encoder in |
104 | output block OB. Assign a new reference if this is the first time |
105 | NODE is encoded. */ |
106 | |
107 | int |
108 | lto_symtab_encoder_encode (lto_symtab_encoder_t encoder, |
109 | symtab_node *node) |
110 | { |
111 | int ref; |
112 | |
113 | if (!encoder->map) |
114 | { |
115 | lto_encoder_entry entry = {.node: node, .in_partition: false, .body: false, .initializer: false}; |
116 | |
117 | ref = encoder->nodes.length (); |
118 | encoder->nodes.safe_push (obj: entry); |
119 | return ref; |
120 | } |
121 | |
122 | size_t *slot = encoder->map->get (k: node); |
123 | if (!slot || !*slot) |
124 | { |
125 | lto_encoder_entry entry = {.node: node, .in_partition: false, .body: false, .initializer: false}; |
126 | ref = encoder->nodes.length (); |
127 | if (!slot) |
128 | encoder->map->put (k: node, v: ref + 1); |
129 | encoder->nodes.safe_push (obj: entry); |
130 | } |
131 | else |
132 | ref = *slot - 1; |
133 | |
134 | return ref; |
135 | } |
136 | |
137 | /* Remove NODE from encoder. */ |
138 | |
139 | bool |
140 | lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder, |
141 | symtab_node *node) |
142 | { |
143 | int index; |
144 | lto_encoder_entry last_node; |
145 | |
146 | size_t *slot = encoder->map->get (k: node); |
147 | if (slot == NULL || !*slot) |
148 | return false; |
149 | |
150 | index = *slot - 1; |
151 | gcc_checking_assert (encoder->nodes[index].node == node); |
152 | |
153 | /* Remove from vector. We do this by swapping node with the last element |
154 | of the vector. */ |
155 | last_node = encoder->nodes.pop (); |
156 | if (last_node.node != node) |
157 | { |
158 | gcc_assert (encoder->map->put (last_node.node, index + 1)); |
159 | |
160 | /* Move the last element to the original spot of NODE. */ |
161 | encoder->nodes[index] = last_node; |
162 | } |
163 | |
164 | /* Remove element from hash table. */ |
165 | encoder->map->remove (k: node); |
166 | return true; |
167 | } |
168 | |
169 | |
170 | /* Return TRUE if we should encode the body of NODE (if any). */ |
171 | |
172 | bool |
173 | lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder, |
174 | struct cgraph_node *node) |
175 | { |
176 | int index = lto_symtab_encoder_lookup (encoder, node); |
177 | return encoder->nodes[index].body; |
178 | } |
179 | |
180 | /* Specify that we encode the body of NODE in this partition. */ |
181 | |
182 | static void |
183 | lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder, |
184 | struct cgraph_node *node) |
185 | { |
186 | int index = lto_symtab_encoder_encode (encoder, node); |
187 | gcc_checking_assert (encoder->nodes[index].node == node); |
188 | encoder->nodes[index].body = true; |
189 | } |
190 | |
191 | /* Return TRUE if we should encode initializer of NODE (if any). */ |
192 | |
193 | bool |
194 | lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder, |
195 | varpool_node *node) |
196 | { |
197 | int index = lto_symtab_encoder_lookup (encoder, node); |
198 | if (index == LCC_NOT_FOUND) |
199 | return false; |
200 | return encoder->nodes[index].initializer; |
201 | } |
202 | |
203 | /* Specify that we should encode initializer of NODE (if any). */ |
204 | |
205 | static void |
206 | lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder, |
207 | varpool_node *node) |
208 | { |
209 | int index = lto_symtab_encoder_lookup (encoder, node); |
210 | encoder->nodes[index].initializer = true; |
211 | } |
212 | |
213 | /* Return TRUE if NODE is in this partition. */ |
214 | |
215 | bool |
216 | lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder, |
217 | symtab_node *node) |
218 | { |
219 | int index = lto_symtab_encoder_lookup (encoder, node); |
220 | if (index == LCC_NOT_FOUND) |
221 | return false; |
222 | return encoder->nodes[index].in_partition; |
223 | } |
224 | |
225 | /* Specify that NODE is in this partition. */ |
226 | |
227 | void |
228 | lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder, |
229 | symtab_node *node) |
230 | { |
231 | int index = lto_symtab_encoder_encode (encoder, node); |
232 | encoder->nodes[index].in_partition = true; |
233 | } |
234 | |
235 | /* Output the cgraph EDGE to OB using ENCODER. */ |
236 | |
237 | static void |
238 | lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge, |
239 | lto_symtab_encoder_t encoder) |
240 | { |
241 | unsigned int uid; |
242 | intptr_t ref; |
243 | struct bitpack_d bp; |
244 | |
245 | if (edge->indirect_unknown_callee) |
246 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag, |
247 | LTO_symtab_indirect_edge); |
248 | else |
249 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag, |
250 | LTO_symtab_edge); |
251 | |
252 | ref = lto_symtab_encoder_lookup (encoder, node: edge->caller); |
253 | gcc_assert (ref != LCC_NOT_FOUND); |
254 | streamer_write_hwi_stream (ob->main_stream, ref); |
255 | |
256 | if (!edge->indirect_unknown_callee) |
257 | { |
258 | ref = lto_symtab_encoder_lookup (encoder, node: edge->callee); |
259 | gcc_assert (ref != LCC_NOT_FOUND); |
260 | streamer_write_hwi_stream (ob->main_stream, ref); |
261 | } |
262 | |
263 | edge->count.stream_out (ob->main_stream); |
264 | |
265 | bp = bitpack_create (s: ob->main_stream); |
266 | uid = !edge->call_stmt ? edge->lto_stmt_uid |
267 | : gimple_uid (g: edge->call_stmt) + 1; |
268 | bp_pack_enum (&bp, cgraph_inline_failed_t, |
269 | CIF_N_REASONS, edge->inline_failed); |
270 | gcc_checking_assert (uid || edge->caller->thunk); |
271 | bp_pack_var_len_unsigned (&bp, uid); |
272 | bp_pack_value (bp: &bp, val: edge->speculative_id, nbits: 16); |
273 | bp_pack_value (bp: &bp, val: edge->indirect_inlining_edge, nbits: 1); |
274 | bp_pack_value (bp: &bp, val: edge->speculative, nbits: 1); |
275 | bp_pack_value (bp: &bp, val: edge->call_stmt_cannot_inline_p, nbits: 1); |
276 | gcc_assert (!edge->call_stmt_cannot_inline_p |
277 | || edge->inline_failed != CIF_BODY_NOT_AVAILABLE); |
278 | bp_pack_value (bp: &bp, val: edge->can_throw_external, nbits: 1); |
279 | bp_pack_value (bp: &bp, val: edge->in_polymorphic_cdtor, nbits: 1); |
280 | if (edge->indirect_unknown_callee) |
281 | { |
282 | int flags = edge->indirect_info->ecf_flags; |
283 | bp_pack_value (bp: &bp, val: (flags & ECF_CONST) != 0, nbits: 1); |
284 | bp_pack_value (bp: &bp, val: (flags & ECF_PURE) != 0, nbits: 1); |
285 | bp_pack_value (bp: &bp, val: (flags & ECF_NORETURN) != 0, nbits: 1); |
286 | bp_pack_value (bp: &bp, val: (flags & ECF_MALLOC) != 0, nbits: 1); |
287 | bp_pack_value (bp: &bp, val: (flags & ECF_NOTHROW) != 0, nbits: 1); |
288 | bp_pack_value (bp: &bp, val: (flags & ECF_RETURNS_TWICE) != 0, nbits: 1); |
289 | /* Flags that should not appear on indirect calls. */ |
290 | gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE |
291 | | ECF_MAY_BE_ALLOCA |
292 | | ECF_SIBCALL |
293 | | ECF_LEAF |
294 | | ECF_NOVOPS))); |
295 | |
296 | bp_pack_value (bp: &bp, val: edge->indirect_info->num_speculative_call_targets, |
297 | nbits: 16); |
298 | } |
299 | streamer_write_bitpack (bp: &bp); |
300 | } |
301 | |
302 | /* Return if NODE contain references from other partitions. */ |
303 | |
304 | bool |
305 | referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder) |
306 | { |
307 | int i; |
308 | struct ipa_ref *ref = NULL; |
309 | |
310 | for (i = 0; node->iterate_referring (i, ref); i++) |
311 | { |
312 | /* Ignore references from non-offloadable nodes while streaming NODE into |
313 | offload LTO section. */ |
314 | if (!ref->referring->need_lto_streaming) |
315 | continue; |
316 | |
317 | if (ref->referring->in_other_partition |
318 | || !lto_symtab_encoder_in_partition_p (encoder, node: ref->referring)) |
319 | return true; |
320 | } |
321 | return false; |
322 | } |
323 | |
324 | /* Return true when node is reachable from other partition. */ |
325 | |
326 | bool |
327 | reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder) |
328 | { |
329 | struct cgraph_edge *e; |
330 | if (!node->definition) |
331 | return false; |
332 | if (node->inlined_to) |
333 | return false; |
334 | for (e = node->callers; e; e = e->next_caller) |
335 | { |
336 | /* Ignore references from non-offloadable nodes while streaming NODE into |
337 | offload LTO section. */ |
338 | if (!e->caller->need_lto_streaming) |
339 | continue; |
340 | |
341 | if (e->caller->in_other_partition |
342 | || !lto_symtab_encoder_in_partition_p (encoder, node: e->caller)) |
343 | return true; |
344 | } |
345 | return false; |
346 | } |
347 | |
348 | /* Return if NODE contain references from other partitions. */ |
349 | |
350 | bool |
351 | referenced_from_this_partition_p (symtab_node *node, |
352 | lto_symtab_encoder_t encoder) |
353 | { |
354 | int i; |
355 | struct ipa_ref *ref = NULL; |
356 | |
357 | for (i = 0; node->iterate_referring (i, ref); i++) |
358 | if (lto_symtab_encoder_in_partition_p (encoder, node: ref->referring)) |
359 | return true; |
360 | return false; |
361 | } |
362 | |
363 | /* Return true when node is reachable from other partition. */ |
364 | |
365 | bool |
366 | reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder) |
367 | { |
368 | struct cgraph_edge *e; |
369 | for (e = node->callers; e; e = e->next_caller) |
370 | if (lto_symtab_encoder_in_partition_p (encoder, node: e->caller)) |
371 | return true; |
372 | return false; |
373 | } |
374 | |
375 | /* Output the cgraph NODE to OB. ENCODER is used to find the |
376 | reference number of NODE->inlined_to. SET is the set of nodes we |
377 | are writing to the current file. If NODE is not in SET, then NODE |
378 | is a boundary of a cgraph_node_set and we pretend NODE just has a |
379 | decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs |
380 | that have had their callgraph node written so far. This is used to |
381 | determine if NODE is a clone of a previously written node. */ |
382 | |
383 | static void |
384 | lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node, |
385 | lto_symtab_encoder_t encoder) |
386 | { |
387 | unsigned int tag; |
388 | struct bitpack_d bp; |
389 | bool boundary_p; |
390 | intptr_t ref; |
391 | bool in_other_partition = false; |
392 | struct cgraph_node *clone_of, *ultimate_clone_of; |
393 | ipa_opt_pass_d *pass; |
394 | int i; |
395 | const char *comdat; |
396 | const char *section; |
397 | tree group; |
398 | |
399 | boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node); |
400 | |
401 | if (node->analyzed && (!boundary_p || node->alias |
402 | || (node->thunk && !node->inlined_to))) |
403 | tag = LTO_symtab_analyzed_node; |
404 | else |
405 | tag = LTO_symtab_unavail_node; |
406 | |
407 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag, |
408 | tag); |
409 | streamer_write_hwi_stream (ob->main_stream, node->order); |
410 | |
411 | /* In WPA mode, we only output part of the call-graph. Also, we |
412 | fake cgraph node attributes. There are two cases that we care. |
413 | |
414 | Boundary nodes: There are nodes that are not part of SET but are |
415 | called from within SET. We artificially make them look like |
416 | externally visible nodes with no function body. |
417 | |
418 | Cherry-picked nodes: These are nodes we pulled from other |
419 | translation units into SET during IPA-inlining. We make them as |
420 | local static nodes to prevent clashes with other local statics. */ |
421 | if (boundary_p && node->analyzed |
422 | && node->get_partitioning_class () == SYMBOL_PARTITION) |
423 | { |
424 | /* Inline clones cannot be part of boundary. |
425 | gcc_assert (!node->inlined_to); |
426 | |
427 | FIXME: At the moment they can be, when partition contains an inline |
428 | clone that is clone of inline clone from outside partition. We can |
429 | reshape the clone tree and make other tree to be the root, but it |
430 | needs a bit extra work and will be promplty done by cgraph_remove_node |
431 | after reading back. */ |
432 | in_other_partition = 1; |
433 | } |
434 | else if (UNLIKELY (lto_stream_offload_p |
435 | && lookup_attribute ("omp target device_ancestor_host" , |
436 | DECL_ATTRIBUTES (node->decl)))) |
437 | /* This symbol is only used as argument to IFN_GOMP_TARGET_REV; this IFN |
438 | is ignored on ACCEL_COMPILER. Thus, mark it as in_other_partition to silence |
439 | verify_node_partition diagnostic. */ |
440 | in_other_partition = 1; |
441 | |
442 | clone_of = node->clone_of; |
443 | while (clone_of |
444 | && (ref = lto_symtab_encoder_lookup (encoder, node: clone_of)) == LCC_NOT_FOUND) |
445 | if (clone_of->prev_sibling_clone) |
446 | clone_of = clone_of->prev_sibling_clone; |
447 | else |
448 | clone_of = clone_of->clone_of; |
449 | |
450 | /* See if body of the master function is output. If not, we are seeing only |
451 | an declaration and we do not need to pass down clone tree. */ |
452 | ultimate_clone_of = clone_of; |
453 | while (ultimate_clone_of && ultimate_clone_of->clone_of) |
454 | ultimate_clone_of = ultimate_clone_of->clone_of; |
455 | |
456 | if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, node: ultimate_clone_of)) |
457 | clone_of = NULL; |
458 | |
459 | if (tag == LTO_symtab_analyzed_node) |
460 | gcc_assert (clone_of || !node->clone_of); |
461 | if (!clone_of) |
462 | streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND); |
463 | else |
464 | streamer_write_hwi_stream (ob->main_stream, ref); |
465 | |
466 | |
467 | lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, node->decl); |
468 | node->count.stream_out (ob->main_stream); |
469 | streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale); |
470 | |
471 | streamer_write_hwi_stream (ob->main_stream, |
472 | node->ipa_transforms_to_apply.length ()); |
473 | FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass) |
474 | streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number); |
475 | |
476 | if (tag == LTO_symtab_analyzed_node) |
477 | { |
478 | if (node->inlined_to) |
479 | { |
480 | ref = lto_symtab_encoder_lookup (encoder, node: node->inlined_to); |
481 | gcc_assert (ref != LCC_NOT_FOUND); |
482 | } |
483 | else |
484 | ref = LCC_NOT_FOUND; |
485 | |
486 | streamer_write_hwi_stream (ob->main_stream, ref); |
487 | } |
488 | |
489 | group = node->get_comdat_group (); |
490 | if (group) |
491 | comdat = IDENTIFIER_POINTER (group); |
492 | else |
493 | comdat = "" ; |
494 | streamer_write_data_stream (ob->main_stream, comdat, strlen (s: comdat) + 1); |
495 | |
496 | if (group) |
497 | { |
498 | if (node->same_comdat_group) |
499 | { |
500 | ref = LCC_NOT_FOUND; |
501 | for (struct symtab_node *n = node->same_comdat_group; |
502 | ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group) |
503 | ref = lto_symtab_encoder_lookup (encoder, node: n); |
504 | } |
505 | else |
506 | ref = LCC_NOT_FOUND; |
507 | streamer_write_hwi_stream (ob->main_stream, ref); |
508 | } |
509 | |
510 | section = node->get_section (); |
511 | if (!section) |
512 | section = "" ; |
513 | |
514 | streamer_write_hwi_stream (ob->main_stream, node->tp_first_run); |
515 | |
516 | bp = bitpack_create (s: ob->main_stream); |
517 | bp_pack_value (bp: &bp, val: node->local, nbits: 1); |
518 | bp_pack_value (bp: &bp, val: node->externally_visible, nbits: 1); |
519 | bp_pack_value (bp: &bp, val: node->no_reorder, nbits: 1); |
520 | bp_pack_value (bp: &bp, val: node->definition, nbits: 1); |
521 | bp_pack_value (bp: &bp, val: node->versionable, nbits: 1); |
522 | bp_pack_value (bp: &bp, val: node->can_change_signature, nbits: 1); |
523 | bp_pack_value (bp: &bp, val: node->redefined_extern_inline, nbits: 1); |
524 | bp_pack_value (bp: &bp, val: node->force_output, nbits: 1); |
525 | bp_pack_value (bp: &bp, val: node->forced_by_abi, nbits: 1); |
526 | bp_pack_value (bp: &bp, val: node->unique_name, nbits: 1); |
527 | bp_pack_value (bp: &bp, val: node->body_removed, nbits: 1); |
528 | bp_pack_value (bp: &bp, val: node->semantic_interposition, nbits: 1); |
529 | bp_pack_value (bp: &bp, val: node->implicit_section, nbits: 1); |
530 | bp_pack_value (bp: &bp, val: node->address_taken, nbits: 1); |
531 | bp_pack_value (bp: &bp, val: tag == LTO_symtab_analyzed_node |
532 | && node->get_partitioning_class () == SYMBOL_PARTITION |
533 | && (reachable_from_other_partition_p (node, encoder) |
534 | || referenced_from_other_partition_p (node, encoder)), nbits: 1); |
535 | bp_pack_value (bp: &bp, val: node->lowered, nbits: 1); |
536 | bp_pack_value (bp: &bp, val: in_other_partition, nbits: 1); |
537 | bp_pack_value (bp: &bp, val: node->alias, nbits: 1); |
538 | bp_pack_value (bp: &bp, val: node->transparent_alias, nbits: 1); |
539 | bp_pack_value (bp: &bp, val: node->weakref, nbits: 1); |
540 | bp_pack_value (bp: &bp, val: node->symver, nbits: 1); |
541 | bp_pack_value (bp: &bp, val: node->frequency, nbits: 2); |
542 | bp_pack_value (bp: &bp, val: node->only_called_at_startup, nbits: 1); |
543 | bp_pack_value (bp: &bp, val: node->only_called_at_exit, nbits: 1); |
544 | bp_pack_value (bp: &bp, val: node->tm_clone, nbits: 1); |
545 | bp_pack_value (bp: &bp, val: node->calls_comdat_local, nbits: 1); |
546 | bp_pack_value (bp: &bp, val: node->icf_merged, nbits: 1); |
547 | bp_pack_value (bp: &bp, val: node->nonfreeing_fn, nbits: 1); |
548 | bp_pack_value (bp: &bp, val: node->merged_comdat, nbits: 1); |
549 | bp_pack_value (bp: &bp, val: node->merged_extern_inline, nbits: 1); |
550 | bp_pack_value (bp: &bp, val: node->thunk, nbits: 1); |
551 | bp_pack_value (bp: &bp, val: node->parallelized_function, nbits: 1); |
552 | bp_pack_value (bp: &bp, val: node->declare_variant_alt, nbits: 1); |
553 | bp_pack_value (bp: &bp, val: node->calls_declare_variant_alt, nbits: 1); |
554 | |
555 | /* Stream thunk info always because we use it in |
556 | ipa_polymorphic_call_context::ipa_polymorphic_call_context |
557 | to properly interpret THIS pointers for thunks that has been converted |
558 | to Gimple. */ |
559 | struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL; |
560 | |
561 | bp_pack_value (bp: &bp, val: thunk != NULL, nbits: 1); |
562 | |
563 | bp_pack_enum (&bp, ld_plugin_symbol_resolution, |
564 | LDPR_NUM_KNOWN, |
565 | /* When doing incremental link, we will get new resolution |
566 | info next time we process the file. */ |
567 | flag_incremental_link == INCREMENTAL_LINK_LTO |
568 | ? LDPR_UNKNOWN : node->resolution); |
569 | bp_pack_value (bp: &bp, val: node->split_part, nbits: 1); |
570 | streamer_write_bitpack (bp: &bp); |
571 | streamer_write_data_stream (ob->main_stream, section, strlen (s: section) + 1); |
572 | |
573 | streamer_write_hwi_stream (ob->main_stream, node->profile_id); |
574 | streamer_write_hwi_stream (ob->main_stream, node->unit_id); |
575 | if (DECL_STATIC_CONSTRUCTOR (node->decl)) |
576 | streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ()); |
577 | if (DECL_STATIC_DESTRUCTOR (node->decl)) |
578 | streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ()); |
579 | |
580 | if (thunk) |
581 | thunk_info::get (node)->stream_out (ob); |
582 | } |
583 | |
584 | /* Output the varpool NODE to OB. |
585 | If NODE is not in SET, then NODE is a boundary. */ |
586 | |
587 | static void |
588 | lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node, |
589 | lto_symtab_encoder_t encoder) |
590 | { |
591 | bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node); |
592 | bool encode_initializer_p |
593 | = (node->definition |
594 | && lto_symtab_encoder_encode_initializer_p (encoder, node)); |
595 | struct bitpack_d bp; |
596 | int ref; |
597 | const char *comdat; |
598 | const char *section; |
599 | tree group; |
600 | |
601 | gcc_assert (!encode_initializer_p || node->definition); |
602 | gcc_assert (boundary_p || encode_initializer_p); |
603 | |
604 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag, |
605 | LTO_symtab_variable); |
606 | streamer_write_hwi_stream (ob->main_stream, node->order); |
607 | lto_output_var_decl_ref (ob->decl_state, ob->main_stream, node->decl); |
608 | bp = bitpack_create (s: ob->main_stream); |
609 | bp_pack_value (bp: &bp, val: node->externally_visible, nbits: 1); |
610 | bp_pack_value (bp: &bp, val: node->no_reorder, nbits: 1); |
611 | bp_pack_value (bp: &bp, val: node->force_output, nbits: 1); |
612 | bp_pack_value (bp: &bp, val: node->forced_by_abi, nbits: 1); |
613 | bp_pack_value (bp: &bp, val: node->unique_name, nbits: 1); |
614 | bp_pack_value (bp: &bp, |
615 | val: node->body_removed |
616 | || (!encode_initializer_p && !node->alias && node->definition), |
617 | nbits: 1); |
618 | bp_pack_value (bp: &bp, val: node->semantic_interposition, nbits: 1); |
619 | bp_pack_value (bp: &bp, val: node->implicit_section, nbits: 1); |
620 | bp_pack_value (bp: &bp, val: node->writeonly, nbits: 1); |
621 | bp_pack_value (bp: &bp, val: node->definition && (encode_initializer_p || node->alias), |
622 | nbits: 1); |
623 | bp_pack_value (bp: &bp, val: node->alias, nbits: 1); |
624 | bp_pack_value (bp: &bp, val: node->transparent_alias, nbits: 1); |
625 | bp_pack_value (bp: &bp, val: node->weakref, nbits: 1); |
626 | bp_pack_value (bp: &bp, val: node->symver, nbits: 1); |
627 | bp_pack_value (bp: &bp, val: node->analyzed && (!boundary_p || node->alias), nbits: 1); |
628 | gcc_assert (node->definition || !node->analyzed); |
629 | /* Constant pool initializers can be de-unified into individual ltrans units. |
630 | FIXME: Alternatively at -Os we may want to avoid generating for them the local |
631 | labels and share them across LTRANS partitions. */ |
632 | if (node->get_partitioning_class () != SYMBOL_PARTITION) |
633 | { |
634 | bp_pack_value (bp: &bp, val: 0, nbits: 1); /* used_from_other_parition. */ |
635 | bp_pack_value (bp: &bp, val: 0, nbits: 1); /* in_other_partition. */ |
636 | } |
637 | else |
638 | { |
639 | bp_pack_value (bp: &bp, val: node->definition |
640 | && referenced_from_other_partition_p (node, encoder), nbits: 1); |
641 | bp_pack_value (bp: &bp, val: node->analyzed |
642 | && boundary_p && !DECL_EXTERNAL (node->decl), nbits: 1); |
643 | /* in_other_partition. */ |
644 | } |
645 | bp_pack_value (bp: &bp, val: node->tls_model, nbits: 3); |
646 | bp_pack_value (bp: &bp, val: node->used_by_single_function, nbits: 1); |
647 | bp_pack_value (bp: &bp, val: node->dynamically_initialized, nbits: 1); |
648 | streamer_write_bitpack (bp: &bp); |
649 | |
650 | group = node->get_comdat_group (); |
651 | if (group) |
652 | comdat = IDENTIFIER_POINTER (group); |
653 | else |
654 | comdat = "" ; |
655 | streamer_write_data_stream (ob->main_stream, comdat, strlen (s: comdat) + 1); |
656 | |
657 | if (group) |
658 | { |
659 | if (node->same_comdat_group) |
660 | { |
661 | ref = LCC_NOT_FOUND; |
662 | for (struct symtab_node *n = node->same_comdat_group; |
663 | ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group) |
664 | ref = lto_symtab_encoder_lookup (encoder, node: n); |
665 | } |
666 | else |
667 | ref = LCC_NOT_FOUND; |
668 | streamer_write_hwi_stream (ob->main_stream, ref); |
669 | } |
670 | |
671 | section = node->get_section (); |
672 | if (!section) |
673 | section = "" ; |
674 | streamer_write_data_stream (ob->main_stream, section, strlen (s: section) + 1); |
675 | |
676 | streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution, |
677 | LDPR_NUM_KNOWN, node->resolution); |
678 | } |
679 | |
680 | /* Output the varpool NODE to OB. |
681 | If NODE is not in SET, then NODE is a boundary. */ |
682 | |
683 | static void |
684 | lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref, |
685 | lto_symtab_encoder_t encoder) |
686 | { |
687 | struct bitpack_d bp; |
688 | int nref; |
689 | int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (g: ref->stmt) + 1; |
690 | struct cgraph_node *node; |
691 | |
692 | bp = bitpack_create (s: ob->main_stream); |
693 | bp_pack_value (bp: &bp, val: ref->use, nbits: 3); |
694 | bp_pack_value (bp: &bp, val: ref->speculative, nbits: 1); |
695 | streamer_write_bitpack (bp: &bp); |
696 | nref = lto_symtab_encoder_lookup (encoder, node: ref->referred); |
697 | gcc_assert (nref != LCC_NOT_FOUND); |
698 | streamer_write_hwi_stream (ob->main_stream, nref); |
699 | |
700 | node = dyn_cast <cgraph_node *> (p: ref->referring); |
701 | if (node) |
702 | { |
703 | if (ref->stmt) |
704 | uid = gimple_uid (g: ref->stmt) + 1; |
705 | streamer_write_hwi_stream (ob->main_stream, uid); |
706 | bp_pack_value (bp: &bp, val: ref->speculative_id, nbits: 16); |
707 | streamer_write_bitpack (bp: &bp); |
708 | } |
709 | } |
710 | |
711 | /* Stream out profile_summary to OB. */ |
712 | |
713 | static void |
714 | output_profile_summary (struct lto_simple_output_block *ob) |
715 | { |
716 | if (profile_info) |
717 | { |
718 | /* We do not output num and run_max, they are not used by |
719 | GCC profile feedback and they are difficult to merge from multiple |
720 | units. */ |
721 | unsigned runs = (profile_info->runs); |
722 | streamer_write_uhwi_stream (ob->main_stream, runs); |
723 | |
724 | /* IPA-profile computes hot bb threshold based on cumulated |
725 | whole program profile. We need to stream it down to ltrans. */ |
726 | if (flag_wpa) |
727 | streamer_write_gcov_count_stream (ob->main_stream, |
728 | get_hot_bb_threshold ()); |
729 | } |
730 | else |
731 | streamer_write_uhwi_stream (ob->main_stream, 0); |
732 | } |
733 | |
734 | /* Output all callees or indirect outgoing edges. EDGE must be the first such |
735 | edge. */ |
736 | |
737 | static void |
738 | output_outgoing_cgraph_edges (struct cgraph_edge *edge, |
739 | struct lto_simple_output_block *ob, |
740 | lto_symtab_encoder_t encoder) |
741 | { |
742 | if (!edge) |
743 | return; |
744 | |
745 | /* Output edges in backward direction, so the reconstructed callgraph match |
746 | and it is easy to associate call sites in the IPA pass summaries. */ |
747 | while (edge->next_callee) |
748 | edge = edge->next_callee; |
749 | for (; edge; edge = edge->prev_callee) |
750 | lto_output_edge (ob, edge, encoder); |
751 | } |
752 | |
753 | /* Output the part of the cgraph in SET. */ |
754 | |
755 | static void |
756 | output_refs (lto_symtab_encoder_t encoder) |
757 | { |
758 | struct lto_simple_output_block *ob; |
759 | int count; |
760 | struct ipa_ref *ref; |
761 | |
762 | ob = lto_create_simple_output_block (LTO_section_refs); |
763 | |
764 | for (int i = 0; i < lto_symtab_encoder_size (encoder); i++) |
765 | { |
766 | symtab_node *node = lto_symtab_encoder_deref (encoder, ref: i); |
767 | |
768 | /* IPA_REF_ALIAS references are always preserved |
769 | in the boundary. Alias node can't have other references and |
770 | can be always handled as if it's not in the boundary. */ |
771 | if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node)) |
772 | continue; |
773 | |
774 | count = node->ref_list.nreferences (); |
775 | if (count) |
776 | { |
777 | streamer_write_gcov_count_stream (ob->main_stream, count); |
778 | streamer_write_uhwi_stream (ob->main_stream, |
779 | lto_symtab_encoder_lookup (encoder, node)); |
780 | for (int i = 0; node->iterate_reference (i, ref); i++) |
781 | lto_output_ref (ob, ref, encoder); |
782 | } |
783 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node)) |
784 | if (cnode->declare_variant_alt) |
785 | omp_lto_output_declare_variant_alt (ob, cnode, encoder); |
786 | } |
787 | |
788 | streamer_write_uhwi_stream (ob->main_stream, 0); |
789 | |
790 | lto_destroy_simple_output_block (ob); |
791 | } |
792 | |
793 | /* Add NODE into encoder as well as nodes it is cloned from. |
794 | Do it in a way so clones appear first. */ |
795 | |
796 | static void |
797 | add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node, |
798 | bool include_body) |
799 | { |
800 | if (node->clone_of) |
801 | add_node_to (encoder, node: node->clone_of, include_body); |
802 | if (include_body) |
803 | lto_set_symtab_encoder_encode_body (encoder, node); |
804 | lto_symtab_encoder_encode (encoder, node); |
805 | } |
806 | |
807 | /* Add all references in NODE to encoders. */ |
808 | |
809 | static void |
810 | create_references (lto_symtab_encoder_t encoder, symtab_node *node) |
811 | { |
812 | int i; |
813 | struct ipa_ref *ref = NULL; |
814 | for (i = 0; node->iterate_reference (i, ref); i++) |
815 | if (is_a <cgraph_node *> (p: ref->referred)) |
816 | add_node_to (encoder, node: dyn_cast <cgraph_node *> (p: ref->referred), include_body: false); |
817 | else |
818 | lto_symtab_encoder_encode (encoder, node: ref->referred); |
819 | } |
820 | |
821 | /* Select what needs to be streamed out. In regular lto mode stream everything. |
822 | In offload lto mode stream only nodes marked as offloadable. */ |
823 | void |
824 | select_what_to_stream (void) |
825 | { |
826 | struct symtab_node *snode; |
827 | FOR_EACH_SYMBOL (snode) |
828 | snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable; |
829 | } |
830 | |
831 | /* Find all symbols we want to stream into given partition and insert them |
832 | to encoders. |
833 | |
834 | The function actually replaces IN_ENCODER by new one. The reason is that |
835 | streaming code needs clone's origin to be streamed before clone. This |
836 | means that we need to insert the nodes in specific order. This order is |
837 | ignored by the partitioning logic earlier. */ |
838 | |
839 | lto_symtab_encoder_t |
840 | compute_ltrans_boundary (lto_symtab_encoder_t in_encoder) |
841 | { |
842 | struct cgraph_edge *edge; |
843 | int i; |
844 | lto_symtab_encoder_t encoder; |
845 | lto_symtab_encoder_iterator lsei; |
846 | hash_set<void *> reachable_call_targets; |
847 | |
848 | encoder = lto_symtab_encoder_new (for_input: false); |
849 | |
850 | /* Go over all entries in the IN_ENCODER and duplicate them to |
851 | ENCODER. At the same time insert masters of clones so |
852 | every master appears before clone. */ |
853 | for (lsei = lsei_start_function_in_partition (encoder: in_encoder); |
854 | !lsei_end_p (lsei); lsei_next_function_in_partition (lsei: &lsei)) |
855 | { |
856 | struct cgraph_node *node = lsei_cgraph_node (lsei); |
857 | if (!node->need_lto_streaming) |
858 | continue; |
859 | add_node_to (encoder, node, include_body: true); |
860 | lto_set_symtab_encoder_in_partition (encoder, node); |
861 | create_references (encoder, node); |
862 | } |
863 | for (lsei = lsei_start_variable_in_partition (encoder: in_encoder); |
864 | !lsei_end_p (lsei); lsei_next_variable_in_partition (lsei: &lsei)) |
865 | { |
866 | varpool_node *vnode = lsei_varpool_node (lsei); |
867 | |
868 | if (!vnode->need_lto_streaming) |
869 | continue; |
870 | lto_set_symtab_encoder_in_partition (encoder, node: vnode); |
871 | lto_set_symtab_encoder_encode_initializer (encoder, node: vnode); |
872 | create_references (encoder, node: vnode); |
873 | } |
874 | /* Pickle in also the initializer of all referenced readonly variables |
875 | to help folding. Constant pool variables are not shared, so we must |
876 | pickle those too. */ |
877 | for (i = 0; i < lto_symtab_encoder_size (encoder); i++) |
878 | { |
879 | symtab_node *node = lto_symtab_encoder_deref (encoder, ref: i); |
880 | if (varpool_node *vnode = dyn_cast <varpool_node *> (p: node)) |
881 | { |
882 | if (!lto_symtab_encoder_encode_initializer_p (encoder, |
883 | node: vnode) |
884 | && (((vnode->ctor_useable_for_folding_p () |
885 | && (!DECL_VIRTUAL_P (vnode->decl) |
886 | || !flag_wpa |
887 | || flag_ltrans_devirtualize))))) |
888 | { |
889 | lto_set_symtab_encoder_encode_initializer (encoder, node: vnode); |
890 | create_references (encoder, node: vnode); |
891 | } |
892 | } |
893 | } |
894 | |
895 | /* Go over all the nodes again to include callees that are not in |
896 | SET. */ |
897 | for (lsei = lsei_start_function_in_partition (encoder); |
898 | !lsei_end_p (lsei); lsei_next_function_in_partition (lsei: &lsei)) |
899 | { |
900 | struct cgraph_node *node = lsei_cgraph_node (lsei); |
901 | for (edge = node->callees; edge; edge = edge->next_callee) |
902 | { |
903 | struct cgraph_node *callee = edge->callee; |
904 | if (!lto_symtab_encoder_in_partition_p (encoder, node: callee)) |
905 | { |
906 | /* We should have moved all the inlines. */ |
907 | gcc_assert (!callee->inlined_to); |
908 | add_node_to (encoder, node: callee, include_body: false); |
909 | } |
910 | } |
911 | /* Add all possible targets for late devirtualization. */ |
912 | if (flag_ltrans_devirtualize || !flag_wpa) |
913 | for (edge = node->indirect_calls; edge; edge = edge->next_callee) |
914 | if (edge->indirect_info->polymorphic) |
915 | { |
916 | unsigned int i; |
917 | void *cache_token; |
918 | bool final; |
919 | vec <cgraph_node *>targets |
920 | = possible_polymorphic_call_targets |
921 | (e: edge, completep: &final, cache_token: &cache_token); |
922 | if (cache_token != NULL |
923 | && !reachable_call_targets.add (k: cache_token)) |
924 | { |
925 | for (i = 0; i < targets.length (); i++) |
926 | { |
927 | struct cgraph_node *callee = targets[i]; |
928 | |
929 | /* Adding an external declarations into the unit serves |
930 | no purpose and just increases its boundary. */ |
931 | if (callee->definition |
932 | && !lto_symtab_encoder_in_partition_p |
933 | (encoder, node: callee)) |
934 | { |
935 | gcc_assert (!callee->inlined_to); |
936 | add_node_to (encoder, node: callee, include_body: false); |
937 | } |
938 | } |
939 | } |
940 | } |
941 | } |
942 | /* Be sure to also insert alias targert and thunk callees. These needs |
943 | to stay to aid local calling conventions. */ |
944 | for (i = 0; i < lto_symtab_encoder_size (encoder); i++) |
945 | { |
946 | symtab_node *node = lto_symtab_encoder_deref (encoder, ref: i); |
947 | cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node); |
948 | |
949 | if (node->alias && node->analyzed) |
950 | create_references (encoder, node); |
951 | if (cnode |
952 | && cnode->thunk && !cnode->inlined_to) |
953 | add_node_to (encoder, node: cnode->callees->callee, include_body: false); |
954 | while (node->transparent_alias && node->analyzed) |
955 | { |
956 | node = node->get_alias_target (); |
957 | if (is_a <cgraph_node *> (p: node)) |
958 | add_node_to (encoder, node: dyn_cast <cgraph_node *> (p: node), |
959 | include_body: false); |
960 | else |
961 | lto_symtab_encoder_encode (encoder, node); |
962 | } |
963 | } |
964 | lto_symtab_encoder_delete (encoder: in_encoder); |
965 | return encoder; |
966 | } |
967 | |
968 | /* Output the part of the symtab in SET and VSET. */ |
969 | |
970 | void |
971 | output_symtab (void) |
972 | { |
973 | struct cgraph_node *node; |
974 | struct lto_simple_output_block *ob; |
975 | int i, n_nodes; |
976 | lto_symtab_encoder_t encoder; |
977 | |
978 | if (flag_wpa) |
979 | output_cgraph_opt_summary (); |
980 | |
981 | ob = lto_create_simple_output_block (LTO_section_symtab_nodes); |
982 | |
983 | output_profile_summary (ob); |
984 | |
985 | /* An encoder for cgraph nodes should have been created by |
986 | ipa_write_summaries_1. */ |
987 | gcc_assert (ob->decl_state->symtab_node_encoder); |
988 | encoder = ob->decl_state->symtab_node_encoder; |
989 | |
990 | /* Write out the nodes. We must first output a node and then its clones, |
991 | otherwise at a time reading back the node there would be nothing to clone |
992 | from. */ |
993 | n_nodes = lto_symtab_encoder_size (encoder); |
994 | for (i = 0; i < n_nodes; i++) |
995 | { |
996 | symtab_node *node = lto_symtab_encoder_deref (encoder, ref: i); |
997 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node)) |
998 | lto_output_node (ob, node: cnode, encoder); |
999 | else |
1000 | lto_output_varpool_node (ob, node: dyn_cast<varpool_node *> (p: node), encoder); |
1001 | } |
1002 | |
1003 | /* Go over the nodes in SET again to write edges. */ |
1004 | for (int i = 0; i < lto_symtab_encoder_size (encoder); i++) |
1005 | { |
1006 | node = dyn_cast <cgraph_node *> (p: lto_symtab_encoder_deref (encoder, ref: i)); |
1007 | if (node |
1008 | && ((node->thunk && !node->inlined_to) |
1009 | || lto_symtab_encoder_in_partition_p (encoder, node))) |
1010 | { |
1011 | output_outgoing_cgraph_edges (edge: node->callees, ob, encoder); |
1012 | output_outgoing_cgraph_edges (edge: node->indirect_calls, ob, encoder); |
1013 | } |
1014 | } |
1015 | |
1016 | streamer_write_uhwi_stream (ob->main_stream, 0); |
1017 | |
1018 | lto_destroy_simple_output_block (ob); |
1019 | |
1020 | /* Emit toplevel asms. |
1021 | When doing WPA we must output every asm just once. Since we do not partition asm |
1022 | nodes at all, output them to first output. This is kind of hack, but should work |
1023 | well. */ |
1024 | if (!asm_nodes_output && !lto_stream_offload_p) |
1025 | { |
1026 | asm_nodes_output = true; |
1027 | lto_output_toplevel_asms (); |
1028 | } |
1029 | |
1030 | output_refs (encoder); |
1031 | } |
1032 | |
1033 | /* Return identifier encoded in IB as a plain string. */ |
1034 | |
1035 | static tree |
1036 | read_identifier (class lto_input_block *ib) |
1037 | { |
1038 | unsigned int len = strnlen (string: ib->data + ib->p, maxlen: ib->len - ib->p - 1); |
1039 | tree id; |
1040 | |
1041 | if (ib->data[ib->p + len]) |
1042 | lto_section_overrun (ib); |
1043 | if (!len) |
1044 | { |
1045 | ib->p++; |
1046 | return NULL; |
1047 | } |
1048 | id = get_identifier (ib->data + ib->p); |
1049 | ib->p += len + 1; |
1050 | return id; |
1051 | } |
1052 | |
1053 | /* Return string encoded in IB, NULL if string is empty. */ |
1054 | |
1055 | static const char * |
1056 | read_string (class lto_input_block *ib) |
1057 | { |
1058 | unsigned int len = strnlen (string: ib->data + ib->p, maxlen: ib->len - ib->p - 1); |
1059 | const char *str; |
1060 | |
1061 | if (ib->data[ib->p + len]) |
1062 | lto_section_overrun (ib); |
1063 | if (!len) |
1064 | { |
1065 | ib->p++; |
1066 | return NULL; |
1067 | } |
1068 | str = ib->data + ib->p; |
1069 | ib->p += len + 1; |
1070 | return str; |
1071 | } |
1072 | |
1073 | /* Output function/variable tables that will allow libgomp to look up offload |
1074 | target code. |
1075 | OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in |
1076 | varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage |
1077 | both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */ |
1078 | |
1079 | void |
1080 | output_offload_tables (void) |
1081 | { |
1082 | bool output_requires = (flag_openmp |
1083 | && (omp_requires_mask & OMP_REQUIRES_TARGET_USED) != 0); |
1084 | if (vec_safe_is_empty (v: offload_funcs) && vec_safe_is_empty (v: offload_vars) |
1085 | && !output_requires) |
1086 | return; |
1087 | |
1088 | struct lto_simple_output_block *ob |
1089 | = lto_create_simple_output_block (LTO_section_offload_table); |
1090 | |
1091 | for (unsigned i = 0; i < vec_safe_length (v: offload_funcs); i++) |
1092 | { |
1093 | symtab_node *node = symtab_node::get (decl: (*offload_funcs)[i]); |
1094 | if (!node) |
1095 | continue; |
1096 | node->force_output = true; |
1097 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, |
1098 | LTO_symtab_last_tag, LTO_symtab_unavail_node); |
1099 | lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, |
1100 | (*offload_funcs)[i]); |
1101 | } |
1102 | |
1103 | for (unsigned i = 0; i < vec_safe_length (v: offload_vars); i++) |
1104 | { |
1105 | symtab_node *node = symtab_node::get (decl: (*offload_vars)[i]); |
1106 | if (!node) |
1107 | continue; |
1108 | node->force_output = true; |
1109 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, |
1110 | LTO_symtab_last_tag, LTO_symtab_variable); |
1111 | lto_output_var_decl_ref (ob->decl_state, ob->main_stream, |
1112 | (*offload_vars)[i]); |
1113 | } |
1114 | |
1115 | for (unsigned i = 0; i < vec_safe_length (v: offload_ind_funcs); i++) |
1116 | { |
1117 | symtab_node *node = symtab_node::get (decl: (*offload_ind_funcs)[i]); |
1118 | if (!node) |
1119 | continue; |
1120 | node->force_output = true; |
1121 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, |
1122 | LTO_symtab_last_tag, LTO_symtab_indirect_function); |
1123 | lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, |
1124 | (*offload_ind_funcs)[i]); |
1125 | } |
1126 | |
1127 | if (output_requires) |
1128 | { |
1129 | HOST_WIDE_INT val = ((HOST_WIDE_INT) omp_requires_mask |
1130 | & (OMP_REQUIRES_UNIFIED_ADDRESS |
1131 | | OMP_REQUIRES_UNIFIED_SHARED_MEMORY |
1132 | | OMP_REQUIRES_REVERSE_OFFLOAD |
1133 | | OMP_REQUIRES_TARGET_USED)); |
1134 | /* (Mis)use LTO_symtab_edge for this variable. */ |
1135 | streamer_write_enum (ob->main_stream, LTO_symtab_tags, |
1136 | LTO_symtab_last_tag, LTO_symtab_edge); |
1137 | streamer_write_hwi_stream (ob->main_stream, val); |
1138 | } |
1139 | |
1140 | streamer_write_uhwi_stream (ob->main_stream, 0); |
1141 | lto_destroy_simple_output_block (ob); |
1142 | |
1143 | /* In WHOPR mode during the WPA stage the joint offload tables need to be |
1144 | streamed to one partition only. That's why we free offload_funcs and |
1145 | offload_vars after the first call of output_offload_tables. */ |
1146 | if (flag_wpa) |
1147 | { |
1148 | vec_free (v&: offload_funcs); |
1149 | vec_free (v&: offload_vars); |
1150 | vec_free (v&: offload_ind_funcs); |
1151 | } |
1152 | } |
1153 | |
1154 | /* Verify the partitioning of NODE. */ |
1155 | |
1156 | static inline void |
1157 | verify_node_partition (symtab_node *node) |
1158 | { |
1159 | if (flag_ltrans) |
1160 | return; |
1161 | |
1162 | #ifdef ACCEL_COMPILER |
1163 | if (node->in_other_partition) |
1164 | { |
1165 | if (TREE_CODE (node->decl) == FUNCTION_DECL) |
1166 | { |
1167 | if (lookup_attribute ("omp target device_ancestor_host" , |
1168 | DECL_ATTRIBUTES (node->decl)) != NULL) |
1169 | return; |
1170 | error_at (DECL_SOURCE_LOCATION (node->decl), |
1171 | "function %qs has been referenced in offloaded code but" |
1172 | " hasn%'t been marked to be included in the offloaded code" , |
1173 | node->name ()); |
1174 | } |
1175 | else if (VAR_P (node->decl)) |
1176 | error_at (DECL_SOURCE_LOCATION (node->decl), |
1177 | "variable %qs has been referenced in offloaded code but" |
1178 | " hasn%'t been marked to be included in the offloaded code" , |
1179 | node->name ()); |
1180 | else |
1181 | gcc_unreachable (); |
1182 | } |
1183 | #else |
1184 | gcc_assert (!node->in_other_partition |
1185 | && !node->used_from_other_partition); |
1186 | #endif |
1187 | } |
1188 | |
1189 | /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS, |
1190 | STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize |
1191 | NODE or to replace the values in it, for instance because the first |
1192 | time we saw it, the function body was not available but now it |
1193 | is. BP is a bitpack with all the bitflags for NODE read from the |
1194 | stream. Initialize HAS_THUNK_INFO to indicate if thunk info should |
1195 | be streamed in. */ |
1196 | |
1197 | static void |
1198 | input_overwrite_node (struct lto_file_decl_data *file_data, |
1199 | struct cgraph_node *node, |
1200 | enum LTO_symtab_tags tag, |
1201 | struct bitpack_d *bp, bool *has_thunk_info) |
1202 | { |
1203 | node->aux = (void *) tag; |
1204 | node->lto_file_data = file_data; |
1205 | |
1206 | node->local = bp_unpack_value (bp, nbits: 1); |
1207 | node->externally_visible = bp_unpack_value (bp, nbits: 1); |
1208 | node->no_reorder = bp_unpack_value (bp, nbits: 1); |
1209 | node->definition = bp_unpack_value (bp, nbits: 1); |
1210 | node->versionable = bp_unpack_value (bp, nbits: 1); |
1211 | node->can_change_signature = bp_unpack_value (bp, nbits: 1); |
1212 | node->redefined_extern_inline = bp_unpack_value (bp, nbits: 1); |
1213 | node->force_output = bp_unpack_value (bp, nbits: 1); |
1214 | node->forced_by_abi = bp_unpack_value (bp, nbits: 1); |
1215 | node->unique_name = bp_unpack_value (bp, nbits: 1); |
1216 | node->body_removed = bp_unpack_value (bp, nbits: 1); |
1217 | node->semantic_interposition = bp_unpack_value (bp, nbits: 1); |
1218 | node->implicit_section = bp_unpack_value (bp, nbits: 1); |
1219 | node->address_taken = bp_unpack_value (bp, nbits: 1); |
1220 | node->used_from_other_partition = bp_unpack_value (bp, nbits: 1); |
1221 | node->lowered = bp_unpack_value (bp, nbits: 1); |
1222 | node->analyzed = tag == LTO_symtab_analyzed_node; |
1223 | node->in_other_partition = bp_unpack_value (bp, nbits: 1); |
1224 | if (node->in_other_partition |
1225 | /* Avoid updating decl when we are seeing just inline clone. |
1226 | When inlining function that has functions already inlined into it, |
1227 | we produce clones of inline clones. |
1228 | |
1229 | WPA partitioning might put each clone into different unit and |
1230 | we might end up streaming inline clone from other partition |
1231 | to support clone we are interested in. */ |
1232 | && (!node->clone_of |
1233 | || node->clone_of->decl != node->decl)) |
1234 | { |
1235 | DECL_EXTERNAL (node->decl) = 1; |
1236 | TREE_STATIC (node->decl) = 0; |
1237 | } |
1238 | node->alias = bp_unpack_value (bp, nbits: 1); |
1239 | node->transparent_alias = bp_unpack_value (bp, nbits: 1); |
1240 | node->weakref = bp_unpack_value (bp, nbits: 1); |
1241 | node->symver = bp_unpack_value (bp, nbits: 1); |
1242 | node->frequency = (enum node_frequency)bp_unpack_value (bp, nbits: 2); |
1243 | node->only_called_at_startup = bp_unpack_value (bp, nbits: 1); |
1244 | node->only_called_at_exit = bp_unpack_value (bp, nbits: 1); |
1245 | node->tm_clone = bp_unpack_value (bp, nbits: 1); |
1246 | node->calls_comdat_local = bp_unpack_value (bp, nbits: 1); |
1247 | node->icf_merged = bp_unpack_value (bp, nbits: 1); |
1248 | node->nonfreeing_fn = bp_unpack_value (bp, nbits: 1); |
1249 | node->merged_comdat = bp_unpack_value (bp, nbits: 1); |
1250 | node->merged_extern_inline = bp_unpack_value (bp, nbits: 1); |
1251 | node->thunk = bp_unpack_value (bp, nbits: 1); |
1252 | node->parallelized_function = bp_unpack_value (bp, nbits: 1); |
1253 | node->declare_variant_alt = bp_unpack_value (bp, nbits: 1); |
1254 | node->calls_declare_variant_alt = bp_unpack_value (bp, nbits: 1); |
1255 | *has_thunk_info = bp_unpack_value (bp, nbits: 1); |
1256 | node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution, |
1257 | LDPR_NUM_KNOWN); |
1258 | node->split_part = bp_unpack_value (bp, nbits: 1); |
1259 | verify_node_partition (node); |
1260 | } |
1261 | |
1262 | /* Return string alias is alias of. */ |
1263 | |
1264 | static tree |
1265 | get_alias_symbol (tree decl) |
1266 | { |
1267 | tree alias = lookup_attribute (attr_name: "alias" , DECL_ATTRIBUTES (decl)); |
1268 | return get_identifier (TREE_STRING_POINTER |
1269 | (TREE_VALUE (TREE_VALUE (alias)))); |
1270 | } |
1271 | |
1272 | /* Read a node from input_block IB. TAG is the node's tag just read. |
1273 | Return the node read or overwriten. */ |
1274 | |
1275 | static struct cgraph_node * |
1276 | input_node (struct lto_file_decl_data *file_data, |
1277 | class lto_input_block *ib, |
1278 | enum LTO_symtab_tags tag, |
1279 | vec<symtab_node *> nodes) |
1280 | { |
1281 | gcc::pass_manager *passes = g->get_passes (); |
1282 | tree fn_decl; |
1283 | struct cgraph_node *node; |
1284 | struct bitpack_d bp; |
1285 | int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND; |
1286 | int clone_ref; |
1287 | int order; |
1288 | int i, count; |
1289 | tree group; |
1290 | const char *section; |
1291 | order = streamer_read_hwi (ib) + file_data->order_base; |
1292 | clone_ref = streamer_read_hwi (ib); |
1293 | bool has_thunk_info; |
1294 | |
1295 | fn_decl = lto_input_fn_decl_ref (ib, file_data); |
1296 | |
1297 | if (clone_ref != LCC_NOT_FOUND) |
1298 | { |
1299 | node = dyn_cast<cgraph_node *> (p: nodes[clone_ref])->create_clone (decl: fn_decl, |
1300 | count: profile_count::uninitialized (), update_original: false, |
1301 | redirect_callers: vNULL, call_duplication_hook: false, NULL, NULL); |
1302 | } |
1303 | else |
1304 | { |
1305 | /* Declaration of functions can be already merged with a declaration |
1306 | from other input file. We keep cgraph unmerged until after streaming |
1307 | of ipa passes is done. Alays forcingly create a fresh node. */ |
1308 | node = symtab->create_empty (); |
1309 | node->decl = fn_decl; |
1310 | if (lookup_attribute (attr_name: "ifunc" , DECL_ATTRIBUTES (fn_decl))) |
1311 | node->ifunc_resolver = 1; |
1312 | node->register_symbol (); |
1313 | } |
1314 | |
1315 | node->order = order; |
1316 | if (order >= symtab->order) |
1317 | symtab->order = order + 1; |
1318 | |
1319 | node->count = profile_count::stream_in (ib); |
1320 | node->count_materialization_scale = streamer_read_hwi (ib); |
1321 | |
1322 | count = streamer_read_hwi (ib); |
1323 | node->ipa_transforms_to_apply = vNULL; |
1324 | for (i = 0; i < count; i++) |
1325 | { |
1326 | opt_pass *pass; |
1327 | int pid = streamer_read_hwi (ib); |
1328 | |
1329 | gcc_assert (pid < passes->passes_by_id_size); |
1330 | pass = passes->passes_by_id[pid]; |
1331 | node->ipa_transforms_to_apply.safe_push (obj: (ipa_opt_pass_d *) pass); |
1332 | } |
1333 | |
1334 | if (tag == LTO_symtab_analyzed_node) |
1335 | ref = streamer_read_hwi (ib); |
1336 | |
1337 | group = read_identifier (ib); |
1338 | if (group) |
1339 | ref2 = streamer_read_hwi (ib); |
1340 | |
1341 | /* Make sure that we have not read this node before. Nodes that |
1342 | have already been read will have their tag stored in the 'aux' |
1343 | field. Since built-in functions can be referenced in multiple |
1344 | functions, they are expected to be read more than once. */ |
1345 | if (node->aux && !fndecl_built_in_p (node: node->decl)) |
1346 | internal_error ("bytecode stream: found multiple instances of cgraph " |
1347 | "node with uid %d" , node->get_uid ()); |
1348 | |
1349 | node->tp_first_run = streamer_read_uhwi (ib); |
1350 | |
1351 | bp = streamer_read_bitpack (ib); |
1352 | |
1353 | input_overwrite_node (file_data, node, tag, bp: &bp, has_thunk_info: &has_thunk_info); |
1354 | |
1355 | /* Store a reference for now, and fix up later to be a pointer. */ |
1356 | node->inlined_to = (cgraph_node *) (intptr_t) ref; |
1357 | |
1358 | if (group) |
1359 | { |
1360 | node->set_comdat_group (group); |
1361 | /* Store a reference for now, and fix up later to be a pointer. */ |
1362 | node->same_comdat_group = (symtab_node *) (intptr_t) ref2; |
1363 | } |
1364 | else |
1365 | node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND; |
1366 | section = read_string (ib); |
1367 | if (section) |
1368 | node->set_section_for_node (section); |
1369 | |
1370 | if (node->alias && !node->analyzed && node->weakref) |
1371 | node->alias_target = get_alias_symbol (decl: node->decl); |
1372 | node->profile_id = streamer_read_hwi (ib); |
1373 | node->unit_id = streamer_read_hwi (ib) + file_data->unit_base; |
1374 | if (symtab->max_unit < node->unit_id) |
1375 | symtab->max_unit = node->unit_id; |
1376 | if (DECL_STATIC_CONSTRUCTOR (node->decl)) |
1377 | node->set_init_priority (streamer_read_hwi (ib)); |
1378 | if (DECL_STATIC_DESTRUCTOR (node->decl)) |
1379 | node->set_fini_priority (streamer_read_hwi (ib)); |
1380 | |
1381 | if (has_thunk_info) |
1382 | thunk_info::get_create (node)->stream_in (ib); |
1383 | |
1384 | return node; |
1385 | } |
1386 | |
1387 | /* Read a node from input_block IB. TAG is the node's tag just read. |
1388 | Return the node read or overwriten. */ |
1389 | |
1390 | static varpool_node * |
1391 | input_varpool_node (struct lto_file_decl_data *file_data, |
1392 | class lto_input_block *ib) |
1393 | { |
1394 | tree var_decl; |
1395 | varpool_node *node; |
1396 | struct bitpack_d bp; |
1397 | int ref = LCC_NOT_FOUND; |
1398 | int order; |
1399 | tree group; |
1400 | const char *section; |
1401 | |
1402 | order = streamer_read_hwi (ib) + file_data->order_base; |
1403 | var_decl = lto_input_var_decl_ref (ib, file_data); |
1404 | |
1405 | /* Declaration of functions can be already merged with a declaration |
1406 | from other input file. We keep cgraph unmerged until after streaming |
1407 | of ipa passes is done. Alays forcingly create a fresh node. */ |
1408 | node = varpool_node::create_empty (); |
1409 | node->decl = var_decl; |
1410 | node->register_symbol (); |
1411 | |
1412 | node->order = order; |
1413 | if (order >= symtab->order) |
1414 | symtab->order = order + 1; |
1415 | node->lto_file_data = file_data; |
1416 | |
1417 | bp = streamer_read_bitpack (ib); |
1418 | node->externally_visible = bp_unpack_value (bp: &bp, nbits: 1); |
1419 | node->no_reorder = bp_unpack_value (bp: &bp, nbits: 1); |
1420 | node->force_output = bp_unpack_value (bp: &bp, nbits: 1); |
1421 | node->forced_by_abi = bp_unpack_value (bp: &bp, nbits: 1); |
1422 | node->unique_name = bp_unpack_value (bp: &bp, nbits: 1); |
1423 | node->body_removed = bp_unpack_value (bp: &bp, nbits: 1); |
1424 | node->semantic_interposition = bp_unpack_value (bp: &bp, nbits: 1); |
1425 | node->implicit_section = bp_unpack_value (bp: &bp, nbits: 1); |
1426 | node->writeonly = bp_unpack_value (bp: &bp, nbits: 1); |
1427 | node->definition = bp_unpack_value (bp: &bp, nbits: 1); |
1428 | node->alias = bp_unpack_value (bp: &bp, nbits: 1); |
1429 | node->transparent_alias = bp_unpack_value (bp: &bp, nbits: 1); |
1430 | node->weakref = bp_unpack_value (bp: &bp, nbits: 1); |
1431 | node->symver = bp_unpack_value (bp: &bp, nbits: 1); |
1432 | node->analyzed = bp_unpack_value (bp: &bp, nbits: 1); |
1433 | node->used_from_other_partition = bp_unpack_value (bp: &bp, nbits: 1); |
1434 | node->in_other_partition = bp_unpack_value (bp: &bp, nbits: 1); |
1435 | if (node->in_other_partition) |
1436 | { |
1437 | DECL_EXTERNAL (node->decl) = 1; |
1438 | TREE_STATIC (node->decl) = 0; |
1439 | } |
1440 | if (node->alias && !node->analyzed && node->weakref) |
1441 | node->alias_target = get_alias_symbol (decl: node->decl); |
1442 | node->tls_model = (enum tls_model)bp_unpack_value (bp: &bp, nbits: 3); |
1443 | node->used_by_single_function = (enum tls_model)bp_unpack_value (bp: &bp, nbits: 1); |
1444 | node->dynamically_initialized = bp_unpack_value (bp: &bp, nbits: 1); |
1445 | group = read_identifier (ib); |
1446 | if (group) |
1447 | { |
1448 | node->set_comdat_group (group); |
1449 | ref = streamer_read_hwi (ib); |
1450 | /* Store a reference for now, and fix up later to be a pointer. */ |
1451 | node->same_comdat_group = (symtab_node *) (intptr_t) ref; |
1452 | } |
1453 | else |
1454 | node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND; |
1455 | section = read_string (ib); |
1456 | if (section) |
1457 | node->set_section_for_node (section); |
1458 | node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution, |
1459 | LDPR_NUM_KNOWN); |
1460 | verify_node_partition (node); |
1461 | return node; |
1462 | } |
1463 | |
1464 | /* Read a node from input_block IB. TAG is the node's tag just read. |
1465 | Return the node read or overwriten. */ |
1466 | |
1467 | static void |
1468 | input_ref (class lto_input_block *ib, |
1469 | symtab_node *referring_node, |
1470 | vec<symtab_node *> nodes) |
1471 | { |
1472 | symtab_node *node = NULL; |
1473 | struct bitpack_d bp; |
1474 | enum ipa_ref_use use; |
1475 | bool speculative; |
1476 | struct ipa_ref *ref; |
1477 | |
1478 | bp = streamer_read_bitpack (ib); |
1479 | use = (enum ipa_ref_use) bp_unpack_value (bp: &bp, nbits: 3); |
1480 | speculative = (enum ipa_ref_use) bp_unpack_value (bp: &bp, nbits: 1); |
1481 | node = nodes[streamer_read_hwi (ib)]; |
1482 | ref = referring_node->create_reference (referred_node: node, use_type: use); |
1483 | ref->speculative = speculative; |
1484 | if (is_a <cgraph_node *> (p: referring_node)) |
1485 | { |
1486 | ref->lto_stmt_uid = streamer_read_hwi (ib); |
1487 | bp = streamer_read_bitpack (ib); |
1488 | ref->speculative_id = bp_unpack_value (bp: &bp, nbits: 16); |
1489 | } |
1490 | } |
1491 | |
1492 | /* Read an edge from IB. NODES points to a vector of previously read nodes for |
1493 | decoding caller and callee of the edge to be read. If INDIRECT is true, the |
1494 | edge being read is indirect (in the sense that it has |
1495 | indirect_unknown_callee set). */ |
1496 | |
1497 | static void |
1498 | input_edge (class lto_input_block *ib, vec<symtab_node *> nodes, |
1499 | bool indirect) |
1500 | { |
1501 | struct cgraph_node *caller, *callee; |
1502 | struct cgraph_edge *edge; |
1503 | unsigned int stmt_id, speculative_id; |
1504 | profile_count count; |
1505 | cgraph_inline_failed_t inline_failed; |
1506 | struct bitpack_d bp; |
1507 | int ecf_flags = 0; |
1508 | |
1509 | caller = dyn_cast<cgraph_node *> (p: nodes[streamer_read_hwi (ib)]); |
1510 | if (caller == NULL || caller->decl == NULL_TREE) |
1511 | internal_error ("bytecode stream: no caller found while reading edge" ); |
1512 | |
1513 | if (!indirect) |
1514 | { |
1515 | callee = dyn_cast<cgraph_node *> (p: nodes[streamer_read_hwi (ib)]); |
1516 | if (callee == NULL || callee->decl == NULL_TREE) |
1517 | internal_error ("bytecode stream: no callee found while reading edge" ); |
1518 | } |
1519 | else |
1520 | callee = NULL; |
1521 | |
1522 | count = profile_count::stream_in (ib); |
1523 | |
1524 | bp = streamer_read_bitpack (ib); |
1525 | inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS); |
1526 | stmt_id = bp_unpack_var_len_unsigned (&bp); |
1527 | speculative_id = bp_unpack_value (bp: &bp, nbits: 16); |
1528 | |
1529 | if (indirect) |
1530 | edge = caller->create_indirect_edge (NULL, ecf_flags: 0, count); |
1531 | else |
1532 | edge = caller->create_edge (callee, NULL, count); |
1533 | |
1534 | edge->indirect_inlining_edge = bp_unpack_value (bp: &bp, nbits: 1); |
1535 | edge->speculative = bp_unpack_value (bp: &bp, nbits: 1); |
1536 | edge->lto_stmt_uid = stmt_id; |
1537 | edge->speculative_id = speculative_id; |
1538 | edge->inline_failed = inline_failed; |
1539 | edge->call_stmt_cannot_inline_p = bp_unpack_value (bp: &bp, nbits: 1); |
1540 | edge->can_throw_external = bp_unpack_value (bp: &bp, nbits: 1); |
1541 | edge->in_polymorphic_cdtor = bp_unpack_value (bp: &bp, nbits: 1); |
1542 | if (indirect) |
1543 | { |
1544 | if (bp_unpack_value (bp: &bp, nbits: 1)) |
1545 | ecf_flags |= ECF_CONST; |
1546 | if (bp_unpack_value (bp: &bp, nbits: 1)) |
1547 | ecf_flags |= ECF_PURE; |
1548 | if (bp_unpack_value (bp: &bp, nbits: 1)) |
1549 | ecf_flags |= ECF_NORETURN; |
1550 | if (bp_unpack_value (bp: &bp, nbits: 1)) |
1551 | ecf_flags |= ECF_MALLOC; |
1552 | if (bp_unpack_value (bp: &bp, nbits: 1)) |
1553 | ecf_flags |= ECF_NOTHROW; |
1554 | if (bp_unpack_value (bp: &bp, nbits: 1)) |
1555 | ecf_flags |= ECF_RETURNS_TWICE; |
1556 | edge->indirect_info->ecf_flags = ecf_flags; |
1557 | |
1558 | edge->indirect_info->num_speculative_call_targets |
1559 | = bp_unpack_value (bp: &bp, nbits: 16); |
1560 | } |
1561 | } |
1562 | |
1563 | |
1564 | /* Read a cgraph from IB using the info in FILE_DATA. */ |
1565 | |
1566 | static vec<symtab_node *> |
1567 | input_cgraph_1 (struct lto_file_decl_data *file_data, |
1568 | class lto_input_block *ib) |
1569 | { |
1570 | enum LTO_symtab_tags tag; |
1571 | vec<symtab_node *> nodes = vNULL; |
1572 | symtab_node *node; |
1573 | unsigned i; |
1574 | |
1575 | tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag); |
1576 | file_data->order_base = symtab->order; |
1577 | file_data->unit_base = symtab->max_unit + 1; |
1578 | while (tag) |
1579 | { |
1580 | if (tag == LTO_symtab_edge) |
1581 | input_edge (ib, nodes, indirect: false); |
1582 | else if (tag == LTO_symtab_indirect_edge) |
1583 | input_edge (ib, nodes, indirect: true); |
1584 | else if (tag == LTO_symtab_variable) |
1585 | { |
1586 | node = input_varpool_node (file_data, ib); |
1587 | nodes.safe_push (obj: node); |
1588 | lto_symtab_encoder_encode (encoder: file_data->symtab_node_encoder, node); |
1589 | } |
1590 | else |
1591 | { |
1592 | node = input_node (file_data, ib, tag, nodes); |
1593 | if (node == NULL || node->decl == NULL_TREE) |
1594 | internal_error ("bytecode stream: found empty cgraph node" ); |
1595 | nodes.safe_push (obj: node); |
1596 | lto_symtab_encoder_encode (encoder: file_data->symtab_node_encoder, node); |
1597 | } |
1598 | |
1599 | tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag); |
1600 | } |
1601 | |
1602 | lto_input_toplevel_asms (file_data, file_data->order_base); |
1603 | |
1604 | /* AUX pointers should be all non-zero for function nodes read from the stream. */ |
1605 | if (flag_checking) |
1606 | { |
1607 | FOR_EACH_VEC_ELT (nodes, i, node) |
1608 | gcc_assert (node->aux || !is_a <cgraph_node *> (node)); |
1609 | } |
1610 | FOR_EACH_VEC_ELT (nodes, i, node) |
1611 | { |
1612 | int ref; |
1613 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node)) |
1614 | { |
1615 | ref = (int) (intptr_t) cnode->inlined_to; |
1616 | |
1617 | /* We share declaration of builtins, so we may read same node twice. */ |
1618 | if (!node->aux) |
1619 | continue; |
1620 | node->aux = NULL; |
1621 | |
1622 | /* Fixup inlined_to from reference to pointer. */ |
1623 | if (ref != LCC_NOT_FOUND) |
1624 | dyn_cast<cgraph_node *> (p: node)->inlined_to |
1625 | = dyn_cast<cgraph_node *> (p: nodes[ref]); |
1626 | else |
1627 | cnode->inlined_to = NULL; |
1628 | } |
1629 | |
1630 | ref = (int) (intptr_t) node->same_comdat_group; |
1631 | |
1632 | /* Fixup same_comdat_group from reference to pointer. */ |
1633 | if (ref != LCC_NOT_FOUND) |
1634 | node->same_comdat_group = nodes[ref]; |
1635 | else |
1636 | node->same_comdat_group = NULL; |
1637 | } |
1638 | FOR_EACH_VEC_ELT (nodes, i, node) |
1639 | node->aux = is_a <cgraph_node *> (p: node) ? (void *)1 : NULL; |
1640 | return nodes; |
1641 | } |
1642 | |
1643 | /* Input ipa_refs. */ |
1644 | |
1645 | static void |
1646 | input_refs (class lto_input_block *ib, |
1647 | vec<symtab_node *> nodes) |
1648 | { |
1649 | int count; |
1650 | int idx; |
1651 | while (true) |
1652 | { |
1653 | symtab_node *node; |
1654 | count = streamer_read_uhwi (ib); |
1655 | if (!count) |
1656 | break; |
1657 | idx = streamer_read_uhwi (ib); |
1658 | node = nodes[idx]; |
1659 | while (count) |
1660 | { |
1661 | input_ref (ib, referring_node: node, nodes); |
1662 | count--; |
1663 | } |
1664 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node)) |
1665 | if (cnode->declare_variant_alt) |
1666 | omp_lto_input_declare_variant_alt (ib, cnode, nodes); |
1667 | } |
1668 | } |
1669 | |
1670 | /* Input profile_info from IB. */ |
1671 | static void |
1672 | input_profile_summary (class lto_input_block *ib, |
1673 | struct lto_file_decl_data *file_data) |
1674 | { |
1675 | unsigned int runs = streamer_read_uhwi (ib); |
1676 | if (runs) |
1677 | { |
1678 | file_data->profile_info.runs = runs; |
1679 | |
1680 | /* IPA-profile computes hot bb threshold based on cumulated |
1681 | whole program profile. We need to stream it down to ltrans. */ |
1682 | if (flag_ltrans) |
1683 | set_hot_bb_threshold (streamer_read_gcov_count (ib)); |
1684 | } |
1685 | |
1686 | } |
1687 | |
1688 | /* Rescale profile summaries to the same number of runs in the whole unit. */ |
1689 | |
1690 | static void |
1691 | merge_profile_summaries (struct lto_file_decl_data **file_data_vec) |
1692 | { |
1693 | struct lto_file_decl_data *file_data; |
1694 | unsigned int j; |
1695 | gcov_unsigned_t max_runs = 0; |
1696 | struct cgraph_node *node; |
1697 | struct cgraph_edge *edge; |
1698 | |
1699 | /* Find unit with maximal number of runs. If we ever get serious about |
1700 | roundoff errors, we might also consider computing smallest common |
1701 | multiply. */ |
1702 | for (j = 0; (file_data = file_data_vec[j]) != NULL; j++) |
1703 | if (max_runs < file_data->profile_info.runs) |
1704 | max_runs = file_data->profile_info.runs; |
1705 | |
1706 | if (!max_runs) |
1707 | return; |
1708 | |
1709 | /* Simple overflow check. We probably don't need to support that many train |
1710 | runs. Such a large value probably imply data corruption anyway. */ |
1711 | if (max_runs > INT_MAX / REG_BR_PROB_BASE) |
1712 | { |
1713 | sorry ("At most %i profile runs is supported. Perhaps corrupted profile?" , |
1714 | INT_MAX / REG_BR_PROB_BASE); |
1715 | return; |
1716 | } |
1717 | |
1718 | profile_info = XCNEW (gcov_summary); |
1719 | profile_info->runs = max_runs; |
1720 | |
1721 | /* If merging already happent at WPA time, we are done. */ |
1722 | if (flag_ltrans) |
1723 | return; |
1724 | |
1725 | /* Now compute count_materialization_scale of each node. |
1726 | During LTRANS we already have values of count_materialization_scale |
1727 | computed, so just update them. */ |
1728 | FOR_EACH_FUNCTION (node) |
1729 | if (node->lto_file_data |
1730 | && node->lto_file_data->profile_info.runs) |
1731 | { |
1732 | int scale; |
1733 | |
1734 | scale = RDIV (node->count_materialization_scale * max_runs, |
1735 | node->lto_file_data->profile_info.runs); |
1736 | node->count_materialization_scale = scale; |
1737 | if (scale < 0) |
1738 | fatal_error (input_location, "Profile information in %s corrupted" , |
1739 | file_data->file_name); |
1740 | |
1741 | if (scale == REG_BR_PROB_BASE) |
1742 | continue; |
1743 | for (edge = node->callees; edge; edge = edge->next_callee) |
1744 | if (edge->count.ipa ().nonzero_p ()) |
1745 | edge->count = edge->count.apply_scale (num: scale, REG_BR_PROB_BASE); |
1746 | for (edge = node->indirect_calls; edge; edge = edge->next_callee) |
1747 | if (edge->count.ipa ().nonzero_p ()) |
1748 | edge->count = edge->count.apply_scale (num: scale, REG_BR_PROB_BASE); |
1749 | if (node->count.ipa ().nonzero_p ()) |
1750 | node->count = node->count.apply_scale (num: scale, REG_BR_PROB_BASE); |
1751 | } |
1752 | } |
1753 | |
1754 | /* Input and merge the symtab from each of the .o files passed to |
1755 | lto1. */ |
1756 | |
1757 | void |
1758 | input_symtab (void) |
1759 | { |
1760 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); |
1761 | struct lto_file_decl_data *file_data; |
1762 | unsigned int j = 0; |
1763 | struct cgraph_node *node; |
1764 | |
1765 | while ((file_data = file_data_vec[j++])) |
1766 | { |
1767 | const char *data; |
1768 | size_t len; |
1769 | class lto_input_block *ib; |
1770 | vec<symtab_node *> nodes; |
1771 | |
1772 | ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes, |
1773 | &data, &len); |
1774 | if (!ib) |
1775 | fatal_error (input_location, |
1776 | "cannot find LTO cgraph in %s" , file_data->file_name); |
1777 | input_profile_summary (ib, file_data); |
1778 | file_data->symtab_node_encoder = lto_symtab_encoder_new (for_input: true); |
1779 | nodes = input_cgraph_1 (file_data, ib); |
1780 | lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes, |
1781 | ib, data, len); |
1782 | |
1783 | ib = lto_create_simple_input_block (file_data, LTO_section_refs, |
1784 | &data, &len); |
1785 | if (!ib) |
1786 | fatal_error (input_location, "cannot find LTO section refs in %s" , |
1787 | file_data->file_name); |
1788 | input_refs (ib, nodes); |
1789 | lto_destroy_simple_input_block (file_data, LTO_section_refs, |
1790 | ib, data, len); |
1791 | if (flag_ltrans) |
1792 | input_cgraph_opt_summary (nodes); |
1793 | nodes.release (); |
1794 | } |
1795 | |
1796 | merge_profile_summaries (file_data_vec); |
1797 | |
1798 | /* Clear out the aux field that was used to store enough state to |
1799 | tell which nodes should be overwritten. */ |
1800 | FOR_EACH_FUNCTION (node) |
1801 | { |
1802 | /* Some nodes may have been created by cgraph_node. This |
1803 | happens when the callgraph contains nested functions. If the |
1804 | node for the parent function was never emitted to the gimple |
1805 | file, cgraph_node will create a node for it when setting the |
1806 | context of the nested function. */ |
1807 | if (node->lto_file_data) |
1808 | node->aux = NULL; |
1809 | } |
1810 | } |
1811 | |
1812 | static void |
1813 | omp_requires_to_name (char *buf, size_t size, HOST_WIDE_INT requires_mask) |
1814 | { |
1815 | char *end = buf + size, *p = buf; |
1816 | if (requires_mask & GOMP_REQUIRES_UNIFIED_ADDRESS) |
1817 | p += snprintf (s: p, maxlen: end - p, format: "unified_address" ); |
1818 | if (requires_mask & GOMP_REQUIRES_UNIFIED_SHARED_MEMORY) |
1819 | p += snprintf (s: p, maxlen: end - p, format: "%sunified_shared_memory" , |
1820 | (p == buf ? "" : ", " )); |
1821 | if (requires_mask & GOMP_REQUIRES_REVERSE_OFFLOAD) |
1822 | p += snprintf (s: p, maxlen: end - p, format: "%sreverse_offload" , |
1823 | (p == buf ? "" : ", " )); |
1824 | } |
1825 | |
1826 | /* Input function/variable tables that will allow libgomp to look up offload |
1827 | target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */ |
1828 | |
1829 | void |
1830 | input_offload_tables (bool do_force_output) |
1831 | { |
1832 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); |
1833 | struct lto_file_decl_data *file_data; |
1834 | unsigned int j = 0; |
1835 | const char *requires_fn = NULL; |
1836 | tree requires_decl = NULL_TREE; |
1837 | |
1838 | omp_requires_mask = (omp_requires) 0; |
1839 | |
1840 | while ((file_data = file_data_vec[j++])) |
1841 | { |
1842 | const char *data; |
1843 | size_t len; |
1844 | class lto_input_block *ib |
1845 | = lto_create_simple_input_block (file_data, LTO_section_offload_table, |
1846 | &data, &len); |
1847 | if (!ib) |
1848 | continue; |
1849 | |
1850 | tree tmp_decl = NULL_TREE; |
1851 | enum LTO_symtab_tags tag |
1852 | = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag); |
1853 | while (tag) |
1854 | { |
1855 | if (tag == LTO_symtab_unavail_node) |
1856 | { |
1857 | tree fn_decl |
1858 | = lto_input_fn_decl_ref (ib, file_data); |
1859 | vec_safe_push (v&: offload_funcs, obj: fn_decl); |
1860 | |
1861 | /* Prevent IPA from removing fn_decl as unreachable, since there |
1862 | may be no refs from the parent function to child_fn in offload |
1863 | LTO mode. */ |
1864 | if (do_force_output) |
1865 | cgraph_node::get (decl: fn_decl)->mark_force_output (); |
1866 | tmp_decl = fn_decl; |
1867 | } |
1868 | else if (tag == LTO_symtab_variable) |
1869 | { |
1870 | tree var_decl |
1871 | = lto_input_var_decl_ref (ib, file_data); |
1872 | vec_safe_push (v&: offload_vars, obj: var_decl); |
1873 | |
1874 | /* Prevent IPA from removing var_decl as unused, since there |
1875 | may be no refs to var_decl in offload LTO mode. */ |
1876 | if (do_force_output) |
1877 | varpool_node::get (decl: var_decl)->force_output = 1; |
1878 | tmp_decl = var_decl; |
1879 | } |
1880 | else if (tag == LTO_symtab_indirect_function) |
1881 | { |
1882 | tree fn_decl |
1883 | = lto_input_fn_decl_ref (ib, file_data); |
1884 | vec_safe_push (v&: offload_ind_funcs, obj: fn_decl); |
1885 | |
1886 | /* Prevent IPA from removing fn_decl as unreachable, since there |
1887 | may be no refs from the parent function to child_fn in offload |
1888 | LTO mode. */ |
1889 | if (do_force_output) |
1890 | cgraph_node::get (decl: fn_decl)->mark_force_output (); |
1891 | tmp_decl = fn_decl; |
1892 | } |
1893 | else if (tag == LTO_symtab_edge) |
1894 | { |
1895 | static bool error_emitted = false; |
1896 | HOST_WIDE_INT val = streamer_read_hwi (ib); |
1897 | |
1898 | if (omp_requires_mask == 0) |
1899 | { |
1900 | omp_requires_mask = (omp_requires) val; |
1901 | requires_decl = tmp_decl; |
1902 | requires_fn = file_data->file_name; |
1903 | } |
1904 | else if (omp_requires_mask != val && !error_emitted) |
1905 | { |
1906 | const char *fn1 = requires_fn; |
1907 | if (requires_decl != NULL_TREE) |
1908 | { |
1909 | while (DECL_CONTEXT (requires_decl) != NULL_TREE |
1910 | && TREE_CODE (requires_decl) != TRANSLATION_UNIT_DECL) |
1911 | requires_decl = DECL_CONTEXT (requires_decl); |
1912 | if (requires_decl != NULL_TREE) |
1913 | fn1 = IDENTIFIER_POINTER (DECL_NAME (requires_decl)); |
1914 | } |
1915 | |
1916 | const char *fn2 = file_data->file_name; |
1917 | if (tmp_decl != NULL_TREE) |
1918 | { |
1919 | while (DECL_CONTEXT (tmp_decl) != NULL_TREE |
1920 | && TREE_CODE (tmp_decl) != TRANSLATION_UNIT_DECL) |
1921 | tmp_decl = DECL_CONTEXT (tmp_decl); |
1922 | if (tmp_decl != NULL_TREE) |
1923 | fn2 = IDENTIFIER_POINTER (DECL_NAME (tmp_decl)); |
1924 | } |
1925 | if (fn1 == fn2) |
1926 | { |
1927 | fn1 = requires_fn; |
1928 | fn2 = file_data->file_name; |
1929 | } |
1930 | |
1931 | char buf1[sizeof ("unified_address, unified_shared_memory, " |
1932 | "reverse_offload" )]; |
1933 | char buf2[sizeof ("unified_address, unified_shared_memory, " |
1934 | "reverse_offload" )]; |
1935 | omp_requires_to_name (buf: buf2, size: sizeof (buf2), |
1936 | requires_mask: val != OMP_REQUIRES_TARGET_USED |
1937 | ? val |
1938 | : (HOST_WIDE_INT) omp_requires_mask); |
1939 | if (val != OMP_REQUIRES_TARGET_USED |
1940 | && omp_requires_mask != OMP_REQUIRES_TARGET_USED) |
1941 | { |
1942 | omp_requires_to_name (buf: buf1, size: sizeof (buf1), |
1943 | requires_mask: omp_requires_mask); |
1944 | error ("OpenMP %<requires%> directive with non-identical " |
1945 | "clauses in multiple compilation units: %qs vs. " |
1946 | "%qs" , buf1, buf2); |
1947 | inform (UNKNOWN_LOCATION, "%qs has %qs" , fn1, buf1); |
1948 | inform (UNKNOWN_LOCATION, "%qs has %qs" , fn2, buf2); |
1949 | } |
1950 | else |
1951 | { |
1952 | error ("OpenMP %<requires%> directive with %qs specified " |
1953 | "only in some compilation units" , buf2); |
1954 | inform (UNKNOWN_LOCATION, "%qs has %qs" , |
1955 | val != OMP_REQUIRES_TARGET_USED ? fn2 : fn1, |
1956 | buf2); |
1957 | inform (UNKNOWN_LOCATION, "but %qs has not" , |
1958 | val != OMP_REQUIRES_TARGET_USED ? fn1 : fn2); |
1959 | } |
1960 | error_emitted = true; |
1961 | } |
1962 | } |
1963 | else |
1964 | fatal_error (input_location, |
1965 | "invalid offload table in %s" , file_data->file_name); |
1966 | |
1967 | tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag); |
1968 | } |
1969 | |
1970 | lto_destroy_simple_input_block (file_data, LTO_section_offload_table, |
1971 | ib, data, len); |
1972 | } |
1973 | #ifdef ACCEL_COMPILER |
1974 | char *omp_requires_file = getenv ("GCC_OFFLOAD_OMP_REQUIRES_FILE" ); |
1975 | if (omp_requires_file == NULL || omp_requires_file[0] == '\0') |
1976 | fatal_error (input_location, "GCC_OFFLOAD_OMP_REQUIRES_FILE unset" ); |
1977 | FILE *f = fopen (omp_requires_file, "wb" ); |
1978 | if (!f) |
1979 | fatal_error (input_location, "Cannot open omp_requires file %qs" , |
1980 | omp_requires_file); |
1981 | uint32_t req_mask = omp_requires_mask; |
1982 | fwrite (&req_mask, sizeof (req_mask), 1, f); |
1983 | fclose (f); |
1984 | #endif |
1985 | } |
1986 | |
1987 | /* True when we need optimization summary for NODE. */ |
1988 | |
1989 | static int |
1990 | output_cgraph_opt_summary_p (struct cgraph_node *node) |
1991 | { |
1992 | if (node->clone_of || node->former_clone_of) |
1993 | return true; |
1994 | clone_info *info = clone_info::get (node); |
1995 | return info && (info->tree_map || info->param_adjustments); |
1996 | } |
1997 | |
1998 | /* Output optimization summary for EDGE to OB. */ |
1999 | static void |
2000 | output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED, |
2001 | struct cgraph_edge *edge ATTRIBUTE_UNUSED) |
2002 | { |
2003 | } |
2004 | |
2005 | /* Output optimization summary for NODE to OB. */ |
2006 | |
2007 | static void |
2008 | output_node_opt_summary (struct output_block *ob, |
2009 | struct cgraph_node *node, |
2010 | lto_symtab_encoder_t encoder) |
2011 | { |
2012 | struct ipa_replace_map *map; |
2013 | int i; |
2014 | struct cgraph_edge *e; |
2015 | |
2016 | /* TODO: Should this code be moved to ipa-param-manipulation? */ |
2017 | struct bitpack_d bp; |
2018 | bp = bitpack_create (s: ob->main_stream); |
2019 | clone_info *info = clone_info::get (node); |
2020 | |
2021 | bp_pack_value (bp: &bp, val: (info && info->param_adjustments != NULL), nbits: 1); |
2022 | streamer_write_bitpack (bp: &bp); |
2023 | if (ipa_param_adjustments *adjustments |
2024 | = info ? info->param_adjustments : NULL) |
2025 | { |
2026 | streamer_write_uhwi (ob, vec_safe_length (v: adjustments->m_adj_params)); |
2027 | ipa_adjusted_param *adj; |
2028 | FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj) |
2029 | { |
2030 | bp = bitpack_create (s: ob->main_stream); |
2031 | bp_pack_value (bp: &bp, val: adj->base_index, IPA_PARAM_MAX_INDEX_BITS); |
2032 | bp_pack_value (bp: &bp, val: adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS); |
2033 | bp_pack_value (bp: &bp, val: adj->op, nbits: 2); |
2034 | bp_pack_value (bp: &bp, val: adj->param_prefix_index, nbits: 2); |
2035 | bp_pack_value (bp: &bp, val: adj->prev_clone_adjustment, nbits: 1); |
2036 | bp_pack_value (bp: &bp, val: adj->reverse, nbits: 1); |
2037 | bp_pack_value (bp: &bp, val: adj->user_flag, nbits: 1); |
2038 | streamer_write_bitpack (bp: &bp); |
2039 | if (adj->op == IPA_PARAM_OP_SPLIT |
2040 | || adj->op == IPA_PARAM_OP_NEW) |
2041 | { |
2042 | stream_write_tree (ob, adj->type, true); |
2043 | if (adj->op == IPA_PARAM_OP_SPLIT) |
2044 | { |
2045 | stream_write_tree (ob, adj->alias_ptr_type, true); |
2046 | streamer_write_uhwi (ob, adj->unit_offset); |
2047 | } |
2048 | } |
2049 | } |
2050 | streamer_write_hwi (ob, adjustments->m_always_copy_start); |
2051 | bp = bitpack_create (s: ob->main_stream); |
2052 | bp_pack_value (bp: &bp, val: info->param_adjustments->m_skip_return, nbits: 1); |
2053 | streamer_write_bitpack (bp: &bp); |
2054 | } |
2055 | |
2056 | streamer_write_uhwi (ob, info ? vec_safe_length (v: info->tree_map) : 0); |
2057 | if (info) |
2058 | FOR_EACH_VEC_SAFE_ELT (info->tree_map, i, map) |
2059 | { |
2060 | streamer_write_uhwi (ob, map->parm_num); |
2061 | gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION); |
2062 | stream_write_tree (ob, map->new_tree, true); |
2063 | } |
2064 | |
2065 | if (lto_symtab_encoder_in_partition_p (encoder, node)) |
2066 | { |
2067 | for (e = node->callees; e; e = e->next_callee) |
2068 | output_edge_opt_summary (ob, edge: e); |
2069 | for (e = node->indirect_calls; e; e = e->next_callee) |
2070 | output_edge_opt_summary (ob, edge: e); |
2071 | } |
2072 | } |
2073 | |
2074 | /* Output optimization summaries stored in callgraph. |
2075 | At the moment it is the clone info structure. */ |
2076 | |
2077 | static void |
2078 | output_cgraph_opt_summary (void) |
2079 | { |
2080 | int i, n_nodes; |
2081 | lto_symtab_encoder_t encoder; |
2082 | struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum); |
2083 | unsigned count = 0; |
2084 | |
2085 | ob->symbol = NULL; |
2086 | encoder = ob->decl_state->symtab_node_encoder; |
2087 | n_nodes = lto_symtab_encoder_size (encoder); |
2088 | for (i = 0; i < n_nodes; i++) |
2089 | { |
2090 | symtab_node *node = lto_symtab_encoder_deref (encoder, ref: i); |
2091 | cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node); |
2092 | if (cnode && output_cgraph_opt_summary_p (node: cnode)) |
2093 | count++; |
2094 | } |
2095 | streamer_write_uhwi (ob, count); |
2096 | for (i = 0; i < n_nodes; i++) |
2097 | { |
2098 | symtab_node *node = lto_symtab_encoder_deref (encoder, ref: i); |
2099 | cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node); |
2100 | if (cnode && output_cgraph_opt_summary_p (node: cnode)) |
2101 | { |
2102 | streamer_write_uhwi (ob, i); |
2103 | output_node_opt_summary (ob, node: cnode, encoder); |
2104 | } |
2105 | } |
2106 | produce_asm (ob, NULL); |
2107 | destroy_output_block (ob); |
2108 | } |
2109 | |
2110 | /* Input optimisation summary of EDGE. */ |
2111 | |
2112 | static void |
2113 | input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED, |
2114 | class lto_input_block *ib_main ATTRIBUTE_UNUSED) |
2115 | { |
2116 | } |
2117 | |
2118 | /* Input optimisation summary of NODE. */ |
2119 | |
2120 | static void |
2121 | input_node_opt_summary (struct cgraph_node *node, |
2122 | class lto_input_block *ib_main, |
2123 | class data_in *data_in) |
2124 | { |
2125 | int i; |
2126 | int count; |
2127 | struct cgraph_edge *e; |
2128 | |
2129 | /* TODO: Should this code be moved to ipa-param-manipulation? */ |
2130 | struct bitpack_d bp; |
2131 | bp = streamer_read_bitpack (ib: ib_main); |
2132 | bool have_adjustments = bp_unpack_value (bp: &bp, nbits: 1); |
2133 | clone_info *info = clone_info::get_create (node); |
2134 | |
2135 | if (have_adjustments) |
2136 | { |
2137 | count = streamer_read_uhwi (ib_main); |
2138 | vec<ipa_adjusted_param, va_gc> *new_params = NULL; |
2139 | for (i = 0; i < count; i++) |
2140 | { |
2141 | ipa_adjusted_param adj; |
2142 | memset (s: &adj, c: 0, n: sizeof (adj)); |
2143 | bp = streamer_read_bitpack (ib: ib_main); |
2144 | adj.base_index = bp_unpack_value (bp: &bp, IPA_PARAM_MAX_INDEX_BITS); |
2145 | adj.prev_clone_index |
2146 | = bp_unpack_value (bp: &bp, IPA_PARAM_MAX_INDEX_BITS); |
2147 | adj.op = (enum ipa_parm_op) bp_unpack_value (bp: &bp, nbits: 2); |
2148 | adj.param_prefix_index = bp_unpack_value (bp: &bp, nbits: 2); |
2149 | adj.prev_clone_adjustment = bp_unpack_value (bp: &bp, nbits: 1); |
2150 | adj.reverse = bp_unpack_value (bp: &bp, nbits: 1); |
2151 | adj.user_flag = bp_unpack_value (bp: &bp, nbits: 1); |
2152 | if (adj.op == IPA_PARAM_OP_SPLIT |
2153 | || adj.op == IPA_PARAM_OP_NEW) |
2154 | { |
2155 | adj.type = stream_read_tree (ib_main, data_in); |
2156 | if (adj.op == IPA_PARAM_OP_SPLIT) |
2157 | { |
2158 | adj.alias_ptr_type = stream_read_tree (ib_main, data_in); |
2159 | adj.unit_offset = streamer_read_uhwi (ib_main); |
2160 | } |
2161 | } |
2162 | vec_safe_push (v&: new_params, obj: adj); |
2163 | } |
2164 | int always_copy_start = streamer_read_hwi (ib_main); |
2165 | bp = streamer_read_bitpack (ib: ib_main); |
2166 | bool skip_return = bp_unpack_value (bp: &bp, nbits: 1); |
2167 | info->param_adjustments |
2168 | = (new (ggc_alloc <ipa_param_adjustments> ()) |
2169 | ipa_param_adjustments (new_params, always_copy_start, skip_return)); |
2170 | } |
2171 | |
2172 | count = streamer_read_uhwi (ib_main); |
2173 | for (i = 0; i < count; i++) |
2174 | { |
2175 | struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> (); |
2176 | |
2177 | vec_safe_push (v&: info->tree_map, obj: map); |
2178 | map->parm_num = streamer_read_uhwi (ib_main); |
2179 | map->new_tree = stream_read_tree (ib_main, data_in); |
2180 | } |
2181 | for (e = node->callees; e; e = e->next_callee) |
2182 | input_edge_opt_summary (edge: e, ib_main); |
2183 | for (e = node->indirect_calls; e; e = e->next_callee) |
2184 | input_edge_opt_summary (edge: e, ib_main); |
2185 | } |
2186 | |
2187 | /* Read section in file FILE_DATA of length LEN with data DATA. */ |
2188 | |
2189 | static void |
2190 | input_cgraph_opt_section (struct lto_file_decl_data *file_data, |
2191 | const char *data, size_t len, |
2192 | vec<symtab_node *> nodes) |
2193 | { |
2194 | const struct lto_function_header * = |
2195 | (const struct lto_function_header *) data; |
2196 | const int cfg_offset = sizeof (struct lto_function_header); |
2197 | const int main_offset = cfg_offset + header->cfg_size; |
2198 | const int string_offset = main_offset + header->main_size; |
2199 | class data_in *data_in; |
2200 | unsigned int i; |
2201 | unsigned int count; |
2202 | |
2203 | lto_input_block ib_main ((const char *) data + main_offset, |
2204 | header->main_size, file_data); |
2205 | |
2206 | data_in = |
2207 | lto_data_in_create (file_data, (const char *) data + string_offset, |
2208 | header->string_size, vNULL); |
2209 | count = streamer_read_uhwi (&ib_main); |
2210 | |
2211 | for (i = 0; i < count; i++) |
2212 | { |
2213 | int ref = streamer_read_uhwi (&ib_main); |
2214 | input_node_opt_summary (node: dyn_cast<cgraph_node *> (p: nodes[ref]), |
2215 | ib_main: &ib_main, data_in); |
2216 | } |
2217 | lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data, |
2218 | len); |
2219 | lto_data_in_delete (data_in); |
2220 | } |
2221 | |
2222 | /* Input optimization summary of cgraph. */ |
2223 | |
2224 | static void |
2225 | input_cgraph_opt_summary (vec<symtab_node *> nodes) |
2226 | { |
2227 | struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data (); |
2228 | struct lto_file_decl_data *file_data; |
2229 | unsigned int j = 0; |
2230 | |
2231 | while ((file_data = file_data_vec[j++])) |
2232 | { |
2233 | size_t len; |
2234 | const char *data |
2235 | = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum, |
2236 | &len); |
2237 | if (data) |
2238 | input_cgraph_opt_section (file_data, data, len, nodes); |
2239 | } |
2240 | } |
2241 | |