1 | /* Top-level LTO routines. |
2 | Copyright (C) 2009-2024 Free Software Foundation, Inc. |
3 | Contributed by CodeSourcery, Inc. |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free |
9 | Software Foundation; either version 3, or (at your option) any later |
10 | version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "tm.h" |
25 | #include "function.h" |
26 | #include "bitmap.h" |
27 | #include "basic-block.h" |
28 | #include "tree.h" |
29 | #include "gimple.h" |
30 | #include "cfghooks.h" |
31 | #include "alloc-pool.h" |
32 | #include "tree-pass.h" |
33 | #include "tree-streamer.h" |
34 | #include "cgraph.h" |
35 | #include "opts.h" |
36 | #include "toplev.h" |
37 | #include "stor-layout.h" |
38 | #include "symbol-summary.h" |
39 | #include "tree-vrp.h" |
40 | #include "sreal.h" |
41 | #include "ipa-cp.h" |
42 | #include "ipa-prop.h" |
43 | #include "common.h" |
44 | #include "debug.h" |
45 | #include "lto.h" |
46 | #include "lto-section-names.h" |
47 | #include "splay-tree.h" |
48 | #include "lto-partition.h" |
49 | #include "context.h" |
50 | #include "pass_manager.h" |
51 | #include "ipa-fnsummary.h" |
52 | #include "ipa-utils.h" |
53 | #include "gomp-constants.h" |
54 | #include "lto-symtab.h" |
55 | #include "stringpool.h" |
56 | #include "fold-const.h" |
57 | #include "attribs.h" |
58 | #include "builtins.h" |
59 | #include "lto-common.h" |
60 | #include "tree-pretty-print.h" |
61 | #include "print-tree.h" |
62 | |
63 | /* True when no new types are going to be streamd from the global stream. */ |
64 | |
65 | static bool type_streaming_finished = false; |
66 | |
67 | GTY(()) tree first_personality_decl; |
68 | |
69 | /* Returns a hash code for P. */ |
70 | |
71 | static hashval_t |
72 | hash_name (const void *p) |
73 | { |
74 | const struct lto_section_slot *ds = (const struct lto_section_slot *) p; |
75 | return (hashval_t) htab_hash_string (ds->name); |
76 | } |
77 | |
78 | |
79 | /* Returns nonzero if P1 and P2 are equal. */ |
80 | |
81 | static int |
82 | eq_name (const void *p1, const void *p2) |
83 | { |
84 | const struct lto_section_slot *s1 |
85 | = (const struct lto_section_slot *) p1; |
86 | const struct lto_section_slot *s2 |
87 | = (const struct lto_section_slot *) p2; |
88 | |
89 | return strcmp (s1: s1->name, s2: s2->name) == 0; |
90 | } |
91 | |
92 | /* Free lto_section_slot. */ |
93 | |
94 | static void |
95 | free_with_string (void *arg) |
96 | { |
97 | struct lto_section_slot *s = (struct lto_section_slot *)arg; |
98 | |
99 | free (CONST_CAST (char *, s->name)); |
100 | free (ptr: arg); |
101 | } |
102 | |
103 | /* Create section hash table. */ |
104 | |
105 | htab_t |
106 | lto_obj_create_section_hash_table (void) |
107 | { |
108 | return htab_create (37, hash_name, eq_name, free_with_string); |
109 | } |
110 | |
111 | /* Delete an allocated integer KEY in the splay tree. */ |
112 | |
113 | static void |
114 | lto_splay_tree_delete_id (splay_tree_key key) |
115 | { |
116 | free (ptr: (void *) key); |
117 | } |
118 | |
119 | /* Compare splay tree node ids A and B. */ |
120 | |
121 | static int |
122 | lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b) |
123 | { |
124 | unsigned HOST_WIDE_INT ai; |
125 | unsigned HOST_WIDE_INT bi; |
126 | |
127 | ai = *(unsigned HOST_WIDE_INT *) a; |
128 | bi = *(unsigned HOST_WIDE_INT *) b; |
129 | |
130 | if (ai < bi) |
131 | return -1; |
132 | else if (ai > bi) |
133 | return 1; |
134 | return 0; |
135 | } |
136 | |
137 | /* Look up splay tree node by ID in splay tree T. */ |
138 | |
139 | static splay_tree_node |
140 | lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id) |
141 | { |
142 | return splay_tree_lookup (t, (splay_tree_key) &id); |
143 | } |
144 | |
145 | /* Check if KEY has ID. */ |
146 | |
147 | static bool |
148 | lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id) |
149 | { |
150 | return *(unsigned HOST_WIDE_INT *) key == id; |
151 | } |
152 | |
153 | /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value. |
154 | The ID is allocated separately because we need HOST_WIDE_INTs which may |
155 | be wider than a splay_tree_key. */ |
156 | |
157 | static void |
158 | lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id, |
159 | struct lto_file_decl_data *file_data) |
160 | { |
161 | unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT); |
162 | *idp = id; |
163 | splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data); |
164 | } |
165 | |
166 | /* Create a splay tree. */ |
167 | |
168 | static splay_tree |
169 | lto_splay_tree_new (void) |
170 | { |
171 | return splay_tree_new (lto_splay_tree_compare_ids, |
172 | lto_splay_tree_delete_id, |
173 | NULL); |
174 | } |
175 | |
176 | /* Decode the content of memory pointed to by DATA in the in decl |
177 | state object STATE. DATA_IN points to a data_in structure for |
178 | decoding. Return the address after the decoded object in the |
179 | input. */ |
180 | |
181 | static const uint32_t * |
182 | lto_read_in_decl_state (class data_in *data_in, const uint32_t *data, |
183 | struct lto_in_decl_state *state) |
184 | { |
185 | uint32_t ix; |
186 | tree decl; |
187 | uint32_t i, j; |
188 | |
189 | ix = *data++; |
190 | state->compressed = ix & 1; |
191 | ix /= 2; |
192 | decl = streamer_tree_cache_get_tree (cache: data_in->reader_cache, ix); |
193 | if (!VAR_OR_FUNCTION_DECL_P (decl)) |
194 | { |
195 | gcc_assert (decl == void_type_node); |
196 | decl = NULL_TREE; |
197 | } |
198 | state->fn_decl = decl; |
199 | |
200 | for (i = 0; i < LTO_N_DECL_STREAMS; i++) |
201 | { |
202 | uint32_t size = *data++; |
203 | vec<tree, va_gc> *decls = NULL; |
204 | vec_alloc (v&: decls, nelems: size); |
205 | |
206 | for (j = 0; j < size; j++) |
207 | vec_safe_push (v&: decls, |
208 | obj: streamer_tree_cache_get_tree (cache: data_in->reader_cache, |
209 | ix: data[j])); |
210 | |
211 | state->streams[i] = decls; |
212 | data += size; |
213 | } |
214 | |
215 | return data; |
216 | } |
217 | |
218 | |
219 | /* Global canonical type table. */ |
220 | static htab_t gimple_canonical_types; |
221 | static hash_map<const_tree, hashval_t> *canonical_type_hash_cache; |
222 | static unsigned long num_canonical_type_hash_entries; |
223 | static unsigned long num_canonical_type_hash_queries; |
224 | |
225 | /* Types postponed for registration to the canonical type table. |
226 | During streaming we postpone all TYPE_CXX_ODR_P types so we can alter |
227 | decide whether there is conflict with non-ODR type or not. */ |
228 | static GTY(()) vec<tree, va_gc> *types_to_register = NULL; |
229 | |
230 | static void iterative_hash_canonical_type (tree type, inchash::hash &hstate); |
231 | static hashval_t gimple_canonical_type_hash (const void *p); |
232 | static hashval_t gimple_register_canonical_type_1 (tree t, hashval_t hash); |
233 | |
234 | /* Returning a hash value for gimple type TYPE. |
235 | |
236 | The hash value returned is equal for types considered compatible |
237 | by gimple_canonical_types_compatible_p. */ |
238 | |
239 | static hashval_t |
240 | hash_canonical_type (tree type) |
241 | { |
242 | inchash::hash hstate; |
243 | enum tree_code code; |
244 | |
245 | /* We compute alias sets only for types that needs them. |
246 | Be sure we do not recurse to something else as we cannot hash incomplete |
247 | types in a way they would have same hash value as compatible complete |
248 | types. */ |
249 | gcc_checking_assert (type_with_alias_set_p (type)); |
250 | |
251 | /* Combine a few common features of types so that types are grouped into |
252 | smaller sets; when searching for existing matching types to merge, |
253 | only existing types having the same features as the new type will be |
254 | checked. */ |
255 | code = tree_code_for_canonical_type_merging (TREE_CODE (type)); |
256 | hstate.add_int (v: code); |
257 | hstate.add_int (TYPE_MODE (type)); |
258 | |
259 | /* Incorporate common features of numerical types. */ |
260 | if (INTEGRAL_TYPE_P (type) |
261 | || SCALAR_FLOAT_TYPE_P (type) |
262 | || FIXED_POINT_TYPE_P (type) |
263 | || TREE_CODE (type) == OFFSET_TYPE |
264 | || POINTER_TYPE_P (type)) |
265 | { |
266 | hstate.add_int (TYPE_PRECISION (type)); |
267 | if (!type_with_interoperable_signedness (type)) |
268 | hstate.add_int (TYPE_UNSIGNED (type)); |
269 | } |
270 | |
271 | if (VECTOR_TYPE_P (type)) |
272 | { |
273 | hstate.add_poly_int (v: TYPE_VECTOR_SUBPARTS (node: type)); |
274 | hstate.add_int (TYPE_UNSIGNED (type)); |
275 | } |
276 | |
277 | if (TREE_CODE (type) == COMPLEX_TYPE) |
278 | hstate.add_int (TYPE_UNSIGNED (type)); |
279 | |
280 | /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be |
281 | interoperable with "signed char". Unless all frontends are revisited to |
282 | agree on these types, we must ignore the flag completely. */ |
283 | |
284 | /* Fortran standard define C_PTR type that is compatible with every |
285 | C pointer. For this reason we need to glob all pointers into one. |
286 | Still pointers in different address spaces are not compatible. */ |
287 | if (POINTER_TYPE_P (type)) |
288 | hstate.add_int (TYPE_ADDR_SPACE (TREE_TYPE (type))); |
289 | |
290 | /* For array types hash the domain bounds and the string flag. */ |
291 | if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type)) |
292 | { |
293 | hstate.add_int (TYPE_STRING_FLAG (type)); |
294 | /* OMP lowering can introduce error_mark_node in place of |
295 | random local decls in types. */ |
296 | if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node) |
297 | inchash::add_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), hstate); |
298 | if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node) |
299 | inchash::add_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), hstate); |
300 | } |
301 | |
302 | /* Recurse for aggregates with a single element type. */ |
303 | if (TREE_CODE (type) == ARRAY_TYPE |
304 | || TREE_CODE (type) == COMPLEX_TYPE |
305 | || TREE_CODE (type) == VECTOR_TYPE) |
306 | iterative_hash_canonical_type (TREE_TYPE (type), hstate); |
307 | |
308 | /* Incorporate function return and argument types. */ |
309 | if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE) |
310 | { |
311 | unsigned na; |
312 | tree p; |
313 | |
314 | iterative_hash_canonical_type (TREE_TYPE (type), hstate); |
315 | |
316 | for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p)) |
317 | { |
318 | iterative_hash_canonical_type (TREE_VALUE (p), hstate); |
319 | na++; |
320 | } |
321 | |
322 | hstate.add_int (v: na); |
323 | } |
324 | |
325 | if (RECORD_OR_UNION_TYPE_P (type)) |
326 | { |
327 | unsigned nf; |
328 | tree f; |
329 | |
330 | for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f)) |
331 | if (TREE_CODE (f) == FIELD_DECL |
332 | && (! DECL_SIZE (f) |
333 | || ! integer_zerop (DECL_SIZE (f)))) |
334 | { |
335 | iterative_hash_canonical_type (TREE_TYPE (f), hstate); |
336 | nf++; |
337 | } |
338 | |
339 | hstate.add_int (v: nf); |
340 | } |
341 | |
342 | return hstate.end(); |
343 | } |
344 | |
345 | /* Returning a hash value for gimple type TYPE combined with VAL. */ |
346 | |
347 | static void |
348 | iterative_hash_canonical_type (tree type, inchash::hash &hstate) |
349 | { |
350 | hashval_t v; |
351 | |
352 | /* All type variants have same TYPE_CANONICAL. */ |
353 | type = TYPE_MAIN_VARIANT (type); |
354 | |
355 | if (!canonical_type_used_p (t: type)) |
356 | v = hash_canonical_type (type); |
357 | /* An already processed type. */ |
358 | else if (TYPE_CANONICAL (type)) |
359 | { |
360 | type = TYPE_CANONICAL (type); |
361 | v = gimple_canonical_type_hash (p: type); |
362 | } |
363 | else |
364 | { |
365 | /* Canonical types should not be able to form SCCs by design, this |
366 | recursion is just because we do not register canonical types in |
367 | optimal order. To avoid quadratic behavior also register the |
368 | type here. */ |
369 | v = hash_canonical_type (type); |
370 | v = gimple_register_canonical_type_1 (t: type, hash: v); |
371 | } |
372 | hstate.merge_hash (other: v); |
373 | } |
374 | |
375 | /* Returns the hash for a canonical type P. */ |
376 | |
377 | static hashval_t |
378 | gimple_canonical_type_hash (const void *p) |
379 | { |
380 | num_canonical_type_hash_queries++; |
381 | hashval_t *slot = canonical_type_hash_cache->get (k: (const_tree) p); |
382 | gcc_assert (slot != NULL); |
383 | return *slot; |
384 | } |
385 | |
386 | |
387 | |
388 | /* Returns nonzero if P1 and P2 are equal. */ |
389 | |
390 | static int |
391 | gimple_canonical_type_eq (const void *p1, const void *p2) |
392 | { |
393 | const_tree t1 = (const_tree) p1; |
394 | const_tree t2 = (const_tree) p2; |
395 | return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1), |
396 | CONST_CAST_TREE (t2)); |
397 | } |
398 | |
399 | /* Main worker for gimple_register_canonical_type. */ |
400 | |
401 | static hashval_t |
402 | gimple_register_canonical_type_1 (tree t, hashval_t hash) |
403 | { |
404 | void **slot; |
405 | |
406 | gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t) |
407 | && type_with_alias_set_p (t) |
408 | && canonical_type_used_p (t)); |
409 | |
410 | /* ODR types for which there is no ODR violation and we did not record |
411 | structurally equivalent non-ODR type can be treated as unique by their |
412 | name. |
413 | |
414 | hash passed to gimple_register_canonical_type_1 is a structural hash |
415 | that we can use to lookup structurally equivalent non-ODR type. |
416 | In case we decide to treat type as unique ODR type we recompute hash based |
417 | on name and let TBAA machinery know about our decision. */ |
418 | if (RECORD_OR_UNION_TYPE_P (t) && odr_type_p (t) |
419 | && TYPE_CXX_ODR_P (t) && !odr_type_violation_reported_p (type: t)) |
420 | { |
421 | /* Anonymous namespace types never conflict with non-C++ types. */ |
422 | if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t)) |
423 | slot = NULL; |
424 | else |
425 | { |
426 | /* Here we rely on fact that all non-ODR types was inserted into |
427 | canonical type hash and thus we can safely detect conflicts between |
428 | ODR types and interoperable non-ODR types. */ |
429 | gcc_checking_assert (type_streaming_finished |
430 | && TYPE_MAIN_VARIANT (t) == t); |
431 | slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, |
432 | NO_INSERT); |
433 | } |
434 | if (slot && !TYPE_CXX_ODR_P (*(tree *)slot)) |
435 | { |
436 | tree nonodr = *(tree *)slot; |
437 | gcc_checking_assert (!flag_ltrans); |
438 | if (symtab->dump_file) |
439 | { |
440 | fprintf (stream: symtab->dump_file, |
441 | format: "ODR and non-ODR type conflict: " ); |
442 | print_generic_expr (symtab->dump_file, t); |
443 | fprintf (stream: symtab->dump_file, format: " and " ); |
444 | print_generic_expr (symtab->dump_file, nonodr); |
445 | fprintf (stream: symtab->dump_file, format: " mangled:%s\n" , |
446 | IDENTIFIER_POINTER |
447 | (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))); |
448 | } |
449 | /* Set canonical for T and all other ODR equivalent duplicates |
450 | including incomplete structures. */ |
451 | set_type_canonical_for_odr_type (type: t, canonical: nonodr); |
452 | } |
453 | else |
454 | { |
455 | tree prevail = prevailing_odr_type (type: t); |
456 | |
457 | if (symtab->dump_file) |
458 | { |
459 | fprintf (stream: symtab->dump_file, |
460 | format: "New canonical ODR type: " ); |
461 | print_generic_expr (symtab->dump_file, t); |
462 | fprintf (stream: symtab->dump_file, format: " mangled:%s\n" , |
463 | IDENTIFIER_POINTER |
464 | (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))); |
465 | } |
466 | /* Set canonical for T and all other ODR equivalent duplicates |
467 | including incomplete structures. */ |
468 | set_type_canonical_for_odr_type (type: t, canonical: prevail); |
469 | enable_odr_based_tbaa (type: t); |
470 | if (!type_in_anonymous_namespace_p (t)) |
471 | hash = htab_hash_string (IDENTIFIER_POINTER |
472 | (DECL_ASSEMBLER_NAME |
473 | (TYPE_NAME (t)))); |
474 | else |
475 | hash = TYPE_UID (t); |
476 | |
477 | /* All variants of t now have TYPE_CANONICAL set to prevail. |
478 | Update canonical type hash cache accordingly. */ |
479 | num_canonical_type_hash_entries++; |
480 | bool existed_p = canonical_type_hash_cache->put (k: prevail, v: hash); |
481 | gcc_checking_assert (!existed_p); |
482 | } |
483 | return hash; |
484 | } |
485 | |
486 | slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT); |
487 | if (*slot) |
488 | { |
489 | tree new_type = (tree)(*slot); |
490 | gcc_checking_assert (new_type != t); |
491 | TYPE_CANONICAL (t) = new_type; |
492 | } |
493 | else |
494 | { |
495 | TYPE_CANONICAL (t) = t; |
496 | *slot = (void *) t; |
497 | /* Cache the just computed hash value. */ |
498 | num_canonical_type_hash_entries++; |
499 | bool existed_p = canonical_type_hash_cache->put (k: t, v: hash); |
500 | gcc_assert (!existed_p); |
501 | } |
502 | return hash; |
503 | } |
504 | |
505 | /* Register type T in the global type table gimple_types and set |
506 | TYPE_CANONICAL of T accordingly. |
507 | This is used by LTO to merge structurally equivalent types for |
508 | type-based aliasing purposes across different TUs and languages. |
509 | |
510 | ??? This merging does not exactly match how the tree.cc middle-end |
511 | functions will assign TYPE_CANONICAL when new types are created |
512 | during optimization (which at least happens for pointer and array |
513 | types). */ |
514 | |
515 | static void |
516 | gimple_register_canonical_type (tree t) |
517 | { |
518 | if (TYPE_CANONICAL (t) || !type_with_alias_set_p (t) |
519 | || !canonical_type_used_p (t)) |
520 | return; |
521 | |
522 | /* Canonical types are same among all complete variants. */ |
523 | if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (t))) |
524 | TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)); |
525 | else |
526 | { |
527 | hashval_t h = hash_canonical_type (TYPE_MAIN_VARIANT (t)); |
528 | gimple_register_canonical_type_1 (TYPE_MAIN_VARIANT (t), hash: h); |
529 | TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)); |
530 | } |
531 | } |
532 | |
533 | /* Re-compute TYPE_CANONICAL for NODE and related types. */ |
534 | |
535 | static void |
536 | lto_register_canonical_types (tree node, bool first_p) |
537 | { |
538 | if (!node |
539 | || !TYPE_P (node)) |
540 | return; |
541 | |
542 | if (first_p) |
543 | TYPE_CANONICAL (node) = NULL_TREE; |
544 | |
545 | if (POINTER_TYPE_P (node) |
546 | || TREE_CODE (node) == COMPLEX_TYPE |
547 | || TREE_CODE (node) == ARRAY_TYPE) |
548 | lto_register_canonical_types (TREE_TYPE (node), first_p); |
549 | |
550 | if (!first_p) |
551 | gimple_register_canonical_type (t: node); |
552 | } |
553 | |
554 | /* Finish canonical type calculation: after all units has been streamed in we |
555 | can check if given ODR type structurally conflicts with a non-ODR type. In |
556 | the first case we set type canonical according to the canonical type hash. |
557 | In the second case we use type names. */ |
558 | |
559 | static void |
560 | lto_register_canonical_types_for_odr_types () |
561 | { |
562 | tree t; |
563 | unsigned int i; |
564 | |
565 | if (!types_to_register) |
566 | return; |
567 | |
568 | type_streaming_finished = true; |
569 | |
570 | /* Be sure that no types derived from ODR types was |
571 | not inserted into the hash table. */ |
572 | if (flag_checking) |
573 | FOR_EACH_VEC_ELT (*types_to_register, i, t) |
574 | gcc_assert (!TYPE_CANONICAL (t)); |
575 | |
576 | /* Register all remaining types. */ |
577 | FOR_EACH_VEC_ELT (*types_to_register, i, t) |
578 | { |
579 | /* For pre-streamed types like va-arg it is possible that main variant |
580 | is !CXX_ODR_P while the variant (which is streamed) is. |
581 | Copy CXX_ODR_P to make type verifier happy. This is safe because |
582 | in canonical type calculation we only consider main variants. |
583 | However we can not change this flag before streaming is finished |
584 | to not affect tree merging. */ |
585 | TYPE_CXX_ODR_P (t) = TYPE_CXX_ODR_P (TYPE_MAIN_VARIANT (t)); |
586 | if (!TYPE_CANONICAL (t)) |
587 | gimple_register_canonical_type (t); |
588 | } |
589 | } |
590 | |
591 | |
592 | /* Remember trees that contains references to declarations. */ |
593 | vec <tree, va_gc> *tree_with_vars; |
594 | |
595 | #define CHECK_VAR(tt) \ |
596 | do \ |
597 | { \ |
598 | if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \ |
599 | && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \ |
600 | return true; \ |
601 | } while (0) |
602 | |
603 | #define CHECK_NO_VAR(tt) \ |
604 | gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt)) |
605 | |
606 | /* Check presence of pointers to decls in fields of a tree_typed T. */ |
607 | |
608 | static inline bool |
609 | mentions_vars_p_typed (tree t) |
610 | { |
611 | CHECK_NO_VAR (TREE_TYPE (t)); |
612 | return false; |
613 | } |
614 | |
615 | /* Check presence of pointers to decls in fields of a tree_common T. */ |
616 | |
617 | static inline bool |
618 | mentions_vars_p_common (tree t) |
619 | { |
620 | if (mentions_vars_p_typed (t)) |
621 | return true; |
622 | CHECK_NO_VAR (TREE_CHAIN (t)); |
623 | return false; |
624 | } |
625 | |
626 | /* Check presence of pointers to decls in fields of a decl_minimal T. */ |
627 | |
628 | static inline bool |
629 | mentions_vars_p_decl_minimal (tree t) |
630 | { |
631 | if (mentions_vars_p_common (t)) |
632 | return true; |
633 | CHECK_NO_VAR (DECL_NAME (t)); |
634 | CHECK_VAR (DECL_CONTEXT (t)); |
635 | return false; |
636 | } |
637 | |
638 | /* Check presence of pointers to decls in fields of a decl_common T. */ |
639 | |
640 | static inline bool |
641 | mentions_vars_p_decl_common (tree t) |
642 | { |
643 | if (mentions_vars_p_decl_minimal (t)) |
644 | return true; |
645 | CHECK_VAR (DECL_SIZE (t)); |
646 | CHECK_VAR (DECL_SIZE_UNIT (t)); |
647 | CHECK_VAR (DECL_INITIAL (t)); |
648 | CHECK_NO_VAR (DECL_ATTRIBUTES (t)); |
649 | CHECK_VAR (DECL_ABSTRACT_ORIGIN (t)); |
650 | return false; |
651 | } |
652 | |
653 | /* Check presence of pointers to decls in fields of a decl_with_vis T. */ |
654 | |
655 | static inline bool |
656 | mentions_vars_p_decl_with_vis (tree t) |
657 | { |
658 | if (mentions_vars_p_decl_common (t)) |
659 | return true; |
660 | |
661 | /* Accessor macro has side-effects, use field-name here. */ |
662 | CHECK_NO_VAR (DECL_ASSEMBLER_NAME_RAW (t)); |
663 | return false; |
664 | } |
665 | |
666 | /* Check presence of pointers to decls in fields of a decl_non_common T. */ |
667 | |
668 | static inline bool |
669 | mentions_vars_p_decl_non_common (tree t) |
670 | { |
671 | if (mentions_vars_p_decl_with_vis (t)) |
672 | return true; |
673 | CHECK_NO_VAR (DECL_RESULT_FLD (t)); |
674 | return false; |
675 | } |
676 | |
677 | /* Check presence of pointers to decls in fields of a decl_non_common T. */ |
678 | |
679 | static bool |
680 | mentions_vars_p_function (tree t) |
681 | { |
682 | if (mentions_vars_p_decl_non_common (t)) |
683 | return true; |
684 | CHECK_NO_VAR (DECL_ARGUMENTS (t)); |
685 | CHECK_NO_VAR (DECL_VINDEX (t)); |
686 | CHECK_VAR (DECL_FUNCTION_PERSONALITY (t)); |
687 | return false; |
688 | } |
689 | |
690 | /* Check presence of pointers to decls in fields of a field_decl T. */ |
691 | |
692 | static bool |
693 | mentions_vars_p_field_decl (tree t) |
694 | { |
695 | if (mentions_vars_p_decl_common (t)) |
696 | return true; |
697 | CHECK_VAR (DECL_FIELD_OFFSET (t)); |
698 | CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t)); |
699 | CHECK_NO_VAR (DECL_QUALIFIER (t)); |
700 | CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t)); |
701 | CHECK_NO_VAR (DECL_FCONTEXT (t)); |
702 | return false; |
703 | } |
704 | |
705 | /* Check presence of pointers to decls in fields of a type T. */ |
706 | |
707 | static bool |
708 | mentions_vars_p_type (tree t) |
709 | { |
710 | if (mentions_vars_p_common (t)) |
711 | return true; |
712 | CHECK_NO_VAR (TYPE_CACHED_VALUES (t)); |
713 | CHECK_VAR (TYPE_SIZE (t)); |
714 | CHECK_VAR (TYPE_SIZE_UNIT (t)); |
715 | CHECK_NO_VAR (TYPE_ATTRIBUTES (t)); |
716 | CHECK_NO_VAR (TYPE_NAME (t)); |
717 | |
718 | CHECK_VAR (TYPE_MIN_VALUE_RAW (t)); |
719 | CHECK_VAR (TYPE_MAX_VALUE_RAW (t)); |
720 | |
721 | /* Accessor is for derived node types only. */ |
722 | CHECK_NO_VAR (TYPE_LANG_SLOT_1 (t)); |
723 | |
724 | CHECK_VAR (TYPE_CONTEXT (t)); |
725 | CHECK_NO_VAR (TYPE_CANONICAL (t)); |
726 | CHECK_NO_VAR (TYPE_MAIN_VARIANT (t)); |
727 | CHECK_NO_VAR (TYPE_NEXT_VARIANT (t)); |
728 | return false; |
729 | } |
730 | |
731 | /* Check presence of pointers to decls in fields of a BINFO T. */ |
732 | |
733 | static bool |
734 | mentions_vars_p_binfo (tree t) |
735 | { |
736 | unsigned HOST_WIDE_INT i, n; |
737 | |
738 | if (mentions_vars_p_common (t)) |
739 | return true; |
740 | CHECK_VAR (BINFO_VTABLE (t)); |
741 | CHECK_NO_VAR (BINFO_OFFSET (t)); |
742 | CHECK_NO_VAR (BINFO_VIRTUALS (t)); |
743 | CHECK_NO_VAR (BINFO_VPTR_FIELD (t)); |
744 | n = vec_safe_length (BINFO_BASE_ACCESSES (t)); |
745 | for (i = 0; i < n; i++) |
746 | CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i)); |
747 | /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX |
748 | and BINFO_VPTR_INDEX; these are used by C++ FE only. */ |
749 | n = BINFO_N_BASE_BINFOS (t); |
750 | for (i = 0; i < n; i++) |
751 | CHECK_NO_VAR (BINFO_BASE_BINFO (t, i)); |
752 | return false; |
753 | } |
754 | |
755 | /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */ |
756 | |
757 | static bool |
758 | mentions_vars_p_constructor (tree t) |
759 | { |
760 | unsigned HOST_WIDE_INT idx; |
761 | constructor_elt *ce; |
762 | |
763 | if (mentions_vars_p_typed (t)) |
764 | return true; |
765 | |
766 | for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), ix: idx, ptr: &ce); idx++) |
767 | { |
768 | CHECK_NO_VAR (ce->index); |
769 | CHECK_VAR (ce->value); |
770 | } |
771 | return false; |
772 | } |
773 | |
774 | /* Check presence of pointers to decls in fields of an expression tree T. */ |
775 | |
776 | static bool |
777 | mentions_vars_p_expr (tree t) |
778 | { |
779 | int i; |
780 | if (mentions_vars_p_typed (t)) |
781 | return true; |
782 | for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) |
783 | CHECK_VAR (TREE_OPERAND (t, i)); |
784 | return false; |
785 | } |
786 | |
787 | /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */ |
788 | |
789 | static bool |
790 | mentions_vars_p_omp_clause (tree t) |
791 | { |
792 | int i; |
793 | if (mentions_vars_p_common (t)) |
794 | return true; |
795 | for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i) |
796 | CHECK_VAR (OMP_CLAUSE_OPERAND (t, i)); |
797 | return false; |
798 | } |
799 | |
800 | /* Check presence of pointers to decls that needs later fixup in T. */ |
801 | |
802 | static bool |
803 | mentions_vars_p (tree t) |
804 | { |
805 | switch (TREE_CODE (t)) |
806 | { |
807 | case IDENTIFIER_NODE: |
808 | break; |
809 | |
810 | case TREE_LIST: |
811 | CHECK_VAR (TREE_VALUE (t)); |
812 | CHECK_VAR (TREE_PURPOSE (t)); |
813 | CHECK_NO_VAR (TREE_CHAIN (t)); |
814 | break; |
815 | |
816 | case FIELD_DECL: |
817 | return mentions_vars_p_field_decl (t); |
818 | |
819 | case LABEL_DECL: |
820 | case CONST_DECL: |
821 | case PARM_DECL: |
822 | case RESULT_DECL: |
823 | case IMPORTED_DECL: |
824 | case NAMESPACE_DECL: |
825 | case NAMELIST_DECL: |
826 | return mentions_vars_p_decl_common (t); |
827 | |
828 | case VAR_DECL: |
829 | return mentions_vars_p_decl_with_vis (t); |
830 | |
831 | case TYPE_DECL: |
832 | return mentions_vars_p_decl_non_common (t); |
833 | |
834 | case FUNCTION_DECL: |
835 | return mentions_vars_p_function (t); |
836 | |
837 | case TREE_BINFO: |
838 | return mentions_vars_p_binfo (t); |
839 | |
840 | case PLACEHOLDER_EXPR: |
841 | return mentions_vars_p_common (t); |
842 | |
843 | case BLOCK: |
844 | case TRANSLATION_UNIT_DECL: |
845 | case OPTIMIZATION_NODE: |
846 | case TARGET_OPTION_NODE: |
847 | break; |
848 | |
849 | case CONSTRUCTOR: |
850 | return mentions_vars_p_constructor (t); |
851 | |
852 | case OMP_CLAUSE: |
853 | return mentions_vars_p_omp_clause (t); |
854 | |
855 | default: |
856 | if (TYPE_P (t)) |
857 | { |
858 | if (mentions_vars_p_type (t)) |
859 | return true; |
860 | } |
861 | else if (EXPR_P (t)) |
862 | { |
863 | if (mentions_vars_p_expr (t)) |
864 | return true; |
865 | } |
866 | else if (CONSTANT_CLASS_P (t)) |
867 | CHECK_NO_VAR (TREE_TYPE (t)); |
868 | else |
869 | gcc_unreachable (); |
870 | } |
871 | return false; |
872 | } |
873 | |
874 | |
875 | /* Return the resolution for the decl with index INDEX from DATA_IN. */ |
876 | |
877 | static enum ld_plugin_symbol_resolution |
878 | get_resolution (class data_in *data_in, unsigned index) |
879 | { |
880 | if (data_in->globals_resolution.exists ()) |
881 | { |
882 | ld_plugin_symbol_resolution_t ret; |
883 | /* We can have references to not emitted functions in |
884 | DECL_FUNCTION_PERSONALITY at least. So we can and have |
885 | to indeed return LDPR_UNKNOWN in some cases. */ |
886 | if (data_in->globals_resolution.length () <= index) |
887 | return LDPR_UNKNOWN; |
888 | ret = data_in->globals_resolution[index]; |
889 | return ret; |
890 | } |
891 | else |
892 | /* Delay resolution finding until decl merging. */ |
893 | return LDPR_UNKNOWN; |
894 | } |
895 | |
896 | /* We need to record resolutions until symbol table is read. */ |
897 | static void |
898 | register_resolution (struct lto_file_decl_data *file_data, tree decl, |
899 | enum ld_plugin_symbol_resolution resolution) |
900 | { |
901 | bool existed; |
902 | if (resolution == LDPR_UNKNOWN) |
903 | return; |
904 | if (!file_data->resolution_map) |
905 | file_data->resolution_map |
906 | = new hash_map<tree, ld_plugin_symbol_resolution>; |
907 | ld_plugin_symbol_resolution_t &res |
908 | = file_data->resolution_map->get_or_insert (k: decl, existed: &existed); |
909 | if (!existed |
910 | || resolution == LDPR_PREVAILING_DEF_IRONLY |
911 | || resolution == LDPR_PREVAILING_DEF |
912 | || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP) |
913 | res = resolution; |
914 | } |
915 | |
916 | /* Register DECL with the global symbol table and change its |
917 | name if necessary to avoid name clashes for static globals across |
918 | different files. */ |
919 | |
920 | static void |
921 | lto_register_var_decl_in_symtab (class data_in *data_in, tree decl, |
922 | unsigned ix) |
923 | { |
924 | tree context; |
925 | |
926 | /* Variable has file scope, not local. */ |
927 | if (!TREE_PUBLIC (decl) |
928 | && !((context = decl_function_context (decl)) |
929 | && auto_var_in_fn_p (decl, context))) |
930 | rest_of_decl_compilation (decl, 1, 0); |
931 | |
932 | /* If this variable has already been declared, queue the |
933 | declaration for merging. */ |
934 | if (TREE_PUBLIC (decl)) |
935 | register_resolution (file_data: data_in->file_data, |
936 | decl, resolution: get_resolution (data_in, index: ix)); |
937 | } |
938 | |
939 | |
940 | /* Register DECL with the global symbol table and change its |
941 | name if necessary to avoid name clashes for static globals across |
942 | different files. DATA_IN contains descriptors and tables for the |
943 | file being read. */ |
944 | |
945 | static void |
946 | lto_register_function_decl_in_symtab (class data_in *data_in, tree decl, |
947 | unsigned ix) |
948 | { |
949 | /* If this variable has already been declared, queue the |
950 | declaration for merging. */ |
951 | if (TREE_PUBLIC (decl) && !DECL_ABSTRACT_P (decl)) |
952 | register_resolution (file_data: data_in->file_data, |
953 | decl, resolution: get_resolution (data_in, index: ix)); |
954 | } |
955 | |
956 | /* Check if T is a decl and needs register its resolution info. */ |
957 | |
958 | static void |
959 | lto_maybe_register_decl (class data_in *data_in, tree t, unsigned ix) |
960 | { |
961 | if (VAR_P (t)) |
962 | lto_register_var_decl_in_symtab (data_in, decl: t, ix); |
963 | else if (TREE_CODE (t) == FUNCTION_DECL |
964 | && !fndecl_built_in_p (node: t)) |
965 | lto_register_function_decl_in_symtab (data_in, decl: t, ix); |
966 | } |
967 | |
968 | |
969 | /* For the type T re-materialize it in the type variant list and |
970 | the pointer/reference-to chains. */ |
971 | |
972 | static void |
973 | lto_fixup_prevailing_type (tree t) |
974 | { |
975 | /* The following re-creates proper variant lists while fixing up |
976 | the variant leaders. We do not stream TYPE_NEXT_VARIANT so the |
977 | variant list state before fixup is broken. */ |
978 | |
979 | /* If we are not our own variant leader link us into our new leaders |
980 | variant list. */ |
981 | if (TYPE_MAIN_VARIANT (t) != t) |
982 | { |
983 | tree mv = TYPE_MAIN_VARIANT (t); |
984 | TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv); |
985 | TYPE_NEXT_VARIANT (mv) = t; |
986 | } |
987 | else if (!TYPE_ATTRIBUTES (t)) |
988 | { |
989 | /* The following reconstructs the pointer chains |
990 | of the new pointed-to type if we are a main variant. We do |
991 | not stream those so they are broken before fixup. |
992 | Don't add it if despite being main variant it has |
993 | attributes (then it was created with build_distinct_type_copy). |
994 | Similarly don't add TYPE_REF_IS_RVALUE REFERENCE_TYPEs. |
995 | Don't add it if there is something in the chain already. */ |
996 | if (TREE_CODE (t) == POINTER_TYPE) |
997 | { |
998 | TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t)); |
999 | TYPE_POINTER_TO (TREE_TYPE (t)) = t; |
1000 | } |
1001 | else if (TREE_CODE (t) == REFERENCE_TYPE && !TYPE_REF_IS_RVALUE (t)) |
1002 | { |
1003 | TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t)); |
1004 | TYPE_REFERENCE_TO (TREE_TYPE (t)) = t; |
1005 | } |
1006 | } |
1007 | } |
1008 | |
1009 | |
1010 | /* We keep prevailing tree SCCs in a hashtable with manual collision |
1011 | handling (in case all hashes compare the same) and keep the colliding |
1012 | entries in the tree_scc->next chain. */ |
1013 | |
1014 | struct tree_scc |
1015 | { |
1016 | tree_scc *next; |
1017 | /* Hash of the whole SCC. */ |
1018 | hashval_t hash; |
1019 | /* Number of trees in the SCC. */ |
1020 | unsigned len; |
1021 | /* Number of possible entries into the SCC (tree nodes [0..entry_len-1] |
1022 | which share the same individual tree hash). */ |
1023 | unsigned entry_len; |
1024 | /* The members of the SCC. |
1025 | We only need to remember the first entry node candidate for prevailing |
1026 | SCCs (but of course have access to all entries for SCCs we are |
1027 | processing). |
1028 | ??? For prevailing SCCs we really only need hash and the first |
1029 | entry candidate, but that's too awkward to implement. */ |
1030 | tree entries[1]; |
1031 | }; |
1032 | |
1033 | struct tree_scc_hasher : nofree_ptr_hash <tree_scc> |
1034 | { |
1035 | static inline hashval_t hash (const tree_scc *); |
1036 | static inline bool equal (const tree_scc *, const tree_scc *); |
1037 | }; |
1038 | |
1039 | hashval_t |
1040 | tree_scc_hasher::hash (const tree_scc *scc) |
1041 | { |
1042 | return scc->hash; |
1043 | } |
1044 | |
1045 | bool |
1046 | tree_scc_hasher::equal (const tree_scc *scc1, const tree_scc *scc2) |
1047 | { |
1048 | if (scc1->hash != scc2->hash |
1049 | || scc1->len != scc2->len |
1050 | || scc1->entry_len != scc2->entry_len) |
1051 | return false; |
1052 | return true; |
1053 | } |
1054 | |
1055 | static hash_table<tree_scc_hasher> *tree_scc_hash; |
1056 | static struct obstack tree_scc_hash_obstack; |
1057 | |
1058 | static unsigned long num_merged_types; |
1059 | static unsigned long num_prevailing_types; |
1060 | static unsigned long num_type_scc_trees; |
1061 | static unsigned long total_scc_size; |
1062 | static unsigned long num_sccs_read; |
1063 | static unsigned long num_unshared_trees_read; |
1064 | static unsigned long total_scc_size_merged; |
1065 | static unsigned long num_sccs_merged; |
1066 | static unsigned long num_scc_compares; |
1067 | static unsigned long num_scc_compare_collisions; |
1068 | |
1069 | |
1070 | /* Compare the two entries T1 and T2 of two SCCs that are possibly equal, |
1071 | recursing through in-SCC tree edges. Returns true if the SCCs entered |
1072 | through T1 and T2 are equal and fills in *MAP with the pairs of |
1073 | SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */ |
1074 | |
1075 | static bool |
1076 | compare_tree_sccs_1 (tree t1, tree t2, tree **map) |
1077 | { |
1078 | enum tree_code code; |
1079 | |
1080 | /* Mark already visited nodes. */ |
1081 | TREE_ASM_WRITTEN (t2) = 1; |
1082 | |
1083 | /* Push the pair onto map. */ |
1084 | (*map)[0] = t1; |
1085 | (*map)[1] = t2; |
1086 | *map = *map + 2; |
1087 | |
1088 | /* Compare value-fields. */ |
1089 | #define compare_values(X) \ |
1090 | do { \ |
1091 | if (X(t1) != X(t2)) \ |
1092 | return false; \ |
1093 | } while (0) |
1094 | |
1095 | compare_values (TREE_CODE); |
1096 | code = TREE_CODE (t1); |
1097 | |
1098 | /* If we end up comparing translation unit decls we either forgot to mark |
1099 | some SCC as local or we compare too much. */ |
1100 | gcc_checking_assert (code != TRANSLATION_UNIT_DECL); |
1101 | |
1102 | if (!TYPE_P (t1)) |
1103 | { |
1104 | compare_values (TREE_SIDE_EFFECTS); |
1105 | compare_values (TREE_CONSTANT); |
1106 | compare_values (TREE_READONLY); |
1107 | compare_values (TREE_PUBLIC); |
1108 | } |
1109 | compare_values (TREE_ADDRESSABLE); |
1110 | compare_values (TREE_THIS_VOLATILE); |
1111 | if (DECL_P (t1)) |
1112 | compare_values (DECL_UNSIGNED); |
1113 | else if (TYPE_P (t1)) |
1114 | compare_values (TYPE_UNSIGNED); |
1115 | if (TYPE_P (t1)) |
1116 | compare_values (TYPE_ARTIFICIAL); |
1117 | else |
1118 | compare_values (TREE_NO_WARNING); |
1119 | compare_values (TREE_NOTHROW); |
1120 | compare_values (TREE_STATIC); |
1121 | if (code != TREE_BINFO) |
1122 | compare_values (TREE_PRIVATE); |
1123 | compare_values (TREE_PROTECTED); |
1124 | compare_values (TREE_DEPRECATED); |
1125 | if (TYPE_P (t1)) |
1126 | { |
1127 | if (AGGREGATE_TYPE_P (t1)) |
1128 | compare_values (TYPE_REVERSE_STORAGE_ORDER); |
1129 | else |
1130 | compare_values (TYPE_SATURATING); |
1131 | compare_values (TYPE_ADDR_SPACE); |
1132 | } |
1133 | else if (code == SSA_NAME) |
1134 | compare_values (SSA_NAME_IS_DEFAULT_DEF); |
1135 | |
1136 | if (CODE_CONTAINS_STRUCT (code, TS_INT_CST)) |
1137 | { |
1138 | if (wi::to_wide (t: t1) != wi::to_wide (t: t2)) |
1139 | return false; |
1140 | } |
1141 | |
1142 | if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST)) |
1143 | { |
1144 | /* ??? No suitable compare routine available. */ |
1145 | REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1); |
1146 | REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2); |
1147 | if (r1.cl != r2.cl |
1148 | || r1.decimal != r2.decimal |
1149 | || r1.sign != r2.sign |
1150 | || r1.signalling != r2.signalling |
1151 | || r1.canonical != r2.canonical |
1152 | || r1.uexp != r2.uexp) |
1153 | return false; |
1154 | for (unsigned i = 0; i < SIGSZ; ++i) |
1155 | if (r1.sig[i] != r2.sig[i]) |
1156 | return false; |
1157 | } |
1158 | |
1159 | if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST)) |
1160 | if (!fixed_compare (EQ_EXPR, |
1161 | TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2))) |
1162 | return false; |
1163 | |
1164 | if (CODE_CONTAINS_STRUCT (code, TS_VECTOR)) |
1165 | { |
1166 | compare_values (VECTOR_CST_LOG2_NPATTERNS); |
1167 | compare_values (VECTOR_CST_NELTS_PER_PATTERN); |
1168 | } |
1169 | |
1170 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
1171 | { |
1172 | compare_values (DECL_MODE); |
1173 | compare_values (DECL_NONLOCAL); |
1174 | compare_values (DECL_VIRTUAL_P); |
1175 | compare_values (DECL_IGNORED_P); |
1176 | compare_values (DECL_ABSTRACT_P); |
1177 | compare_values (DECL_ARTIFICIAL); |
1178 | compare_values (DECL_USER_ALIGN); |
1179 | compare_values (DECL_PRESERVE_P); |
1180 | compare_values (DECL_EXTERNAL); |
1181 | compare_values (DECL_NOT_GIMPLE_REG_P); |
1182 | compare_values (DECL_ALIGN); |
1183 | if (code == LABEL_DECL) |
1184 | { |
1185 | compare_values (EH_LANDING_PAD_NR); |
1186 | compare_values (LABEL_DECL_UID); |
1187 | } |
1188 | else if (code == FIELD_DECL) |
1189 | { |
1190 | compare_values (DECL_PACKED); |
1191 | compare_values (DECL_NONADDRESSABLE_P); |
1192 | compare_values (DECL_PADDING_P); |
1193 | compare_values (DECL_FIELD_ABI_IGNORED); |
1194 | compare_values (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD); |
1195 | compare_values (DECL_OFFSET_ALIGN); |
1196 | compare_values (DECL_NOT_FLEXARRAY); |
1197 | } |
1198 | else if (code == VAR_DECL) |
1199 | { |
1200 | compare_values (DECL_HAS_DEBUG_EXPR_P); |
1201 | compare_values (DECL_NONLOCAL_FRAME); |
1202 | } |
1203 | if (code == RESULT_DECL |
1204 | || code == PARM_DECL |
1205 | || code == VAR_DECL) |
1206 | { |
1207 | compare_values (DECL_BY_REFERENCE); |
1208 | if (code == VAR_DECL |
1209 | || code == PARM_DECL) |
1210 | compare_values (DECL_HAS_VALUE_EXPR_P); |
1211 | } |
1212 | } |
1213 | |
1214 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) |
1215 | compare_values (DECL_REGISTER); |
1216 | |
1217 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
1218 | { |
1219 | compare_values (DECL_COMMON); |
1220 | compare_values (DECL_DLLIMPORT_P); |
1221 | compare_values (DECL_WEAK); |
1222 | compare_values (DECL_SEEN_IN_BIND_EXPR_P); |
1223 | compare_values (DECL_COMDAT); |
1224 | compare_values (DECL_VISIBILITY); |
1225 | compare_values (DECL_VISIBILITY_SPECIFIED); |
1226 | if (code == VAR_DECL) |
1227 | { |
1228 | compare_values (DECL_HARD_REGISTER); |
1229 | /* DECL_IN_TEXT_SECTION is set during final asm output only. */ |
1230 | compare_values (DECL_IN_CONSTANT_POOL); |
1231 | } |
1232 | } |
1233 | |
1234 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
1235 | { |
1236 | compare_values (DECL_BUILT_IN_CLASS); |
1237 | compare_values (DECL_STATIC_CONSTRUCTOR); |
1238 | compare_values (DECL_STATIC_DESTRUCTOR); |
1239 | compare_values (DECL_UNINLINABLE); |
1240 | compare_values (DECL_POSSIBLY_INLINED); |
1241 | compare_values (DECL_IS_NOVOPS); |
1242 | compare_values (DECL_IS_RETURNS_TWICE); |
1243 | compare_values (DECL_IS_MALLOC); |
1244 | compare_values (FUNCTION_DECL_DECL_TYPE); |
1245 | compare_values (DECL_DECLARED_INLINE_P); |
1246 | compare_values (DECL_STATIC_CHAIN); |
1247 | compare_values (DECL_NO_INLINE_WARNING_P); |
1248 | compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT); |
1249 | compare_values (DECL_NO_LIMIT_STACK); |
1250 | compare_values (DECL_DISREGARD_INLINE_LIMITS); |
1251 | compare_values (DECL_PURE_P); |
1252 | compare_values (DECL_LOOPING_CONST_OR_PURE_P); |
1253 | compare_values (DECL_IS_REPLACEABLE_OPERATOR); |
1254 | compare_values (DECL_FINAL_P); |
1255 | compare_values (DECL_CXX_CONSTRUCTOR_P); |
1256 | compare_values (DECL_CXX_DESTRUCTOR_P); |
1257 | if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN) |
1258 | compare_values (DECL_UNCHECKED_FUNCTION_CODE); |
1259 | } |
1260 | |
1261 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) |
1262 | { |
1263 | compare_values (TYPE_MODE); |
1264 | compare_values (TYPE_NEEDS_CONSTRUCTING); |
1265 | if (RECORD_OR_UNION_TYPE_P (t1)) |
1266 | { |
1267 | compare_values (TYPE_TRANSPARENT_AGGR); |
1268 | compare_values (TYPE_FINAL_P); |
1269 | compare_values (TYPE_CXX_ODR_P); |
1270 | } |
1271 | else if (code == ARRAY_TYPE) |
1272 | compare_values (TYPE_NONALIASED_COMPONENT); |
1273 | if (code == ARRAY_TYPE || code == INTEGER_TYPE) |
1274 | compare_values (TYPE_STRING_FLAG); |
1275 | if (AGGREGATE_TYPE_P (t1)) |
1276 | compare_values (TYPE_TYPELESS_STORAGE); |
1277 | compare_values (TYPE_EMPTY_P); |
1278 | if (FUNC_OR_METHOD_TYPE_P (t1)) |
1279 | compare_values (TYPE_NO_NAMED_ARGS_STDARG_P); |
1280 | if (RECORD_OR_UNION_TYPE_P (t1)) |
1281 | compare_values (TYPE_INCLUDES_FLEXARRAY); |
1282 | compare_values (TYPE_PACKED); |
1283 | compare_values (TYPE_RESTRICT); |
1284 | compare_values (TYPE_USER_ALIGN); |
1285 | compare_values (TYPE_READONLY); |
1286 | compare_values (TYPE_PRECISION_RAW); |
1287 | compare_values (TYPE_ALIGN); |
1288 | /* Do not compare TYPE_ALIAS_SET. Doing so introduce ordering issues |
1289 | with calls to get_alias_set which may initialize it for streamed |
1290 | in types. */ |
1291 | } |
1292 | |
1293 | /* We don't want to compare locations, so there is nothing do compare |
1294 | for TS_EXP. */ |
1295 | |
1296 | /* BLOCKs are function local and we don't merge anything there, so |
1297 | simply refuse to merge. */ |
1298 | if (CODE_CONTAINS_STRUCT (code, TS_BLOCK)) |
1299 | return false; |
1300 | |
1301 | if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL)) |
1302 | if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1), |
1303 | TRANSLATION_UNIT_LANGUAGE (t2)) != 0) |
1304 | return false; |
1305 | |
1306 | if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)) |
1307 | if (!cl_target_option_eq (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2))) |
1308 | return false; |
1309 | |
1310 | if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION)) |
1311 | if (!cl_optimization_option_eq (TREE_OPTIMIZATION (t1), |
1312 | TREE_OPTIMIZATION (t2))) |
1313 | return false; |
1314 | |
1315 | if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) |
1316 | if (vec_safe_length (BINFO_BASE_ACCESSES (t1)) |
1317 | != vec_safe_length (BINFO_BASE_ACCESSES (t2))) |
1318 | return false; |
1319 | |
1320 | if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) |
1321 | { |
1322 | compare_values (CLOBBER_KIND); |
1323 | compare_values (CONSTRUCTOR_NELTS); |
1324 | } |
1325 | |
1326 | if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER)) |
1327 | if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2) |
1328 | || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2), |
1329 | IDENTIFIER_LENGTH (t1)) != 0) |
1330 | return false; |
1331 | |
1332 | if (CODE_CONTAINS_STRUCT (code, TS_STRING)) |
1333 | if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2) |
1334 | || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
1335 | TREE_STRING_LENGTH (t1)) != 0) |
1336 | return false; |
1337 | |
1338 | if (code == OMP_CLAUSE) |
1339 | { |
1340 | compare_values (OMP_CLAUSE_CODE); |
1341 | switch (OMP_CLAUSE_CODE (t1)) |
1342 | { |
1343 | case OMP_CLAUSE_DEFAULT: |
1344 | compare_values (OMP_CLAUSE_DEFAULT_KIND); |
1345 | break; |
1346 | case OMP_CLAUSE_SCHEDULE: |
1347 | compare_values (OMP_CLAUSE_SCHEDULE_KIND); |
1348 | break; |
1349 | case OMP_CLAUSE_DEPEND: |
1350 | compare_values (OMP_CLAUSE_DEPEND_KIND); |
1351 | break; |
1352 | case OMP_CLAUSE_MAP: |
1353 | compare_values (OMP_CLAUSE_MAP_KIND); |
1354 | break; |
1355 | case OMP_CLAUSE_PROC_BIND: |
1356 | compare_values (OMP_CLAUSE_PROC_BIND_KIND); |
1357 | break; |
1358 | case OMP_CLAUSE_REDUCTION: |
1359 | compare_values (OMP_CLAUSE_REDUCTION_CODE); |
1360 | compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT); |
1361 | compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE); |
1362 | break; |
1363 | default: |
1364 | break; |
1365 | } |
1366 | } |
1367 | |
1368 | #undef compare_values |
1369 | |
1370 | |
1371 | /* Compare pointer fields. */ |
1372 | |
1373 | /* Recurse. Search & Replaced from DFS_write_tree_body. |
1374 | Folding the early checks into the compare_tree_edges recursion |
1375 | macro makes debugging way quicker as you are able to break on |
1376 | compare_tree_sccs_1 and simply finish until a call returns false |
1377 | to spot the SCC members with the difference. */ |
1378 | #define compare_tree_edges(E1, E2) \ |
1379 | do { \ |
1380 | tree t1_ = (E1), t2_ = (E2); \ |
1381 | if (t1_ != t2_ \ |
1382 | && (!t1_ || !t2_ \ |
1383 | || !TREE_VISITED (t2_) \ |
1384 | || (!TREE_ASM_WRITTEN (t2_) \ |
1385 | && !compare_tree_sccs_1 (t1_, t2_, map)))) \ |
1386 | return false; \ |
1387 | /* Only non-NULL trees outside of the SCC may compare equal. */ \ |
1388 | gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \ |
1389 | } while (0) |
1390 | |
1391 | if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) |
1392 | { |
1393 | if (code != IDENTIFIER_NODE) |
1394 | compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2)); |
1395 | } |
1396 | |
1397 | if (CODE_CONTAINS_STRUCT (code, TS_VECTOR)) |
1398 | { |
1399 | /* Note that the number of elements for EXPR has already been emitted |
1400 | in EXPR's header (see streamer_write_tree_header). */ |
1401 | unsigned int count = vector_cst_encoded_nelts (t: t1); |
1402 | for (unsigned int i = 0; i < count; ++i) |
1403 | compare_tree_edges (VECTOR_CST_ENCODED_ELT (t1, i), |
1404 | VECTOR_CST_ENCODED_ELT (t2, i)); |
1405 | } |
1406 | |
1407 | if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX)) |
1408 | { |
1409 | compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2)); |
1410 | compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2)); |
1411 | } |
1412 | |
1413 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) |
1414 | { |
1415 | compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2)); |
1416 | /* ??? Global decls from different TUs have non-matching |
1417 | TRANSLATION_UNIT_DECLs. Only consider a small set of |
1418 | decls equivalent, we should not end up merging others. */ |
1419 | if ((code == TYPE_DECL |
1420 | || code == NAMESPACE_DECL |
1421 | || code == IMPORTED_DECL |
1422 | || code == CONST_DECL |
1423 | || (VAR_OR_FUNCTION_DECL_P (t1) |
1424 | && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1)))) |
1425 | && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2)) |
1426 | ; |
1427 | else |
1428 | compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2)); |
1429 | } |
1430 | |
1431 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
1432 | { |
1433 | compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2)); |
1434 | compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2)); |
1435 | compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2)); |
1436 | compare_tree_edges (DECL_ABSTRACT_ORIGIN (t1), DECL_ABSTRACT_ORIGIN (t2)); |
1437 | if ((code == VAR_DECL |
1438 | || code == PARM_DECL) |
1439 | && DECL_HAS_VALUE_EXPR_P (t1)) |
1440 | compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2)); |
1441 | if (code == VAR_DECL |
1442 | && DECL_HAS_DEBUG_EXPR_P (t1)) |
1443 | compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2)); |
1444 | /* LTO specific edges. */ |
1445 | if (code != FUNCTION_DECL |
1446 | && code != TRANSLATION_UNIT_DECL) |
1447 | compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2)); |
1448 | } |
1449 | |
1450 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) |
1451 | { |
1452 | if (code == FUNCTION_DECL) |
1453 | { |
1454 | tree a1, a2; |
1455 | for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2); |
1456 | a1 || a2; |
1457 | a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2)) |
1458 | compare_tree_edges (a1, a2); |
1459 | compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2)); |
1460 | } |
1461 | else if (code == TYPE_DECL) |
1462 | compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2)); |
1463 | } |
1464 | |
1465 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
1466 | { |
1467 | /* Make sure we don't inadvertently set the assembler name. */ |
1468 | if (DECL_ASSEMBLER_NAME_SET_P (t1)) |
1469 | compare_tree_edges (DECL_ASSEMBLER_NAME (t1), |
1470 | DECL_ASSEMBLER_NAME (t2)); |
1471 | } |
1472 | |
1473 | if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL)) |
1474 | { |
1475 | compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2)); |
1476 | compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2)); |
1477 | compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1), |
1478 | DECL_BIT_FIELD_REPRESENTATIVE (t2)); |
1479 | compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1), |
1480 | DECL_FIELD_BIT_OFFSET (t2)); |
1481 | compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2)); |
1482 | } |
1483 | |
1484 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
1485 | { |
1486 | compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1), |
1487 | DECL_FUNCTION_PERSONALITY (t2)); |
1488 | compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2)); |
1489 | compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1), |
1490 | DECL_FUNCTION_SPECIFIC_TARGET (t2)); |
1491 | compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1), |
1492 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2)); |
1493 | } |
1494 | |
1495 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) |
1496 | { |
1497 | compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2)); |
1498 | compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2)); |
1499 | compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)); |
1500 | compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2)); |
1501 | /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be |
1502 | reconstructed during fixup. */ |
1503 | /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists |
1504 | during fixup. */ |
1505 | compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)); |
1506 | /* ??? Global types from different TUs have non-matching |
1507 | TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise |
1508 | equal. */ |
1509 | if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2)) |
1510 | ; |
1511 | else |
1512 | compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2)); |
1513 | /* TYPE_CANONICAL is re-computed during type merging, so do not |
1514 | compare it here. */ |
1515 | compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2)); |
1516 | } |
1517 | |
1518 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)) |
1519 | { |
1520 | if (code == ARRAY_TYPE) |
1521 | compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2)); |
1522 | else if (RECORD_OR_UNION_TYPE_P (t1)) |
1523 | { |
1524 | tree f1, f2; |
1525 | for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); |
1526 | f1 || f2; |
1527 | f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) |
1528 | compare_tree_edges (f1, f2); |
1529 | } |
1530 | else if (code == FUNCTION_TYPE |
1531 | || code == METHOD_TYPE) |
1532 | compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2)); |
1533 | |
1534 | if (!POINTER_TYPE_P (t1)) |
1535 | compare_tree_edges (TYPE_MIN_VALUE_RAW (t1), TYPE_MIN_VALUE_RAW (t2)); |
1536 | compare_tree_edges (TYPE_MAX_VALUE_RAW (t1), TYPE_MAX_VALUE_RAW (t2)); |
1537 | } |
1538 | |
1539 | if (CODE_CONTAINS_STRUCT (code, TS_LIST)) |
1540 | { |
1541 | compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2)); |
1542 | compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2)); |
1543 | compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2)); |
1544 | } |
1545 | |
1546 | if (CODE_CONTAINS_STRUCT (code, TS_VEC)) |
1547 | for (int i = 0; i < TREE_VEC_LENGTH (t1); i++) |
1548 | compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i)); |
1549 | |
1550 | if (CODE_CONTAINS_STRUCT (code, TS_EXP)) |
1551 | { |
1552 | for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++) |
1553 | compare_tree_edges (TREE_OPERAND (t1, i), |
1554 | TREE_OPERAND (t2, i)); |
1555 | |
1556 | /* BLOCKs are function local and we don't merge anything there. */ |
1557 | if (TREE_BLOCK (t1) || TREE_BLOCK (t2)) |
1558 | return false; |
1559 | } |
1560 | |
1561 | if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) |
1562 | { |
1563 | unsigned i; |
1564 | tree t; |
1565 | /* Lengths have already been compared above. */ |
1566 | FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t) |
1567 | compare_tree_edges (t, BINFO_BASE_BINFO (t2, i)); |
1568 | FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t) |
1569 | compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i)); |
1570 | compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2)); |
1571 | compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2)); |
1572 | compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2)); |
1573 | /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX |
1574 | and BINFO_VPTR_INDEX; these are used by C++ FE only. */ |
1575 | } |
1576 | |
1577 | if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) |
1578 | { |
1579 | unsigned i; |
1580 | tree index, value; |
1581 | /* Lengths have already been compared above. */ |
1582 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value) |
1583 | { |
1584 | compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index); |
1585 | compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value); |
1586 | } |
1587 | } |
1588 | |
1589 | if (code == OMP_CLAUSE) |
1590 | { |
1591 | int i; |
1592 | |
1593 | for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++) |
1594 | compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i), |
1595 | OMP_CLAUSE_OPERAND (t2, i)); |
1596 | compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2)); |
1597 | } |
1598 | |
1599 | #undef compare_tree_edges |
1600 | |
1601 | return true; |
1602 | } |
1603 | |
1604 | /* Compare the tree scc SCC to the prevailing candidate PSCC, filling |
1605 | out MAP if they are equal. */ |
1606 | |
1607 | static bool |
1608 | compare_tree_sccs (tree_scc *pscc, tree_scc *scc, |
1609 | tree *map) |
1610 | { |
1611 | /* Assume SCC entry hashes are sorted after their cardinality. Which |
1612 | means we can simply take the first n-tuple of equal hashes |
1613 | (which is recorded as entry_len) and do n SCC entry candidate |
1614 | comparisons. */ |
1615 | for (unsigned i = 0; i < pscc->entry_len; ++i) |
1616 | { |
1617 | tree *mapp = map; |
1618 | num_scc_compare_collisions++; |
1619 | if (compare_tree_sccs_1 (t1: pscc->entries[0], t2: scc->entries[i], map: &mapp)) |
1620 | { |
1621 | /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN |
1622 | on the scc as all trees will be freed. */ |
1623 | return true; |
1624 | } |
1625 | /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case |
1626 | the SCC prevails. */ |
1627 | for (unsigned j = 0; j < scc->len; ++j) |
1628 | TREE_ASM_WRITTEN (scc->entries[j]) = 0; |
1629 | } |
1630 | |
1631 | return false; |
1632 | } |
1633 | |
1634 | /* QSort sort function to sort a map of two pointers after the 2nd |
1635 | pointer. */ |
1636 | |
1637 | static int |
1638 | cmp_tree (const void *p1_, const void *p2_) |
1639 | { |
1640 | tree *p1 = (tree *)(const_cast<void *>(p1_)); |
1641 | tree *p2 = (tree *)(const_cast<void *>(p2_)); |
1642 | if (p1[1] == p2[1]) |
1643 | return 0; |
1644 | return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1; |
1645 | } |
1646 | |
1647 | /* New scc of size 1 containing T was streamed in from DATA_IN and not merged. |
1648 | Register it to reader cache at index FROM. */ |
1649 | |
1650 | static void |
1651 | process_dref (class data_in *data_in, tree t, unsigned from) |
1652 | { |
1653 | struct streamer_tree_cache_d *cache = data_in->reader_cache; |
1654 | /* If we got a debug reference queued, see if the prevailing |
1655 | tree has a debug reference and if not, register the one |
1656 | for the tree we are about to throw away. */ |
1657 | if (dref_queue.length () == 1) |
1658 | { |
1659 | dref_entry e = dref_queue.pop (); |
1660 | gcc_assert (e.decl |
1661 | == streamer_tree_cache_get_tree (cache, from)); |
1662 | const char *sym; |
1663 | unsigned HOST_WIDE_INT off; |
1664 | if (!debug_hooks->die_ref_for_decl (t, &sym, &off)) |
1665 | debug_hooks->register_external_die (t, e.sym, e.off); |
1666 | } |
1667 | } |
1668 | |
1669 | /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and |
1670 | hash value SCC_HASH with an already recorded SCC. Return true if |
1671 | that was successful, otherwise return false. */ |
1672 | |
1673 | static bool |
1674 | unify_scc (class data_in *data_in, unsigned from, |
1675 | unsigned len, unsigned scc_entry_len, hashval_t scc_hash) |
1676 | { |
1677 | bool unified_p = false; |
1678 | struct streamer_tree_cache_d *cache = data_in->reader_cache; |
1679 | tree_scc *scc |
1680 | = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree)); |
1681 | scc->next = NULL; |
1682 | scc->hash = scc_hash; |
1683 | scc->len = len; |
1684 | scc->entry_len = scc_entry_len; |
1685 | for (unsigned i = 0; i < len; ++i) |
1686 | { |
1687 | tree t = streamer_tree_cache_get_tree (cache, ix: from + i); |
1688 | scc->entries[i] = t; |
1689 | /* These types should be streamed as unshared. */ |
1690 | gcc_checking_assert |
1691 | (!(TREE_CODE (t) == TRANSLATION_UNIT_DECL |
1692 | || (VAR_OR_FUNCTION_DECL_P (t) |
1693 | && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t))) |
1694 | || TREE_CODE (t) == LABEL_DECL |
1695 | || (TREE_CODE (t) == NAMESPACE_DECL && !DECL_NAME (t)) |
1696 | || (TYPE_P (t) |
1697 | && type_with_linkage_p (TYPE_MAIN_VARIANT (t)) |
1698 | && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t))))); |
1699 | } |
1700 | |
1701 | /* Look for the list of candidate SCCs to compare against. */ |
1702 | tree_scc **slot; |
1703 | slot = tree_scc_hash->find_slot_with_hash (comparable: scc, hash: scc_hash, insert: INSERT); |
1704 | if (*slot) |
1705 | { |
1706 | /* Try unifying against each candidate. */ |
1707 | num_scc_compares++; |
1708 | |
1709 | /* Set TREE_VISITED on the scc so we can easily identify tree nodes |
1710 | outside of the scc when following tree edges. Make sure |
1711 | that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit |
1712 | to track whether we visited the SCC member during the compare. |
1713 | We cannot use TREE_VISITED on the pscc members as the extended |
1714 | scc and pscc can overlap. */ |
1715 | for (unsigned i = 0; i < scc->len; ++i) |
1716 | { |
1717 | TREE_VISITED (scc->entries[i]) = 1; |
1718 | gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i])); |
1719 | } |
1720 | |
1721 | tree *map = XALLOCAVEC (tree, 2 * len); |
1722 | for (tree_scc *pscc = *slot; pscc; pscc = pscc->next) |
1723 | { |
1724 | if (!compare_tree_sccs (pscc, scc, map)) |
1725 | continue; |
1726 | |
1727 | /* Found an equal SCC. */ |
1728 | unified_p = true; |
1729 | num_scc_compare_collisions--; |
1730 | num_sccs_merged++; |
1731 | total_scc_size_merged += len; |
1732 | |
1733 | if (flag_checking) |
1734 | for (unsigned i = 0; i < len; ++i) |
1735 | { |
1736 | tree t = map[2*i+1]; |
1737 | enum tree_code code = TREE_CODE (t); |
1738 | /* IDENTIFIER_NODEs should be singletons and are merged by the |
1739 | streamer. The others should be singletons, too, and we |
1740 | should not merge them in any way. */ |
1741 | gcc_assert (code != TRANSLATION_UNIT_DECL |
1742 | && code != IDENTIFIER_NODE); |
1743 | } |
1744 | |
1745 | /* Fixup the streamer cache with the prevailing nodes according |
1746 | to the tree node mapping computed by compare_tree_sccs. */ |
1747 | if (len == 1) |
1748 | { |
1749 | process_dref (data_in, t: pscc->entries[0], from); |
1750 | lto_maybe_register_decl (data_in, t: pscc->entries[0], ix: from); |
1751 | streamer_tree_cache_replace_tree (cache, pscc->entries[0], from); |
1752 | } |
1753 | else |
1754 | { |
1755 | tree *map2 = XALLOCAVEC (tree, 2 * len); |
1756 | for (unsigned i = 0; i < len; ++i) |
1757 | { |
1758 | map2[i*2] = (tree)(uintptr_t)(from + i); |
1759 | map2[i*2+1] = scc->entries[i]; |
1760 | } |
1761 | qsort (map2, len, 2 * sizeof (tree), cmp_tree); |
1762 | qsort (map, len, 2 * sizeof (tree), cmp_tree); |
1763 | for (unsigned i = 0; i < len; ++i) |
1764 | { |
1765 | lto_maybe_register_decl (data_in, t: map[2*i], |
1766 | ix: (uintptr_t)map2[2*i]); |
1767 | streamer_tree_cache_replace_tree (cache, map[2*i], |
1768 | (uintptr_t)map2[2*i]); |
1769 | } |
1770 | } |
1771 | |
1772 | /* Free the tree nodes from the read SCC. */ |
1773 | data_in->location_cache.revert_location_cache (); |
1774 | for (unsigned i = 0; i < len; ++i) |
1775 | { |
1776 | if (TYPE_P (scc->entries[i])) |
1777 | num_merged_types++; |
1778 | free_node (scc->entries[i]); |
1779 | } |
1780 | |
1781 | /* Drop DIE references. |
1782 | ??? Do as in the size-one SCC case which involves sorting |
1783 | the queue. */ |
1784 | dref_queue.truncate (size: 0); |
1785 | |
1786 | break; |
1787 | } |
1788 | |
1789 | /* Reset TREE_VISITED if we didn't unify the SCC with another. */ |
1790 | if (!unified_p) |
1791 | for (unsigned i = 0; i < scc->len; ++i) |
1792 | TREE_VISITED (scc->entries[i]) = 0; |
1793 | } |
1794 | |
1795 | /* If we didn't unify it to any candidate duplicate the relevant |
1796 | pieces to permanent storage and link it into the chain. */ |
1797 | if (!unified_p) |
1798 | { |
1799 | tree_scc *pscc |
1800 | = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc)); |
1801 | memcpy (dest: pscc, src: scc, n: sizeof (tree_scc)); |
1802 | pscc->next = (*slot); |
1803 | *slot = pscc; |
1804 | } |
1805 | return unified_p; |
1806 | } |
1807 | |
1808 | typedef int_hash<unsigned, 0, UINT_MAX> code_id_hash; |
1809 | |
1810 | /* Do registering necessary once new tree fully streamed in (including all |
1811 | trees it reffers to). */ |
1812 | |
1813 | static void |
1814 | process_new_tree (tree t, hash_map <code_id_hash, unsigned> *hm, |
1815 | unsigned index, unsigned *total, class data_in *data_in) |
1816 | { |
1817 | /* Reconstruct the type variant and pointer-to/reference-to |
1818 | chains. */ |
1819 | if (TYPE_P (t)) |
1820 | { |
1821 | /* Map the tree types to their frequencies. */ |
1822 | if (flag_lto_dump_type_stats) |
1823 | { |
1824 | unsigned key = (unsigned) TREE_CODE (t); |
1825 | unsigned *countp = hm->get (k: key); |
1826 | hm->put (k: key, v: countp ? (*countp) + 1 : 1); |
1827 | (*total)++; |
1828 | } |
1829 | |
1830 | num_prevailing_types++; |
1831 | lto_fixup_prevailing_type (t); |
1832 | |
1833 | /* Compute the canonical type of all non-ODR types. |
1834 | Delay ODR types for the end of merging process - the canonical |
1835 | type for those can be computed using the (unique) name however |
1836 | we want to do this only if units in other languages do not |
1837 | contain structurally equivalent type. |
1838 | |
1839 | Because SCC components are streamed in random (hash) order |
1840 | we may have encountered the type before while registering |
1841 | type canonical of a derived type in the same SCC. */ |
1842 | if (!TYPE_CANONICAL (t)) |
1843 | { |
1844 | if (!RECORD_OR_UNION_TYPE_P (t) |
1845 | || !TYPE_CXX_ODR_P (t)) |
1846 | gimple_register_canonical_type (t); |
1847 | else if (COMPLETE_TYPE_P (t)) |
1848 | vec_safe_push (v&: types_to_register, obj: t); |
1849 | } |
1850 | if (TYPE_MAIN_VARIANT (t) == t && odr_type_p (t)) |
1851 | register_odr_type (t); |
1852 | } |
1853 | /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its |
1854 | type which is also member of this SCC. */ |
1855 | if (TREE_CODE (t) == INTEGER_CST |
1856 | && !TREE_OVERFLOW (t)) |
1857 | cache_integer_cst (t); |
1858 | if (!flag_ltrans) |
1859 | { |
1860 | lto_maybe_register_decl (data_in, t, ix: index); |
1861 | /* Scan the tree for references to global functions or |
1862 | variables and record those for later fixup. */ |
1863 | if (mentions_vars_p (t)) |
1864 | vec_safe_push (v&: tree_with_vars, obj: t); |
1865 | } |
1866 | } |
1867 | |
1868 | /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA. |
1869 | RESOLUTIONS is the set of symbols picked by the linker (read from the |
1870 | resolution file when the linker plugin is being used). */ |
1871 | |
1872 | static void |
1873 | lto_read_decls (struct lto_file_decl_data *decl_data, const void *data, |
1874 | vec<ld_plugin_symbol_resolution_t> resolutions) |
1875 | { |
1876 | const struct lto_decl_header * = (const struct lto_decl_header *) data; |
1877 | const int decl_offset = sizeof (struct lto_decl_header); |
1878 | const int main_offset = decl_offset + header->decl_state_size; |
1879 | const int string_offset = main_offset + header->main_size; |
1880 | class data_in *data_in; |
1881 | unsigned int i; |
1882 | const uint32_t *data_ptr, *data_end; |
1883 | uint32_t num_decl_states; |
1884 | |
1885 | lto_input_block ib_main ((const char *) data + main_offset, |
1886 | header->main_size, decl_data); |
1887 | |
1888 | data_in = lto_data_in_create (decl_data, (const char *) data + string_offset, |
1889 | header->string_size, resolutions); |
1890 | |
1891 | /* We do not uniquify the pre-loaded cache entries, those are middle-end |
1892 | internal types that should not be merged. */ |
1893 | |
1894 | hash_map <code_id_hash, unsigned> hm; |
1895 | unsigned total = 0; |
1896 | |
1897 | /* Read the global declarations and types. */ |
1898 | while (ib_main.p < ib_main.len) |
1899 | { |
1900 | tree t; |
1901 | unsigned from = data_in->reader_cache->nodes.length (); |
1902 | /* Read and uniquify SCCs as in the input stream. */ |
1903 | enum LTO_tags tag = streamer_read_record_start (ib: &ib_main); |
1904 | if (tag == LTO_tree_scc || tag == LTO_trees) |
1905 | { |
1906 | unsigned len_; |
1907 | unsigned scc_entry_len; |
1908 | |
1909 | /* Because we stream in SCC order we know that all unshared trees |
1910 | are now fully streamed. Process them. */ |
1911 | hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_, |
1912 | &scc_entry_len, |
1913 | tag == LTO_tree_scc); |
1914 | unsigned len = data_in->reader_cache->nodes.length () - from; |
1915 | gcc_assert (len == len_); |
1916 | |
1917 | if (tag == LTO_tree_scc) |
1918 | { |
1919 | total_scc_size += len; |
1920 | num_sccs_read++; |
1921 | } |
1922 | else |
1923 | num_unshared_trees_read += len; |
1924 | |
1925 | /* We have the special case of size-1 SCCs that are pre-merged |
1926 | by means of identifier and string sharing for example. |
1927 | ??? Maybe we should avoid streaming those as SCCs. */ |
1928 | tree first = streamer_tree_cache_get_tree (cache: data_in->reader_cache, |
1929 | ix: from); |
1930 | /* Identifier and integers are shared specially, they should never |
1931 | go by the tree merging path. */ |
1932 | gcc_checking_assert ((TREE_CODE (first) != IDENTIFIER_NODE |
1933 | && (TREE_CODE (first) != INTEGER_CST |
1934 | || TREE_OVERFLOW (first))) |
1935 | || len != 1); |
1936 | |
1937 | /* Try to unify the SCC with already existing ones. */ |
1938 | if (!flag_ltrans && tag != LTO_trees |
1939 | && unify_scc (data_in, from, |
1940 | len, scc_entry_len, scc_hash)) |
1941 | continue; |
1942 | |
1943 | /* Tree merging failed, mark entries in location cache as |
1944 | permanent. */ |
1945 | data_in->location_cache.accept_location_cache (); |
1946 | |
1947 | bool seen_type = false; |
1948 | for (unsigned i = 0; i < len; ++i) |
1949 | { |
1950 | tree t = streamer_tree_cache_get_tree (cache: data_in->reader_cache, |
1951 | ix: from + i); |
1952 | process_new_tree (t, hm: &hm, index: from + i, total: &total, data_in); |
1953 | if (TYPE_P (t)) |
1954 | seen_type = true; |
1955 | } |
1956 | |
1957 | /* Register DECLs with the debuginfo machinery. */ |
1958 | while (!dref_queue.is_empty ()) |
1959 | { |
1960 | dref_entry e = dref_queue.pop (); |
1961 | debug_hooks->register_external_die (e.decl, e.sym, e.off); |
1962 | } |
1963 | |
1964 | if (seen_type) |
1965 | num_type_scc_trees += len; |
1966 | } |
1967 | else |
1968 | { |
1969 | t = lto_input_tree_1 (&ib_main, data_in, tag, hash: 0); |
1970 | gcc_assert (data_in->reader_cache->nodes.length () == from + 1); |
1971 | num_unshared_trees_read++; |
1972 | data_in->location_cache.accept_location_cache (); |
1973 | process_dref (data_in, t, from); |
1974 | if (TREE_CODE (t) == IDENTIFIER_NODE |
1975 | || (TREE_CODE (t) == INTEGER_CST |
1976 | && !TREE_OVERFLOW (t))) |
1977 | ; |
1978 | else |
1979 | { |
1980 | lto_maybe_register_decl (data_in, t, ix: from); |
1981 | process_new_tree (t, hm: &hm, index: from, total: &total, data_in); |
1982 | } |
1983 | } |
1984 | } |
1985 | |
1986 | /* Dump type statistics. */ |
1987 | if (flag_lto_dump_type_stats) |
1988 | { |
1989 | fprintf (stdout, format: " Type Frequency Percentage\n\n" ); |
1990 | for (hash_map<code_id_hash, unsigned>::iterator itr = hm.begin (); |
1991 | itr != hm.end (); |
1992 | ++itr) |
1993 | { |
1994 | std::pair<unsigned, unsigned> p = *itr; |
1995 | enum tree_code code = (enum tree_code) p.first; |
1996 | fprintf (stdout, format: "%14s %6d %12.2f\n" , get_tree_code_name (code), |
1997 | p.second, float (p.second)/total*100); |
1998 | } |
1999 | } |
2000 | |
2001 | data_in->location_cache.apply_location_cache (); |
2002 | |
2003 | /* Read in lto_in_decl_state objects. */ |
2004 | data_ptr = (const uint32_t *) ((const char*) data + decl_offset); |
2005 | data_end |
2006 | = (const uint32_t *) ((const char*) data_ptr + header->decl_state_size); |
2007 | num_decl_states = *data_ptr++; |
2008 | |
2009 | gcc_assert (num_decl_states > 0); |
2010 | decl_data->global_decl_state = lto_new_in_decl_state (); |
2011 | data_ptr = lto_read_in_decl_state (data_in, data: data_ptr, |
2012 | state: decl_data->global_decl_state); |
2013 | |
2014 | /* Read in per-function decl states and enter them in hash table. */ |
2015 | decl_data->function_decl_states |
2016 | = hash_table<decl_state_hasher>::create_ggc (n: 37); |
2017 | |
2018 | for (i = 1; i < num_decl_states; i++) |
2019 | { |
2020 | struct lto_in_decl_state *state = lto_new_in_decl_state (); |
2021 | |
2022 | data_ptr = lto_read_in_decl_state (data_in, data: data_ptr, state); |
2023 | lto_in_decl_state **slot |
2024 | = decl_data->function_decl_states->find_slot (value: state, insert: INSERT); |
2025 | gcc_assert (*slot == NULL); |
2026 | *slot = state; |
2027 | } |
2028 | |
2029 | if (data_ptr != data_end) |
2030 | internal_error ("bytecode stream: garbage at the end of symbols section" ); |
2031 | |
2032 | /* Set the current decl state to be the global state. */ |
2033 | decl_data->current_decl_state = decl_data->global_decl_state; |
2034 | |
2035 | lto_data_in_delete (data_in); |
2036 | } |
2037 | |
2038 | /* Custom version of strtoll, which is not portable. */ |
2039 | |
2040 | static int64_t |
2041 | lto_parse_hex (const char *p) |
2042 | { |
2043 | int64_t ret = 0; |
2044 | |
2045 | for (; *p != '\0'; ++p) |
2046 | { |
2047 | char c = *p; |
2048 | unsigned char part; |
2049 | ret <<= 4; |
2050 | if (c >= '0' && c <= '9') |
2051 | part = c - '0'; |
2052 | else if (c >= 'a' && c <= 'f') |
2053 | part = c - 'a' + 10; |
2054 | else if (c >= 'A' && c <= 'F') |
2055 | part = c - 'A' + 10; |
2056 | else |
2057 | internal_error ("could not parse hex number" ); |
2058 | ret |= part; |
2059 | } |
2060 | |
2061 | return ret; |
2062 | } |
2063 | |
2064 | /* Read resolution for file named FILE_NAME. The resolution is read from |
2065 | RESOLUTION. */ |
2066 | |
2067 | static void |
2068 | lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file) |
2069 | { |
2070 | /* We require that objects in the resolution file are in the same |
2071 | order as the lto1 command line. */ |
2072 | unsigned int name_len; |
2073 | char *obj_name; |
2074 | unsigned int num_symbols; |
2075 | unsigned int i; |
2076 | struct lto_file_decl_data *file_data; |
2077 | splay_tree_node nd = NULL; |
2078 | |
2079 | if (!resolution) |
2080 | return; |
2081 | |
2082 | name_len = strlen (s: file->filename); |
2083 | obj_name = XNEWVEC (char, name_len + 1); |
2084 | fscanf (stream: resolution, format: " " ); /* Read white space. */ |
2085 | |
2086 | fread (ptr: obj_name, size: sizeof (char), n: name_len, stream: resolution); |
2087 | obj_name[name_len] = '\0'; |
2088 | if (filename_cmp (s1: obj_name, s2: file->filename) != 0) |
2089 | internal_error ("unexpected file name %s in linker resolution file. " |
2090 | "Expected %s" , obj_name, file->filename); |
2091 | if (file->offset != 0) |
2092 | { |
2093 | int t; |
2094 | char offset_p[17]; |
2095 | int64_t offset; |
2096 | t = fscanf (stream: resolution, format: "@0x%16s" , offset_p); |
2097 | if (t != 1) |
2098 | internal_error ("could not parse file offset" ); |
2099 | offset = lto_parse_hex (p: offset_p); |
2100 | if (offset != file->offset) |
2101 | internal_error ("unexpected offset" ); |
2102 | } |
2103 | |
2104 | free (ptr: obj_name); |
2105 | |
2106 | fscanf (stream: resolution, format: "%u" , &num_symbols); |
2107 | |
2108 | for (i = 0; i < num_symbols; i++) |
2109 | { |
2110 | int t; |
2111 | unsigned index; |
2112 | unsigned HOST_WIDE_INT id; |
2113 | char r_str[27]; |
2114 | enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0; |
2115 | unsigned int j; |
2116 | unsigned int lto_resolution_str_len = ARRAY_SIZE (lto_resolution_str); |
2117 | res_pair rp; |
2118 | |
2119 | t = fscanf (stream: resolution, format: "%u " HOST_WIDE_INT_PRINT_HEX_PURE |
2120 | " %26s %*[^\n]\n" , &index, &id, r_str); |
2121 | if (t != 3) |
2122 | internal_error ("invalid line in the resolution file" ); |
2123 | |
2124 | for (j = 0; j < lto_resolution_str_len; j++) |
2125 | { |
2126 | if (strcmp (s1: lto_resolution_str[j], s2: r_str) == 0) |
2127 | { |
2128 | r = (enum ld_plugin_symbol_resolution) j; |
2129 | /* Incremental linking together with -fwhole-program may seem |
2130 | somewhat contradictionary (as the point of incremental linking |
2131 | is to allow re-linking with more symbols later) but it is |
2132 | used to build LTO kernel. We want to hide all symbols that |
2133 | are not explicitely marked as exported and thus turn |
2134 | LDPR_PREVAILING_DEF_IRONLY_EXP |
2135 | to LDPR_PREVAILING_DEF_IRONLY. */ |
2136 | if (flag_whole_program |
2137 | && flag_incremental_link == INCREMENTAL_LINK_NOLTO |
2138 | && r == LDPR_PREVAILING_DEF_IRONLY_EXP) |
2139 | r = LDPR_PREVAILING_DEF_IRONLY; |
2140 | break; |
2141 | } |
2142 | } |
2143 | if (j == lto_resolution_str_len) |
2144 | internal_error ("invalid resolution in the resolution file" ); |
2145 | |
2146 | if (!(nd && lto_splay_tree_id_equal_p (key: nd->key, id))) |
2147 | { |
2148 | nd = lto_splay_tree_lookup (t: file_ids, id); |
2149 | if (nd == NULL) |
2150 | internal_error ("resolution sub id %wx not in object file" , id); |
2151 | } |
2152 | |
2153 | file_data = (struct lto_file_decl_data *)nd->value; |
2154 | /* The indexes are very sparse. To save memory save them in a compact |
2155 | format that is only unpacked later when the subfile is processed. */ |
2156 | rp.res = r; |
2157 | rp.index = index; |
2158 | file_data->respairs.safe_push (obj: rp); |
2159 | if (file_data->max_index < index) |
2160 | file_data->max_index = index; |
2161 | } |
2162 | } |
2163 | |
2164 | /* List of file_decl_datas. */ |
2165 | struct file_data_list |
2166 | { |
2167 | struct lto_file_decl_data *first, *last; |
2168 | }; |
2169 | |
2170 | /* Is the name for a id'ed LTO section? */ |
2171 | |
2172 | static int |
2173 | lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id) |
2174 | { |
2175 | const char *s; |
2176 | |
2177 | if (strncmp (s1: name, s2: section_name_prefix, n: strlen (s: section_name_prefix))) |
2178 | return 0; |
2179 | s = strrchr (s: name, c: '.'); |
2180 | if (!s) |
2181 | return 0; |
2182 | /* If the section is not suffixed with an ID return. */ |
2183 | if ((size_t)(s - name) == strlen (s: section_name_prefix)) |
2184 | return 0; |
2185 | return sscanf (s: s, format: "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1; |
2186 | } |
2187 | |
2188 | /* Create file_data of each sub file id. */ |
2189 | |
2190 | static int |
2191 | create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids, |
2192 | struct file_data_list *list) |
2193 | { |
2194 | struct lto_section_slot s_slot, *new_slot; |
2195 | unsigned HOST_WIDE_INT id; |
2196 | splay_tree_node nd; |
2197 | void **hash_slot; |
2198 | char *new_name; |
2199 | struct lto_file_decl_data *file_data; |
2200 | |
2201 | if (!lto_section_with_id (name: ls->name, id: &id)) |
2202 | return 1; |
2203 | |
2204 | /* Find hash table of sub module id. */ |
2205 | nd = lto_splay_tree_lookup (t: file_ids, id); |
2206 | if (nd != NULL) |
2207 | { |
2208 | file_data = (struct lto_file_decl_data *)nd->value; |
2209 | } |
2210 | else |
2211 | { |
2212 | file_data = ggc_alloc<lto_file_decl_data> (); |
2213 | memset(s: file_data, c: 0, n: sizeof (struct lto_file_decl_data)); |
2214 | file_data->id = id; |
2215 | file_data->section_hash_table = lto_obj_create_section_hash_table (); |
2216 | lto_splay_tree_insert (t: file_ids, id, file_data); |
2217 | |
2218 | /* Maintain list in linker order. */ |
2219 | if (!list->first) |
2220 | list->first = file_data; |
2221 | if (list->last) |
2222 | list->last->next = file_data; |
2223 | |
2224 | list->last = file_data; |
2225 | } |
2226 | |
2227 | /* Copy section into sub module hash table. */ |
2228 | new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1); |
2229 | s_slot.name = new_name; |
2230 | hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT); |
2231 | gcc_assert (*hash_slot == NULL); |
2232 | |
2233 | new_slot = XDUP (struct lto_section_slot, ls); |
2234 | new_slot->name = new_name; |
2235 | *hash_slot = new_slot; |
2236 | return 1; |
2237 | } |
2238 | |
2239 | /* Read declarations and other initializations for a FILE_DATA. */ |
2240 | |
2241 | static void |
2242 | lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file, |
2243 | int order) |
2244 | { |
2245 | const char *data; |
2246 | size_t len; |
2247 | vec<ld_plugin_symbol_resolution_t> |
2248 | resolutions = vNULL; |
2249 | int i; |
2250 | res_pair *rp; |
2251 | |
2252 | /* Create vector for fast access of resolution. We do this lazily |
2253 | to save memory. */ |
2254 | resolutions.safe_grow_cleared (len: file_data->max_index + 1, exact: true); |
2255 | for (i = 0; file_data->respairs.iterate (ix: i, ptr: &rp); i++) |
2256 | resolutions[rp->index] = rp->res; |
2257 | file_data->respairs.release (); |
2258 | |
2259 | file_data->renaming_hash_table = lto_create_renaming_table (); |
2260 | file_data->file_name = file->filename; |
2261 | file_data->order = order; |
2262 | |
2263 | /* Read and verify LTO section. */ |
2264 | data = lto_get_summary_section_data (file_data, LTO_section_lto, &len); |
2265 | if (data == NULL) |
2266 | { |
2267 | fatal_error (input_location, "bytecode stream in file %qs generated " |
2268 | "with GCC compiler older than 10.0" , file_data->file_name); |
2269 | return; |
2270 | } |
2271 | |
2272 | memcpy (dest: &file_data->lto_section_header, src: data, n: sizeof (lto_section)); |
2273 | lto_check_version (file_data->lto_section_header.major_version, |
2274 | file_data->lto_section_header.minor_version, |
2275 | file_data->file_name); |
2276 | |
2277 | #ifdef ACCEL_COMPILER |
2278 | lto_input_mode_table (file_data); |
2279 | #else |
2280 | file_data->mode_table = NULL; |
2281 | file_data->mode_bits = ceil_log2 (x: MAX_MACHINE_MODE); |
2282 | #endif |
2283 | |
2284 | data = lto_get_summary_section_data (file_data, LTO_section_decls, &len); |
2285 | if (data == NULL) |
2286 | { |
2287 | internal_error ("cannot read %<LTO_section_decls%> from %s" , |
2288 | file_data->file_name); |
2289 | return; |
2290 | } |
2291 | /* Frees resolutions. */ |
2292 | lto_read_decls (decl_data: file_data, data, resolutions); |
2293 | lto_free_section_data (file_data, LTO_section_decls, NULL, data, len); |
2294 | } |
2295 | |
2296 | /* Finalize FILE_DATA in FILE and increase COUNT. */ |
2297 | |
2298 | static int |
2299 | lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data, |
2300 | int *count, int order) |
2301 | { |
2302 | lto_file_finalize (file_data, file, order); |
2303 | if (symtab->dump_file) |
2304 | fprintf (stream: symtab->dump_file, |
2305 | format: "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n" , |
2306 | file_data->file_name, file_data->id); |
2307 | (*count)++; |
2308 | return 0; |
2309 | } |
2310 | |
2311 | /* Generate a TREE representation for all types and external decls |
2312 | entities in FILE. |
2313 | |
2314 | Read all of the globals out of the file. Then read the cgraph |
2315 | and process the .o index into the cgraph nodes so that it can open |
2316 | the .o file to load the functions and ipa information. */ |
2317 | |
2318 | static struct lto_file_decl_data * |
2319 | lto_file_read (lto_file *file, FILE *resolution_file, int *count) |
2320 | { |
2321 | struct lto_file_decl_data *file_data = NULL; |
2322 | splay_tree file_ids; |
2323 | htab_t section_hash_table; |
2324 | struct lto_section_slot *section; |
2325 | struct file_data_list file_list; |
2326 | struct lto_section_list section_list; |
2327 | |
2328 | memset (s: §ion_list, c: 0, n: sizeof (struct lto_section_list)); |
2329 | section_hash_table = lto_obj_build_section_table (file, list: §ion_list); |
2330 | |
2331 | /* Dump the details of LTO objects. */ |
2332 | if (flag_lto_dump_objects) |
2333 | { |
2334 | int i=0; |
2335 | fprintf (stdout, format: "\n LTO Object Name: %s\n" , file->filename); |
2336 | fprintf (stdout, format: "\nNo. Offset Size Section Name\n\n" ); |
2337 | for (section = section_list.first; section != NULL; section = section->next) |
2338 | fprintf (stdout, format: "%2d %8" PRId64 " %8" PRIu64 " %s\n" , |
2339 | ++i, (int64_t) section->start, (uint64_t) section->len, |
2340 | section->name); |
2341 | } |
2342 | |
2343 | /* Find all sub modules in the object and put their sections into new hash |
2344 | tables in a splay tree. */ |
2345 | file_ids = lto_splay_tree_new (); |
2346 | memset (s: &file_list, c: 0, n: sizeof (struct file_data_list)); |
2347 | for (section = section_list.first; section != NULL; section = section->next) |
2348 | create_subid_section_table (ls: section, file_ids, list: &file_list); |
2349 | |
2350 | /* Add resolutions to file ids. */ |
2351 | lto_resolution_read (file_ids, resolution: resolution_file, file); |
2352 | |
2353 | /* Finalize each lto file for each submodule in the merged object. */ |
2354 | int order = 0; |
2355 | for (file_data = file_list.first; file_data != NULL; |
2356 | file_data = file_data->next) |
2357 | lto_create_files_from_ids (file, file_data, count, order: order++); |
2358 | |
2359 | splay_tree_delete (file_ids); |
2360 | htab_delete (section_hash_table); |
2361 | |
2362 | return file_list.first; |
2363 | } |
2364 | |
2365 | #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE |
2366 | #define LTO_MMAP_IO 1 |
2367 | #endif |
2368 | |
2369 | #if LTO_MMAP_IO |
2370 | /* Page size of machine is used for mmap and munmap calls. */ |
2371 | static size_t page_mask; |
2372 | #endif |
2373 | |
2374 | /* Get the section data of length LEN from FILENAME starting at |
2375 | OFFSET. The data segment must be freed by the caller when the |
2376 | caller is finished. Returns NULL if all was not well. */ |
2377 | |
2378 | static char * |
2379 | lto_read_section_data (struct lto_file_decl_data *file_data, |
2380 | intptr_t offset, size_t len) |
2381 | { |
2382 | char *result; |
2383 | static int fd = -1; |
2384 | static char *fd_name; |
2385 | #if LTO_MMAP_IO |
2386 | intptr_t computed_len; |
2387 | intptr_t computed_offset; |
2388 | intptr_t diff; |
2389 | #endif |
2390 | |
2391 | /* Keep a single-entry file-descriptor cache. The last file we |
2392 | touched will get closed at exit. |
2393 | ??? Eventually we want to add a more sophisticated larger cache |
2394 | or rather fix function body streaming to not stream them in |
2395 | practically random order. */ |
2396 | if (fd != -1 |
2397 | && filename_cmp (s1: fd_name, s2: file_data->file_name) != 0) |
2398 | { |
2399 | free (ptr: fd_name); |
2400 | close (fd: fd); |
2401 | fd = -1; |
2402 | } |
2403 | if (fd == -1) |
2404 | { |
2405 | fd = open (file: file_data->file_name, O_RDONLY|O_BINARY); |
2406 | if (fd == -1) |
2407 | { |
2408 | fatal_error (input_location, "Cannot open %s" , file_data->file_name); |
2409 | return NULL; |
2410 | } |
2411 | fd_name = xstrdup (file_data->file_name); |
2412 | } |
2413 | |
2414 | #if LTO_MMAP_IO |
2415 | if (!page_mask) |
2416 | { |
2417 | size_t page_size = sysconf (_SC_PAGE_SIZE); |
2418 | page_mask = ~(page_size - 1); |
2419 | } |
2420 | |
2421 | computed_offset = offset & page_mask; |
2422 | diff = offset - computed_offset; |
2423 | computed_len = len + diff; |
2424 | |
2425 | result = (char *) mmap (NULL, len: computed_len, PROT_READ, MAP_PRIVATE, |
2426 | fd: fd, offset: computed_offset); |
2427 | if (result == MAP_FAILED) |
2428 | { |
2429 | fatal_error (input_location, "Cannot map %s" , file_data->file_name); |
2430 | return NULL; |
2431 | } |
2432 | |
2433 | return result + diff; |
2434 | #else |
2435 | result = (char *) xmalloc (len); |
2436 | if (lseek (fd, offset, SEEK_SET) != offset |
2437 | || read (fd, result, len) != (ssize_t) len) |
2438 | { |
2439 | free (result); |
2440 | fatal_error (input_location, "Cannot read %s" , file_data->file_name); |
2441 | result = NULL; |
2442 | } |
2443 | #ifdef __MINGW32__ |
2444 | /* Native windows doesn't supports delayed unlink on opened file. So |
2445 | we close file here again. This produces higher I/O load, but at least |
2446 | it prevents to have dangling file handles preventing unlink. */ |
2447 | free (fd_name); |
2448 | fd_name = NULL; |
2449 | close (fd); |
2450 | fd = -1; |
2451 | #endif |
2452 | return result; |
2453 | #endif |
2454 | } |
2455 | |
2456 | |
2457 | /* Get the section data from FILE_DATA of SECTION_TYPE with NAME. |
2458 | NAME will be NULL unless the section type is for a function |
2459 | body. */ |
2460 | |
2461 | static const char * |
2462 | get_section_data (struct lto_file_decl_data *file_data, |
2463 | enum lto_section_type section_type, |
2464 | const char *name, int order, |
2465 | size_t *len) |
2466 | { |
2467 | htab_t section_hash_table = file_data->section_hash_table; |
2468 | struct lto_section_slot *f_slot; |
2469 | struct lto_section_slot s_slot; |
2470 | const char *section_name = lto_get_section_name (section_type, name, |
2471 | order, file_data); |
2472 | char *data = NULL; |
2473 | |
2474 | *len = 0; |
2475 | s_slot.name = section_name; |
2476 | f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot); |
2477 | if (f_slot) |
2478 | { |
2479 | data = lto_read_section_data (file_data, offset: f_slot->start, len: f_slot->len); |
2480 | *len = f_slot->len; |
2481 | } |
2482 | |
2483 | free (CONST_CAST (char *, section_name)); |
2484 | return data; |
2485 | } |
2486 | |
2487 | |
2488 | /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that |
2489 | starts at OFFSET and has LEN bytes. */ |
2490 | |
2491 | static void |
2492 | free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED, |
2493 | enum lto_section_type section_type ATTRIBUTE_UNUSED, |
2494 | const char *name ATTRIBUTE_UNUSED, |
2495 | const char *offset, size_t len ATTRIBUTE_UNUSED) |
2496 | { |
2497 | #if LTO_MMAP_IO |
2498 | intptr_t computed_len; |
2499 | intptr_t computed_offset; |
2500 | intptr_t diff; |
2501 | #endif |
2502 | |
2503 | #if LTO_MMAP_IO |
2504 | computed_offset = ((intptr_t) offset) & page_mask; |
2505 | diff = (intptr_t) offset - computed_offset; |
2506 | computed_len = len + diff; |
2507 | |
2508 | munmap (addr: (caddr_t) computed_offset, len: computed_len); |
2509 | #else |
2510 | free (CONST_CAST(char *, offset)); |
2511 | #endif |
2512 | } |
2513 | |
2514 | static lto_file *current_lto_file; |
2515 | |
2516 | /* If TT is a variable or function decl replace it with its |
2517 | prevailing variant. */ |
2518 | #define LTO_SET_PREVAIL(tt) \ |
2519 | do {\ |
2520 | if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \ |
2521 | && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \ |
2522 | { \ |
2523 | tt = lto_symtab_prevailing_decl (tt); \ |
2524 | fixed = true; \ |
2525 | } \ |
2526 | } while (0) |
2527 | |
2528 | /* Ensure that TT isn't a replacable var of function decl. */ |
2529 | #define LTO_NO_PREVAIL(tt) \ |
2530 | gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt)) |
2531 | |
2532 | /* Given a tree T replace all fields referring to variables or functions |
2533 | with their prevailing variant. */ |
2534 | static void |
2535 | lto_fixup_prevailing_decls (tree t) |
2536 | { |
2537 | enum tree_code code = TREE_CODE (t); |
2538 | bool fixed = false; |
2539 | |
2540 | gcc_checking_assert (code != TREE_BINFO); |
2541 | LTO_NO_PREVAIL (TREE_TYPE (t)); |
2542 | if (CODE_CONTAINS_STRUCT (code, TS_COMMON) |
2543 | /* lto_symtab_prevail_decl use TREE_CHAIN to link to the prevailing decl. |
2544 | in the case T is a prevailed declaration we would ICE here. */ |
2545 | && !VAR_OR_FUNCTION_DECL_P (t)) |
2546 | LTO_NO_PREVAIL (TREE_CHAIN (t)); |
2547 | if (DECL_P (t)) |
2548 | { |
2549 | LTO_NO_PREVAIL (DECL_NAME (t)); |
2550 | LTO_SET_PREVAIL (DECL_CONTEXT (t)); |
2551 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
2552 | { |
2553 | LTO_SET_PREVAIL (DECL_SIZE (t)); |
2554 | LTO_SET_PREVAIL (DECL_SIZE_UNIT (t)); |
2555 | LTO_SET_PREVAIL (DECL_INITIAL (t)); |
2556 | LTO_NO_PREVAIL (DECL_ATTRIBUTES (t)); |
2557 | LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t)); |
2558 | } |
2559 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
2560 | { |
2561 | LTO_NO_PREVAIL (DECL_ASSEMBLER_NAME_RAW (t)); |
2562 | } |
2563 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) |
2564 | { |
2565 | LTO_NO_PREVAIL (DECL_RESULT_FLD (t)); |
2566 | } |
2567 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
2568 | { |
2569 | LTO_NO_PREVAIL (DECL_ARGUMENTS (t)); |
2570 | LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t)); |
2571 | LTO_NO_PREVAIL (DECL_VINDEX (t)); |
2572 | } |
2573 | if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL)) |
2574 | { |
2575 | LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t)); |
2576 | LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t)); |
2577 | LTO_NO_PREVAIL (DECL_QUALIFIER (t)); |
2578 | LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t)); |
2579 | LTO_NO_PREVAIL (DECL_FCONTEXT (t)); |
2580 | } |
2581 | } |
2582 | else if (TYPE_P (t)) |
2583 | { |
2584 | LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t)); |
2585 | LTO_SET_PREVAIL (TYPE_SIZE (t)); |
2586 | LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t)); |
2587 | LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t)); |
2588 | LTO_NO_PREVAIL (TYPE_NAME (t)); |
2589 | |
2590 | LTO_SET_PREVAIL (TYPE_MIN_VALUE_RAW (t)); |
2591 | LTO_SET_PREVAIL (TYPE_MAX_VALUE_RAW (t)); |
2592 | LTO_NO_PREVAIL (TYPE_LANG_SLOT_1 (t)); |
2593 | |
2594 | LTO_SET_PREVAIL (TYPE_CONTEXT (t)); |
2595 | |
2596 | LTO_NO_PREVAIL (TYPE_CANONICAL (t)); |
2597 | LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t)); |
2598 | LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t)); |
2599 | } |
2600 | else if (EXPR_P (t)) |
2601 | { |
2602 | int i; |
2603 | for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) |
2604 | LTO_SET_PREVAIL (TREE_OPERAND (t, i)); |
2605 | } |
2606 | else if (TREE_CODE (t) == CONSTRUCTOR) |
2607 | { |
2608 | unsigned i; |
2609 | tree val; |
2610 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val) |
2611 | LTO_SET_PREVAIL (val); |
2612 | } |
2613 | else |
2614 | { |
2615 | switch (code) |
2616 | { |
2617 | case TREE_LIST: |
2618 | LTO_SET_PREVAIL (TREE_VALUE (t)); |
2619 | LTO_SET_PREVAIL (TREE_PURPOSE (t)); |
2620 | break; |
2621 | default: |
2622 | gcc_unreachable (); |
2623 | } |
2624 | } |
2625 | /* If we fixed nothing, then we missed something seen by |
2626 | mentions_vars_p. */ |
2627 | gcc_checking_assert (fixed); |
2628 | } |
2629 | #undef LTO_SET_PREVAIL |
2630 | #undef LTO_NO_PREVAIL |
2631 | |
2632 | /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE, |
2633 | replaces var and function decls with the corresponding prevailing def. */ |
2634 | |
2635 | static void |
2636 | lto_fixup_state (struct lto_in_decl_state *state) |
2637 | { |
2638 | unsigned i, si; |
2639 | |
2640 | /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs, |
2641 | we still need to walk from all DECLs to find the reachable |
2642 | FUNCTION_DECLs and VAR_DECLs. */ |
2643 | for (si = 0; si < LTO_N_DECL_STREAMS; si++) |
2644 | { |
2645 | vec<tree, va_gc> *trees = state->streams[si]; |
2646 | for (i = 0; i < vec_safe_length (v: trees); i++) |
2647 | { |
2648 | tree t = (*trees)[i]; |
2649 | if (flag_checking && TYPE_P (t)) |
2650 | verify_type (t); |
2651 | if (VAR_OR_FUNCTION_DECL_P (t) |
2652 | && (TREE_PUBLIC (t) || DECL_EXTERNAL (t))) |
2653 | (*trees)[i] = lto_symtab_prevailing_decl (decl: t); |
2654 | } |
2655 | } |
2656 | } |
2657 | |
2658 | /* Fix the decls from all FILES. Replaces each decl with the corresponding |
2659 | prevailing one. */ |
2660 | |
2661 | static void |
2662 | lto_fixup_decls (struct lto_file_decl_data **files) |
2663 | { |
2664 | unsigned int i; |
2665 | tree t; |
2666 | |
2667 | if (tree_with_vars) |
2668 | FOR_EACH_VEC_ELT ((*tree_with_vars), i, t) |
2669 | lto_fixup_prevailing_decls (t); |
2670 | |
2671 | for (i = 0; files[i]; i++) |
2672 | { |
2673 | struct lto_file_decl_data *file = files[i]; |
2674 | struct lto_in_decl_state *state = file->global_decl_state; |
2675 | lto_fixup_state (state); |
2676 | |
2677 | hash_table<decl_state_hasher>::iterator iter; |
2678 | lto_in_decl_state *elt; |
2679 | FOR_EACH_HASH_TABLE_ELEMENT (*file->function_decl_states, elt, |
2680 | lto_in_decl_state *, iter) |
2681 | lto_fixup_state (state: elt); |
2682 | } |
2683 | } |
2684 | |
2685 | static GTY((length ("lto_stats.num_input_files + 1" ))) struct lto_file_decl_data **all_file_decl_data; |
2686 | |
2687 | /* Turn file datas for sub files into a single array, so that they look |
2688 | like separate files for further passes. */ |
2689 | |
2690 | static void |
2691 | lto_flatten_files (struct lto_file_decl_data **orig, int count, |
2692 | int last_file_ix) |
2693 | { |
2694 | struct lto_file_decl_data *n, *next; |
2695 | int i, k; |
2696 | |
2697 | lto_stats.num_input_files = count; |
2698 | all_file_decl_data |
2699 | = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (c: count + 1); |
2700 | /* Set the hooks so that all of the ipa passes can read in their data. */ |
2701 | lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data); |
2702 | for (i = 0, k = 0; i < last_file_ix; i++) |
2703 | { |
2704 | for (n = orig[i]; n != NULL; n = next) |
2705 | { |
2706 | all_file_decl_data[k++] = n; |
2707 | next = n->next; |
2708 | n->next = NULL; |
2709 | } |
2710 | } |
2711 | all_file_decl_data[k] = NULL; |
2712 | gcc_assert (k == count); |
2713 | } |
2714 | |
2715 | /* Input file data before flattening (i.e. splitting them to subfiles to support |
2716 | incremental linking. */ |
2717 | static int real_file_count; |
2718 | static GTY((length ("real_file_count + 1" ))) struct lto_file_decl_data **real_file_decl_data; |
2719 | |
2720 | /* Read all the symbols from the input files FNAMES. NFILES is the |
2721 | number of files requested in the command line. Instantiate a |
2722 | global call graph by aggregating all the sub-graphs found in each |
2723 | file. */ |
2724 | |
2725 | void |
2726 | read_cgraph_and_symbols (unsigned nfiles, const char **fnames) |
2727 | { |
2728 | unsigned int i, last_file_ix; |
2729 | FILE *resolution; |
2730 | unsigned resolution_objects = 0; |
2731 | int count = 0; |
2732 | struct lto_file_decl_data **decl_data; |
2733 | symtab_node *snode; |
2734 | |
2735 | symtab->initialize (); |
2736 | |
2737 | timevar_push (tv: TV_IPA_LTO_DECL_IN); |
2738 | |
2739 | #ifdef ACCEL_COMPILER |
2740 | section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX; |
2741 | lto_stream_offload_p = true; |
2742 | #endif |
2743 | |
2744 | real_file_decl_data |
2745 | = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (c: nfiles + 1); |
2746 | real_file_count = nfiles; |
2747 | |
2748 | /* Read the resolution file. */ |
2749 | resolution = NULL; |
2750 | if (resolution_file_name) |
2751 | { |
2752 | int t; |
2753 | |
2754 | resolution = fopen (filename: resolution_file_name, modes: "r" ); |
2755 | if (resolution == NULL) |
2756 | fatal_error (input_location, |
2757 | "could not open symbol resolution file: %m" ); |
2758 | |
2759 | t = fscanf (stream: resolution, format: "%u" , &resolution_objects); |
2760 | gcc_assert (t == 1); |
2761 | } |
2762 | symtab->state = LTO_STREAMING; |
2763 | |
2764 | canonical_type_hash_cache = new hash_map<const_tree, hashval_t> (251); |
2765 | gimple_canonical_types = htab_create (16381, gimple_canonical_type_hash, |
2766 | gimple_canonical_type_eq, NULL); |
2767 | gcc_obstack_init (&tree_scc_hash_obstack); |
2768 | tree_scc_hash = new hash_table<tree_scc_hasher> (4096); |
2769 | |
2770 | /* Register the common node types with the canonical type machinery so |
2771 | we properly share alias-sets across languages and TUs. Do not |
2772 | expose the common nodes as type merge target - those that should be |
2773 | are already exposed so by pre-loading the LTO streamer caches. |
2774 | Do two passes - first clear TYPE_CANONICAL and then re-compute it. */ |
2775 | for (i = 0; i < itk_none; ++i) |
2776 | lto_register_canonical_types (node: integer_types[i], first_p: true); |
2777 | for (i = 0; i < stk_type_kind_last; ++i) |
2778 | lto_register_canonical_types (node: sizetype_tab[i], first_p: true); |
2779 | for (i = 0; i < TI_MAX; ++i) |
2780 | lto_register_canonical_types (node: global_trees[i], first_p: true); |
2781 | for (i = 0; i < itk_none; ++i) |
2782 | lto_register_canonical_types (node: integer_types[i], first_p: false); |
2783 | for (i = 0; i < stk_type_kind_last; ++i) |
2784 | lto_register_canonical_types (node: sizetype_tab[i], first_p: false); |
2785 | for (i = 0; i < TI_MAX; ++i) |
2786 | lto_register_canonical_types (node: global_trees[i], first_p: false); |
2787 | |
2788 | if (!quiet_flag) |
2789 | fprintf (stderr, format: "Reading object files:" ); |
2790 | |
2791 | /* Read all of the object files specified on the command line. */ |
2792 | for (i = 0, last_file_ix = 0; i < nfiles; ++i) |
2793 | { |
2794 | struct lto_file_decl_data *file_data = NULL; |
2795 | if (!quiet_flag) |
2796 | { |
2797 | fprintf (stderr, format: " %s" , fnames[i]); |
2798 | fflush (stderr); |
2799 | } |
2800 | |
2801 | current_lto_file = lto_obj_file_open (filename: fnames[i], writable: false); |
2802 | if (!current_lto_file) |
2803 | break; |
2804 | |
2805 | file_data = lto_file_read (file: current_lto_file, resolution_file: resolution, count: &count); |
2806 | if (!file_data) |
2807 | { |
2808 | lto_obj_file_close (file: current_lto_file); |
2809 | free (ptr: current_lto_file); |
2810 | current_lto_file = NULL; |
2811 | break; |
2812 | } |
2813 | |
2814 | decl_data[last_file_ix++] = file_data; |
2815 | |
2816 | lto_obj_file_close (file: current_lto_file); |
2817 | free (ptr: current_lto_file); |
2818 | current_lto_file = NULL; |
2819 | } |
2820 | |
2821 | lto_flatten_files (orig: decl_data, count, last_file_ix); |
2822 | lto_stats.num_input_files = count; |
2823 | ggc_free(decl_data); |
2824 | real_file_decl_data = NULL; |
2825 | |
2826 | lto_register_canonical_types_for_odr_types (); |
2827 | |
2828 | if (resolution_file_name) |
2829 | { |
2830 | /* True, since the plugin splits the archives. */ |
2831 | gcc_assert (resolution_objects == nfiles); |
2832 | fclose (stream: resolution); |
2833 | } |
2834 | |
2835 | /* Show the LTO report before launching LTRANS. */ |
2836 | if (flag_lto_report || (flag_wpa && flag_lto_report_wpa)) |
2837 | print_lto_report_1 (); |
2838 | |
2839 | /* Free gimple type merging datastructures. */ |
2840 | delete tree_scc_hash; |
2841 | tree_scc_hash = NULL; |
2842 | obstack_free (&tree_scc_hash_obstack, NULL); |
2843 | htab_delete (gimple_canonical_types); |
2844 | gimple_canonical_types = NULL; |
2845 | delete canonical_type_hash_cache; |
2846 | canonical_type_hash_cache = NULL; |
2847 | |
2848 | /* At this stage we know that majority of GGC memory is reachable. |
2849 | Growing the limits prevents unnecesary invocation of GGC. */ |
2850 | ggc_grow (); |
2851 | report_heap_memory_use (); |
2852 | |
2853 | /* Set the hooks so that all of the ipa passes can read in their data. */ |
2854 | lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data); |
2855 | |
2856 | timevar_pop (tv: TV_IPA_LTO_DECL_IN); |
2857 | |
2858 | if (!quiet_flag) |
2859 | fprintf (stderr, format: "\nReading the symbol table:" ); |
2860 | |
2861 | timevar_push (tv: TV_IPA_LTO_CGRAPH_IO); |
2862 | /* Read the symtab. */ |
2863 | input_symtab (); |
2864 | |
2865 | input_offload_tables (!flag_ltrans); |
2866 | |
2867 | /* Store resolutions into the symbol table. */ |
2868 | |
2869 | FOR_EACH_SYMBOL (snode) |
2870 | if (snode->externally_visible && snode->real_symbol_p () |
2871 | && snode->lto_file_data && snode->lto_file_data->resolution_map |
2872 | && !(TREE_CODE (snode->decl) == FUNCTION_DECL |
2873 | && fndecl_built_in_p (node: snode->decl)) |
2874 | && !(VAR_P (snode->decl) && DECL_HARD_REGISTER (snode->decl))) |
2875 | { |
2876 | ld_plugin_symbol_resolution_t *res; |
2877 | |
2878 | res = snode->lto_file_data->resolution_map->get (k: snode->decl); |
2879 | if (!res || *res == LDPR_UNKNOWN) |
2880 | { |
2881 | if (snode->output_to_lto_symbol_table_p ()) |
2882 | fatal_error (input_location, "missing resolution data for %s" , |
2883 | IDENTIFIER_POINTER |
2884 | (DECL_ASSEMBLER_NAME (snode->decl))); |
2885 | } |
2886 | /* Symbol versions are always used externally, but linker does not |
2887 | report that correctly. |
2888 | This is binutils PR25924. */ |
2889 | else if (snode->symver && *res == LDPR_PREVAILING_DEF_IRONLY) |
2890 | snode->resolution = LDPR_PREVAILING_DEF_IRONLY_EXP; |
2891 | else |
2892 | snode->resolution = *res; |
2893 | } |
2894 | for (i = 0; all_file_decl_data[i]; i++) |
2895 | if (all_file_decl_data[i]->resolution_map) |
2896 | { |
2897 | delete all_file_decl_data[i]->resolution_map; |
2898 | all_file_decl_data[i]->resolution_map = NULL; |
2899 | } |
2900 | |
2901 | timevar_pop (tv: TV_IPA_LTO_CGRAPH_IO); |
2902 | |
2903 | if (!quiet_flag) |
2904 | fprintf (stderr, format: "\nMerging declarations:" ); |
2905 | |
2906 | timevar_push (tv: TV_IPA_LTO_DECL_MERGE); |
2907 | /* Merge global decls. In ltrans mode we read merged cgraph, we do not |
2908 | need to care about resolving symbols again, we only need to replace |
2909 | duplicated declarations read from the callgraph and from function |
2910 | sections. */ |
2911 | if (!flag_ltrans) |
2912 | { |
2913 | lto_symtab_merge_decls (); |
2914 | |
2915 | /* If there were errors during symbol merging bail out, we have no |
2916 | good way to recover here. */ |
2917 | if (seen_error ()) |
2918 | fatal_error (input_location, |
2919 | "errors during merging of translation units" ); |
2920 | |
2921 | /* Fixup all decls. */ |
2922 | lto_fixup_decls (files: all_file_decl_data); |
2923 | } |
2924 | if (tree_with_vars) |
2925 | ggc_free (tree_with_vars); |
2926 | tree_with_vars = NULL; |
2927 | /* During WPA we want to prevent ggc collecting by default. Grow limits |
2928 | until after the IPA summaries are streamed in. Basically all IPA memory |
2929 | is explcitly managed by ggc_free and ggc collect is not useful. |
2930 | Exception are the merged declarations. */ |
2931 | ggc_grow (); |
2932 | report_heap_memory_use (); |
2933 | |
2934 | timevar_pop (tv: TV_IPA_LTO_DECL_MERGE); |
2935 | /* Each pass will set the appropriate timer. */ |
2936 | |
2937 | if (!quiet_flag) |
2938 | fprintf (stderr, format: "\nReading summaries:" ); |
2939 | |
2940 | /* Read the IPA summary data. */ |
2941 | if (flag_ltrans) |
2942 | ipa_read_optimization_summaries (); |
2943 | else |
2944 | ipa_read_summaries (); |
2945 | |
2946 | ggc_grow (); |
2947 | |
2948 | for (i = 0; all_file_decl_data[i]; i++) |
2949 | { |
2950 | gcc_assert (all_file_decl_data[i]->symtab_node_encoder); |
2951 | lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder); |
2952 | all_file_decl_data[i]->symtab_node_encoder = NULL; |
2953 | lto_in_decl_state *global_decl_state |
2954 | = all_file_decl_data[i]->global_decl_state; |
2955 | lto_free_function_in_decl_state (global_decl_state); |
2956 | all_file_decl_data[i]->global_decl_state = NULL; |
2957 | all_file_decl_data[i]->current_decl_state = NULL; |
2958 | } |
2959 | |
2960 | if (!flag_ltrans) |
2961 | { |
2962 | /* Finally merge the cgraph according to the decl merging decisions. */ |
2963 | timevar_push (tv: TV_IPA_LTO_CGRAPH_MERGE); |
2964 | |
2965 | if (!quiet_flag) |
2966 | fprintf (stderr, format: "\nMerging symbols:" ); |
2967 | |
2968 | gcc_assert (!dump_file); |
2969 | dump_file = dump_begin (lto_link_dump_id, NULL); |
2970 | |
2971 | if (dump_file) |
2972 | { |
2973 | fprintf (stream: dump_file, format: "Before merging:\n" ); |
2974 | symtab->dump (f: dump_file); |
2975 | } |
2976 | lto_symtab_merge_symbols (); |
2977 | /* Removal of unreachable symbols is needed to make verify_symtab to pass; |
2978 | we are still having duplicated comdat groups containing local statics. |
2979 | We could also just remove them while merging. */ |
2980 | symtab->remove_unreachable_nodes (file: dump_file); |
2981 | ggc_collect (); |
2982 | report_heap_memory_use (); |
2983 | |
2984 | if (dump_file) |
2985 | dump_end (lto_link_dump_id, dump_file); |
2986 | dump_file = NULL; |
2987 | timevar_pop (tv: TV_IPA_LTO_CGRAPH_MERGE); |
2988 | } |
2989 | symtab->state = IPA_SSA; |
2990 | /* All node removals happening here are useless, because |
2991 | WPA should not stream them. Still always perform remove_unreachable_nodes |
2992 | because we may reshape clone tree, get rid of dead masters of inline |
2993 | clones and remove symbol entries for read-only variables we keep around |
2994 | only to be able to constant fold them. */ |
2995 | if (flag_ltrans) |
2996 | { |
2997 | if (symtab->dump_file) |
2998 | symtab->dump (f: symtab->dump_file); |
2999 | symtab->remove_unreachable_nodes (file: symtab->dump_file); |
3000 | } |
3001 | |
3002 | /* Indicate that the cgraph is built and ready. */ |
3003 | symtab->function_flags_ready = true; |
3004 | |
3005 | ggc_free (all_file_decl_data); |
3006 | all_file_decl_data = NULL; |
3007 | } |
3008 | |
3009 | |
3010 | |
3011 | /* Show various memory usage statistics related to LTO. */ |
3012 | void |
3013 | print_lto_report_1 (void) |
3014 | { |
3015 | const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS" ; |
3016 | fprintf (stderr, format: "%s statistics\n" , pfx); |
3017 | |
3018 | fprintf (stderr, format: "[%s] read %lu unshared trees\n" , |
3019 | pfx, num_unshared_trees_read); |
3020 | fprintf (stderr, format: "[%s] read %lu mergeable SCCs of average size %f\n" , |
3021 | pfx, num_sccs_read, total_scc_size / (double)num_sccs_read); |
3022 | fprintf (stderr, format: "[%s] %lu tree bodies read in total\n" , pfx, |
3023 | total_scc_size + num_unshared_trees_read); |
3024 | if (flag_wpa && tree_scc_hash && num_sccs_read) |
3025 | { |
3026 | fprintf (stderr, format: "[%s] tree SCC table: size " HOST_SIZE_T_PRINT_DEC ", " |
3027 | HOST_SIZE_T_PRINT_DEC " elements, collision ratio: %f\n" , pfx, |
3028 | (fmt_size_t) tree_scc_hash->size (), |
3029 | (fmt_size_t) tree_scc_hash->elements (), |
3030 | tree_scc_hash->collisions ()); |
3031 | hash_table<tree_scc_hasher>::iterator hiter; |
3032 | tree_scc *scc, *max_scc = NULL; |
3033 | unsigned max_length = 0; |
3034 | FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter) |
3035 | { |
3036 | unsigned length = 0; |
3037 | tree_scc *s = scc; |
3038 | for (; s; s = s->next) |
3039 | length++; |
3040 | if (length > max_length) |
3041 | { |
3042 | max_length = length; |
3043 | max_scc = scc; |
3044 | } |
3045 | } |
3046 | fprintf (stderr, format: "[%s] tree SCC max chain length %u (size %u)\n" , |
3047 | pfx, max_length, max_scc->len); |
3048 | fprintf (stderr, format: "[%s] Compared %lu SCCs, %lu collisions (%f)\n" , pfx, |
3049 | num_scc_compares, num_scc_compare_collisions, |
3050 | num_scc_compare_collisions / (double) num_scc_compares); |
3051 | fprintf (stderr, format: "[%s] Merged %lu SCCs\n" , pfx, num_sccs_merged); |
3052 | fprintf (stderr, format: "[%s] Merged %lu tree bodies\n" , pfx, |
3053 | total_scc_size_merged); |
3054 | fprintf (stderr, format: "[%s] Merged %lu types\n" , pfx, num_merged_types); |
3055 | fprintf (stderr, format: "[%s] %lu types prevailed (%lu associated trees)\n" , |
3056 | pfx, num_prevailing_types, num_type_scc_trees); |
3057 | fprintf (stderr, format: "[%s] GIMPLE canonical type table: size " |
3058 | HOST_SIZE_T_PRINT_DEC ", " HOST_SIZE_T_PRINT_DEC |
3059 | " elements, %d searches, %d collisions (ratio: %f)\n" , pfx, |
3060 | (fmt_size_t) htab_size (gimple_canonical_types), |
3061 | (fmt_size_t) htab_elements (gimple_canonical_types), |
3062 | gimple_canonical_types->searches, |
3063 | gimple_canonical_types->collisions, |
3064 | htab_collisions (gimple_canonical_types)); |
3065 | fprintf (stderr, format: "[%s] GIMPLE canonical type pointer-map: " |
3066 | "%lu elements, %ld searches\n" , pfx, |
3067 | num_canonical_type_hash_entries, |
3068 | num_canonical_type_hash_queries); |
3069 | } |
3070 | |
3071 | print_lto_report (pfx); |
3072 | } |
3073 | |
3074 | GTY(()) tree lto_eh_personality_decl; |
3075 | |
3076 | /* Return the LTO personality function decl. */ |
3077 | |
3078 | tree |
3079 | lto_eh_personality (void) |
3080 | { |
3081 | if (!lto_eh_personality_decl) |
3082 | { |
3083 | /* Use the first personality DECL for our personality if we don't |
3084 | support multiple ones. This ensures that we don't artificially |
3085 | create the need for them in a single-language program. */ |
3086 | if (first_personality_decl && !dwarf2out_do_cfi_asm ()) |
3087 | lto_eh_personality_decl = first_personality_decl; |
3088 | else |
3089 | lto_eh_personality_decl = lhd_gcc_personality (); |
3090 | } |
3091 | |
3092 | return lto_eh_personality_decl; |
3093 | } |
3094 | |
3095 | /* Set the process name based on the LTO mode. */ |
3096 | |
3097 | static void |
3098 | lto_process_name (void) |
3099 | { |
3100 | if (flag_lto) |
3101 | setproctitle (flag_incremental_link == INCREMENTAL_LINK_LTO |
3102 | ? "lto1-inclink" : "lto1-lto" ); |
3103 | if (flag_wpa) |
3104 | setproctitle ("lto1-wpa" ); |
3105 | if (flag_ltrans) |
3106 | setproctitle ("lto1-ltrans" ); |
3107 | } |
3108 | |
3109 | |
3110 | /* Initialize the LTO front end. */ |
3111 | |
3112 | void |
3113 | lto_fe_init (void) |
3114 | { |
3115 | lto_process_name (); |
3116 | lto_streamer_hooks_init (); |
3117 | lto_reader_init (); |
3118 | lto_set_in_hooks (NULL, get_section_data, free_section_data); |
3119 | memset (s: <o_stats, c: 0, n: sizeof (lto_stats)); |
3120 | bitmap_obstack_initialize (NULL); |
3121 | gimple_register_cfg_hooks (); |
3122 | } |
3123 | |
3124 | #include "gt-lto-lto-common.h" |
3125 | |